diff --git a/.DS_Store b/.DS_Store
deleted file mode 100644
index d44a205b..00000000
Binary files a/.DS_Store and /dev/null differ
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
deleted file mode 100644
index 5adaab3f..00000000
--- a/.bumpversion.cfg
+++ /dev/null
@@ -1,12 +0,0 @@
-[bumpversion]
-current_version = 0.1.1
-commit = True
-tag = True
-
-[bumpversion:file:pyproject.toml]
-search = version = "{current_version}"
-replace = version = "{new_version}"
-
-[bumpversion:file:src/pyvite/__version__.py]
-search = __version__ = "{current_version}"
-replace = __version__ = "{new_version}"
diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 00000000..71bf2d86
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,7 @@
+.venv
+venv
+node_modules
+__pycache__
+*.pyc
+dist/
+tmp/
diff --git a/.env.docker.example b/.env.docker.example
new file mode 100644
index 00000000..9e7d8775
--- /dev/null
+++ b/.env.docker.example
@@ -0,0 +1,30 @@
+# App
+SECRET_KEY='secret-key'
+LITESTAR_DEBUG=true
+LITESTAR_HOST=0.0.0.0
+LITESTAR_PORT=8000
+APP_URL=http://localhost:${LITESTAR_PORT}
+
+LOG_LEVEL=10
+# Database
+DATABASE_ECHO=true
+DATABASE_ECHO_POOL=true
+DATABASE_POOL_DISABLE=false
+DATABASE_POOL_MAX_OVERFLOW=5
+DATABASE_POOL_SIZE=5
+DATABASE_POOL_TIMEOUT=30
+DATABASE_URL=postgresql+asyncpg://chapter:chapter@db:5432/chapter
+
+# Cache
+REDIS_URL=redis://cache:6379/0
+
+SAQ_USE_SERVER_LIFESPAN=False # don't use with docker.
+SAQ_WEB_ENABLED=True
+SAQ_BACKGROUND_WORKERS=1
+SAQ_CONCURRENCY=1
+
+VITE_HOST=localhost
+VITE_PORT=3006
+VITE_HOT_RELOAD=True
+VITE_DEV_MODE=True
+ALLOWED_CORS_ORIGINS=["localhost:3006","localhost:8000"]
diff --git a/.env.example b/.env.example
deleted file mode 100644
index 88f3a5cb..00000000
--- a/.env.example
+++ /dev/null
@@ -1,32 +0,0 @@
-# App
-PYSPA_BUILD_NUMBER=0
-PYSPA_DEBUG=true
-PYSPA_DEFAULT_PAGINATION_LIMIT=100
-PYSPA_ENVIRONMENT=local
-PYSPA_LOG_LEVEL=INFO
-PYSPA_NAME=starlite-pg-redis-docker
-
-# Cache
-PYSPA_REDIS_EXPIRATION=60
-PYSPA_REDIS_URL=redis://cache:6379/0
-
-# Database
-PYSPA_POSTGRES_ECHO=false
-PYSPA_POSTGRES_URL=postgresql+asyncpg://postgres:mysecretpassword@db:5432/example-pg-docker
-
-# Gunicorn
-PYSPA_GUNICORN_ACCESS_LOG=-
-PYSPA_GUNICORN_ERROR_LOG=-
-PYSPA_GUNICORN_HOST=0.0.0.0
-PYSPA_GUNICORN_KEEPALIVE=65
-PYSPA_GUNICORN_LOG_LEVEL=info
-PYSPA_GUNICORN_PORT=8000
-PYSPA_GUNICORN_RELOAD=true
-PYSPA_GUNICORN_THREADS=2
-PYSPA_GUNICORN_TIMEOUT=65
-PYSPA_GUNICORN_WORKERS=2
-PYSPA_GUNICORN_WORKER_CLASS=app.utils.restartable_worker.RestartableUvicornWorker
-
-# Sentry
-PYSPA_SENTRY_DSN=
-PYSPA_SENTRY_TRACES_SAMPLE_RATE=0.0001
diff --git a/.env.local.example b/.env.local.example
new file mode 100644
index 00000000..2eb2ea07
--- /dev/null
+++ b/.env.local.example
@@ -0,0 +1,29 @@
+# App
+SECRET_KEY='secret-key'
+LITESTAR_DEBUG=true
+LITESTAR_HOST=0.0.0.0
+LITESTAR_PORT=8089
+APP_URL=http://localhost:${LITESTAR_PORT}
+
+LOG_LEVEL=10
+# Database
+DATABASE_ECHO=true
+DATABASE_ECHO_POOL=true
+DATABASE_POOL_DISABLE=false
+DATABASE_POOL_MAX_OVERFLOW=5
+DATABASE_POOL_SIZE=5
+DATABASE_POOL_TIMEOUT=30
+DATABASE_URL=postgresql+asyncpg://chapter:chapter@localhost:5432/chapter
+
+REDIS_URL=redis://localhost:6379/0
+
+# Worker
+SAQ_USE_SERVER_LIFESPAN=True
+SAQ_WEB_ENABLED=True
+SAQ_BACKGROUND_WORKERS=1
+SAQ_CONCURRENCY=1
+
+VITE_HOST=localhost
+VITE_PORT=5174
+VITE_HOT_RELOAD=True
+VITE_DEV_MODE=False
diff --git a/.env.testing b/.env.testing
new file mode 100644
index 00000000..94fdea98
--- /dev/null
+++ b/.env.testing
@@ -0,0 +1,16 @@
+# App
+SECRET_KEY='secret-key'
+
+# Cache
+REDIS_URL=redis://localhost:6397/0
+
+SAQ_USE_SERVER_LIFESPAN=False # don't use with docker.
+SAQ_WEB_ENABLED=True
+SAQ_BACKGROUND_WORKERS=1
+SAQ_CONCURRENCY=1
+
+VITE_HOST=localhost
+VITE_PORT=3006
+VITE_HOT_RELOAD=True
+VITE_DEV_MODE=True
+VITE_USE_SERVER_LIFESPAN=False
diff --git a/.eslintrc.cjs b/.eslintrc.cjs
new file mode 100644
index 00000000..99ce592c
--- /dev/null
+++ b/.eslintrc.cjs
@@ -0,0 +1,54 @@
+module.exports = {
+  root: true,
+  env: { browser: true, es2020: true },
+  extends: [
+    "plugin:react/recommended",
+    "plugin:@typescript-eslint/recommended",
+    "prettier",
+    "plugin:prettier/recommended",
+    "plugin:import/recommended",
+    "plugin:react-hooks/recommended",
+  ],
+  ignorePatterns: ["dist", ".eslintrc.cjs"],
+  parser: "@typescript-eslint/parser",
+  parserOptions: {
+    ecmaFeatures: {
+      jsx: true,
+    },
+    ecmaVersion: 12,
+    sourceType: "module",
+  },
+  plugins: ["react-refresh", "react", "@typescript-eslint", "react-hooks"],
+  rules: {
+    "react-refresh/only-export-components": [
+      "warn",
+      { allowConstantExport: true },
+    ],
+    "no-use-before-define": "off",
+    "@typescript-eslint/no-use-before-define": ["error"],
+    "react/jsx-filename-extension": ["warn", { extensions: [".tsx"] }],
+    "import/extensions": [
+      "error",
+      "ignorePackages",
+      { ts: "never", tsx: "never" },
+    ],
+    "no-shadow": "off",
+    "@typescript-eslint/no-shadow": ["error"],
+    "@typescript-eslint/explicit-function-return-type": [
+      "error",
+      { allowExpressions: true },
+    ],
+    "@typescript-eslint/no-explicit-any": "off",
+    "max-len": ["warn", { code: 120, ignoreComments: true, ignoreUrls: true }],
+    "react-hooks/rules-of-hooks": "error",
+    "react-hooks/exhaustive-deps": "warn",
+    "import/prefer-default-export": "off",
+    "react/prop-types": "off",
+    "prettier/prettier": ["error", { endOfLine: "auto" }],
+  },
+  settings: {
+    "import/resolver": {
+      typescript: {},
+    },
+  },
+}
diff --git a/.flake8 b/.flake8
deleted file mode 100644
index 390277b5..00000000
--- a/.flake8
+++ /dev/null
@@ -1,17 +0,0 @@
-[flake8]
-jobs = 4
-max-line-length = 80
-exclude = .git,__pycache__
-max-complexity = 10
-# D203 - blank line before docstring
-# E203 - broken whitespace check for ":", disagrees with black
-ignore = D203,E203, E501, C408, B008, B009, W503, SIM119, C417, B023
-per-file-ignores =
-    # imported but unused
-    __init__.py: F401
-type-checking-pydantic-enabled = true
-type-checking-fastapi-enabled = true
-classmethod-decorators =
-    classmethod
-    validator
-    root_validator
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 00000000..2794bbba
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,7 @@
+# Code owner settings for `litestar fullstack`
+# @maintainers should be assigned to all reviews.
+# Most specific assignment takes precedence though, so if you add a more specific thing than the `*` glob, you must also add @maintainers
+# For more info about code owners see https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-file-example
+
+# Global Assignment
+*   @cofin @litestar-org/maintainers @litestar-org/members
diff --git a/.github/workflows/aws-ecs-deploy-prod.yml b/.github/workflows/aws-ecs-deploy-prod.yml
new file mode 100644
index 00000000..2505540d
--- /dev/null
+++ b/.github/workflows/aws-ecs-deploy-prod.yml
@@ -0,0 +1,92 @@
+# This workflow will build and push a new container image to Amazon ECR,
+# and then will deploy a new task definition to Amazon ECS, when there is a push to the "master" branch.
+#
+# To use this workflow, you will need to complete the following set-up steps:
+#
+# 1. Create an ECR repository to store your images.
+#    For example: `aws ecr create-repository --repository-name my-ecr-repo --region us-east-2`.
+#    Replace the value of the `ECR_REPOSITORY` environment variable in the workflow below with your repository's name.
+#    Replace the value of the `AWS_REGION` environment variable in the workflow below with your repository's region.
+#
+# 2. Create an ECS task definition, an ECS cluster, and an ECS service.
+#    For example, follow the Getting Started guide on the ECS console:
+#      https://us-east-2.console.aws.amazon.com/ecs/home?region=us-east-2#/firstRun
+#    Replace the value of the `ECS_SERVICE` environment variable in the workflow below with the name you set for the Amazon ECS service.
+#    Replace the value of the `ECS_CLUSTER` environment variable in the workflow below with the name you set for the cluster.
+#
+# 3. Store your ECS task definition as a JSON file in your repository.
+#    The format should follow the output of `aws ecs register-task-definition --generate-cli-skeleton`.
+#    Replace the value of the `ECS_TASK_DEFINITION` environment variable in the workflow below with the path to the JSON file.
+#    Replace the value of the `CONTAINER_NAME` environment variable in the workflow below with the name of the container
+#    in the `containerDefinitions` section of the task definition.
+#    Note: Use `latest` tag instead of overwriting the image id into task definition container name. This is better
+#    than updating multiple container definitions in a task definition.
+#
+# 4. Store an IAM user access key in GitHub Actions secrets named `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`.
+#    See the documentation for each action used below for the recommended IAM policies for this IAM user,
+#    and best practices on handling the access key credentials.
+
+name: Deploy to Amazon ECS
+
+on:
+  push:
+    branches: ["main"]
+
+env:
+  AWS_REGION: eu-central-1 # set this to your preferred AWS region, e.g. us-west-1
+  ECR_REPOSITORY: chapter-app-repository-prod # set this to your Amazon ECR repository name
+  ECS_SERVICE: chapter-app-service-prod # set this to your Amazon ECS service name
+  ECS_CLUSTER: chapter-app-cluster-prod # set this to your Amazon ECS cluster name
+  ECS_TASK_DEFINITION:
+    deploy/aws/ecs-task-definition-prod.json # set this to the path to your Amazon ECS task definition
+    # file, e.g. .aws/task-definition.json
+    # containerDefinitions section of your task definition
+  DOCKERFILE: deploy/aws/Dockerfile.app # Container dockerfile
+  IMAGE_LATEST_TAG: latest # latest tag name
+
+permissions:
+  contents: read
+  id-token: write
+
+jobs:
+  deploy:
+    name: Deploy
+    runs-on: ubuntu-latest
+    environment: production
+
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v3
+
+      - name: Configure AWS credentials using OIDC
+        uses: aws-actions/configure-aws-credentials@v4
+        with:
+          role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_DEPLOYER_ROLE_NAME }}
+          aws-region: ${{ env.AWS_REGION }}
+
+      - name: Login to Amazon ECR
+        id: login-ecr
+        uses: aws-actions/amazon-ecr-login@v2
+
+      - name: Build, tag, and push image to Amazon ECR
+        id: build-image
+        env:
+          ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }}
+          IMAGE_TAG: ${{ github.sha }}
+        run: |
+          # Build a docker container and
+          # push it to ECR so that it can
+          # be deployed to ECS.
+          docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_LATEST_TAG -f $DOCKERFILE .
+          docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG
+          echo "image=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" >> $GITHUB_OUTPUT
+          docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_LATEST_TAG
+          echo "image=$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_LATEST_TAG" >> $GITHUB_OUTPUT
+
+      - name: Deploy Amazon ECS task definition
+        uses: aws-actions/amazon-ecs-deploy-task-definition@v1
+        with:
+          task-definition: ${{ env.ECS_TASK_DEFINITION }}
+          service: ${{ env.ECS_SERVICE }}
+          cluster: ${{ env.ECS_CLUSTER }}
+          wait-for-service-stability: true
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index c1460cc2..94d82d59 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -1,96 +1,118 @@
-name: ci
+name: Tests and Linting
+
 on:
   pull_request:
   push:
     branches:
       - main
+concurrency:
+  group: test-${{ github.head_ref }}
+  cancel-in-progress: true
+
+env:
+  PYTHONUNBUFFERED: "1"
+  FORCE_COLOR: "1"
 jobs:
   validate:
     runs-on: ubuntu-latest
     env:
       SETUPTOOLS_USE_DISTUTILS: stdlib
     steps:
-      - uses: actions/checkout@v3
-      - uses: actions/setup-python@v4
+      - uses: actions/checkout@v4
+
+      - uses: actions/setup-python@v5
         with:
-          python-version: "3.10"
-      - uses: pre-commit/action@v3.0.0
+          python-version: "3.11"
+
+      - name: Install base libraries
+        run: pip install nodeenv cython setuptools pip  --upgrade --quiet --user
+
+      - uses: pre-commit/action@v3.0.1
+
   test:
     needs: validate
     runs-on: ubuntu-latest
     strategy:
       fail-fast: true
       matrix:
-        python-version: ["3.7", "3.8", "3.9", "3.10"]
+        python-version: ["3.11","3.12"]
     steps:
       - name: Check out repository
-        uses: actions/checkout@v3
+        uses: actions/checkout@v4
+
       - name: Set up python ${{ matrix.python-version }}
-        uses: actions/setup-python@v4
+        uses: actions/setup-python@v5
         with:
           python-version: ${{ matrix.python-version }}
-      - name: Install Poetry
-        uses: snok/install-poetry@v1
-      - name: Load cached venv
-        id: cached-poetry-dependencies
-        uses: actions/cache@v3
+
+      - name: Create cache file
+        run: echo '${{ matrix.python-version }}' > ./matrix-file.txt
+
+      - uses: pdm-project/setup-pdm@v4
+        name: Set up PDM
         with:
-          path: .venv
-          key: venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}
+          python-version: ${{ matrix.python-version }}
+          allow-python-prereleases: false
+          cache: true
+          cache-dependency-path: |
+            ./pdm.lock
+            ./matrix-file.txt
+
+      - name: Install base libraries
+        run: pip install nodeenv cython setuptools pip --upgrade --quiet --user
+
       - name: Install dependencies
-        if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
-        run: poetry install --no-interaction --no-root --extras="brotli"
-      - name: Set pythonpath
-        run: echo "PYTHONPATH=$PWD" >> $GITHUB_ENV
-      - name: Test
-        run: poetry run pytest
-        if: matrix.python-version != '3.10'
+        run: pdm install -G:all
+
       - name: Test with Coverage
-        run: poetry run pytest --cov=. --cov-report=xml
-        if: matrix.python-version == '3.10'
-      - uses: actions/upload-artifact@v3
+        run: pdm run pytest tests --cov=app --cov-report=xml
+
+      - if: matrix.python-version == '3.11'
+        uses: actions/upload-artifact@v4
         with:
           name: coverage-xml
           path: coverage.xml
-        if: matrix.python-version == '3.10'
-  sonar:
-    needs: test
-    if: github.event.pull_request.head.repo.fork == false
+
+  build-docs:
+    needs:
+      - validate
+    if: github.event_name == 'pull_request'
     runs-on: ubuntu-latest
     steps:
       - name: Check out repository
-        uses: actions/checkout@v3
-      - name: Download Artifacts
-        uses: actions/download-artifact@v3
+        uses: actions/checkout@v4
+
+      - name: Set up Python
+        uses: actions/setup-python@v5
         with:
-          name: coverage-xml
-      - name: Fix coverage file for sonarcloud
-        run: sed -i "s/home\/runner\/work\/starlite\/starlite/github\/workspace/g" coverage.xml
-      - name: SonarCloud Scan
-        uses: sonarsource/sonarcloud-github-action@master
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-          SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
-  snyk:
-    needs: test
-    if: github.event.pull_request.head.repo.fork == false
-    runs-on: ubuntu-latest
-    steps:
-      - uses: actions/checkout@master
-      - name: Run Snyk Monitor
-        if: ${{ github.ref == 'refs/heads/main' }}
-        uses: snyk/actions/python-3.8@master
+          python-version: "3.11"
+
+      - uses: pdm-project/setup-pdm@v4
+        name: Set up PDM
         with:
-          command: monitor
+          python-version: "3.11"
+          allow-python-prereleases: false
+          cache: true
+
+      - name: Install dependencies
+        run: pdm install -G:all
+
+      - name: Build docs
+        run: pdm run make docs
+
+      - name: Save PR number
         env:
-          SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
-      - name: Run Snyk Test
-        if: ${{ github.ref != 'refs/heads/main' }}
-        uses: snyk/actions/python-3.8@master
+          PR_NUMBER: ${{ github.event.number }}
+        run: echo $PR_NUMBER > .pr_number
+
+      - name: Upload artifact
+        uses: actions/upload-artifact@v4
         with:
-          command: test
-        env:
-          SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
+          name: docs-preview
+          path: |
+            docs/_build/html
+            .pr_number
+
   codeql:
     needs: test
     runs-on: ubuntu-latest
@@ -98,16 +120,19 @@ jobs:
       security-events: write
     steps:
       - name: Initialize CodeQL
-        uses: github/codeql-action/init@v2
+        uses: github/codeql-action/init@v3
         with:
           languages: python
       - name: Checkout repository
-        uses: actions/checkout@v3
-      - name: Load cached venv
-        id: cached-poetry-dependencies
-        uses: actions/cache@v3
+        uses: actions/checkout@v4
+      - name: Install base libraries
+        run: pip install nodeenv cython setuptools pip --upgrade --quiet --user
+      - uses: pdm-project/setup-pdm@v4
+        name: Set up PDM
         with:
-          path: .venv
-          key: venv-${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}
+          python-version: "3.11"
+          cache: true
+      - name: Install dependencies
+        run: pdm install -G:all
       - name: Perform CodeQL Analysis
-        uses: github/codeql-action/analyze@v2
+        uses: github/codeql-action/analyze@v3
diff --git a/.github/workflows/docs-preview.yaml b/.github/workflows/docs-preview.yaml
new file mode 100644
index 00000000..7dc5c630
--- /dev/null
+++ b/.github/workflows/docs-preview.yaml
@@ -0,0 +1,72 @@
+name: Deploy Documentation Preview
+
+on:
+  workflow_run:
+    workflows: [Tests And Linting]
+    types: [completed]
+
+jobs:
+  deploy:
+    if: ${{ github.event.workflow_run.conclusion == 'success' &&  github.event.workflow_run.event == 'pull_request' }}
+    runs-on: ubuntu-latest
+    permissions:
+      issues: write
+      pull-requests: write
+
+    steps:
+      - name: Check out repository
+        uses: actions/checkout@v4
+
+      - name: Download artifact
+        uses: dawidd6/action-download-artifact@v6
+        with:
+          workflow_conclusion: success
+          run_id: ${{ github.event.workflow_run.id }}
+          path: docs-preview
+          name: docs-preview
+
+      - name: Set PR number
+        run: echo "PR_NUMBER=$(cat docs-preview/.pr_number)" >> $GITHUB_ENV
+
+      - name: Deploy docs preview
+        uses: JamesIves/github-pages-deploy-action@v4
+        with:
+          folder: docs-preview/docs/_build/html
+          token: ${{ secrets.DOCS_PREVIEW_DEPLOY_TOKEN }}
+          repository-name: litestar-org/fullstack-docs-preview
+          clean: false
+          target-folder: ${{ env.PR_NUMBER }}
+          branch: gh-pages
+
+      - uses: actions/github-script@v7
+        env:
+          PR_NUMBER: ${{ env.PR_NUMBER }}
+        with:
+          script: |
+            const issue_number = process.env.PR_NUMBER
+            const body = "Documentation preview will be available shortly at https://litestar-org.github.io/fullstack-docs-preview/" + issue_number
+
+            const opts = github.rest.issues.listComments.endpoint.merge({
+              owner: context.repo.owner,
+              repo: context.repo.repo,
+              issue_number: issue_number,
+            });
+
+            const comments = await github.paginate(opts)
+
+            for (const comment of comments) {
+              if (comment.user.id === 41898282 && comment.body === body) {
+                await github.rest.issues.deleteComment({
+                  owner: context.repo.owner,
+                  repo: context.repo.repo,
+                  comment_id: comment.id
+                })
+              }
+            }
+
+            await github.rest.issues.createComment({
+                  owner: context.repo.owner,
+                  repo: context.repo.repo,
+                  issue_number: issue_number,
+                  body: body,
+            })
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
index 5e4a6d7d..83c10c67 100644
--- a/.github/workflows/docs.yaml
+++ b/.github/workflows/docs.yaml
@@ -1,18 +1,46 @@
-name: docs
+name: Documentation Building
+
 on:
-  workflow_run:
-    workflows: ["ci"]
-    branches: [main]
-    types:
-      - completed
+  release:
+    types: [published]
+  push:
+    branches:
+      - main
+
 jobs:
   docs:
+    permissions:
+      contents: write
     runs-on: ubuntu-latest
-    if: ${{ github.event.workflow_run.conclusion == 'success' }}
     steps:
-      - uses: actions/checkout@v3
-      - uses: actions/setup-python@v4
+      - uses: actions/checkout@v4
+
+      - uses: actions/setup-python@v5
         with:
-          python-version: "3.10"
-      - run: pip install "mkdocs-material"
-      - run: mkdocs gh-deploy --force
+          python-version: "3.11"
+
+      - uses: pdm-project/setup-pdm@v4
+        name: Set up PDM
+        with:
+          python-version: "3.11"
+          allow-python-prereleases: true
+          cache: true
+
+      - name: Install dependencies
+        run: pdm install -G:all
+
+      - name: Fetch gh pages
+        run: git fetch origin gh-pages --depth=1
+
+      - name: Build release docs
+        run: pdm run python scripts/build-docs.py docs-build
+        if: github.event_name == 'release'
+
+      - name: Build dev docs
+        run: pdm run python scripts/build-docs.py docs-build
+        if: github.event_name == 'push'
+
+      - name: Deploy
+        uses: JamesIves/github-pages-deploy-action@v4
+        with:
+          folder: docs-build
diff --git a/.github/workflows/pr-title.yaml b/.github/workflows/pr-title.yaml
new file mode 100644
index 00000000..f4f5d1c2
--- /dev/null
+++ b/.github/workflows/pr-title.yaml
@@ -0,0 +1,20 @@
+name: "Lint PR Title"
+
+on:
+  pull_request_target:
+    types:
+      - opened
+      - edited
+      - synchronize
+
+permissions:
+  pull-requests: read
+
+jobs:
+  main:
+    name: Validate PR title
+    runs-on: ubuntu-latest
+    steps:
+      - uses: amannn/action-semantic-pull-request@v5
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml
deleted file mode 100644
index 1a672cbd..00000000
--- a/.github/workflows/publish.yaml
+++ /dev/null
@@ -1,23 +0,0 @@
-name: publish
-on:
-  release:
-    types: [published]
-jobs:
-  publish-release:
-    runs-on: ubuntu-latest
-    steps:
-      - name: Check out repository
-        uses: actions/checkout@v3
-      - name: Set up python 3.10
-        uses: actions/setup-python@v4
-        with:
-          python-version: "3.10"
-      - name: Install Poetry
-        uses: snok/install-poetry@v1
-      - name: Install dependencies
-        run: poetry install --no-interaction --no-root --no-dev
-      - name: publish
-        shell: bash
-        run: |
-          poetry config pypi-token.pypi ${{ secrets.PYPI_TOKEN }}
-          poetry publish --build --no-interaction
diff --git a/.gitignore b/.gitignore
index 90ac4fa4..f53feb24 100644
--- a/.gitignore
+++ b/.gitignore
@@ -14,8 +14,6 @@ dist/
 downloads/
 eggs/
 .eggs/
-lib/
-lib64/
 parts/
 sdist/
 var/
@@ -26,7 +24,8 @@ share/python-wheels/
 .installed.cfg
 *.egg
 MANIFEST
-
+poetry.toml
+.pdm-python
 # PyInstaller
 #  Usually these files are written by a python script from a template
 #  before PyInstaller builds the exe, so as to inject date/other infos into it.
@@ -50,7 +49,7 @@ coverage.xml
 *.py,cover
 .hypothesis/
 .pytest_cache/
-
+.python-version
 # Translations
 *.mo
 *.pot
@@ -91,9 +90,6 @@ ipython_config.py
 #   install all needed dependencies.
 #Pipfile.lock
 
-# poetry
-# poetry.lock
-
 # PEP 582; used by e.g. github.com/David-OConnor/pyflow
 __pypackages__/
 
@@ -105,13 +101,18 @@ celerybeat.pid
 *.sage.py
 
 # Environments
-.env
+.env*
+!.env.*.example
+!.env.testing
 .venv
 env/
 venv/
 ENV/
 env.bak/
 venv.bak/
+.venv
+media/
+!media/.gitkeep
 
 # Spyder project settings
 .spyderproject
@@ -131,5 +132,50 @@ dmypy.json
 # Pyre type checker
 .pyre/
 
-# VSCode
-.vscode
+# vscode
+# .vscode
+.venv
+
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+pnpm-debug.log*
+lerna-debug.log*
+
+node_modules
+dist
+dist-ssr
+*.local
+tsconfig.tsbuildinfo
+
+# Editor directories and files
+.vscode/*
+!.vscode/extensions.json
+.idea
+.DS_Store
+*.suo
+*.ntvs*
+*.njsproj
+*.sln
+*.sw?
+
+# temporary files
+tmp/
+temp/
+
+# built files from the web UI
+src/app/domain/web/public
+src/app/domain/web/public/hot
+.vite
+src/app/domain/web/static
+public/hot
+public/bundle
+pdm-pythn
+
+db.duckdb
+local.duckdb
+
+requirements.txt
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index b8808742..a5d9a2e9 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,106 +1,49 @@
 repos:
   - repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: v4.3.0
+    rev: v4.6.0
     hooks:
       - id: check-ast
       - id: check-case-conflict
       - id: check-merge-conflict
+      - id: check-toml
       - id: debug-statements
       - id: end-of-file-fixer
-        exclude: "\\.idea/(.)*"
+      - id: mixed-line-ending
       - id: trailing-whitespace
-  - repo: https://github.com/asottile/pyupgrade
-    rev: v2.34.0
+  - repo: https://github.com/charliermarsh/ruff-pre-commit
+    rev: v0.5.0
     hooks:
-      - id: pyupgrade
-        args: ["--py310-plus"]
-  - repo: https://github.com/pycqa/isort
-    rev: 5.10.1
-    hooks:
-      - id: isort
-  - repo: https://github.com/psf/black
-    rev: 22.6.0
-    hooks:
-      - id: black
-        args: [--config=./pyproject.toml]
-  - repo: https://github.com/codespell-project/codespell
-    rev: v2.1.0
-    hooks:
-      - id: codespell
-  - repo: https://github.com/asottile/blacken-docs
-    rev: v1.12.1
-    hooks:
-      - id: blacken-docs
+      - id: ruff
+        args:
+          - --fix
   - repo: https://github.com/pre-commit/mirrors-prettier
-    rev: "v2.7.1"
+    rev: v4.0.0-alpha.8
     hooks:
       - id: prettier
-  - repo: https://github.com/pycqa/bandit
-    rev: 1.7.4
-    hooks:
-      - id: bandit
-        exclude: "test_*"
-        args: ["-iii", "-ll", "-s=B308,B703"]
-  - repo: https://gitlab.com/pycqa/flake8
-    rev: 3.9.2
-    hooks:
-      - id: flake8
-        additional_dependencies:
-          [
-            "flake8-bugbear",
-            "flake8-comprehensions",
-            "flake8-mutable",
-            "flake8-print",
-            "flake8-simplify",
-            "flake8-type-checking",
-          ]
+        exclude: templates|migrations|scripts|docs|dist|.venv|public
   - repo: https://github.com/ariebovenberg/slotscheck
-    rev: v0.14.1
+    rev: v0.19.0
     hooks:
       - id: slotscheck
-        exclude: "^(?!starlite/)"
-        additional_dependencies:
-          [
-            openapi_schema_pydantic,
-            orjson,
-            pydantic,
-            pydantic_factories,
-            pyyaml,
-            starlette,
-            sqlalchemy,
-            requests,
-            jinja2,
-            mako,
-          ]
-  - repo: https://github.com/pycqa/pylint
-    rev: "v2.14.4"
-    hooks:
-      - id: pylint
-        exclude: "test_*"
-        args: ["--unsafe-load-any-extension=y"]
-        additional_dependencies:
-          [
-            openapi_schema_pydantic,
-            orjson,
-            pydantic,
-            pydantic_factories,
-            pyyaml,
-            starlette,
-            sqlalchemy,
-          ]
+        exclude: test_*|docs|migrations|scripts
+        entry: env PYTHONPATH=src slotscheck
   - repo: https://github.com/pre-commit/mirrors-mypy
-    rev: "v0.961"
+    rev: v1.10.1
     hooks:
       - id: mypy
+        exclude: scripts/
         additional_dependencies:
-          [
-            orjson,
-            types-PyYAML,
-            types-requests,
-            openapi_schema_pydantic,
-            pydantic,
-            pydantic_factories,
-            starlette,
-            sqlalchemy,
-            types-freezegun,
-          ]
+          - passlib[argon2]
+          - asyncpg
+          - asyncpg-stubs
+          - litestar[jinja,jwt,redis,structlog]
+          - "advanced-alchemy[uuid]"
+          - litestar-granian
+          - litestar-vite>=0.1.21
+          - litestar-saq
+          - types-python-jose
+          - types-click
+          - types-redis
+          - types-passlib
+          - types-click
+          - types-PyYAML
diff --git a/.prettierignore b/.prettierignore
new file mode 100644
index 00000000..cae1a354
--- /dev/null
+++ b/.prettierignore
@@ -0,0 +1,14 @@
+templates
+scripts
+artwork
+deploy
+docs
+*.json
+.eslintrc.cjs
+postcss.config.cjs
+.github
+.venv
+media
+public
+dist
+.git
diff --git a/.prettierrc.json b/.prettierrc.json
new file mode 100644
index 00000000..c08d3078
--- /dev/null
+++ b/.prettierrc.json
@@ -0,0 +1,7 @@
+{
+  "trailingComma": "es5",
+  "tabWidth": 2,
+  "semi": false,
+  "singleQuote": false,
+  "endOfLine": "auto"
+}
diff --git a/.pylintrc b/.pylintrc
deleted file mode 100644
index fc02813a..00000000
--- a/.pylintrc
+++ /dev/null
@@ -1,441 +0,0 @@
-# This Pylint rcfile contains a best-effort configuration to uphold the
-# best-practices and style described in the Google Python style guide:
-#   https://google.github.io/styleguide/pyguide.html
-#
-# Its canonical open-source location is:
-#   https://google.github.io/styleguide/pylintrc
-
-[MASTER]
-
-# Files or directories to be skipped. They should be base names, not paths.
-ignore=third_party
-
-# Files or directories matching the regex patterns are skipped. The regex
-# matches against base names, not paths.
-ignore-patterns=
-
-# Pickle collected data for later comparisons.
-persistent=no
-
-# List of plugins (as comma separated values of python modules names) to load,
-# usually to register additional checkers.
-load-plugins=
-
-# Use multiple processes to speed up Pylint.
-jobs=4
-
-# Allow loading of arbitrary C extensions. Extensions are imported into the
-# active Python interpreter and may run arbitrary code.
-unsafe-load-any-extension=no
-
-
-[MESSAGES CONTROL]
-
-# Only show warnings with the listed confidence levels. Leave empty to show
-# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
-confidence=
-
-# Enable the message, report, category or checker with the given id(s). You can
-# either give multiple identifier separated by comma (,) or put this option
-# multiple time (only on the command line, not in the configuration file where
-# it should appear only once). See also the "--disable" option for examples.
-enable=useless-suppression
-
-# Disable the message, report, category or checker with the given id(s). You
-# can either give multiple identifiers separated by comma (,) or put this
-# option multiple times (only on the command line, not in the configuration
-# file where it should appear only once).You can also use "--disable=all" to
-# disable everything first and then reenable specific checks. For example, if
-# you want to run only the similarities checker, you can use "--disable=all
-# --enable=similarities". If you want to run only the classes checker, but have
-# no Warning level messages displayed, use"--disable=all --enable=classes
-# --disable=W"
-disable=abstract-method,
-        apply-builtin,
-        arguments-differ,
-        attribute-defined-outside-init,
-        backtick,
-        bad-continuation,     # added for black compatibility
-        bad-option-value,
-        bad-whitespace.       # added for black compatibility
-        basestring-builtin,
-        buffer-builtin,
-        c-extension-no-member,
-        consider-using-enumerate,
-        cmp-builtin,
-        cmp-method,
-        coerce-builtin,
-        coerce-method,
-        delslice-method,
-        div-method,
-        duplicate-code,
-        eq-without-hash,
-        execfile-builtin,
-        file-builtin,
-        filter-builtin-not-iterating,
-        fixme,
-        getslice-method,
-        global-statement,
-        hex-method,
-        idiv-method,
-        implicit-str-concat-in-sequence,
-        import-error,
-        import-self,
-        import-star-module-level,
-        inconsistent-return-statements,
-        input-builtin,
-        intern-builtin,
-        invalid-str-codec,
-        locally-disabled,
-        long-builtin,
-        long-suffix,
-        map-builtin-not-iterating,
-        misplaced-comparison-constant,
-        missing-module-docstring,
-        redefined-outer-name,
-        metaclass-assignment,
-        next-method-called,
-        next-method-defined,
-        no-absolute-import,
-        no-else-break,
-        no-else-continue,
-        no-else-raise,
-        no-else-return,
-        no-init,
-        no-member,
-        no-name-in-module,
-        no-self-use,
-        nonzero-method,
-        oct-method,
-        old-division,
-        old-ne-operator,
-        old-octal-literal,
-        old-raise-syntax,
-        parameter-unpacking,
-        print-statement,
-        raising-string,
-        range-builtin-not-iterating,
-        raw_input-builtin,
-        rdiv-method,
-        reduce-builtin,
-        relative-import,
-        reload-builtin,
-        round-builtin,
-        setslice-method,
-        signature-differs,
-        standarderror-builtin,
-        suppressed-message,
-        sys-max-int,
-        too-few-public-methods,
-        too-many-ancestors,
-        too-many-arguments,
-        too-many-boolean-expressions,
-        too-many-branches,
-        too-many-instance-attributes,
-        too-many-locals,
-        too-many-nested-blocks,
-        too-many-public-methods,
-        too-many-return-statements,
-        too-many-statements,
-        trailing-newlines,
-        unichr-builtin,
-        unicode-builtin,
-        unnecessary-pass,
-        unpacking-in-except,
-        useless-else-on-loop,
-        useless-object-inheritance,
-        useless-suppression,
-        using-cmp-argument,
-        wrong-import-order,
-        xrange-builtin,
-        zip-builtin-not-iterating,
-extension-pkg-whitelist = "pydantic,orjson,picologging"
-
-[REPORTS]
-
-# Set the output format. Available formats are text, parseable, colorized, msvs
-# (visual studio) and html. You can also give a reporter class, eg
-# mypackage.mymodule.MyReporterClass.
-output-format=text
-
-
-# Tells whether to display a full report or only the messages
-reports=no
-
-# Python expression which should return a note less than 10 (10 is the highest
-# note). You have access to the variables errors warning, statement which
-# respectively contain the number of errors / warnings messages and the total
-# number of statements analyzed. This is used by the global evaluation report
-# (RP0004).
-evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
-
-# Template used to display messages. This is a python new-style format string
-# used to format the message information. See doc for all details
-#msg-template=
-
-
-[BASIC]
-
-# Good variable names which should always be accepted, separated by a comma
-good-names=_,i,e,fn,main,_
-
-# Bad variable names which should always be refused, separated by a comma
-bad-names=
-
-# Colon-delimited sets of names that determine each other's naming style when
-# the name regexes allow several styles.
-name-group=
-
-# Include a hint for the correct naming format with invalid-name
-include-naming-hint=no
-
-# List of decorators that produce properties, such as abc.abstractproperty. Add
-# to this list to register other decorators that produce valid properties.
-property-classes=abc.abstractproperty,cached_property.cached_property,cached_property.threaded_cached_property,cached_property.cached_property_with_ttl,cached_property.threaded_cached_property_with_ttl
-
-# Regular expression matching correct function names
-function-rgx=^(?:(?P<exempt>setUp|tearDown|setUpModule|tearDownModule)|(?P<camel_case>_?[A-Z][a-zA-Z0-9]*)|(?P<snake_case>_?[a-z][a-z0-9_]*))$
-
-# Regular expression matching correct variable names
-variable-rgx=^[a-z][a-z0-9_]*$
-
-# Regular expression matching correct constant names
-const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
-
-# Regular expression matching correct attribute names
-attr-rgx=^_{0,2}[a-z][a-z0-9_]*$
-
-# Regular expression matching correct argument names
-argument-rgx=^[a-z][a-z0-9_]*$
-
-# Regular expression matching correct class attribute names
-class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
-
-# Regular expression matching correct inline iteration names
-inlinevar-rgx=^[a-z][a-z0-9_]*$
-
-# Regular expression matching correct class names
-class-rgx=^_?[A-Z][a-zA-Z0-9]*$
-
-# Regular expression matching correct module names
-# Jacob added __version__
-module-rgx=^(_?[a-z][a-z0-9_]*|__init__|__version__)$
-
-# Regular expression matching correct method names
-method-rgx=(?x)^(?:(?P<exempt>_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase|tearDownTestCase|setupSelf|tearDownClass|setUpClass|(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next)|(?P<camel_case>_{0,2}[A-Z][a-zA-Z0-9_]*)|(?P<snake_case>_{0,2}[a-z][a-z0-9_]*))$
-
-# Regular expression which should only match function or class names that do
-# not require a docstring.
-no-docstring-rgx=(__.*__|main|test.*|.*test|.*Test)$
-
-# Minimum line length for functions/classes that require docstrings, shorter
-# ones are exempt.
-docstring-min-length=10
-
-
-[TYPECHECK]
-
-# List of decorators that produce context managers, such as
-# contextlib.contextmanager. Add to this list to register other decorators that
-# produce valid context managers.
-contextmanager-decorators=contextlib.contextmanager,contextlib2.contextmanager
-
-# Tells whether missing members accessed in mixin class should be ignored. A
-# mixin class is detected if its name ends with "mixin" (case insensitive).
-ignore-mixin-members=yes
-
-# List of module names for which member attributes should not be checked
-# (useful for modules/projects where namespaces are manipulated during runtime
-# and thus existing member attributes cannot be deduced by static analysis. It
-# supports qualified module names, as well as Unix pattern matching.
-ignored-modules=
-
-# List of class names for which member attributes should not be checked (useful
-# for classes with dynamically set attributes). This supports the use of
-# qualified names.
-ignored-classes=optparse.Values,thread._local,_thread._local
-
-# List of members which are set dynamically and missed by pylint inference
-# system, and so shouldn't trigger E1101 when accessed. Python regular
-# expressions are accepted.
-generated-members=
-
-
-[FORMAT]
-
-# Maximum number of characters on a single line.
-max-line-length=88
-
-# TODO(https://github.com/PyCQA/pylint/issues/3352): Direct pylint to exempt
-# lines made too long by directives to pytype.
-
-# Regexp for a line that is allowed to be longer than the limit.
-ignore-long-lines=(?x)(
-  ^\s*(\#\ )?<?https?://\S+>?$|
-  ^\s*(from\s+\S+\s+)?import\s+.+$)
-
-# Allow the body of an if to be on the same line as the test if there is no
-# else.
-single-line-if-stmt=yes
-
-# Maximum number of lines in a module
-max-module-lines=99999
-
-# Jacob changed to 4 spaces, conforming to PEP-8
-indent-string='    '
-
-# Number of spaces of indent required inside a hanging  or continued line.
-indent-after-paren=4
-
-# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
-expected-line-ending-format=
-
-
-[MISCELLANEOUS]
-
-# List of note tags to take in consideration, separated by a comma.
-notes=TODO
-
-
-[STRING]
-
-# This flag controls whether inconsistent-quotes generates a warning when the
-# character used as a quote delimiter is used inconsistently within a module.
-check-quote-consistency=yes
-
-
-[VARIABLES]
-
-# Tells whether we should check for unused import in __init__ files.
-init-import=no
-
-# A regular expression matching the name of dummy variables (i.e. expectedly
-# not used).
-dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_)
-
-# List of additional names supposed to be defined in builtins. Remember that
-# you should avoid to define new builtins when possible.
-additional-builtins=
-
-# List of strings which can identify a callback function by name. A callback
-# name must start or end with one of those strings.
-callbacks=cb_,_cb
-
-# List of qualified module names which can have objects that can redefine
-# builtins.
-redefining-builtins-modules=six,six.moves,past.builtins,future.builtins,functools
-
-
-[LOGGING]
-
-# Logging modules to check that the string format arguments are in logging
-# function parameter format
-logging-modules=logging,absl.logging,tensorflow.io.logging,picologging
-
-
-[SIMILARITIES]
-
-# Minimum lines number of a similarity.
-min-similarity-lines=4
-
-# Ignore comments when computing similarities.
-ignore-comments=yes
-
-# Ignore docstrings when computing similarities.
-ignore-docstrings=yes
-
-# Ignore imports when computing similarities.
-ignore-imports=no
-
-
-[SPELLING]
-
-# Spelling dictionary name. Available dictionaries: none. To make it working
-# install python-enchant package.
-spelling-dict=
-
-# List of comma separated words that should not be checked.
-spelling-ignore-words=
-
-# A path to a file that contains private dictionary; one word per line.
-spelling-private-dict-file=
-
-# Tells whether to store unknown words to indicated private dictionary in
-# --spelling-private-dict-file option instead of raising a message.
-spelling-store-unknown-words=no
-
-
-[IMPORTS]
-
-# Deprecated modules which should not be used, separated by a comma
-deprecated-modules=regsub,
-                   TERMIOS,
-                   Bastion,
-                   rexec,
-                   sets
-
-# Create a graph of every (i.e. internal and external) dependencies in the
-# given file (report RP0402 must not be disabled)
-import-graph=
-
-# Create a graph of external dependencies in the given file (report RP0402 must
-# not be disabled)
-ext-import-graph=
-
-# Create a graph of internal dependencies in the given file (report RP0402 must
-# not be disabled)
-int-import-graph=
-
-# Force import order to recognize a module as part of the standard
-# compatibility libraries.
-known-standard-library=
-
-# Force import order to recognize a module as part of a third party library.
-known-third-party=enchant, absl
-
-# Analyse import fallback blocks. This can be used to support both Python 2 and
-# 3 compatible code, which means that the block might have code that exists
-# only in one or another interpreter, leading to false positives when analysed.
-analyse-fallback-blocks=no
-
-
-[CLASSES]
-
-# List of method names used to declare (i.e. assign) instance attributes.
-defining-attr-methods=__init__,
-                      __new__,
-                      setUp
-
-# List of member names, which should be excluded from the protected access
-# warning.
-exclude-protected=_asdict,
-                  _fields,
-                  _replace,
-                  _source,
-                  _make
-
-# List of valid names for the first argument in a class method.
-valid-classmethod-first-arg=cls,
-                            class_
-
-# List of valid names for the first argument in a metaclass class method.
-valid-metaclass-classmethod-first-arg=mcs
-
-
-[EXCEPTIONS]
-
-# Exceptions that will emit a warning when being caught. Defaults to
-# "Exception"
-overgeneral-exceptions=StandardError,
-                       Exception,
-                       BaseException
-
-[DESIGN]
-max-args=15
-max-attributes=20
-max-locals=30
-max-returns=15
-max-branches=15
-
-[VARIABLES]
-ignored-argument-names=args|kwargs|_|__
\ No newline at end of file
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
new file mode 100644
index 00000000..80a56511
--- /dev/null
+++ b/.vscode/extensions.json
@@ -0,0 +1,10 @@
+{
+  "recommendations": [
+    "mikestead.dotenv",
+    "christian-kohler.path-intellisense",
+    "ms-python.vscode-pylance",
+    "ms-python.python",
+    "charliermarsh.ruff",
+    "ms-python.mypy-type-checker"
+  ]
+}
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 00000000..cff8f155
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,81 @@
+{
+  "files.exclude": {
+    "**/._*": true,
+    "**/*.pyc": {
+      "when": "$(basename).py"
+    },
+    ".mypy_cache": true,
+    "**/__pycache__": true,
+    ".venv": false,
+    ".idea": true,
+    ".run": true,
+    ".pytest_cache": true,
+    ".hypothesis": true,
+    ".nova": true,
+    ".cache": true,
+    ".dist": true,
+    "**/.pytest_cache": true,
+    "site": true,
+    ".angular": true,
+    ".ruff_cache": true,
+    ".coverage": true,
+    "node_modules": false
+  },
+  "ruff.format.args": ["--config=${workspaceFolder}/pyproject.toml"],
+  "ruff.lint.run": "onType",
+  "ruff.lint.args": ["--config=${workspaceFolder}/pyproject.toml"],
+  "mypy-type-checker.importStrategy": "fromEnvironment",
+  "black-formatter.importStrategy": "fromEnvironment",
+  "pylint.importStrategy": "fromEnvironment",
+  "pylint.args": [ "--rcfile=pylintrc"],
+  "python.autoComplete.extraPaths": ["${workspaceFolder}/src"],
+  "python.terminal.activateEnvInCurrentTerminal": true,
+  "python.terminal.executeInFileDir": true,
+  "python.testing.pytestEnabled": true,
+  "autoDocstring.guessTypes": false,
+  "python.analysis.autoImportCompletions": true,
+  "python.analysis.autoFormatStrings": true,
+  "python.analysis.extraPaths": ["${workspaceFolder}/src"],
+  "editor.formatOnSave": true,
+  "notebook.formatOnSave.enabled": true,
+  "black-formatter.args": ["--line-length=120"],
+  "evenBetterToml.formatter.reorderKeys": true,
+  "evenBetterToml.formatter.trailingNewline": true,
+  "evenBetterToml.formatter.columnWidth": 120,
+  "evenBetterToml.formatter.arrayAutoCollapse": true,
+  "python.globalModuleInstallation": false,
+  "python.testing.unittestEnabled": false,
+  "python.testing.autoTestDiscoverOnSaveEnabled": true,
+  "editor.codeActionsOnSave": {
+    "source.fixAll.ruff": "explicit",
+    "source.organizeImports.ruff": "explicit"
+  },
+  "[python]": {
+    "editor.formatOnSave": true,
+    "editor.formatOnSaveMode": "file",
+    "editor.insertSpaces": true,
+    "editor.tabSize": 4,
+    "editor.trimAutoWhitespace": true,
+    "editor.defaultFormatter": "charliermarsh.ruff",
+    "editor.codeActionsOnSave": {
+      "source.fixAll": "explicit",
+      "source.organizeImports": "explicit"
+    }
+  },
+  "python.analysis.fixAll": [
+    "source.unusedImports",
+    "source.convertImportFormat"
+  ],
+  "sqltools.disableReleaseNotifications": true,
+  "sqltools.disableNodeDetectNotifications": true,
+  "python.testing.unittestArgs": [
+    "-v",
+    "-s",
+    "./tests",
+    "-p",
+    "test_*.py"
+  ],
+  "python.testing.pytestArgs": [
+    "tests"
+  ],
+}
diff --git a/Dockerfile b/Dockerfile
deleted file mode 100644
index cc4d7ae7..00000000
--- a/Dockerfile
+++ /dev/null
@@ -1,90 +0,0 @@
-# Dockerfile
-ARG PYTHON_BUILDER_IMAGE=3.10-slim
-ARG NODE_BUILDER_IMAGE=18-slim
-
-## Store the commit versiom into the image for usage later
-FROM alpine/git AS git
-ADD . /app
-WORKDIR /app
-# I use this file to provide the git commit
-# in the footer without having git present
-# in my production image
-RUN git rev-parse HEAD | tee /version
-
-## Node image for JS applications
-FROM node:${NODE_BUILDER_IMAGE} as node-base
-RUN apt-get update \
-    && apt-get upgrade -y \
-    && apt-get autoremove -y
-# todo: add build commands here
-
-
-## Build venv
-FROM python:${PYTHON_BUILDER_IMAGE} as python-base
-ENV PIP_DEFAULT_TIMEOUT=100 \
-    PIP_DISABLE_PIP_VERSION_CHECK=1 \
-    PIP_NO_CACHE_DIR=1 \
-    PYTHONDONTWRITEBYTECODE=1 \
-    PYTHONUNBUFFERED=1 \
-    PYTHONFAULTHANDLER=1 \
-    PYTHONHASHSEED=random
-RUN apt-get update \
-    && apt-get upgrade -y \
-    && apt-get autoremove -y \
-    && apt-get clean -y \
-    && rm -rf /root/.cache \
-    && rm -rf /var/apt/lists/* \
-    && rm -rf /var/cache/apt/*
-RUN pip install --upgrade pip  \
-    pip install wheel setuptools
-
-
-FROM python-base AS build-stage
-ARG POETRY_INSTALL_ARGS="--no-dev"
-ENV POETRY_HOME="/opt/poetry" \
-    POETRY_NO_INTERACTION=1 \
-    POETRY_VIRTUALENVS_CREATE=0 \
-    POETRY_VIRTUALENVS_IN_PROJECT=1 \
-    POETRY_CACHE_DIR='/var/cache/pypoetry' \
-    POETRY_VERSION='1.1.4' \
-    POETRY_INSTALL_ARGS="${POETRY_INSTALL_ARGS}" 
-RUN apt-get install -y --no-install-recommends curl git build-essential \
-    && apt-get autoremove -y
-
-RUN curl -sSL https://install.python-poetry.org | python - \
-    && ln -s /opt/poetry/bin/poetry /usr/local/bin/poetry
-
-WORKDIR /app
-COPY pyproject.toml poetry.lock ./
-RUN python -m venv --copies /app/venv
-RUN . /app/venv/bin/activate \
-    && poetry install $POETRY_INSTALL_ARGS
-
-
-## Beginning of runtime image
-FROM python:${PYTHON_BUILDER_IMAGE} as run-image
-COPY --from=build-stage /app/venv /app/venv/
-ENV PATH /app/venv/bin:$PATH
-WORKDIR /app
-COPY LICENSE pyproject.toml README.md ./
-COPY alembic.ini ./
-COPY scripts ./scripts/
-COPY alembic ./alembic/
-# These are the two folders that change the most.
-COPY opdba /app/
-COPY --from=git /version /app/.version
-
-# switch to a non-root user for security
-RUN addgroup --system --gid 1001 "app-user" \
-    && adduser --no-create-home --system --uid 1001 "app-user" \
-    && chown -R "app-user":"app-user" /app
-COPY --chown="app-user":"app-user" --from=build-stage /app/venv /app/venv/
-COPY --chown="app-user":"app-user" *.md  LICENSE /app/ 
-COPY --chown="app-user":"app-user" sample /app/sample
-
-# These are the two folders that change the most.
-COPY --chown="app-user":"app-user" db_assessment /app/db_assessment
-
-USER "app-user"
-ENTRYPOINT [ "gunicorn","--bind", "0.0.0.0:8080","--timeout", "0", "--workers","1", "db_assessment.api:app"]
-EXPOSE 8080
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index d6456956..00000000
--- a/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/LICENSE.md b/LICENSE.md
new file mode 100644
index 00000000..e7347d30
--- /dev/null
+++ b/LICENSE.md
@@ -0,0 +1,3 @@
+Copyright (c) 2024
+
+All Rights Reserved.
diff --git a/Makefile b/Makefile
new file mode 100644
index 00000000..df6ff64e
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,163 @@
+SHELL := /bin/bash
+# =============================================================================
+# Variables
+# =============================================================================
+
+.DEFAULT_GOAL:=help
+.ONESHELL:
+USING_PDM		          	=	$(shell grep "tool.pdm" pyproject.toml && echo "yes")
+ENV_PREFIX		        	=.venv/bin/
+VENV_EXISTS           		=	$(shell python3 -c "if __import__('pathlib').Path('.venv/bin/activate').exists(): print('yes')")
+NODE_MODULES_EXISTS			=	$(shell python3 -c "if __import__('pathlib').Path('node_modules').exists(): print('yes')")
+SRC_DIR               		=src
+BUILD_DIR             		=dist
+PDM_OPTS 		          	?=
+PDM 			            ?= 	pdm $(PDM_OPTS)
+
+.EXPORT_ALL_VARIABLES:
+
+ifndef VERBOSE
+.SILENT:
+endif
+
+
+.PHONY: help
+help: 		   										## Display this help text for Makefile
+	@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n  make \033[36m<target>\033[0m\n"} /^[a-zA-Z0-9_-]+:.*?##/ { printf "  \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
+
+
+.PHONY: upgrade
+upgrade:       										## Upgrade all dependencies to the latest stable versions
+	@echo "=> Updating all dependencies"
+	@if [ "$(USING_PDM)" ]; then $(PDM) update; fi
+	@echo "=> Python Dependencies Updated"
+	@$(ENV_PREFIX)pre-commit autoupdate
+	@echo "=> Updated Pre-commit"
+
+.PHONY: uninstall
+uninstall:
+	@echo "=> Uninstalling PDM"
+ifeq ($(OS),Windows_NT)
+	@echo "=> Removing PDM from %APPDATA%\Python\Scripts"
+	@if exist "%APPDATA%\Python\Scripts\pdm" (del "%APPDATA%\Python\Scripts\pdm")
+else
+	@echo "=> Removing PDM from ~/.local/bin"
+	@rm -f ~/.local/bin/pdm
+endif
+	@echo "=> PDM removal complete"
+	@echo "=> Uninstallation complete!"
+
+# =============================================================================
+# Developer Utils
+# =============================================================================
+install-pdm: 										## Install latest version of PDM
+	@curl -sSLO https://pdm.fming.dev/install-pdm.py && \
+	curl -sSL https://pdm.fming.dev/install-pdm.py.sha256 | shasum -a 256 -c - && \
+	python3 install-pdm.py
+
+install:											## Install the project and
+	@if ! $(PDM) --version > /dev/null; then echo '=> Installing PDM'; $(MAKE) install-pdm; fi
+	@if [ "$(VENV_EXISTS)" ]; then echo "=> Removing existing virtual environment"; fi
+	if [ "$(VENV_EXISTS)" ]; then $(MAKE) destroy-venv; fi
+	if [ "$(VENV_EXISTS)" ]; then $(MAKE) clean; fi
+	@if [ "$(USING_PDM)" ]; then $(PDM) config venv.in_project true && python3 -m venv --copies .venv && . $(ENV_PREFIX)/activate && $(ENV_PREFIX)/pip install --quiet -U wheel setuptools cython pip mypy nodeenv; fi
+	@if [ "$(USING_PDM)" ]; then $(PDM) install -G:all; fi
+	@echo "=> Install complete! Note: If you want to re-install re-run 'make install'"
+
+
+clean: 												## Cleanup temporary build artifacts
+	@echo "=> Cleaning working directory"
+	@rm -rf .pytest_cache .ruff_cache .hypothesis build/ -rf dist/ .eggs/ .coverage coverage.xml coverage.json htmlcov/ .mypy_cache
+	@find . -name '*.egg-info' -exec rm -rf {} +
+	@find . -name '*.egg' -exec rm -f {} +
+	@find . -name '*.pyc' -exec rm -f {} +
+	@find . -name '*.pyo' -exec rm -f {} +
+	@find . -name '*~' -exec rm -f {} +
+	@find . -name '__pycache__' -exec rm -rf {} +
+	@find . -name '.pytest_cache' -exec rm -rf {} +
+	@find . -name '.ipynb_checkpoints' -exec rm -rf {} +
+
+destroy-venv: 											## Destroy the virtual environment
+	@echo "=> Cleaning Python virtual environment"
+	@rm -rf .venv
+
+destroy-node_modules: 											## Destroy the node environment
+	@echo "=> Cleaning Node modules"
+	@rm -rf node_modules
+
+tidy: clean destroy-venv destroy-node_modules ## Clean up everything
+
+migrations:       ## Generate database migrations
+	@echo "ATTENTION: This operation will create a new database migration for any defined models changes."
+	@while [ -z "$$MIGRATION_MESSAGE" ]; do read -r -p "Migration message: " MIGRATION_MESSAGE; done ;
+	@$(ENV_PREFIX)app database make-migrations --autogenerate -m "$${MIGRATION_MESSAGE}"
+
+.PHONY: migrate
+migrate:          ## Generate database migrations
+	@echo "ATTENTION: Will apply all database migrations."
+	@$(ENV_PREFIX)app database upgrade
+
+.PHONY: build
+build:
+	@echo "=> Building package..."
+	@if [ "$(USING_PDM)" ]; then pdm build; fi
+	@echo "=> Package build complete..."
+
+.PHONY: refresh-lockfiles
+refresh-lockfiles:                                 ## Sync lockfiles with requirements files.
+	@pdm update --update-reuse --group :all
+
+.PHONY: lock
+lock:                                             ## Rebuild lockfiles from scratch, updating all dependencies
+	@pdm update --update-eager --group :all
+
+# =============================================================================
+# Tests, Linting, Coverage
+# =============================================================================
+.PHONY: lint
+lint: 												## Runs pre-commit hooks; includes ruff linting, codespell, black
+	@echo "=> Running pre-commit process"
+	@$(ENV_PREFIX)pre-commit run --all-files
+	@echo "=> Pre-commit complete"
+
+.PHONY: format
+format: 												## Runs code formatting utilities
+	@echo "=> Running pre-commit process"
+	@$(ENV_PREFIX)ruff . --fix
+	@echo "=> Pre-commit complete"
+
+.PHONY: coverage
+coverage:  											## Run the tests and generate coverage report
+	@echo "=> Running tests with coverage"
+	@$(ENV_PREFIX)pytest tests --cov=app
+	@$(ENV_PREFIX)coverage html
+	@$(ENV_PREFIX)coverage xml
+	@echo "=> Coverage report generated"
+
+.PHONY: test
+test:  												## Run the tests
+	@echo "=> Running test cases"
+	@$(ENV_PREFIX)pytest tests
+	@echo "=> Tests complete"
+
+# =============================================================================
+# Docs
+# =============================================================================
+.PHONY: docs-install
+docs-install: 										## Install docs dependencies
+	@echo "=> Installing documentation dependencies"
+	@$(PDM) install -dG:docs
+	@echo "=> Installed documentation dependencies"
+
+docs-clean: 										## Dump the existing built docs
+	@echo "=> Cleaning documentation build assets"
+	@rm -rf docs/_build
+	@echo "=> Removed existing documentation build assets"
+
+docs-serve: docs-clean 								## Serve the docs locally
+	@echo "=> Serving documentation"
+	$(PDM_RUN_BIN) sphinx-autobuild docs docs/_build/ -j auto --watch src --watch docs --watch tests --watch CONTRIBUTING.rst --port 8002
+
+docs: docs-clean 									## Dump the existing built docs and rebuild them
+	@echo "=> Building documentation"
+	@$(PDM_RUN_BIN) sphinx-build -M html docs docs/_build/ -E -a -j auto --keep-going
diff --git a/README.md b/README.md
index 32e92d3d..3da25aca 100644
--- a/README.md
+++ b/README.md
@@ -1,10 +1,259 @@
-# starlite-plugin-example
+# Chapter
 
-Example repository that can be used a base for building Starlite plugins.
+This is the chapter app.
 
-<!-- markdownlint-restore -->
-<!-- prettier-ignore-end -->
+## Quick Start
 
-<!-- ALL-CONTRIBUTORS-LIST:END -->
+To quickly get a development environment running, run the following:
 
-This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!
+```shell
+make install
+. .venv/bin/activate
+```
+
+### Local Development
+
+```bash
+cp .env.local.example .env
+pdm run start-infra # this starts a database and redis instance only
+# this will start the SAQ worker, Vite development process, and Litestar
+pdm run app run
+
+# to stop the database and redis, run
+pdm run stop-infra
+```
+
+### Docker
+
+```bash
+docker compose up
+```
+
+### Details
+
+<summary>Command Examples</summary>
+
+## App Commands
+
+```bash
+❯ app
+
+ Usage: app [OPTIONS] COMMAND [ARGS]...
+
+ Litestar CLI.
+
+╭─ Options ────────────────────────────────────────────────────────────────────╮
+│ --app          TEXT       Module path to a Litestar application (TEXT)       │
+│ --app-dir      DIRECTORY  Look for APP in the specified directory, by adding │
+│                           this to the PYTHONPATH. Defaults to the current    │
+│                           working directory.                                 │
+│                           (DIRECTORY)                                        │
+│ --help     -h             Show this message and exit.                        │
+╰──────────────────────────────────────────────────────────────────────────────╯
+Using Litestar app from env: 'app.asgi:app'
+Loading environment configuration from .env
+╭─ Commands ───────────────────────────────────────────────────────────────────╮
+│ assets       Manage Vite Tasks.                                              │
+│ database     Manage SQLAlchemy database components.                          │
+│ info         Show information about the detected Litestar app.               │
+│ routes       Display information about the application's routes.             │
+│ run          Run a Litestar app.                                             │
+│ schema       Manage server-side OpenAPI schemas.                             │
+│ sessions     Manage server-side sessions.                                    │
+│ users        Manage application users and roles.                             │
+│ version      Show the currently installed Litestar version.                  │
+│ workers      Manage background task workers.                                 │
+╰──────────────────────────────────────────────────────────────────────────────╯
+
+```
+
+## Database Commands
+
+Alembic integration is built directly into the CLI under the `database` command.
+
+```bash
+❯ app database
+Using Litestar app from env: 'app.asgi:create_app'
+
+ Usage: app database [OPTIONS] COMMAND [ARGS]...
+
+ Manage SQLAlchemy database components.
+
+╭─ Options ────────────────────────────────────────────────────────────────────╮
+│ --help  -h    Show this message and exit.                                    │
+╰──────────────────────────────────────────────────────────────────────────────╯
+╭─ Commands ───────────────────────────────────────────────────────────────────╮
+│ downgrade              Downgrade database to a specific revision.            │
+│ init                   Initialize migrations for the project.                │
+│ make-migrations        Create a new migration revision.                      │
+│ merge-migrations       Merge multiple revisions into a single new revision.  │
+│ show-current-revision  Shows the current revision for the database.          │
+│ stamp-migration        Mark (Stamp) a specific revision as current without   │
+│                        applying the migrations.                              │
+│ upgrade                Upgrade database to a specific revision.              │
+╰──────────────────────────────────────────────────────────────────────────────╯
+
+```
+
+### Upgrading the Database
+
+```bash
+❯ app database upgrade
+Using Litestar app from env: 'app.asgi:create_app'
+Starting database upgrade process ───────────────────────────────────────────────
+Are you sure you you want migrate the database to the "head" revision? [y/n]: y
+2023-10-01T19:44:13.536101Z [debug    ] Using selector: EpollSelector
+2023-10-01T19:44:13.623437Z [info     ] Context impl PostgresqlImpl.
+2023-10-01T19:44:13.623617Z [info     ] Will assume transactional DDL.
+2023-10-01T19:44:13.667920Z [info     ] Running upgrade  -> c3a9a11cc35d, init
+2023-10-01T19:44:13.774932Z [debug    ] new branch insert c3a9a11cc35d
+2023-10-01T19:44:13.783804Z [info     ] Pool disposed. Pool size: 5  Connections
+ in pool: 0 Current Overflow: -5 Current Checked out connections: 0
+2023-10-01T19:44:13.784013Z [info     ] Pool recreating
+```
+
+## Worker Commands
+
+```bash
+❯ app worker
+Using Litestar app from env: 'app.asgi:create_app'
+
+ Usage: app worker [OPTIONS] COMMAND [ARGS]...
+
+ Manage application background workers.
+
+╭─ Options ────────────────────────────────────────────────────────────────────╮
+│ --help  -h    Show this message and exit.                                    │
+╰──────────────────────────────────────────────────────────────────────────────╯
+╭─ Commands ───────────────────────────────────────────────────────────────────╮
+│ run       Starts the background workers.                                     │
+╰──────────────────────────────────────────────────────────────────────────────╯
+
+```
+
+## Run Commands
+
+To run the application through Granian (HTTP1 or HTTP2) using the standard Litestar CLI, you can use the following:
+
+```bash
+❯ app run --help
+Using Litestar app from env: 'app.asgi:app'
+Loading environment configuration from .env
+
+ Usage: app run [OPTIONS]
+
+ Run a Litestar app.
+ The app can be either passed as a module path in the form of <module
+ name>.<submodule>:<app instance or factory>, set as an environment variable
+ LITESTAR_APP with the same format or automatically discovered from one of
+ these canonical paths: app.py, asgi.py, application.py or app/__init__.py.
+ When auto-discovering application factories, functions with the name
+ ``create_app`` are considered, or functions that are annotated as returning a
+ ``Litestar`` instance.
+
+╭─ Options ────────────────────────────────────────────────────────────────────╮
+│ --port                   -p  INTEGER                 Serve under this port   │
+│                                                      (INTEGER)               │
+│                                                      [default: 8000]         │
+│ --wc,--web-concurrency…  -W  INTEGER RANGE           The number of processes │
+│                              [1<=x<=7]               to start.               │
+│                                                      (INTEGER RANGE)         │
+│                                                      [default: 1; 1<=x<=7]   │
+│ --threads                    INTEGER RANGE [x>=1]    The number of threads.  │
+│                                                      (INTEGER RANGE)         │
+│                                                      [default: 1; x>=1]      │
+│ --blocking-threads           INTEGER RANGE [x>=1]    The number of blocking  │
+│                                                      threads.                │
+│                                                      (INTEGER RANGE)         │
+│                                                      [default: 1; x>=1]      │
+│ --threading-mode             THREADMODES             Threading mode to use.  │
+│                                                      (THREADMODES)           │
+│ --http                       HTTPMODES               HTTP Version to use     │
+│                                                      (HTTP or HTTP2)         │
+│                                                      (HTTPMODES)             │
+│ --opt                                                Enable additional event │
+│                                                      loop optimizations      │
+│ --backlog                    INTEGER RANGE [x>=128]  Maximum number of       │
+│                                                      connections to hold in  │
+│                                                      backlog.                │
+│                                                      (INTEGER RANGE)         │
+│                                                      [default: 1024; x>=128] │
+│ --host                   -H  TEXT                    Server under this host  │
+│                                                      (TEXT)                  │
+│                                                      [default: 127.0.0.1]    │
+│ --ssl-keyfile                FILE                    SSL key file (FILE)     │
+│ --ssl-certificate            FILE                    SSL certificate file    │
+│                                                      (FILE)                  │
+│ --create-self-signed-c…                              If certificate and key  │
+│                                                      are not found at        │
+│                                                      specified locations,    │
+│                                                      create a self-signed    │
+│                                                      certificate and a key   │
+│ --http1-buffer-size          INTEGER RANGE           Set the maximum buffer  │
+│                              [x>=8192]               size for HTTP/1         │
+│                                                      connections             │
+│                                                      (INTEGER RANGE)         │
+│                                                      [default: 417792;       │
+│                                                      x>=8192]                │
+│ --http1-keep-alive/--n…                              Enables or disables     │
+│                                                      HTTP/1 keep-alive       │
+│                                                      [default:               │
+│                                                      http1-keep-alive]       │
+│ --http1-pipeline-flush…                              Aggregates HTTP/1       │
+│                                                      flushes to better       │
+│                                                      support pipelined       │
+│                                                      responses               │
+│                                                      (experimental)          │
+│ --http2-adaptive-windo…                              Sets whether to use an  │
+│                                                      adaptive flow control   │
+│                                                      for HTTP2               │
+│ --http2-initial-connec…      INTEGER                 Sets the max            │
+│                                                      connection-level flow   │
+│                                                      control for HTTP2       │
+│                                                      (INTEGER)               │
+│ --http2-initial-stream…      INTEGER                 Sets the                │
+│                                                      `SETTINGS_INITIAL_WIND… │
+│                                                      option for HTTP2        │
+│                                                      stream-level flow       │
+│                                                      control                 │
+│                                                      (INTEGER)               │
+│ --http2-keep-alive-int…      OPTIONAL                Sets an interval for    │
+│                                                      HTTP2 Ping frames       │
+│                                                      should be sent to keep  │
+│                                                      a connection alive      │
+│                                                      (OPTIONAL)              │
+│ --http2-keep-alive-tim…      INTEGER                 Sets a timeout for      │
+│                                                      receiving an            │
+│                                                      acknowledgement of the  │
+│                                                      HTTP2 keep-alive ping   │
+│                                                      (INTEGER)               │
+│ --http2-max-concurrent…      INTEGER                 Sets the                │
+│                                                      SETTINGS_MAX_CONCURREN… │
+│                                                      option for HTTP2        │
+│                                                      connections             │
+│                                                      (INTEGER)               │
+│ --http2-max-frame-size       INTEGER                 Sets the maximum frame  │
+│                                                      size to use for HTTP2   │
+│                                                      (INTEGER)               │
+│ --http2-max-headers-si…      INTEGER                 Sets the max size of    │
+│                                                      received header frames  │
+│                                                      (INTEGER)               │
+│ --http2-max-send-buffe…      INTEGER                 Set the maximum write   │
+│                                                      buffer size for each    │
+│                                                      HTTP/2 stream           │
+│                                                      (INTEGER)               │
+│ --url-path-prefix            TEXT                    URL path prefix the app │
+│                                                      is mounted on           │
+│                                                      (TEXT)                  │
+│ --debug                  -d                          Run app in debug mode   │
+│ --pdb,--use-pdb          -P                          Drop into PDB on an     │
+│                                                      exception               │
+│ --respawn-failed-worke…                              Enable workers respawn  │
+│                                                      on unexpected exit      │
+│ --reload                 -r                          Reload server on        │
+│                                                      changes                 │
+│ --help                   -h                          Show this message and   │
+│                                                      exit.                   │
+╰──────────────────────────────────────────────────────────────────────────────╯
+
+```
diff --git a/deploy/aws/Dockerfile.app b/deploy/aws/Dockerfile.app
new file mode 100644
index 00000000..7f8b4457
--- /dev/null
+++ b/deploy/aws/Dockerfile.app
@@ -0,0 +1,86 @@
+ARG PYTHON_BUILDER_IMAGE=3.12-slim-bookworm
+
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- Python base -------------------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+FROM python:${PYTHON_BUILDER_IMAGE} as python-base
+ENV PIP_DEFAULT_TIMEOUT=100 \
+  PIP_DISABLE_PIP_VERSION_CHECK=1 \
+  PIP_NO_CACHE_DIR=1 \
+  PIP_ROOT_USER_ACTION=ignore \
+  PYTHONDONTWRITEBYTECODE=1 \
+  PYTHONUNBUFFERED=1 \
+  PYTHONFAULTHANDLER=1 \
+  PYTHONHASHSEED=random \
+  LANG=C.UTF-8 \
+  LC_ALL=C.UTF-8
+RUN apt-get update \
+  && apt-get upgrade -y \
+  && apt-get install -y --no-install-recommends git tini curl \
+  && apt-get install -y dnsutils \
+  && apt-get autoremove -y \
+  && apt-get clean -y \
+  && rm -rf /root/.cache \
+  && rm -rf /var/apt/lists/* \
+  && rm -rf /var/cache/apt/* \
+  && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false\
+  && mkdir -p /workspace/app \
+  && pip install --quiet -U pip wheel setuptools virtualenv
+
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- Python build base -------------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+FROM python-base AS build-base
+ARG PDM_INSTALL_ARGS=""
+ENV PDM_INSTALL_ARGS="${PDM_INSTALL_ARGS}" \
+  GRPC_PYTHON_BUILD_WITH_CYTHON=1 \
+  PATH="/workspace/app/.venv/bin:/usr/local/bin:$PATH"
+## -------------------------- add build packages ----------------------------------- ##
+RUN apt-get install -y --no-install-recommends build-essential curl \
+  && apt-get autoremove -y \
+  && apt-get clean -y \
+  && rm -rf /root/.cache \
+  && rm -rf /var/apt/lists/* \
+  && rm -rf /var/cache/apt/* \
+  && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false
+
+## -------------------------- install application ----------------------------------- ##
+WORKDIR /workspace/app
+COPY pyproject.toml pdm.lock README.md .pre-commit-config.yaml LICENSE.md Makefile \
+  ./
+COPY scripts ./scripts/
+RUN python -m venv --copies /workspace/app/.venv \
+  && /workspace/app/.venv/bin/pip install --quiet pdm nodeenv cython  \
+  && pdm install ${PDM_INSTALL_ARGS} --no-self  \
+  && pdm export ${PDM_INSTALL_ARGS} --without-hashes --prod --output=requirements.txt
+COPY src ./src/
+RUN pdm build
+
+
+## ---------------------------------------------------------------------------------- ##
+## -------------------------------- runtime build ----------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- use base image  ---------------------------------------- ##
+
+FROM python-base as run-image
+ARG ENV_SECRETS="runtime-secrets"
+ARG LITESTAR_APP="app.asgi:app"
+ENV ENV_SECRETS="${ENV_SECRETS}" \
+  LITESTAR_APP="${LITESTAR_APP}"
+
+RUN addgroup --system --gid 65532 nonroot \
+  && adduser --no-create-home --system --uid 65532 nonroot \
+  && chown -R nonroot:nonroot /workspace
+## -------------------------- install application ----------------------------------- ##
+COPY --from=build-base --chown=65532:65532 /workspace/app/requirements.txt /tmp/requirements.txt
+COPY --from=build-base --chown=65532:65532 /workspace/app/dist /tmp/
+WORKDIR /workspace/app
+RUN pip install --quiet --disable-pip-version-check --no-deps --requirement=/tmp/requirements.txt
+RUN pip install --quiet --disable-pip-version-check --no-deps /tmp/*.whl
+
+USER nonroot
+STOPSIGNAL SIGINT
+EXPOSE 8000
+ENTRYPOINT [ "tini", "--" ]
+CMD [ "litestar", "run", "--host", "0.0.0.0" ]
+VOLUME /workspace/app
diff --git a/CHANGELOG.md b/deploy/aws/README.md
similarity index 100%
rename from CHANGELOG.md
rename to deploy/aws/README.md
diff --git a/deploy/aws/ecs-task-definition-prod.json b/deploy/aws/ecs-task-definition-prod.json
new file mode 100644
index 00000000..ae0115b2
--- /dev/null
+++ b/deploy/aws/ecs-task-definition-prod.json
@@ -0,0 +1,289 @@
+{
+  "family": "chapter-app-family-prod",
+  "containerDefinitions": [
+    {
+      "name": "app",
+      "image": "050206582437.dkr.ecr.eu-central-1.amazonaws.com/chapter-app-repository-prod:latest",
+      "cpu": "256",
+      "memory": "512",
+      "essential": true,
+      "command": ["litestar", "run", "--host", "0.0.0.0"],
+      "dependsOn": [
+        {
+          "condition": "SUCCESS",
+          "containerName": "db-migrator"
+        },
+        {
+          "condition": "SUCCESS",
+          "containerName": "worker"
+        }
+      ],
+      "portMappings": [
+        {
+          "containerPort": 8000,
+          "hostPort": 8000,
+          "protocol": "tcp"
+        }
+      ],
+      "environment": [
+        {
+          "name": "LITESTAR_APP",
+          "value": "app.asgi:create_app"
+        },
+        {
+          "name": "APP_ENVIRONMENT",
+          "value": "docker"
+        },
+        {
+          "name": "OPENAPI_CONTACT_EMAIL",
+          "value": "dev@nectar.run"
+        },
+        {
+          "name": "OPENAPI_CONTACT_NAME",
+          "value": "Devs"
+        },
+        {
+          "name": "OPENAPI_TITLE",
+          "value": "Chapter API"
+        },
+        {
+          "name": "DB_MIGRATION_DDL_VERSION_TABLE",
+          "value": "ddl_version"
+        },
+        { "name": "SAQ_USE_SERVER_LIFESPAN", "value": "false" }
+      ],
+      "secrets": [
+        {
+          "name": "APP_SECRET_KEY",
+          "valueFrom": "arn:aws:ssm:eu-central-1:050206582437:parameter/chapter-app/prod/app/app-secret-key"
+        },
+        {
+          "name": "SLACK_ALERTS_URL",
+          "valueFrom": "arn:aws:ssm:eu-central-1:050206582437:parameter/chapter-app/prod/app/slack-alerts-url"
+        },
+        {
+          "name": "REDIS_URL",
+          "valueFrom": "arn:aws:ssm:eu-central-1:050206582437:parameter/chapter-app/prod/app/redis-url"
+        },
+        {
+          "name": "DB_PASSWORD",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:password::"
+        },
+        {
+          "name": "DB_HOST",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:host::"
+        },
+        {
+          "name": "DB_PORT",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:port::"
+        },
+        {
+          "name": "DB_USER",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:username::"
+        },
+        {
+          "name": "DB_NAME",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:dbname::"
+        }
+      ],
+      "healthCheck": {
+        "command": [
+          "CMD-SHELL",
+          "curl -f http://localhost:8000/health || exit 1"
+        ],
+        "interval": 30,
+        "timeout": 5,
+        "retries": 3,
+        "startPeriod": 60
+      },
+      "logConfiguration": {
+        "logDriver": "awslogs",
+        "options": {
+          "awslogs-create-group": "true",
+          "awslogs-group": "/ecs/chapter-app-app-prod",
+          "awslogs-region": "eu-central-1",
+          "awslogs-stream-prefix": "ecs"
+        }
+      }
+    },
+    {
+      "name": "db-migrator",
+      "image": "050206582437.dkr.ecr.eu-central-1.amazonaws.com/chapter-app-repository-prod:latest",
+      "cpu": "256",
+      "memory": "512",
+      "essential": false,
+      "command": ["litestar", "database", "upgrade", "--no-prompt"],
+      "environment": [
+        {
+          "name": "LITESTAR_APP",
+          "value": "app.asgi:create_app"
+        },
+        {
+          "name": "APP_ENVIRONMENT",
+          "value": "docker"
+        },
+        {
+          "name": "OPENAPI_CONTACT_EMAIL",
+          "value": "dev@nectar.run"
+        },
+        {
+          "name": "OPENAPI_CONTACT_NAME",
+          "value": "Devs"
+        },
+        {
+          "name": "OPENAPI_TITLE",
+          "value": "Chapter API"
+        },
+        {
+          "name": "DB_MIGRATION_DDL_VERSION_TABLE",
+          "value": "ddl_version"
+        },
+        { "name": "SAQ_USE_SERVER_LIFESPAN", "value": "false" }
+      ],
+      "secrets": [
+        {
+          "name": "APP_SECRET_KEY",
+          "valueFrom": "arn:aws:ssm:eu-central-1:050206582437:parameter/chapter-app/prod/app/app-secret-key"
+        },
+        {
+          "name": "SLACK_ALERTS_URL",
+          "valueFrom": "arn:aws:ssm:eu-central-1:050206582437:parameter/chapter-app/prod/app/slack-alerts-url"
+        },
+        {
+          "name": "REDIS_URL",
+          "valueFrom": "arn:aws:ssm:eu-central-1:050206582437:parameter/chapter-app/prod/app/redis-url"
+        },
+        {
+          "name": "DB_PASSWORD",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:password::"
+        },
+        {
+          "name": "DB_HOST",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:host::"
+        },
+        {
+          "name": "DB_PORT",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:port::"
+        },
+        {
+          "name": "DB_USER",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:username::"
+        },
+        {
+          "name": "DB_NAME",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:dbname::"
+        }
+      ],
+      "healthCheck": {
+        "command": ["CMD-SHELL", "exit 0"],
+        "interval": 30,
+        "timeout": 5,
+        "retries": 3,
+        "startPeriod": 60
+      },
+      "logConfiguration": {
+        "logDriver": "awslogs",
+        "options": {
+          "awslogs-create-group": "true",
+          "awslogs-group": "/ecs/chapter-app-db-migrator-prod",
+          "awslogs-region": "eu-central-1",
+          "awslogs-stream-prefix": "ecs"
+        }
+      }
+    },
+    {
+      "name": "worker",
+      "image": "050206582437.dkr.ecr.eu-central-1.amazonaws.com/chapter-app-repository-prod:latest",
+      "cpu": "256",
+      "memory": "512",
+      "essential": false,
+      "command": ["litestar", "workers", "run"],
+      "environment": [
+        {
+          "name": "LITESTAR_APP",
+          "value": "app.asgi:create_app"
+        },
+        {
+          "name": "APP_ENVIRONMENT",
+          "value": "docker"
+        },
+        {
+          "name": "OPENAPI_CONTACT_EMAIL",
+          "value": "dev@nectar.run"
+        },
+        {
+          "name": "OPENAPI_CONTACT_NAME",
+          "value": "Devs"
+        },
+        {
+          "name": "OPENAPI_TITLE",
+          "value": "Chapter API"
+        },
+        {
+          "name": "DB_MIGRATION_DDL_VERSION_TABLE",
+          "value": "ddl_version"
+        },
+        { "name": "SAQ_USE_SERVER_LIFESPAN", "value": "false" }
+      ],
+      "secrets": [
+        {
+          "name": "APP_SECRET_KEY",
+          "valueFrom": "arn:aws:ssm:eu-central-1:050206582437:parameter/chapter-app/prod/app/app-secret-key"
+        },
+        {
+          "name": "SLACK_ALERTS_URL",
+          "valueFrom": "arn:aws:ssm:eu-central-1:050206582437:parameter/chapter-app/prod/app/slack-alerts-url"
+        },
+        {
+          "name": "REDIS_URL",
+          "valueFrom": "arn:aws:ssm:eu-central-1:050206582437:parameter/chapter-app/prod/app/redis-url"
+        },
+        {
+          "name": "DB_PASSWORD",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:password::"
+        },
+        {
+          "name": "DB_HOST",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:host::"
+        },
+        {
+          "name": "DB_PORT",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:port::"
+        },
+        {
+          "name": "DB_USER",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:username::"
+        },
+        {
+          "name": "DB_NAME",
+          "valueFrom": "arn:aws:secretsmanager:eu-central-1:050206582437:secret:chapter-app-db-password-prod-IM9fy3:dbname::"
+        }
+      ],
+      "healthCheck": {
+        "command": ["CMD-SHELL", "exit 0"],
+        "interval": 30,
+        "timeout": 5,
+        "retries": 3,
+        "startPeriod": 60
+      },
+      "logConfiguration": {
+        "logDriver": "awslogs",
+        "options": {
+          "awslogs-create-group": "true",
+          "awslogs-group": "/ecs/chapter-app-worker-prod",
+          "awslogs-region": "eu-central-1",
+          "awslogs-stream-prefix": "ecs"
+        }
+      }
+    }
+  ],
+  "runtimePlatform": {
+    "operatingSystemFamily": "LINUX"
+  },
+  "networkMode": "awsvpc",
+  "requiresCompatibilities": ["FARGATE"],
+  "cpu": "1024",
+  "memory": "2048",
+  "executionRoleArn": "arn:aws:iam::050206582437:role/chapter-app-ecs-task-execution-role-prod",
+  "taskRoleArn": "arn:aws:iam::050206582437:role/chapter-app-ecs-task-role-prod"
+}
diff --git a/deploy/aws/tf/main.tf b/deploy/aws/tf/main.tf
new file mode 100644
index 00000000..9deb50db
--- /dev/null
+++ b/deploy/aws/tf/main.tf
@@ -0,0 +1,123 @@
+# tf/main.tf
+# 
+data "aws_caller_identity" "current" {}
+
+locals {
+  ecs_task_definition_json = file("../ecs-task-definition-${var.environment}.json")
+}
+
+terraform {
+  backend "s3" {
+    bucket         = "${var.app_name}-tf-backend"     # Replace with your S3 bucket name
+    key            = "${var.environment}/state/terraform.tfstate" # Replace with your desired state file path
+    region         = var.region                       # Replace with your AWS region
+    dynamodb_table = "${var.app_name}-tf-lock-table-${var.environment}"  # Optional: for state locking
+    encrypt        = true                             # Encrypt the state file at rest
+  }
+}
+
+provider "aws" {
+  region = var.region
+}
+
+module "vpc" {
+  source      = "./modules/vpc"
+  app_name    = var.app_name
+  environment = var.environment
+}
+
+module "vpc_endpoints" {
+  source             = "./modules/vpc_endpoints"
+  app_name           = var.app_name
+  environment        = var.environment
+  region             = var.region
+  vpc_id             = module.vpc.vpc_id
+  private_subnets    = module.vpc.private_subnets
+  security_group_ids = [module.vpc.rds_security_group_id]
+  public_route_table_ids = module.vpc.public_route_table_ids
+}
+
+module "alb" {
+  source          = "./modules/alb"
+  app_name        = var.app_name
+  environment     = var.environment
+  domain_name     = var.domain_name
+  vpc_id          = module.vpc.vpc_id
+  public_subnets  = module.vpc.public_subnets
+  security_groups = [module.vpc.alb_security_group_id]
+}
+
+module "ecr" {
+  source                   = "./modules/ecr"
+  app_name                 = var.app_name
+  environment              = var.environment
+} 
+
+module "ecs" {
+  source                   = "./modules/ecs"
+  app_name                 = var.app_name
+  environment              = var.environment
+  vpc_id                   = module.vpc.vpc_id
+  private_subnets          = module.vpc.private_subnets
+  ecs_task_execution_role  = module.iam.ecs_execution_role_arn
+  ecs_task_role            = module.iam.ecs_task_role_arn
+  alb_target_group_arn     = module.alb.target_group_arn
+  security_groups          = [module.vpc.ecs_security_group_id]
+  ecs_task_definition_json = local.ecs_task_definition_json
+}
+
+module "rds" {
+  source             = "./modules/rds"
+  app_name           = var.app_name
+  environment        = var.environment
+  vpc_id             = module.vpc.vpc_id
+  private_subnets    = module.vpc.private_subnets
+  security_group_ids = [module.vpc.rds_security_group_id]
+  app_db_user        = "app_user"
+  app_db_name        = "${var.app_name}-db"
+}
+
+module "jumpbox" {
+  source             = "./modules/jumpbox"
+  app_name           = var.app_name
+  environment        = var.environment
+  vpc_id             = module.vpc.vpc_id
+  public_subnet_id   = module.vpc.public_subnets[0]
+  security_group_ids = [module.vpc.ec2_jumpbox_security_group_id]
+  ec2_key_pair_name  = var.ec2_key_pair_name
+}
+
+module "s3" {
+  source   = "./modules/s3"
+  app_name = var.app_name
+  environment = var.environment
+}
+
+module "redis" {
+  source             = "./modules/redis"
+  app_name           = var.app_name
+  environment        = var.environment
+  vpc_id             = module.vpc.vpc_id
+  private_subnets    = module.vpc.private_subnets
+  security_group_ids = [module.vpc.redis_security_group_id]
+}
+
+module "cloudwatch" {
+  source      = "./modules/cloudwatch"
+  app_name    = var.app_name
+  environment = var.environment
+}
+
+module "iam" {
+  source                    = "./modules/iam"
+  app_name                  = var.app_name
+  environment               = var.environment
+  region                    = var.region
+  aws_account_id            = data.aws_caller_identity.current.account_id
+  app_bucket_name           = module.s3.app_bucket_name
+  rds_db_id                 = module.rds.rds_db_id
+  rds_db_secret_arn         = module.rds.rds_db_secret_arn
+  cloudwatch_log_group_name = module.cloudwatch.cloudwatch_log_group_name
+  github_repo               = var.github_repo
+  github_branch             = var.github_branch
+}
diff --git a/deploy/aws/tf/modules/alb/main.tf b/deploy/aws/tf/modules/alb/main.tf
new file mode 100644
index 00000000..81a07406
--- /dev/null
+++ b/deploy/aws/tf/modules/alb/main.tf
@@ -0,0 +1,54 @@
+# tf/modules/alb/main.tf
+
+resource "aws_lb" "app_alb" {
+  name               = "${var.app_name}-app-alb-${var.environment}"
+  internal           = false
+  load_balancer_type = "application"
+  security_groups    = var.security_groups
+  subnets            = var.public_subnets
+
+  tags = {
+    Name = "app-alb"
+    Environment = var.environment
+  }
+
+  depends_on = [var.vpc_id]
+}
+
+resource "aws_lb_target_group" "app_tg" {
+  name        = "${var.app_name}-tg-${var.environment}"
+  port        = 80
+  protocol    = "HTTP"
+  target_type = "ip"
+  vpc_id      = var.vpc_id
+
+  health_check {
+    path                = "/health"
+    protocol            = "HTTP"
+    interval            = 30
+    timeout             = 5
+    healthy_threshold   = 5
+    unhealthy_threshold = 2
+  }
+
+  depends_on = [var.vpc_id]
+}
+
+data "aws_acm_certificate" "domain_cert" {
+  domain   = var.domain_name
+  statuses = ["ISSUED"]
+}
+
+resource "aws_lb_listener" "https" {
+  load_balancer_arn = aws_lb.app_alb.arn
+  port              = "443"
+  protocol          = "HTTPS"
+  certificate_arn   = data.aws_acm_certificate.domain_cert.arn
+
+  default_action {
+    type             = "forward"
+    target_group_arn = aws_lb_target_group.app_tg.arn
+  }
+
+  depends_on = [aws_lb_target_group.app_tg]
+}
diff --git a/deploy/aws/tf/modules/alb/outputs.tf b/deploy/aws/tf/modules/alb/outputs.tf
new file mode 100644
index 00000000..194d3ab4
--- /dev/null
+++ b/deploy/aws/tf/modules/alb/outputs.tf
@@ -0,0 +1,11 @@
+# tf/modules/alb/outputs.tf
+
+output "alb_dns_name" {
+  description = "DNS name of the ALB"
+  value       = aws_lb.app_alb.dns_name
+}
+
+output "target_group_arn" {
+  description = "ARN of the ALB target group"
+  value       = aws_lb_target_group.app_tg.arn
+}
diff --git a/deploy/aws/tf/modules/alb/variables.tf b/deploy/aws/tf/modules/alb/variables.tf
new file mode 100644
index 00000000..090a4bde
--- /dev/null
+++ b/deploy/aws/tf/modules/alb/variables.tf
@@ -0,0 +1,31 @@
+# tf/modules/alb/variables.tf
+
+variable "app_name" {
+  description = "App name"
+  type        = string
+}
+
+variable "environment" {
+  description = "Environment name (e.g., dev, prod)"
+  type        = string
+}
+
+variable "domain_name" {
+  description = "App domain name, used to reference the right certificate"
+  type        = string
+}
+
+variable "vpc_id" {
+  description = "VPC ID where ALB will be deployed"
+  type        = string
+}
+
+variable "public_subnets" {
+  description = "Public subnet IDs for ALB"
+  type        = list(string)
+}
+
+variable "security_groups" {
+  description = "Security groups for ALB"
+  type        = list(string)
+}
diff --git a/deploy/aws/tf/modules/cloudwatch/main.tf b/deploy/aws/tf/modules/cloudwatch/main.tf
new file mode 100644
index 00000000..f55bfb3c
--- /dev/null
+++ b/deploy/aws/tf/modules/cloudwatch/main.tf
@@ -0,0 +1,31 @@
+# tf/modules/cloudwatch/main.tf
+
+resource "aws_cloudwatch_log_group" "app_log_group" {
+  name              = "/ecs/${var.app_name}-app-${var.environment}"
+  retention_in_days = 7
+
+  tags = {
+    Name = "${var.app_name}-app-log-group-${var.environment}"
+    Env  = var.environment
+  }
+}
+
+resource "aws_cloudwatch_log_group" "db_migrator_log_group" {
+  name              = "/ecs/${var.app_name}-db-migrator-${var.environment}"
+  retention_in_days = 7
+
+  tags = {
+    Name = "${var.app_name}-db-migrator-log-group-${var.environment}"
+    Env  = var.environment
+  }
+}
+
+resource "aws_cloudwatch_log_group" "worker_log_group" {
+  name              = "/ecs/${var.app_name}-worker-${var.environment}"
+  retention_in_days = 7
+
+  tags = {
+    Name = "${var.app_name}-worker-log-group-${var.environment}"
+    Env  = var.environment
+  }
+}
diff --git a/deploy/aws/tf/modules/cloudwatch/outputs.tf b/deploy/aws/tf/modules/cloudwatch/outputs.tf
new file mode 100644
index 00000000..fd15301b
--- /dev/null
+++ b/deploy/aws/tf/modules/cloudwatch/outputs.tf
@@ -0,0 +1,6 @@
+# tf/modules/cloudwatch/outputs.tf
+
+output "cloudwatch_log_group_name" {
+  description = "Name of the CloudWatch Log Group"
+  value       = aws_cloudwatch_log_group.app_log_group.name
+}
diff --git a/deploy/aws/tf/modules/cloudwatch/variables.tf b/deploy/aws/tf/modules/cloudwatch/variables.tf
new file mode 100644
index 00000000..e4f30d4a
--- /dev/null
+++ b/deploy/aws/tf/modules/cloudwatch/variables.tf
@@ -0,0 +1,12 @@
+# tf/modules/clouddwatch/variables.tf
+
+variable "app_name" {
+  description = "App name"
+  type        = string
+}
+
+variable "environment" {
+  description = "Environment name (e.g., dev, prod)"
+  type        = string
+}
+
diff --git a/deploy/aws/tf/modules/ecr/main.tf b/deploy/aws/tf/modules/ecr/main.tf
new file mode 100644
index 00000000..fdacbaf0
--- /dev/null
+++ b/deploy/aws/tf/modules/ecr/main.tf
@@ -0,0 +1,14 @@
+# tf/modules/ecr/main.tf
+
+resource "aws_ecr_repository" "app_repository" {
+  name = "${var.app_name}-repository-${var.environment}"
+
+  image_scanning_configuration {
+    scan_on_push = true
+  }
+
+  tags = {
+    Name = "${var.app_name}-repository-${var.environment}"
+    Environment = var.environment
+  }
+}
diff --git a/deploy/aws/tf/modules/ecr/outputs.tf b/deploy/aws/tf/modules/ecr/outputs.tf
new file mode 100644
index 00000000..44da6def
--- /dev/null
+++ b/deploy/aws/tf/modules/ecr/outputs.tf
@@ -0,0 +1,6 @@
+# tf/modules/ecr/outputs.tf
+
+output "repository_url" {
+  description = "URL of the ECR repository"
+  value       = aws_ecr_repository.app_repository.repository_url
+}
diff --git a/deploy/aws/tf/modules/ecr/variables.tf b/deploy/aws/tf/modules/ecr/variables.tf
new file mode 100644
index 00000000..2d31bb35
--- /dev/null
+++ b/deploy/aws/tf/modules/ecr/variables.tf
@@ -0,0 +1,11 @@
+# tf/modules/ecr/variables.tf
+
+variable "app_name" {
+  description = "App name"
+  type        = string
+}
+
+variable "environment" {
+  description = "The environment name (e.g., dev, staging, prod)"
+  type        = string
+}
diff --git a/deploy/aws/tf/modules/ecs/main.tf b/deploy/aws/tf/modules/ecs/main.tf
new file mode 100644
index 00000000..45d5a1e2
--- /dev/null
+++ b/deploy/aws/tf/modules/ecs/main.tf
@@ -0,0 +1,34 @@
+# tf/modules/ecs/main.tf
+
+resource "aws_ecs_cluster" "app_cluster" {
+  name = "${var.app_name}-cluster-${var.environment}"
+}
+
+resource "aws_ecs_task_definition" "app_task" {
+  family                = jsondecode(var.ecs_task_definition_json).family
+  network_mode          = jsondecode(var.ecs_task_definition_json).networkMode
+  requires_compatibilities = jsondecode(var.ecs_task_definition_json).requiresCompatibilities
+  cpu                   = jsondecode(var.ecs_task_definition_json).cpu
+  memory                = jsondecode(var.ecs_task_definition_json).memory
+  execution_role_arn    = var.ecs_task_execution_role
+  task_role_arn         = var.ecs_task_role
+  container_definitions = jsonencode(jsondecode(var.ecs_task_definition_json).containerDefinitions)
+}
+
+resource "aws_ecs_service" "app_service" {
+  name            = "${var.app_name}-service-${var.environment}"
+  cluster         = aws_ecs_cluster.app_cluster.id
+  task_definition = aws_ecs_task_definition.app_task.arn
+  desired_count   = 1
+  launch_type     = "FARGATE"
+  network_configuration {
+    subnets         = var.private_subnets
+    security_groups = var.security_groups
+    assign_public_ip = false
+  }
+  load_balancer {
+    target_group_arn = var.alb_target_group_arn
+    container_name   = "app"
+    container_port   = 8000
+  }
+}
diff --git a/deploy/aws/tf/modules/ecs/outputs.tf b/deploy/aws/tf/modules/ecs/outputs.tf
new file mode 100644
index 00000000..9f37a452
--- /dev/null
+++ b/deploy/aws/tf/modules/ecs/outputs.tf
@@ -0,0 +1,6 @@
+# tf/modules/ecs/outputs.tf
+
+output "ecs_cluster_name" {
+  description = "Name of the ECS cluster"
+  value       = aws_ecs_cluster.app_cluster.name
+}
diff --git a/deploy/aws/tf/modules/ecs/variables.tf b/deploy/aws/tf/modules/ecs/variables.tf
new file mode 100644
index 00000000..d6362bc5
--- /dev/null
+++ b/deploy/aws/tf/modules/ecs/variables.tf
@@ -0,0 +1,46 @@
+# tf/modules/ecs/variables.tf
+
+variable "app_name" {
+  description = "App name"
+  type        = string
+}
+
+variable "environment" {
+  description = "Environment name (e.g., dev, prod)"
+  type        = string
+}
+
+variable "vpc_id" {
+  description = "VPC ID"
+  type        = string
+}
+
+variable "private_subnets" {
+  description = "Private subnet IDs for ECS tasks"
+  type        = list(string)
+}
+
+variable "ecs_task_execution_role" {
+  description = "IAM role for ECS task execution"
+  type        = string
+}
+
+variable "ecs_task_role" {
+  description = "IAM role for ECS task"
+  type        = string
+}
+
+variable "alb_target_group_arn" {
+  description = "ALB target group ARN"
+  type        = string
+}
+
+variable "security_groups" {
+  description = "Security groups for ECS tasks"
+  type        = list(string)
+}
+
+variable "ecs_task_definition_json" {
+  description = "ECS task definition json"
+  type        = string
+}
diff --git a/deploy/aws/tf/modules/iam/main.tf b/deploy/aws/tf/modules/iam/main.tf
new file mode 100644
index 00000000..c4ca72cd
--- /dev/null
+++ b/deploy/aws/tf/modules/iam/main.tf
@@ -0,0 +1,171 @@
+# tf/modules/iam/main.tf
+ 
+resource "aws_iam_openid_connect_provider" "github_actions" {
+  url                   = "https://token.actions.githubusercontent.com"
+  client_id_list        = ["sts.amazonaws.com"]
+  thumbprint_list       = ["6938fd4d98bab03faadb97b34396831e3780aea1"]
+
+  tags = {
+    Name = "GitHub Actions OIDC Provider"
+  }
+}
+
+resource "aws_iam_role" "ecs_task_execution_role" {
+  name = "${var.app_name}-ecs-task-execution-role-${var.environment}"
+
+  assume_role_policy = jsonencode({
+    Version = "2012-10-17",
+    Statement = [{
+      Action    = "sts:AssumeRole",
+      Effect    = "Allow",
+      Principal = {
+        Service = "ecs-tasks.amazonaws.com"
+      }
+    }]
+  })
+
+  managed_policy_arns = [
+    "arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy",
+    "arn:aws:iam::aws:policy/service-role/AmazonEC2ContainerServiceRole"
+  ]
+
+  inline_policy {
+    name = "${var.app_name}-ecs-task-outbound-access-policy-${var.environment}"
+    policy = jsonencode({
+      Version = "2012-10-17",
+      Statement = [
+        {
+          Effect   = "Allow",
+          Action   = ["ssm:GetParameters", "ssm:GetParameter"],
+          Resource = "arn:aws:ssm:${var.region}:${var.aws_account_id}:parameter/${var.app_name}/${var.environment}/*"
+        },
+        {
+          Effect   = "Allow",
+          Action   = ["secretsmanager:GetSecretValue"],
+          Resource = var.rds_db_secret_arn
+        },
+      ]
+    })
+  }
+
+}
+
+resource "aws_iam_role" "ecs_task_role" {
+  name = "${var.app_name}-ecs-task-role-${var.environment}"
+
+  assume_role_policy = jsonencode({
+    Version = "2012-10-17",
+    Statement = [{
+      Action    = "sts:AssumeRole",
+      Effect    = "Allow",
+      Principal = {
+        Service = "ecs-tasks.amazonaws.com"
+      }
+    }]
+  })
+
+  managed_policy_arns = [
+    "arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy",
+  ]
+
+  inline_policy {
+    name = "${var.app_name}-ecs-task-outbound-access-policy-${var.environment}"
+    policy = jsonencode({
+      Version = "2012-10-17",
+      Statement = [
+        {
+          Effect   = "Allow",
+          Action   = ["s3:ListBucket", "s3:GetObject"],
+          Resource = "arn:aws:s3:::${var.app_bucket_name}/*"
+        },
+        {
+          Effect   = "Allow",
+          Action   = ["rds:Connect"],
+          Resource = "arn:aws:rds:${var.region}:${var.aws_account_id}:db:${var.rds_db_id}"
+        },
+        {
+          Effect   = "Allow",
+          Action   = ["elasticache:DescribeCacheClusters"],
+          Resource = "*"
+        },
+        {
+          Effect   = "Allow",
+          Action   = ["logs:CreateLogGroup", "logs:CreateLogStream", "logs:PutLogEvents"],
+          Resource = "arn:aws:logs:${var.region}:${var.aws_account_id}:log-group:/ecs/${var.app_name}*:*"
+        }
+      ]
+    })
+  }
+}
+
+# Define the IAM Role with OIDC trust policy
+resource "aws_iam_role" "github_actions_role" {
+  name = "${var.app_name}-github-actions-role-${var.environment}"
+
+  assume_role_policy = jsonencode({
+    Version = "2012-10-17",
+    Statement = [
+      {
+        Effect = "Allow",
+        Principal = {
+          Federated = "arn:aws:iam::${var.aws_account_id}:oidc-provider/token.actions.githubusercontent.com"
+        },
+        Action = "sts:AssumeRoleWithWebIdentity",
+        Condition = {
+          StringLike = {
+            "token.actions.githubusercontent.com:sub" : "repo:${var.github_repo}:*"
+          },
+          "ForAllValues:StringEquals": {
+              "token.actions.githubusercontent.com:aud": "sts.amazonaws.com",
+              "token.actions.githubusercontent.com:iss": "https://token.actions.githubusercontent.com"
+          }
+        }
+      }
+    ]
+  })
+}
+
+# Define IAM Policy for ECS and ECR permissions
+resource "aws_iam_policy" "github_actions_policy" {
+  name = "${var.app_name}-github-actions-policy-${var.environment}"
+
+  policy = jsonencode({
+    Version = "2012-10-17",
+    Statement = [
+      {
+        Effect = "Allow",
+        Action = [
+          "ecr:GetAuthorizationToken",
+          "ecr:GetDownloadUrlForLayer",
+          "ecr:BatchGetImage",
+          "ecr:CompleteLayerUpload",
+          "ecr:UploadLayerPart",
+          "ecr:InitiateLayerUpload",
+          "ecr:PutImage",
+          "ecr:BatchCheckLayerAvailability",
+          "ecs:UpdateService",
+          "ecs:DescribeServices",
+          "ecs:DescribeTaskDefinition",
+          "ecs:RegisterTaskDefinition"
+        ],
+        Resource = "*"
+      },
+      {
+        Effect = "Allow",
+        Action = [
+          "iam:PassRole"
+        ],
+        Resource = [
+          "${aws_iam_role.ecs_task_execution_role.arn}",
+          "${aws_iam_role.ecs_task_role.arn}"
+        ]
+      }
+    ]
+  })
+}
+
+# Attach the policy to the role
+resource "aws_iam_role_policy_attachment" "attach_github_actions_policy" {
+  role       = aws_iam_role.github_actions_role.name
+  policy_arn = aws_iam_policy.github_actions_policy.arn
+}
diff --git a/deploy/aws/tf/modules/iam/outputs.tf b/deploy/aws/tf/modules/iam/outputs.tf
new file mode 100644
index 00000000..f0bbd5c6
--- /dev/null
+++ b/deploy/aws/tf/modules/iam/outputs.tf
@@ -0,0 +1,16 @@
+# tf/modules/iam/outputs.tf
+
+output "ecs_execution_role_arn" {
+  description = "ARN of the ECS task execution role"
+  value       = aws_iam_role.ecs_task_execution_role.arn
+}
+
+output "ecs_task_role_arn" {
+  description = "ARN of the ECS task role"
+  value       = aws_iam_role.ecs_task_role.arn
+}
+
+output "github_actions_role_arn" {
+  description = "The ARN of the IAM Role for GitHub Actions"
+  value       = aws_iam_role.github_actions_role.arn
+}
diff --git a/deploy/aws/tf/modules/iam/variables.tf b/deploy/aws/tf/modules/iam/variables.tf
new file mode 100644
index 00000000..f94609ac
--- /dev/null
+++ b/deploy/aws/tf/modules/iam/variables.tf
@@ -0,0 +1,52 @@
+# tf/modules/iam/variables.tf
+
+variable "app_name" {
+  description = "App name"
+  type        = string
+}
+
+variable "environment" {
+  description = "Environment name (e.g., dev, prod)"
+  type        = string
+}
+
+variable "region" {
+  description = "AWS region"
+  type        = string
+}
+
+variable "aws_account_id" {
+  description = "AWS Account ID"
+  type        = string
+}
+
+variable "app_bucket_name" {
+  description = "App's s3 bucket"
+  type        = string
+}
+
+variable "rds_db_id" {
+  description = "RDS database ID"
+  type        = string
+}
+
+variable "rds_db_secret_arn" {
+  description = "RDS database secret arn"
+  type        = string
+}
+
+variable "cloudwatch_log_group_name" {
+  description = "Cloudwatch log group name"
+  type        = string
+}
+
+variable "github_repo" {
+  description = "GitHub repository in the format 'owner/repo'"
+  type        = string
+}
+
+variable "github_branch" {
+  description = "GitHub branch to allow access"
+  type        = string
+  default     = "main"
+}
diff --git a/deploy/aws/tf/modules/jumpbox/main.tf b/deploy/aws/tf/modules/jumpbox/main.tf
new file mode 100644
index 00000000..c90d0555
--- /dev/null
+++ b/deploy/aws/tf/modules/jumpbox/main.tf
@@ -0,0 +1,39 @@
+# tf/modules/jumpbox/main.tf
+
+data "aws_ami" "aws_linux" {
+  most_recent = true
+  owners      = ["137112412989"] # Amazon
+
+  filter {
+    name   = "name"
+    values = ["al2023-ami-2023.5.20240805.0-kernel-6.1-x86_64"]
+  }
+}
+
+resource "aws_instance" "ec2_jumpbox" {
+  ami                    = data.aws_ami.aws_linux.id
+  instance_type          = "t3.micro"
+  subnet_id              = var.public_subnet_id
+  vpc_security_group_ids = var.security_group_ids
+  key_name               = var.ec2_key_pair_name
+  associate_public_ip_address = true
+
+  tags = {
+    Name = "${var.app_name}-ec2-jumpbox-${var.environment}"
+    Environment = var.environment
+  }
+
+  depends_on = [var.vpc_id, var.public_subnet_id]
+}
+
+# Allocate an Elastic IP
+resource "aws_eip" "ec2_jumpbox_eip" {
+  depends_on = [aws_instance.ec2_jumpbox]
+}
+
+# Associate the Elastic IP with the EC2 instance
+resource "aws_eip_association" "ec2_jumpbox_eip_assoc" {
+  instance_id   = aws_instance.ec2_jumpbox.id
+  allocation_id = aws_eip.ec2_jumpbox_eip.id
+  depends_on = [aws_instance.ec2_jumpbox, aws_eip.ec2_jumpbox_eip]
+}
diff --git a/deploy/aws/tf/modules/jumpbox/outputs.tf b/deploy/aws/tf/modules/jumpbox/outputs.tf
new file mode 100644
index 00000000..a5591ca7
--- /dev/null
+++ b/deploy/aws/tf/modules/jumpbox/outputs.tf
@@ -0,0 +1,6 @@
+# tf/modules/jumpbox/outputs.tf
+
+output "jumpbox_id" {
+  description = "ID of the Jumpbox EC2 instance"
+  value       = aws_instance.ec2_jumpbox.id
+}
diff --git a/deploy/aws/tf/modules/jumpbox/variables.tf b/deploy/aws/tf/modules/jumpbox/variables.tf
new file mode 100644
index 00000000..8711d5d1
--- /dev/null
+++ b/deploy/aws/tf/modules/jumpbox/variables.tf
@@ -0,0 +1,31 @@
+# tf/modules/jumpbox/variables.tf
+
+variable "app_name" {
+  description = "App name"
+  type        = string
+}
+
+variable "environment" {
+  description = "Environment name (e.g., dev, prod)"
+  type        = string
+}
+
+variable "vpc_id" {
+  description = "VPC ID"
+  type        = string
+}
+
+variable "public_subnet_id" {
+  description = "Public subnet ID for the jumpbox"
+  type        = string
+}
+
+variable "security_group_ids" {
+  description = "Security group IDs for the jumpbox"
+  type        = list(string)
+}
+
+variable "ec2_key_pair_name" {
+  description = "SSH key name for the EC2 instance"
+  type        = string
+}
diff --git a/deploy/aws/tf/modules/rds/main.tf b/deploy/aws/tf/modules/rds/main.tf
new file mode 100644
index 00000000..7f3a3bea
--- /dev/null
+++ b/deploy/aws/tf/modules/rds/main.tf
@@ -0,0 +1,75 @@
+# tf/modules/rds/main.tf
+
+resource "aws_db_subnet_group" "app_db_subnet_group" {
+  name       = "${var.app_name}-db-subnet-group-${var.environment}"
+  subnet_ids = var.private_subnets
+
+  tags = {
+    Name = "${var.app_name}-db-subnet-group-${var.environment}"
+    Environment = var.environment
+  }
+
+  depends_on = [var.vpc_id]
+}
+
+resource "random_password" "app_db_password" {
+  length  = 16
+  special = true
+}
+
+resource "aws_secretsmanager_secret" "app_db_password" {
+  name        = "${var.app_name}-db-password-${var.environment}"
+  description = "The RDS database password"
+}
+
+resource "aws_secretsmanager_secret_version" "app_db_password_version" {
+  secret_id     = aws_secretsmanager_secret.app_db_password.id
+  secret_string = jsonencode({
+    username = var.app_db_user
+    password = random_password.app_db_password.result
+    port     = 5432
+    dbname   = var.app_db_name
+  })
+  depends_on = [aws_secretsmanager_secret.app_db_password, random_password.app_db_password]
+}
+
+data "aws_secretsmanager_secret_version" "app_db_password_version_data" {
+  secret_id = aws_secretsmanager_secret.app_db_password.id
+  depends_on = [aws_secretsmanager_secret.app_db_password, aws_secretsmanager_secret_version.app_db_password_version]
+}
+
+resource "aws_db_instance" "app_db" {
+  identifier              = "${var.app_name}-db-${var.environment}"
+  engine                  = "postgres"
+  instance_class          = "db.t4g.micro"
+  allocated_storage       = 20
+  storage_type            = "gp2"
+  username                = var.app_db_user
+  password                = jsondecode(data.aws_secretsmanager_secret_version.app_db_password_version_data.secret_string).password
+  db_subnet_group_name    = aws_db_subnet_group.app_db_subnet_group.name
+  vpc_security_group_ids  = var.security_group_ids
+
+  skip_final_snapshot     = true
+
+  backup_retention_period = 7  # Retain backups for 7 days
+  backup_window            = "05:00-06:00"  # Define a backup window (optional)
+  maintenance_window       = "Sun:07:00-Sun:13:00"  # Define a maintenance window (optional)
+
+  tags = {
+    Name = "${var.app_name}-db-${var.environment}"
+    Enviorment = var.environment
+  }
+
+  provisioner "local-exec" {
+    command = <<EOT
+      PGPASSWORD="${jsondecode(data.aws_secretsmanager_secret_version.app_db_password_version_data.secret_string).password}" psql -h ${self.address} -U ${var.app_db_user} -c "CREATE DATABASE ${var.app_db_name};"
+    EOT
+    environment = {
+      PGPASSWORD = jsondecode(data.aws_secretsmanager_secret_version.app_db_password_version_data.secret_string).password
+    }
+  }
+
+  depends_on = [var.vpc_id, aws_secretsmanager_secret.app_db_password]
+}
+
+# TODO: Append rds host to secretsmanager
diff --git a/deploy/aws/tf/modules/rds/outputs.tf b/deploy/aws/tf/modules/rds/outputs.tf
new file mode 100644
index 00000000..00eccb72
--- /dev/null
+++ b/deploy/aws/tf/modules/rds/outputs.tf
@@ -0,0 +1,16 @@
+# tf/modules/rds/outputs.tf
+
+output "rds_db_id" {
+  description = "Identifier of the RDS instance"
+  value       = aws_db_instance.app_db.identifier
+}
+
+output "rds_endpoint" {
+  description = "Endpoint of the RDS instance"
+  value       = aws_db_instance.app_db.endpoint
+}
+
+output "rds_db_secret_arn" {
+  description = "RDS database secret arn"
+  value       = aws_secretsmanager_secret.app_db_password.arn
+}
diff --git a/deploy/aws/tf/modules/rds/variables.tf b/deploy/aws/tf/modules/rds/variables.tf
new file mode 100644
index 00000000..d0cad838
--- /dev/null
+++ b/deploy/aws/tf/modules/rds/variables.tf
@@ -0,0 +1,36 @@
+# tf/modules/rds/variables.tf
+
+variable "app_name" {
+  description = "App name"
+  type        = string
+}
+
+variable "environment" {
+  description = "Environment name (e.g., dev, prod)"
+  type        = string
+}
+
+variable "vpc_id" {
+  description = "VPC ID"
+  type        = string
+}
+
+variable "private_subnets" {
+  description = "Private subnet IDs for RDS"
+  type        = list(string)
+}
+
+variable "security_group_ids" {
+  description = "Security group IDs for RDS"
+  type        = list(string)
+}
+
+variable "app_db_user" {
+  description = "App database user"
+  type        = string
+}
+
+variable "app_db_name" {
+  description = "App database name"
+  type        = string
+}
diff --git a/deploy/aws/tf/modules/redis/main.tf b/deploy/aws/tf/modules/redis/main.tf
new file mode 100644
index 00000000..6c5d1354
--- /dev/null
+++ b/deploy/aws/tf/modules/redis/main.tf
@@ -0,0 +1,28 @@
+# tf/modules/redis/main.tf
+
+resource "aws_elasticache_subnet_group" "redis_subnet_group" {
+  name       = "${var.app_name}-redis-subnet-group-${var.environment}"
+  subnet_ids = var.private_subnets
+
+  tags = {
+    Name = "${var.app_name}-redis-subnet-group-${var.environment}"
+  }
+
+  depends_on = [var.vpc_id]
+}
+
+resource "aws_elasticache_cluster" "redis_cluster" {
+  cluster_id           = "${var.app_name}-redis-cluster-${var.environment}"
+  engine               = "redis"
+  node_type            = "cache.t3.micro"
+  num_cache_nodes      = 1
+  subnet_group_name    = aws_elasticache_subnet_group.redis_subnet_group.name
+  security_group_ids = var.security_group_ids
+
+  tags = {
+    Name = "${var.app_name}-redis-cluster-${var.environment}"
+    Environment = var.environment
+  }
+
+  depends_on = [var.vpc_id, aws_elasticache_subnet_group.redis_subnet_group]
+}
diff --git a/deploy/aws/tf/modules/redis/outputs.tf b/deploy/aws/tf/modules/redis/outputs.tf
new file mode 100644
index 00000000..5f30cfbc
--- /dev/null
+++ b/deploy/aws/tf/modules/redis/outputs.tf
@@ -0,0 +1,6 @@
+# tf/modules/redis/outputs.tf
+
+output "redis_endpoint" {
+  description = "Endpoint of the Redis cluster"
+  value       = aws_elasticache_cluster.redis_cluster.cache_nodes.0.address
+}
diff --git a/deploy/aws/tf/modules/redis/variables.tf b/deploy/aws/tf/modules/redis/variables.tf
new file mode 100644
index 00000000..233f032b
--- /dev/null
+++ b/deploy/aws/tf/modules/redis/variables.tf
@@ -0,0 +1,26 @@
+# tf/modules/redis/variables.tf
+
+variable "app_name" {
+  description = "App name"
+  type        = string
+}
+
+variable "environment" {
+  description = "Environment name (e.g., dev, prod)"
+  type        = string
+}
+
+variable "vpc_id" {
+  description = "VPC ID"
+  type        = string
+}
+
+variable "private_subnets" {
+  description = "Private subnet IDs for Redis"
+  type        = list(string)
+}
+
+variable "security_group_ids" {
+  description = "Security group IDs for Redis"
+  type        = list(string)
+}
diff --git a/deploy/aws/tf/modules/s3/main.tf b/deploy/aws/tf/modules/s3/main.tf
new file mode 100644
index 00000000..4b0f187f
--- /dev/null
+++ b/deploy/aws/tf/modules/s3/main.tf
@@ -0,0 +1,10 @@
+# tf/modules/s3/main.tf
+
+resource "aws_s3_bucket" "app_bucket" {
+  bucket = "${var.app_name}-bucket-${var.environment}"
+
+  tags = {
+    Name        = "${var.app_name}-bucket-${var.environment}"
+    Environment = var.environment
+  }
+}
diff --git a/deploy/aws/tf/modules/s3/outputs.tf b/deploy/aws/tf/modules/s3/outputs.tf
new file mode 100644
index 00000000..4b046d68
--- /dev/null
+++ b/deploy/aws/tf/modules/s3/outputs.tf
@@ -0,0 +1,6 @@
+# tf/modules/s3/outputs.tf
+
+output "app_bucket_name" {
+  description = "Name of the S3 bucket"
+  value       = aws_s3_bucket.app_bucket.bucket
+}
diff --git a/deploy/aws/tf/modules/s3/variables.tf b/deploy/aws/tf/modules/s3/variables.tf
new file mode 100644
index 00000000..df7f90f8
--- /dev/null
+++ b/deploy/aws/tf/modules/s3/variables.tf
@@ -0,0 +1,11 @@
+# tf/modules/s3/variables.tf
+
+variable "app_name" {
+  description = "App name"
+  type        = string
+}
+
+variable "environment" {
+  description = "Environment name (e.g., dev, prod)"
+  type        = string
+}
diff --git a/deploy/aws/tf/modules/vpc/main.tf b/deploy/aws/tf/modules/vpc/main.tf
new file mode 100644
index 00000000..f1220f13
--- /dev/null
+++ b/deploy/aws/tf/modules/vpc/main.tf
@@ -0,0 +1,146 @@
+# tf/modules/vpc/main.tf
+
+module "vpc" {
+  # TODO: Find equivalent module in opentofu registry
+  source = "terraform-aws-modules/vpc/aws"
+  
+  name = "${var.app_name}-vpc-${var.environment}"
+  cidr = var.cidr_block
+
+  azs             = ["eu-central-1a", "eu-central-1b"]
+  private_subnets = ["10.0.1.0/24", "10.0.2.0/24"]
+  public_subnets  = ["10.0.101.0/24", "10.0.102.0/24"]
+
+  enable_nat_gateway = true
+  single_nat_gateway = true
+
+  tags = {
+    OpenTofu   = "true",
+    Environment = var.environment
+  }
+}
+
+resource "aws_security_group" "alb_sg" {
+  vpc_id = module.vpc.vpc_id
+
+  ingress {
+    from_port   = 443
+    to_port     = 443
+    protocol    = "tcp"
+    cidr_blocks = ["0.0.0.0/0"]
+  }
+
+  egress {
+    from_port   = 0
+    to_port     = 0
+    protocol    = "-1"
+    cidr_blocks = ["0.0.0.0/0"]
+  }
+
+  tags = {
+    Name = "alb-sg"
+    Environment = var.environment
+  }
+
+  depends_on = [module.vpc.vpc_id]
+}
+
+resource "aws_security_group" "ecs_sg" {
+  vpc_id = module.vpc.vpc_id
+
+  ingress {
+    from_port   = 8000
+    to_port     = 8000
+    protocol    = "tcp"
+    security_groups = [aws_security_group.alb_sg.id]
+  }
+
+  egress {
+    from_port   = 0
+    to_port     = 0
+    protocol    = "-1"
+    cidr_blocks = ["0.0.0.0/0"]
+  }
+
+  tags = {
+    Name = "ecs-sg"
+    Environment = var.environment
+  }
+
+  depends_on = [module.vpc.vpc_id]
+}
+
+resource "aws_security_group" "ec2_jumpbox_sg" {
+  vpc_id = module.vpc.vpc_id
+
+  ingress {
+    from_port   = 22
+    to_port     = 22
+    protocol    = "tcp"
+    cidr_blocks = ["0.0.0.0/0"]
+  }
+
+  egress {
+    from_port   = 0
+    to_port     = 0
+    protocol    = "-1"
+    cidr_blocks = ["0.0.0.0/0"]
+  }
+
+  tags = {
+    Name = "jumpbox-sg"
+    Environment = var.environment
+  }
+
+  depends_on = [module.vpc.vpc_id]
+}
+
+resource "aws_security_group" "rds_sg" {
+  vpc_id = module.vpc.vpc_id
+
+  ingress {
+    from_port   = 5432
+    to_port     = 5432
+    protocol    = "tcp"
+    security_groups = [aws_security_group.ecs_sg.id, aws_security_group.ec2_jumpbox_sg.id]
+  }
+
+  egress {
+    from_port   = 0
+    to_port     = 0
+    protocol    = "-1"
+    cidr_blocks = ["0.0.0.0/0"]
+  }
+
+  tags = {
+    Name = "rds-sg"
+    Environment = var.environment
+  }
+
+  depends_on = [module.vpc.vpc_id]
+}
+
+resource "aws_security_group" "redis_sg" {
+  vpc_id = module.vpc.vpc_id
+
+  ingress {
+    from_port   = 6379
+    to_port     = 6379
+    protocol    = "tcp"
+    security_groups = [aws_security_group.ecs_sg.id]
+  }
+
+  egress {
+    from_port   = 0
+    to_port     = 0
+    protocol    = "-1"
+    cidr_blocks = ["0.0.0.0/0"]
+  }
+
+  tags = {
+    Name = "redis-sg"
+    Environment = var.environment
+  }
+
+  depends_on = [module.vpc.vpc_id]
+}
diff --git a/deploy/aws/tf/modules/vpc/outputs.tf b/deploy/aws/tf/modules/vpc/outputs.tf
new file mode 100644
index 00000000..0ae838b8
--- /dev/null
+++ b/deploy/aws/tf/modules/vpc/outputs.tf
@@ -0,0 +1,46 @@
+# tf/modules/vpc/outputs.tf
+
+output "vpc_id" {
+  description = "VPC ID"
+  value       = module.vpc.vpc_id
+}
+
+output "private_subnets" {
+  description = "Private subnet IDs"
+  value       = module.vpc.private_subnets
+}
+
+output "public_subnets" {
+  description = "Public subnet IDs"
+  value       = module.vpc.public_subnets
+}
+
+output "public_route_table_ids" {
+  description = "Public route table IDs"
+  value       = module.vpc.public_route_table_ids
+}
+
+output "alb_security_group_id" {
+  description = "ALB security group ID"
+  value       = aws_security_group.alb_sg.id
+}
+
+output "ecs_security_group_id" {
+  description = "ECS security group ID"
+  value       = aws_security_group.ecs_sg.id
+}
+
+output "rds_security_group_id" {
+  description = "RDS security group ID"
+  value       = aws_security_group.rds_sg.id
+}
+
+output "ec2_jumpbox_security_group_id" {
+  description = "Jumpbox security group ID"
+  value       = aws_security_group.ec2_jumpbox_sg.id
+}
+
+output "redis_security_group_id" {
+  description = "Redis security group ID"
+  value       = aws_security_group.redis_sg.id
+}
diff --git a/deploy/aws/tf/modules/vpc/variables.tf b/deploy/aws/tf/modules/vpc/variables.tf
new file mode 100644
index 00000000..e3314e96
--- /dev/null
+++ b/deploy/aws/tf/modules/vpc/variables.tf
@@ -0,0 +1,17 @@
+# tf/modules/vpc/variables.tf
+
+variable "app_name" {
+  description = "App name"
+  type        = string
+}
+
+variable "environment" {
+  description = "Environment name (e.g., dev, prod)"
+  type        = string
+}
+
+variable "cidr_block" {
+  description = "CIDR block for the VPC"
+  type        = string
+  default     = "10.0.0.0/16"
+}
diff --git a/deploy/aws/tf/modules/vpc_endpoints/main.tf b/deploy/aws/tf/modules/vpc_endpoints/main.tf
new file mode 100644
index 00000000..5a60379e
--- /dev/null
+++ b/deploy/aws/tf/modules/vpc_endpoints/main.tf
@@ -0,0 +1,90 @@
+# tf/modules/vpc_endpoints/main.tf
+
+resource "aws_vpc_endpoint" "s3" {
+  vpc_id             = var.vpc_id
+  service_name       = "com.amazonaws.${var.region}.s3"
+  vpc_endpoint_type  = "Gateway"
+  route_table_ids    = var.public_route_table_ids
+
+  tags = {
+    Name        = "s3-vpc-endpoint"
+    Environment = var.environment
+  }
+
+  depends_on = [var.vpc_id]
+}
+
+resource "aws_vpc_endpoint" "cw_logs" {
+  vpc_id             = var.vpc_id
+  service_name       = "com.amazonaws.${var.region}.logs"
+  vpc_endpoint_type  = "Interface"
+  subnet_ids         = var.private_subnets
+  security_group_ids = var.security_group_ids
+
+  tags = {
+    Name        = "cw-logs-vpc-endpoint"
+    Environment = var.environment
+  }
+
+  depends_on = [var.vpc_id]
+}
+
+resource "aws_vpc_endpoint" "ssm" {
+  vpc_id             = var.vpc_id
+  service_name       = "com.amazonaws.${var.region}.ssm"
+  vpc_endpoint_type  = "Interface"
+  subnet_ids         = var.private_subnets
+  security_group_ids = var.security_group_ids
+
+  tags = {
+    Name        = "ssm-vpc-endpoint"
+    Environment = var.environment
+  }
+
+  depends_on = [var.vpc_id]
+}
+
+resource "aws_vpc_endpoint" "secretsmanager" {
+  vpc_id             = var.vpc_id
+  service_name       = "com.amazonaws.${var.region}.secretsmanager"
+  vpc_endpoint_type  = "Interface"
+  subnet_ids         = var.private_subnets
+  security_group_ids = var.security_group_ids
+
+  tags = {
+    Name        = "secretsmanager-vpc-endpoint"
+    Environment = var.environment
+  }
+
+  depends_on = [var.vpc_id]
+}
+
+resource "aws_vpc_endpoint" "rds" {
+  vpc_id             = var.vpc_id
+  service_name       = "com.amazonaws.${var.region}.rds"
+  vpc_endpoint_type  = "Interface"
+  subnet_ids         = var.private_subnets
+  security_group_ids = var.security_group_ids
+
+  tags = {
+    Name        = "rds-vpc-endpoint"
+    Environment = var.environment
+  }
+
+  depends_on = [var.vpc_id]
+}
+
+resource "aws_vpc_endpoint" "redis" {
+  vpc_id             = var.vpc_id
+  service_name       = "com.amazonaws.${var.region}.elasticache"
+  vpc_endpoint_type  = "Interface"
+  subnet_ids         = var.private_subnets
+  security_group_ids = var.security_group_ids
+
+  tags = {
+    Name        = "redis-vpc-endpoint"
+    Environment = var.environment
+  }
+
+  depends_on = [var.vpc_id]
+}
diff --git a/deploy/aws/tf/modules/vpc_endpoints/outputs.tf b/deploy/aws/tf/modules/vpc_endpoints/outputs.tf
new file mode 100644
index 00000000..ac5fef43
--- /dev/null
+++ b/deploy/aws/tf/modules/vpc_endpoints/outputs.tf
@@ -0,0 +1,13 @@
+# tf/modules/vpc_endpoints/outputs.tf
+
+output "vpc_endpoint_ids" {
+  description = "IDs of the VPC endpoints"
+  value       = [
+    aws_vpc_endpoint.s3.id,
+    aws_vpc_endpoint.cw_logs.id,
+    aws_vpc_endpoint.ssm.id,
+    aws_vpc_endpoint.secretsmanager.id,
+    aws_vpc_endpoint.rds.id,
+    aws_vpc_endpoint.redis.id
+  ]
+}
diff --git a/deploy/aws/tf/modules/vpc_endpoints/variables.tf b/deploy/aws/tf/modules/vpc_endpoints/variables.tf
new file mode 100644
index 00000000..d1683ddf
--- /dev/null
+++ b/deploy/aws/tf/modules/vpc_endpoints/variables.tf
@@ -0,0 +1,36 @@
+# terraform/modules/vpc_endpoints/variables.tf
+
+variable "app_name" {
+  description = "App name"
+  type        = string
+}
+
+variable "environment" {
+  description = "Environment name (e.g., dev, prod)"
+  type        = string
+}
+
+variable "region" {
+  description = "AWS region"
+  type        = string
+}
+
+variable "vpc_id" {
+  description = "VPC ID"
+  type        = string
+}
+
+variable "private_subnets" {
+  description = "Private subnet IDs for VPC endpoints"
+  type        = list(string)
+}
+
+variable "security_group_ids" {
+  description = "Security group IDs for VPC endpoints"
+  type        = list(string)
+}
+
+variable "public_route_table_ids" {
+  description = "Public route table IDs"
+  type        = list(string)
+}
diff --git a/deploy/aws/tf/outputs.tf b/deploy/aws/tf/outputs.tf
new file mode 100644
index 00000000..0c0b9d8f
--- /dev/null
+++ b/deploy/aws/tf/outputs.tf
@@ -0,0 +1,16 @@
+# tf/outputs.tf
+
+output "alb_dns_name" {
+  description = "DNS name of the ALB"
+  value       = module.alb.alb_dns_name
+}
+
+output "rds_endpoint" {
+  description = "RDS endpoint"
+  value       = module.rds.rds_endpoint
+}
+
+output "redis_endpoint" {
+  description = "Redis endpoint"
+  value       = module.redis.redis_endpoint
+}
diff --git a/deploy/aws/tf/variables.tf b/deploy/aws/tf/variables.tf
new file mode 100644
index 00000000..145e0ed3
--- /dev/null
+++ b/deploy/aws/tf/variables.tf
@@ -0,0 +1,43 @@
+# tf/variables.tf
+
+variable "region" {
+  description = "AWS Region"
+  type        = string
+  default     = "eu-central-1"
+}
+
+variable "environment" {
+  description = "Environment name (e.g., dev, prod)"
+  type        = string
+  default     = "prod"
+}
+
+variable "app_name" {
+  description = "App name"
+  type        = string
+  default     = "chapter-app"
+}
+
+variable "domain_name" {
+  description = "App domain name, used to reference the right certificate"
+  type        = string
+  default     = "*.chapter.show"
+}
+
+variable "ec2_key_pair_name" {
+  description = "EC2 key-pair name for the jumpbox"
+  type        = string
+  default     = "nectar-shri"
+}
+
+variable "github_repo" {
+  description = "GitHub repository in the format 'owner/repo'"
+  type        = string
+  default     = "nectar-run/chapter"
+}
+
+variable "github_branch" {
+  description = "GitHub branch to allow access"
+  type        = string
+  default     = "main"
+}
diff --git a/deploy/docker/dev/Dockerfile b/deploy/docker/dev/Dockerfile
new file mode 100644
index 00000000..0145b128
--- /dev/null
+++ b/deploy/docker/dev/Dockerfile
@@ -0,0 +1,88 @@
+ARG PYTHON_BUILDER_IMAGE=3.11-slim-bookworm
+
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- Python base -------------------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+FROM python:${PYTHON_BUILDER_IMAGE} as python-base
+ENV PIP_DEFAULT_TIMEOUT=100 \
+  PIP_DISABLE_PIP_VERSION_CHECK=1 \
+  PIP_NO_CACHE_DIR=1 \
+  PIP_ROOT_USER_ACTION=ignore \
+  PYTHONDONTWRITEBYTECODE=1 \
+  PYTHONUNBUFFERED=1 \
+  PYTHONFAULTHANDLER=1 \
+  PYTHONHASHSEED=random \
+  LANG=C.UTF-8 \
+  LC_ALL=C.UTF-8
+RUN apt-get update \
+  && apt-get upgrade -y \
+  && apt-get install -y --no-install-recommends git tini \
+  && apt-get autoremove -y \
+  && apt-get clean -y \
+  && rm -rf /root/.cache \
+  && rm -rf /var/apt/lists/* \
+  && rm -rf /var/cache/apt/* \
+  && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false\
+  && mkdir -p /workspace/app \
+  && pip install --quiet -U pip wheel setuptools virtualenv
+
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- Python build base -------------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+FROM python-base AS build-base
+ARG PDM_INSTALL_ARGS="-G:all"
+ENV PDM_INSTALL_ARGS="${PDM_INSTALL_ARGS}" \
+  GRPC_PYTHON_BUILD_WITH_CYTHON=1 \
+  PATH="/workspace/app/.venv/bin:/usr/local/bin:$PATH"
+## -------------------------- add build packages ----------------------------------- ##
+RUN apt-get install -y --no-install-recommends build-essential curl \
+  && apt-get autoremove -y \
+  && apt-get clean -y \
+  && rm -rf /root/.cache \
+  && rm -rf /var/apt/lists/* \
+  && rm -rf /var/cache/apt/* \
+  && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false
+
+## -------------------------- install application ----------------------------------- ##
+WORKDIR /workspace/app
+COPY pyproject.toml pdm.lock README.md .pre-commit-config.yaml LICENSE.md Makefile \
+  ./
+COPY scripts ./scripts/
+COPY public ./public/
+COPY resources ./resources/
+RUN python -m venv --copies /workspace/app/.venv \
+  && /workspace/app/.venv/bin/pip install cython pdm nodeenv  \
+  && pdm install ${PDM_INSTALL_ARGS} --no-self
+COPY src ./src/
+
+## ---------------------------------------------------------------------------------- ##
+## -------------------------------- development build ------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- use builder base --------------------------------------- ##
+FROM build-base as dev-image
+ARG ENV_SECRETS="runtime-secrets"
+ARG LITESTAR_APP="app.asgi:app"
+## --------------------------- standardize execution env ----------------------------- ##
+ENV PATH="/workspace/app/.venv/bin:$PATH" \
+  VIRTUAL_ENV="/workspace/app/.venv" \
+  ENV_SECRETS="${ENV_SECRETS}"  \
+  PIP_DEFAULT_TIMEOUT=100 \
+  PIP_DISABLE_PIP_VERSION_CHECK=1 \
+  PIP_NO_CACHE_DIR=1 \
+  PYTHONDONTWRITEBYTECODE=1 \
+  PYTHONUNBUFFERED=1 \
+  PYTHONFAULTHANDLER=1 \
+  PYTHONHASHSEED=random \
+  LANG=C.UTF-8 \
+  LC_ALL=C.UTF-8 \
+  LITESTAR_APP="${LITESTAR_APP}"
+WORKDIR /workspace/app
+COPY docs/ docs/
+COPY tests/ tests/
+COPY src src/
+RUN pdm install $PDM_INSTALL_ARGS
+STOPSIGNAL SIGINT
+EXPOSE 8000
+ENTRYPOINT ["tini","--" ]
+CMD [ "litestar","run","--host","0.0.0.0"]
+VOLUME /workspace/app
diff --git a/deploy/docker/run/Dockerfile b/deploy/docker/run/Dockerfile
new file mode 100644
index 00000000..a1bf7961
--- /dev/null
+++ b/deploy/docker/run/Dockerfile
@@ -0,0 +1,85 @@
+ARG PYTHON_BUILDER_IMAGE=3.12-slim-bookworm
+
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- Python base -------------------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+FROM python:${PYTHON_BUILDER_IMAGE} as python-base
+ENV PIP_DEFAULT_TIMEOUT=100 \
+  PIP_DISABLE_PIP_VERSION_CHECK=1 \
+  PIP_NO_CACHE_DIR=1 \
+  PIP_ROOT_USER_ACTION=ignore \
+  PYTHONDONTWRITEBYTECODE=1 \
+  PYTHONUNBUFFERED=1 \
+  PYTHONFAULTHANDLER=1 \
+  PYTHONHASHSEED=random \
+  LANG=C.UTF-8 \
+  LC_ALL=C.UTF-8
+RUN apt-get update \
+  && apt-get upgrade -y \
+  && apt-get install -y --no-install-recommends git tini \
+  && apt-get autoremove -y \
+  && apt-get clean -y \
+  && rm -rf /root/.cache \
+  && rm -rf /var/apt/lists/* \
+  && rm -rf /var/cache/apt/* \
+  && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false\
+  && mkdir -p /workspace/app \
+  && pip install --quiet -U pip wheel setuptools virtualenv
+
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- Python build base -------------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+FROM python-base AS build-base
+ARG PDM_INSTALL_ARGS=""
+ENV PDM_INSTALL_ARGS="${PDM_INSTALL_ARGS}" \
+  GRPC_PYTHON_BUILD_WITH_CYTHON=1 \
+  PATH="/workspace/app/.venv/bin:/usr/local/bin:$PATH"
+## -------------------------- add build packages ----------------------------------- ##
+RUN apt-get install -y --no-install-recommends build-essential curl \
+  && apt-get autoremove -y \
+  && apt-get clean -y \
+  && rm -rf /root/.cache \
+  && rm -rf /var/apt/lists/* \
+  && rm -rf /var/cache/apt/* \
+  && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false
+
+## -------------------------- install application ----------------------------------- ##
+WORKDIR /workspace/app
+COPY pyproject.toml pdm.lock README.md .pre-commit-config.yaml LICENSE.md Makefile \
+  ./
+COPY scripts ./scripts/
+RUN python -m venv --copies /workspace/app/.venv \
+  && /workspace/app/.venv/bin/pip install --quiet pdm nodeenv cython  \
+  && pdm install ${PDM_INSTALL_ARGS} --no-self  \
+  && pdm export ${PDM_INSTALL_ARGS} --without-hashes --prod --output=requirements.txt
+COPY src ./src/
+RUN pdm build
+
+
+## ---------------------------------------------------------------------------------- ##
+## -------------------------------- runtime build ----------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- use base image  ---------------------------------------- ##
+
+FROM python-base as run-image
+ARG ENV_SECRETS="runtime-secrets"
+ARG LITESTAR_APP="app.asgi:app"
+ENV ENV_SECRETS="${ENV_SECRETS}" \
+  LITESTAR_APP="${LITESTAR_APP}"
+
+RUN addgroup --system --gid 65532 nonroot \
+  && adduser --no-create-home --system --uid 65532 nonroot \
+  && chown -R nonroot:nonroot /workspace
+## -------------------------- install application ----------------------------------- ##
+COPY --from=build-base --chown=65532:65532 /workspace/app/requirements.txt /tmp/requirements.txt
+COPY --from=build-base --chown=65532:65532 /workspace/app/dist /tmp/
+WORKDIR /workspace/app
+RUN pip install --quiet --disable-pip-version-check --no-deps --requirement=/tmp/requirements.txt
+RUN pip install --quiet --disable-pip-version-check --no-deps /tmp/*.whl
+
+USER nonroot
+STOPSIGNAL SIGINT
+EXPOSE 8000
+ENTRYPOINT ["tini","--" ]
+CMD [ "litestar","run","--host","0.0.0.0"]
+VOLUME /workspace/app
diff --git a/deploy/docker/run/Dockerfile.distroless b/deploy/docker/run/Dockerfile.distroless
new file mode 100644
index 00000000..5f2c39b9
--- /dev/null
+++ b/deploy/docker/run/Dockerfile.distroless
@@ -0,0 +1,144 @@
+ARG PYTHON_BUILDER_IMAGE=3.11-slim-bullseye
+ARG PYTHON_RUN_IMAGE=gcr.io/distroless/cc:nonroot
+
+
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- Python base -------------------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+FROM python:${PYTHON_BUILDER_IMAGE} as python-base
+ARG PDM_INSTALL_ARGS=""
+ENV PDM_INSTALL_ARGS="${PDM_INSTALL_ARGS}" \
+    GRPC_PYTHON_BUILD_WITH_CYTHON=1 \
+    PATH="/workspace/app/.venv/bin:/usr/local/bin:$PATH" \
+    PIP_DEFAULT_TIMEOUT=100 \
+    PIP_DISABLE_PIP_VERSION_CHECK=1 \
+    PIP_NO_CACHE_DIR=1 \
+    PYTHONDONTWRITEBYTECODE=1 \
+    PYTHONUNBUFFERED=1 \
+    PYTHONFAULTHANDLER=1 \
+    PYTHONHASHSEED=random  \
+    LANG=C.UTF-8 \
+    LC_ALL=C.UTF-8
+## -------------------------- add common compiled libraries --------------------------- ##
+RUN apt-get update \
+    && apt-get upgrade -y \
+    && apt-get install -y --no-install-recommends git tini \
+    && apt-get autoremove -y \
+    && apt-get clean -y \
+    && rm -rf /root/.cache \
+    && rm -rf /var/apt/lists/* \
+    && rm -rf /var/cache/apt/* \
+    && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
+    && mkdir -p /workspace/app \
+    ## -------------------------- upgrade default packages -------------------------------- ##
+    && pip install --quiet --upgrade pip wheel setuptools cython virtualenv mypy
+
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- Python build base -------------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+FROM python-base AS build-base
+ARG PDM_INSTALL_ARGS=""
+ENV PDM_INSTALL_ARGS="${PDM_INSTALL_ARGS}" \
+    GRPC_PYTHON_BUILD_WITH_CYTHON=1 \
+    PATH="/workspace/app/.venv/bin:/usr/local/bin:$PATH"
+## -------------------------- add development packages ------------------------------ ##
+RUN apt-get install -y --no-install-recommends build-essential curl \
+    && apt-get autoremove -y \
+    && apt-get clean -y \
+    && rm -rf /root/.cache \
+    && rm -rf /var/apt/lists/* \
+    && rm -rf /var/cache/apt/* \
+    && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false
+## -------------------------- install application ----------------------------------- ##
+WORKDIR /workspace/app
+COPY pyproject.toml pdm.lock README.md .pre-commit-config.yaml LICENSE Makefile \
+    package.json package-lock.json vite.config.ts tsconfig.json tsconfig.node.json \
+    tailwind.config.cjs postcss.config.cjs components.json \
+    ./
+RUN python -m venv --copies /workspace/app/.venv \
+    && /workspace/app/.venv/bin/pip install --quiet pdm nodeenv cython mypy
+COPY scripts ./scripts/
+COPY public ./public/
+COPY resources ./resources/
+RUN pdm install ${PDM_INSTALL_ARGS} --no-self  \
+    && pdm export ${PDM_INSTALL_ARGS} --without-hashes --prod --output=requirements.txt
+COPY src ./src/
+
+RUN pdm build
+VOLUME /workspace/app
+## ---------------------------------------------------------------------------------- ##
+## -------------------------------- runtime build ----------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- use base image  ---------------------------------------- ##
+
+FROM python-base as run-base
+ARG ENV_SECRETS="runtime-secrets"
+ENV ENV_SECRETS="${ENV_SECRETS}"
+RUN addgroup --system --gid 65532 nonroot \
+    && adduser --no-create-home --system --uid 65532 nonroot \
+    && chown -R nonroot:nonroot /workspace \
+    && python -m venv --copies /workspace/app/.venv
+## -------------------------- install application ----------------------------------- ##
+COPY --from=build-base --chown=65532:65532 /workspace/app/requirements.txt /tmp/requirements.txt
+COPY --from=build-base --chown=65532:65532 /workspace/app/dist /tmp/
+WORKDIR /workspace/app
+RUN  /workspace/app/.venv/bin/pip install --quiet --disable-pip-version-check --no-deps --requirement=/tmp/requirements.txt
+RUN /workspace/app/.venv/bin/pip install --quiet --disable-pip-version-check --no-deps /tmp/*.whl
+
+
+
+## ---------------------------------------------------------------------------------- ##
+## ------------------------- distroless runtime build ------------------------------- ##
+## ---------------------------------------------------------------------------------- ##
+
+## ------------------------- use distroless `cc` image  ----------------------------- ##
+FROM ${PYTHON_RUN_IMAGE} as run-image
+ARG ENV_SECRETS="runtime-secrets"
+ARG CHIPSET_ARCH=x86_64-linux-gnu
+ARG LITESTAR_APP="app.asgi:app"
+ENV PATH="/workspace/app/.venv/bin:/bin:/usr/local/bin:$PATH" \
+    ENV_SECRETS="${ENV_SECRETS}" \
+    CHIPSET_ARCH="${CHIPSET_ARCH}" \
+    PIP_DEFAULT_TIMEOUT=100 \
+    PIP_DISABLE_PIP_VERSION_CHECK=1 \
+    PIP_NO_CACHE_DIR=1 \
+    PYTHONDONTWRITEBYTECODE=1 \
+    PYTHONUNBUFFERED=1 \
+    PYTHONFAULTHANDLER=1 \
+    PYTHONHASHSEED=random \
+    LANG=C.UTF-8 \
+    LC_ALL=C.UTF-8 \
+    LITESTAR_APP="${LITESTAR_APP}"
+## ------------------------- copy python itself from builder -------------------------- ##
+
+# this carries more risk than installing it fully, but makes the image a lot smaller
+COPY --from=run-base /usr/local/lib/ /usr/local/lib/
+COPY --from=run-base  /usr/local/bin/python /usr/local/bin/python
+COPY --from=run-base /etc/ld.so.cache /etc/ld.so.cache
+
+## -------------------------- add common compiled libraries --------------------------- ##
+
+# add tini
+COPY --from=run-base /usr/bin/tini-static /usr/local/bin/tini
+
+# If seeing ImportErrors, check if in the python-base already and copy as below
+
+# required by lots of packages - e.g. six, numpy, wsgi
+COPY --from=run-base /lib/${CHIPSET_ARCH}/libz.so.1 /lib/${CHIPSET_ARCH}/
+COPY --from=run-base /lib/${CHIPSET_ARCH}/libbz2.so.1.0 /lib/${CHIPSET_ARCH}/
+
+# required by google-cloud/grpcio
+COPY --from=run-base /usr/lib/${CHIPSET_ARCH}/libffi* /usr/lib/${CHIPSET_ARCH}/
+COPY --from=run-base /lib/${CHIPSET_ARCH}/libexpat* /lib/${CHIPSET_ARCH}/
+## -------------------------- install application ----------------------------------- ##
+WORKDIR /workspace/app
+COPY --from=run-base --chown=65532:65532 /workspace/app/.venv /workspace/app/.venv
+
+## --------------------------- standardize execution env ----------------------------- ##
+
+
+STOPSIGNAL SIGINT
+EXPOSE 8000
+ENTRYPOINT ["tini","--" ]
+CMD [ "litestar","run","--host","0.0.0.0"]
+VOLUME /workspace/app
diff --git a/docker-compose.infra.yml b/docker-compose.infra.yml
new file mode 100644
index 00000000..78b12032
--- /dev/null
+++ b/docker-compose.infra.yml
@@ -0,0 +1,52 @@
+services:
+  cache:
+    image: valkey/valkey:latest
+    ports:
+      - "16379:6379"
+    hostname: cache
+    command: redis-server --appendonly yes
+    volumes:
+      - cache-data:/data
+    environment:
+      ALLOW_EMPTY_PASSWORD: "yes"
+    restart: unless-stopped
+    logging:
+      options:
+        max-size: 10m
+        max-file: "3"
+    healthcheck:
+      test:
+        - CMD
+        - redis-cli
+        - ping
+      interval: 1s
+      timeout: 3s
+      retries: 30
+  db:
+    image: postgres:latest
+    ports:
+      - "15432:5432"
+    hostname: db
+    environment:
+      POSTGRES_PASSWORD: "app"
+      POSTGRES_USER: "app"
+      POSTGRES_DB: "app"
+    volumes:
+      - db-data:/var/lib/postgresql/data
+    restart: unless-stopped
+    logging:
+      options:
+        max-size: 10m
+        max-file: "3"
+    healthcheck:
+      test:
+        - CMD
+        - pg_isready
+        - -U
+        - app
+      interval: 2s
+      timeout: 3s
+      retries: 40
+volumes:
+  db-data: {}
+  cache-data: {}
diff --git a/docker-compose.override.yml b/docker-compose.override.yml
new file mode 100644
index 00000000..54e789d8
--- /dev/null
+++ b/docker-compose.override.yml
@@ -0,0 +1,58 @@
+x-development-volumes: &development-volumes
+  volumes:
+    - ./docs:/workspace/app/docs/
+    - ./tests:/workspace/app/tests/
+    - ./src:/workspace/app/src/
+    - ./Makefile:/workspace/app/Makefile
+    - ./pyproject.toml:/workspace/app/pyproject.toml
+    - ./pdm.lock:/workspace/app/pdm.lock
+    - ./resources:/workspace/app/resources
+    - ./public:/workspace/app/public
+    - ./.env.docker.example:/workspace/app/.env
+
+services:
+  localmail:
+    image: mailhog/mailhog:v1.0.0
+    container_name: localmail
+    ports:
+      - "8025:8025"
+  app:
+    build:
+      context: .
+      dockerfile: deploy/docker/dev/Dockerfile
+    ports:
+      - "8000:8000"
+      - "3006:3006"
+    image: app:latest-dev
+    tty: true
+    environment:
+      SAQ_USE_SERVER_LIFESPAN: "false"
+    command: litestar run --reload --host 0.0.0.0 --port 8000
+    restart: always
+    <<: *development-volumes
+  worker:
+    image: app:latest-dev
+    command: litestar workers run
+    tty: true
+    restart: always
+    <<: *development-volumes
+    depends_on:
+      db:
+        condition: service_healthy
+      cache:
+        condition: service_healthy
+
+    env_file:
+      - .env.docker.example
+  migrator:
+    image: app:latest-dev
+    command: litestar database upgrade --no-prompt
+    restart: "no"
+    <<: *development-volumes
+    env_file:
+      - .env.docker.example
+    depends_on:
+      db:
+        condition: service_healthy
+      cache:
+        condition: service_healthy
diff --git a/docker-compose.yml b/docker-compose.yml
index ed5aebc7..49d4b820 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,39 +1,91 @@
-version: "3.9"
 services:
+  cache:
+    image: valkey/valkey:latest
+    ports:
+      - "16379:6379"
+    hostname: cache
+    command: redis-server --appendonly yes
+    volumes:
+      - cache-data:/data
+    environment:
+      ALLOW_EMPTY_PASSWORD: "yes"
+    restart: unless-stopped
+    logging:
+      options:
+        max-size: 10m
+        max-file: "3"
+    healthcheck:
+      test:
+        - CMD
+        - redis-cli
+        - ping
+      interval: 1s
+      timeout: 3s
+      retries: 30
   db:
     image: postgres:latest
-    volumes:
-      - db:/var/lib/postrgresql/data/
     ports:
-      - "5432:5432"
+      - "15432:5432"
+    hostname: db
     environment:
-      POSTGRES_PASSWORD: optimus-prime
-      POSTGRES_DB: optimus-prime
-
+      POSTGRES_PASSWORD: "app"
+      POSTGRES_USER: "app"
+      POSTGRES_DB: "app"
+    volumes:
+      - db-data:/var/lib/postgresql/data
+    restart: unless-stopped
+    logging:
+      options:
+        max-size: 10m
+        max-file: "3"
+    healthcheck:
+      test:
+        - CMD
+        - pg_isready
+        - -U
+        - app
+      interval: 2s
+      timeout: 3s
+      retries: 40
   app:
     build:
       context: .
-      dockerfile: Dockerfile
-      args:
-        INSTALL_ARGS: ""
-    command: scripts/entry
+      dockerfile: deploy/docker/run/Dockerfile
+    image: app:latest
+    restart: always
     depends_on:
-      - db
-      - cache
+      db:
+        condition: service_healthy
+      cache:
+        condition: service_healthy
     ports:
       - "8000:8000"
+    environment:
+      SAQ_USE_SERVER_LIFESPAN: "false"
     env_file:
-      - .env
-    volumes:
-      - ./src/opdba:/workspace/opdba
-      # - ./scripts:/code/scripts
-      - ./tests:/workspace/tests
-
-  mailhog:
-    image: mailhog/mailhog:v1.0.0
-    container_name: advisor-mailhog
-    ports:
-      - "8025:8025"
+      - .env.docker.example
+  worker:
+    image: app:latest
+    command: litestar workers run
+    restart: always
+    depends_on:
+      db:
+        condition: service_healthy
+      cache:
+        condition: service_healthy
+    env_file:
+      - .env.docker.example
+  migrator:
+    image: app:latest
+    restart: "no"
+    command: litestar database upgrade --no-prompt
+    env_file:
+      - .env.docker.example
+    depends_on:
+      db:
+        condition: service_healthy
+      cache:
+        condition: service_healthy
 volumes:
-  db: {}
-  cache: {}
+  db-data: {}
+  cache-data: {}
diff --git a/docs/_static/badge.png b/docs/_static/badge.png
new file mode 100644
index 00000000..16d02b09
Binary files /dev/null and b/docs/_static/badge.png differ
diff --git a/docs/_static/badge.svg b/docs/_static/badge.svg
new file mode 100644
index 00000000..e83862fe
--- /dev/null
+++ b/docs/_static/badge.svg
@@ -0,0 +1,31 @@
+<svg xmlns="http://www.w3.org/2000/svg" width="500" zoomAndPan="magnify" viewBox="0 0 375 374.999991" height="500"
+     preserveAspectRatio="xMidYMid meet" version="1.0">
+    <defs>
+        <clipPath id="9eb7762d41">
+            <path d="M 15.933594 105 L 328 105 L 328 259 L 15.933594 259 Z M 15.933594 105 " clip-rule="nonzero"/>
+        </clipPath>
+        <clipPath id="183d3cc178">
+            <path d="M 142 78.769531 L 359.433594 78.769531 L 359.433594 296.269531 L 142 296.269531 Z M 142 78.769531 "
+                  clip-rule="nonzero"/>
+        </clipPath>
+    </defs>
+    <g clip-path="url(#9eb7762d41)">
+        <path fill="#edb641"
+              d="M 147.625 240.3125 C 161.5 233.984375 173.554688 227.011719 183.425781 220.550781 C 202.304688 208.203125 226.4375 185.242188 227.761719 183.410156 L 218.917969 177.503906 L 211.257812 172.386719 L 235.503906 171.441406 L 243.296875 171.136719 L 245.414062 163.640625 L 252.007812 140.304688 L 260.402344 163.054688 L 263.097656 170.363281 L 270.890625 170.058594 L 295.136719 169.113281 L 276.078125 184.117188 L 269.953125 188.9375 L 272.652344 196.25 L 281.046875 218.996094 L 260.871094 205.523438 L 254.390625 201.195312 L 248.265625 206.015625 L 229.207031 221.023438 L 232.480469 209.425781 L 235.796875 197.691406 L 236.207031 196.234375 C 213.003906 213.585938 180.546875 230.304688 161.140625 236.488281 C 156.6875 237.90625 152.183594 239.179688 147.625 240.3125 Z M 101.992188 258.078125 C 136.382812 256.734375 177.355469 248 217.675781 222.363281 L 209.90625 249.867188 L 254.910156 214.4375 L 302.539062 246.246094 L 282.71875 192.539062 L 327.71875 157.109375 L 270.46875 159.34375 L 250.648438 105.636719 L 235.085938 160.726562 L 177.835938 162.964844 L 210.980469 185.097656 C 189.164062 204.921875 134.445312 247.195312 61.957031 250.03125 C 47.300781 250.601562 31.914062 249.558594 15.933594 246.394531 C 15.933594 246.394531 52.011719 260.035156 101.992188 258.078125 "
+              fill-opacity="1" fill-rule="nonzero"/>
+    </g>
+    <g clip-path="url(#183d3cc178)">
+        <path fill="#edb641"
+              d="M 250.789062 78.96875 C 190.78125 78.96875 142.140625 127.570312 142.140625 187.519531 C 142.140625 198.875 143.886719 209.816406 147.121094 220.101562 C 151.847656 217.75 156.363281 215.316406 160.660156 212.84375 C 158.394531 204.789062 157.183594 196.296875 157.183594 187.519531 C 157.183594 135.871094 199.089844 93.996094 250.789062 93.996094 C 302.484375 93.996094 344.390625 135.871094 344.390625 187.519531 C 344.390625 239.171875 302.484375 281.042969 250.789062 281.042969 C 222.75 281.042969 197.597656 268.722656 180.441406 249.210938 C 175.453125 251.152344 170.402344 252.917969 165.289062 254.511719 C 185.183594 279.816406 216.082031 296.070312 250.789062 296.070312 C 310.792969 296.070312 359.433594 247.472656 359.433594 187.519531 C 359.433594 127.570312 310.792969 78.96875 250.789062 78.96875 "
+              fill-opacity="1" fill-rule="nonzero"/>
+    </g>
+    <path fill="#edb641"
+          d="M 92.292969 173.023438 L 98.289062 191.460938 L 117.691406 191.460938 L 101.992188 202.855469 L 107.988281 221.292969 L 92.292969 209.898438 L 76.59375 221.292969 L 82.589844 202.855469 L 66.894531 191.460938 L 86.296875 191.460938 L 92.292969 173.023438 "
+          fill-opacity="1" fill-rule="nonzero"/>
+    <path fill="#edb641"
+          d="M 120.214844 112.25 L 125.390625 128.167969 L 142.140625 128.167969 L 128.589844 138 L 133.765625 153.917969 L 120.214844 144.082031 L 106.664062 153.917969 L 111.839844 138 L 98.289062 128.167969 L 115.039062 128.167969 L 120.214844 112.25 "
+          fill-opacity="1" fill-rule="nonzero"/>
+    <path fill="#edb641"
+          d="M 34.695312 209.136719 L 37.71875 218.421875 L 47.492188 218.421875 L 39.585938 224.160156 L 42.605469 233.449219 L 34.695312 227.707031 L 26.792969 233.449219 L 29.8125 224.160156 L 21.90625 218.421875 L 31.679688 218.421875 L 34.695312 209.136719 "
+          fill-opacity="1" fill-rule="nonzero"/>
+</svg>
diff --git a/docs/app_reference/app.md b/docs/app_reference/app.md
deleted file mode 100644
index 16729844..00000000
--- a/docs/app_reference/app.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# App Reference
-
-::: pyvite.main
diff --git a/docs/code_reference/module1.md b/docs/code_reference/module1.md
deleted file mode 100644
index bdb85e03..00000000
--- a/docs/code_reference/module1.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# Module 1
-
-::: pyvite.core.module1
diff --git a/docs/code_reference/module2.md b/docs/code_reference/module2.md
deleted file mode 100644
index f546ff2a..00000000
--- a/docs/code_reference/module2.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# Module 2
-
-::: pyvite.core.module2
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 00000000..78d4ecbc
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,135 @@
+"""Sphinx configuration."""
+from __future__ import annotations
+
+import importlib.metadata
+import warnings
+from functools import partial
+from typing import TYPE_CHECKING, Any
+
+from sqlalchemy.exc import SAWarning
+
+if TYPE_CHECKING:
+    from sphinx.addnodes import document
+    from sphinx.application import Sphinx
+
+warnings.filterwarnings("ignore", category=SAWarning)
+warnings.filterwarnings("ignore", category=DeprecationWarning)  # RemovedInSphinx80Warning
+
+# -- Project information -----------------------------------------------------
+project = importlib.metadata.metadata("app")["Name"]
+copyright = "2023, Litestar Organization"
+author = "Cody Fincher"
+release = importlib.metadata.version("app")
+
+# -- General configuration ---------------------------------------------------
+extensions = [
+    "sphinx_click",
+    "sphinx_design",
+    "sphinx.ext.todo",
+    "sphinx_copybutton",
+    "sphinx.ext.autodoc",
+    "sphinx.ext.viewcode",
+    "sphinx.ext.napoleon",
+    "sphinxcontrib.mermaid",
+    "sphinx.ext.intersphinx",
+    "sphinx_toolbox.collapse",
+    "sphinx.ext.autosectionlabel",
+]
+
+exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
+
+intersphinx_mapping = {
+    "python": ("https://docs.python.org/3", None),
+    "anyio": ("https://anyio.readthedocs.io/en/stable/", None),
+    "click": ("https://click.palletsprojects.com/en/8.1.x/", None),
+    "structlog": ("https://www.structlog.org/en/stable/", None),
+    "litestar": ("https://docs.litestar.dev/latest/", None),
+    "msgspec": ("https://jcristharif.com/msgspec/", None),
+    "saq": ("https://saq-py.readthedocs.io/en/latest/", None),
+    "advanced-alchemy": ("https://docs.advanced-alchemy.jolt.rs/latest/", None),
+}
+
+napoleon_google_docstring = True
+napoleon_include_special_with_doc = True
+napoleon_use_admonition_for_examples = True
+napoleon_use_admonition_for_notes = True
+napoleon_use_admonition_for_references = False
+napoleon_attr_annotations = True
+
+autoclass_content = "both"
+autodoc_default_options = {
+    "members": True,
+    "member-order": "bysource",
+    "special-members": "__init__",
+    "exclude-members": "__weakref__",
+    "show-inheritance": True,
+    "class-signature": "separated",
+    "typehints-format": "short",
+}
+
+autosectionlabel_prefix_document = True
+suppress_warnings = [
+    "autosectionlabel.*",
+    "ref.python",  # TODO: remove when https://github.com/sphinx-doc/sphinx/issues/4961 is fixed
+]
+todo_include_todos = True
+
+# -- Style configuration -----------------------------------------------------
+html_theme = "litestar_sphinx_theme"
+html_static_path = ["_static"]
+html_show_sourcelink = True
+html_title = "Litestar Fullstack Docs"
+html_context = {
+    "github_user": "litestar-org",
+    "github_repo": "litestar-fullstack",
+    "github_version": "main",
+    "doc_path": "docs",
+}
+html_theme_options = {
+    "use_page_nav": False,
+    "use_edit_page_button": True,
+    "github_repo_name": "litestar-fullstack",
+    "logo": {
+        "link": "https://docs.fullstack.litestar.dev",
+    },
+    "extra_navbar_items": {
+        "Documentation": "index",
+        "Community": {
+            "Contributing": {
+                "description": "Learn how to contribute to Litestar Fullstack",
+                "link": "contribution-guide",
+                "icon": "contributing",
+            },
+            "Code of Conduct": {
+                "description": "Review the etiquette for interacting with the Litestar community",
+                "link": "https://github.com/litestar-org/.github/blob/main/CODE_OF_CONDUCT.md",
+                "icon": "coc",
+            },
+        },
+        "About": {
+            "Litestar Organization": {
+                "description": "About the Litestar organization",
+                "link": "https://litestar.dev/about/organization.html",
+                "icon": "org",
+            },
+        },
+    },
+}
+
+
+def update_html_context(
+    app: Sphinx,
+    pagename: str,
+    templatename: str,
+    context: dict[str, Any],
+    doctree: document,
+) -> None:
+    context["generate_toctree_html"] = partial(context["generate_toctree_html"], startdepth=0)
+
+
+def setup(app: Sphinx) -> dict[str, bool]:
+    app.setup_extension("litestar_sphinx_theme")
+    app.setup_extension("pydata_sphinx_theme")
+    app.connect("html-page-context", update_html_context)
+
+    return {"parallel_read_safe": True, "parallel_write_safe": True}
diff --git a/docs/developer_guide/commands.md b/docs/developer_guide/commands.md
deleted file mode 100644
index 8a36e67a..00000000
--- a/docs/developer_guide/commands.md
+++ /dev/null
@@ -1,17 +0,0 @@
-# Commands
-
-- `make clean` - Remove all build, testing, and static documentation files.
-
-- `make test` - Run the tests using pytest.
-
-- `make lint` - Run the linting tools. Includes pre-commit hooks, black, isort, flake8, pylint, and mypy.
-
-- `make check` - Run the test and lint commands.
-
-- `make build` - Build a docker image locally using the Dockerfile. The image will be named _pyvite_.
-
-- `make gen-docs` - Generate HTML documentation.
-
-- `make docs` - Generate HTML documentation and serve it to the browser.
-
-- `make pre-release increment={major/minor/patch}` - Bump the version and create a release tag. Should only be run from the _main_ branch. Passes the increment value to bump2version to create a new version number dynamically. The new version number will be added to _\_\_version\_\_.py_ and _pyproject.toml_ and a new commit will be logged. The tag will be created from the new commit.
diff --git a/docs/developer_guide/developer_setup.md b/docs/developer_guide/developer_setup.md
deleted file mode 100644
index a9a8cdb0..00000000
--- a/docs/developer_guide/developer_setup.md
+++ /dev/null
@@ -1,85 +0,0 @@
-# Developer Setup
-
-To begin local development, clone the [PyTemplates/typer_cli](https://github.com/PyTemplate/typer_cli) repository and use one of the following methods to build it. Commands should be executed from inside of the project home folder.
-
-## Using poetry
-
-```bash
-poetry install
-```
-
-Install optional dependencies using the `--extras` flag:
-
-```bash
-poetry install --extras=environment
-```
-
-## Using pip
-
-```bash
-pip install .
-```
-
-Install optional dependencies using square brackets:
-
-```bash
-pip install .[environment]
-```
-
-## Environments
-
-```python
-test = [
-    "pytest",
-    "pytest-cov",
-]
-
-lint = [
-    "black",
-    "isort",
-    "flake8",
-    "pylint",
-    "mypy",
-    "pre-commit",
-]
-
-docs = [
-    "mkdocs",
-    "mkdocstrings",
-    "mkdocstrings-python",
-    "mkdocs-material",
-]
-
-# Includes all optional dependencies
-dev = [
-    "pytest",
-    "pytest-cov",
-    "black",
-    "isort",
-    "flake8",
-    "pylint",
-    "mypy",
-    "pre-commit",
-    "mkdocs",
-    "mkdocstrings",
-    "mkdocstrings-python",
-    "mkdocs-material",
-    "bump2version",
-]
-```
-
-## Using a local docker build
-
-To build an image locally from the Dockerfile:
-
-```bash
-make build
-```
-
-To run the image:
-
-```bash
-docker run --rm pyvite hello user
-docker run --rm pyvite goodbye user
-docker run --rm pyvite version
-```
diff --git a/docs/developer_guide/releases.md b/docs/developer_guide/releases.md
deleted file mode 100644
index 777b2ebb..00000000
--- a/docs/developer_guide/releases.md
+++ /dev/null
@@ -1,7 +0,0 @@
-# Releases
-
-A release should consist of the following two steps from a tested, linted, and up to date copy of the *main* branch:
-
-1. `make pre-release increment={major/minor/patch}` - Commit the version number bump and create a new tag locally. The version number follows semantic versioning standards (major.minor.patch) and the tag is the version number prepended with a 'v'.
-
-2. `git push --follow-tags` - Update the *main* branch with only the changes from the version bump. Publish the new tag and kick off the release workflow.
diff --git a/docs/developer_guide/workflows.md b/docs/developer_guide/workflows.md
deleted file mode 100644
index 9879e81e..00000000
--- a/docs/developer_guide/workflows.md
+++ /dev/null
@@ -1,31 +0,0 @@
-# Workflows
-
-## Test
-
-- Run the tests on every push/pull_request to the *main* branch.
-- Writes a coverage report using pytest-cov and uploads it to codecov.io.
-- Tests run against python versions 3.8 and 3.9.
-- Optional manual trigger in the github actions tab.
-
-## Lint
-
-- Run the linting tools on every push/pull_request to the *main* branch.
-- Includes pre-commit hooks, black, isort, flake8, pylint, and mypy.
-- Optional manual trigger in the github actions tab.
-
-## Release
-
-- Build a wheel distribution, build a docker image, create a github release, and publish to PyPI and Docker Hub whenever a new tag is created.
-- Linting and testing steps must pass before the release steps can begin.
-- Documentation is automatically published to the *docs* branch and hosted on github pages.
-- All github release tags, docker image tags, and PyPI version numbers are in agreement with one another and follow semantic versioning standards.
-
-## Build and Publish Docs
-
-- Build the documentation, publish to the *docs* branch, and release to github pages.
-- Runs only on a manual trigger in the github actions tab.
-
-## Build and Publish Docker Image
-
-- Build the docker image, tag it with the branch name, and publish it to dockerhub.
-- Runs only a manual trigger in the github actions tab.
diff --git a/docs/extras/credits.md b/docs/extras/credits.md
deleted file mode 100644
index a9b76272..00000000
--- a/docs/extras/credits.md
+++ /dev/null
@@ -1,11 +0,0 @@
-# Credits
-
-## Other python package templates
-
-- [https://github.com/waynerv/cookiecutter-pypackage](https://github.com/waynerv/cookiecutter-pypackage)
-- [https://github.com/AllenCellModeling/cookiecutter-pypackage](https://github.com/AllenCellModeling/cookiecutter-pypackage)
-
-## Actions
-
-- [https://github.com/JamesIves/github-pages-deploy-action](https://github.com/JamesIves/github-pages-deploy-action)
-- [https://github.com/softprops/action-gh-release](https://github.com/softprops/action-gh-release)
diff --git a/docs/extras/file_tree.md b/docs/extras/file_tree.md
deleted file mode 100644
index 7b064bdb..00000000
--- a/docs/extras/file_tree.md
+++ /dev/null
@@ -1,44 +0,0 @@
-# File Tree
-
-```bash
-.
-├── Dockerfile
-├── docs
-│   ├── app_reference
-│   │   └── app.md
-│   ├── code_reference
-│   │   ├── module1.md
-│   │   └── module2.md
-│   ├── developer_guide
-│   │   ├── commands.md
-│   │   ├── developer_setup.md
-│   │   ├── releases.md
-│   │   └── workflows.md
-│   ├── extras
-│   │   ├── credits.md
-│   │   └── file_tree.md
-│   ├── index.md
-│   └── user_guide
-│       ├── installation.md
-│       └── usage.md
-├── LICENSE
-├── Makefile
-├── mkdocs.yml
-├── poetry.lock
-├── pyproject.toml
-├── README.md
-├── src
-│   └── pyvite
-│       ├── core
-│       │   ├── __init__.py
-│       │   ├── module1.py
-│       │   └── module2.py
-│       ├── __init__.py
-│       ├── main.py
-│       └── __version__.py
-└── tests
-    ├── __init__.py
-    ├── test_app.py
-    ├── test_module1.py
-    └── test_module2.py
-```
diff --git a/docs/index.md b/docs/index.md
deleted file mode 100644
index 8b3d0e5d..00000000
--- a/docs/index.md
+++ /dev/null
@@ -1,18 +0,0 @@
-<p align="center">
-  <a href="https://user-images.githubusercontent.com/20674972/178172752-abd4497d-6a0e-416b-9eef-1b1c0dca8477.png">
-    <img src="https://user-images.githubusercontent.com/20674972/178172752-abd4497d-6a0e-416b-9eef-1b1c0dca8477.png" alt="Pytemplates Banner" style="width:100%;">
-  </a>
-</p>
-
-### A production ready python CLI template
-
-- Metadata and dependency information is stored in the pyproject.toml for compatibility with both [pip](https://pip.pypa.io/en/stable/) and [poetry](https://python-poetry.org/docs/).
-- [Flake8](https://flake8.pycqa.org/en/latest/), [pylint](https://pylint.pycqa.org/en/latest/index.html), and [isort](https://pycqa.github.io/isort/) configurations are defined to be compatible with the [black](https://black.readthedocs.io/en/stable/) autoformatter.
-- Pylint settings are based on the [Google Python Style Guide](https://google.github.io/styleguide/pyguide.html) and adapted for black compatibility.
-- Linting tools run automatically before each commit using [pre-commit](https://pre-commit.com/), black, and isort.
-- Test coverage reports are generated during every commit and pull request using [coverage](https://coverage.readthedocs.io/en/6.4.1/) and [pytest-cov](https://pytest-cov.readthedocs.io/en/latest/). All reports are automatically uploaded and archived on [codecov.io](https://about.codecov.io/).
-- Unit tests are written using [pytest](https://docs.pytest.org/en/latest/) and static type checking is provided by [mypy](http://mypy-lang.org/index.html).
-- Package releases to [PyPI](https://pypi.org/) with dynamic versioning provided by [bump2version](https://github.com/c4urself/bump2version) begin automatically whenever a new tag is created in github.
-- Docker images are automatically published to [Docker Hub](https://hub.docker.com/) during every release. Images are tagged with a semantic version number which agrees with the git tag and the PyPI version number.
-- Documentation is built using [mkdocs](https://www.mkdocs.org/) and [mkdocstrings](https://mkdocstrings.github.io/). Docs are automatically deployed to [github pages](https://docs.github.com/en/pages) during every release.
-- Release notes are automatically generated during every release using [github actions](https://docs.github.com/en/actions).
diff --git a/docs/user_guide/installation.md b/docs/user_guide/installation.md
deleted file mode 100644
index 2ba80f49..00000000
--- a/docs/user_guide/installation.md
+++ /dev/null
@@ -1,13 +0,0 @@
-# Installation
-
-To install the package using `pip`:
-
-```bash
-pip install pyvite
-```
-
-To download the CLI application using `docker`:
-
-```bash
-docker pull pytemplates/typer_cli:latest
-```
diff --git a/docs/user_guide/usage.md b/docs/user_guide/usage.md
deleted file mode 100644
index 87e770b2..00000000
--- a/docs/user_guide/usage.md
+++ /dev/null
@@ -1,17 +0,0 @@
-# Usage
-
-Using the python package installation:
-
-```bash
-pytemplates hello user
-pytemplates goodbye user
-pytemplates version
-```
-
-Using the docker image:
-
-```bash
-docker run --rm pytemplates/typer_cli hello user
-docker run --rm pytemplates/typer_cli goodbye user
-docker run --rm pytemplates/typer_cli version
-```
diff --git a/mkdocs.yml b/mkdocs.yml
deleted file mode 100644
index daf35a4a..00000000
--- a/mkdocs.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-site_name: Pytemplates - Typer CLI
-repo_url: https://github.com/PyTemplate/typer_cli
-edit_uri: ./edit/main/docs
-
-# Sticking with readthedocs theme for basic template
-# material theme also installed
-theme:
-  name: "readthedocs"
-
-plugins:
-  - search
-  - mkdocstrings
-
-nav:
-    - Home: index.md
-    - User Guide:
-      - Installation: user_guide/installation.md
-      - Usage: user_guide/usage.md
-    - Developer Guide:
-      - Developer Setup: developer_guide/developer_setup.md
-      - Commands: developer_guide/commands.md
-      - Workflows: developer_guide/workflows.md
-      - Releases: developer_guide/releases.md
-    - App Reference:
-      - App Reference: app_reference/app.md
-    - Code Reference:
-      - Core:
-        - Module1: code_reference/module1.md
-        - Module2: code_reference/module2.md
-    - Extras:
-      - File Tree: extras/file_tree.md
-      - Credits: extras/credits.md
diff --git a/mypy.ini b/mypy.ini
deleted file mode 100644
index bbdb794e..00000000
--- a/mypy.ini
+++ /dev/null
@@ -1,34 +0,0 @@
-[mypy]
-plugins = pydantic.mypy, sqlalchemy.ext.mypy.plugin
-warn_unused_ignores = True
-warn_redundant_casts = True
-warn_unused_configs = True
-warn_unreachable = True
-warn_return_any = True
-strict = True
-disallow_untyped_decorators = True
-disallow_any_generics = False
-implicit_reexport = False
-show_error_codes = True
-
-[mypy-tests.*]
-disallow_untyped_decorators = False
-
-[pydantic-mypy]
-init_forbid_extra = True
-init_typed = True
-warn_required_dynamic_aliases = True
-warn_untyped_fields = True
-
-[mypy-uvicorn.*]
-ignore_missing_imports = True
-
-[mypy-gunicorn.*]
-ignore_missing_imports = True
-
-[mypy-picologging.*]
-ignore_missing_imports = True
-
-[mypy-typer.*]
-disallow_untyped_defs = True
-strict = True
\ No newline at end of file
diff --git a/pdm.lock b/pdm.lock
new file mode 100644
index 00000000..9f1b8659
--- /dev/null
+++ b/pdm.lock
@@ -0,0 +1,3102 @@
+# This file is @generated by PDM.
+# It is not intended for manual editing.
+
+[metadata]
+groups = ["default", "dev", "docs", "linting", "test"]
+strategy = ["cross_platform", "inherit_metadata"]
+lock_version = "4.4.2"
+content_hash = "sha256:740631a0cc48057f4d6f4ee7739df0071a698e6a4381e21b6122406e1e3cd0fa"
+
+[[package]]
+name = "accessible-pygments"
+version = "0.0.5"
+requires_python = ">=3.9"
+summary = "A collection of accessible pygments styles"
+groups = ["docs"]
+dependencies = [
+    "pygments>=1.5",
+]
+files = [
+    {file = "accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7"},
+    {file = "accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872"},
+]
+
+[[package]]
+name = "advanced-alchemy"
+version = "0.17.1"
+requires_python = ">=3.8"
+summary = "Ready-to-go SQLAlchemy concoctions."
+groups = ["default"]
+dependencies = [
+    "alembic>=1.12.0",
+    "greenlet; sys_platform == \"darwin\"",
+    "sqlalchemy>=2.0.20",
+    "typing-extensions>=4.0.0",
+]
+files = [
+    {file = "advanced_alchemy-0.17.1-py3-none-any.whl", hash = "sha256:a48d2702e430febd277dc66c0959ce0fbd85eb475a625ec5999b88688bb36167"},
+    {file = "advanced_alchemy-0.17.1.tar.gz", hash = "sha256:3bee29096c1b9cea4102e7366c6d8685a51030d078778a6748f8e4c43013ab81"},
+]
+
+[[package]]
+name = "advanced-alchemy"
+version = "0.17.1"
+extras = ["uuid"]
+requires_python = ">=3.8"
+summary = "Ready-to-go SQLAlchemy concoctions."
+groups = ["default"]
+dependencies = [
+    "advanced-alchemy==0.17.1",
+    "uuid-utils>=0.6.1",
+]
+files = [
+    {file = "advanced_alchemy-0.17.1-py3-none-any.whl", hash = "sha256:a48d2702e430febd277dc66c0959ce0fbd85eb475a625ec5999b88688bb36167"},
+    {file = "advanced_alchemy-0.17.1.tar.gz", hash = "sha256:3bee29096c1b9cea4102e7366c6d8685a51030d078778a6748f8e4c43013ab81"},
+]
+
+[[package]]
+name = "aiosqlite"
+version = "0.20.0"
+requires_python = ">=3.8"
+summary = "asyncio bridge to the standard sqlite3 module"
+groups = ["default"]
+dependencies = [
+    "typing-extensions>=4.0",
+]
+files = [
+    {file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"},
+    {file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"},
+]
+
+[[package]]
+name = "alabaster"
+version = "0.7.16"
+requires_python = ">=3.9"
+summary = "A light, configurable Sphinx theme"
+groups = ["docs"]
+files = [
+    {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"},
+    {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"},
+]
+
+[[package]]
+name = "alembic"
+version = "1.13.1"
+requires_python = ">=3.8"
+summary = "A database migration tool for SQLAlchemy."
+groups = ["default"]
+dependencies = [
+    "Mako",
+    "SQLAlchemy>=1.3.0",
+    "typing-extensions>=4",
+]
+files = [
+    {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"},
+    {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"},
+]
+
+[[package]]
+name = "anyio"
+version = "4.4.0"
+requires_python = ">=3.8"
+summary = "High level compatibility layer for multiple asynchronous event loop implementations"
+groups = ["default", "docs"]
+dependencies = [
+    "exceptiongroup>=1.0.2; python_version < \"3.11\"",
+    "idna>=2.8",
+    "sniffio>=1.1",
+    "typing-extensions>=4.1; python_version < \"3.11\"",
+]
+files = [
+    {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"},
+    {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"},
+]
+
+[[package]]
+name = "apeye"
+version = "1.4.1"
+requires_python = ">=3.6.1"
+summary = "Handy tools for working with URLs and APIs."
+groups = ["docs"]
+dependencies = [
+    "apeye-core>=1.0.0b2",
+    "domdf-python-tools>=2.6.0",
+    "platformdirs>=2.3.0",
+    "requests>=2.24.0",
+]
+files = [
+    {file = "apeye-1.4.1-py3-none-any.whl", hash = "sha256:44e58a9104ec189bf42e76b3a7fe91e2b2879d96d48e9a77e5e32ff699c9204e"},
+    {file = "apeye-1.4.1.tar.gz", hash = "sha256:14ea542fad689e3bfdbda2189a354a4908e90aee4bf84c15ab75d68453d76a36"},
+]
+
+[[package]]
+name = "apeye-core"
+version = "1.1.5"
+requires_python = ">=3.6.1"
+summary = "Core (offline) functionality for the apeye library."
+groups = ["docs"]
+dependencies = [
+    "domdf-python-tools>=2.6.0",
+    "idna>=2.5",
+]
+files = [
+    {file = "apeye_core-1.1.5-py3-none-any.whl", hash = "sha256:dc27a93f8c9e246b3b238c5ea51edf6115ab2618ef029b9f2d9a190ec8228fbf"},
+    {file = "apeye_core-1.1.5.tar.gz", hash = "sha256:5de72ed3d00cc9b20fea55e54b7ab8f5ef8500eb33a5368bc162a5585e238a55"},
+]
+
+[[package]]
+name = "argon2-cffi"
+version = "23.1.0"
+requires_python = ">=3.7"
+summary = "Argon2 for Python"
+groups = ["default"]
+dependencies = [
+    "argon2-cffi-bindings",
+]
+files = [
+    {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"},
+    {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"},
+]
+
+[[package]]
+name = "argon2-cffi-bindings"
+version = "21.2.0"
+requires_python = ">=3.6"
+summary = "Low-level CFFI bindings for Argon2"
+groups = ["default"]
+dependencies = [
+    "cffi>=1.0.1",
+]
+files = [
+    {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"},
+    {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"},
+    {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"},
+    {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"},
+    {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"},
+    {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"},
+    {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"},
+    {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"},
+    {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"},
+    {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"},
+    {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"},
+    {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"},
+    {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"},
+    {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"},
+    {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"},
+    {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"},
+    {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"},
+    {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"},
+    {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"},
+    {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"},
+    {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"},
+]
+
+[[package]]
+name = "astroid"
+version = "3.2.2"
+requires_python = ">=3.8.0"
+summary = "An abstract syntax tree for Python with inference support."
+groups = ["linting"]
+dependencies = [
+    "typing-extensions>=4.0.0; python_version < \"3.11\"",
+]
+files = [
+    {file = "astroid-3.2.2-py3-none-any.whl", hash = "sha256:e8a0083b4bb28fcffb6207a3bfc9e5d0a68be951dd7e336d5dcf639c682388c0"},
+    {file = "astroid-3.2.2.tar.gz", hash = "sha256:8ead48e31b92b2e217b6c9733a21afafe479d52d6e164dd25fb1a770c7c3cf94"},
+]
+
+[[package]]
+name = "async-timeout"
+version = "4.0.3"
+requires_python = ">=3.7"
+summary = "Timeout context manager for asyncio programs"
+groups = ["default", "linting", "test"]
+marker = "python_version < \"3.12.0\""
+files = [
+    {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
+    {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
+]
+
+[[package]]
+name = "asyncpg"
+version = "0.29.0"
+requires_python = ">=3.8.0"
+summary = "An asyncio PostgreSQL driver"
+groups = ["default", "linting", "test"]
+dependencies = [
+    "async-timeout>=4.0.3; python_version < \"3.12.0\"",
+]
+files = [
+    {file = "asyncpg-0.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72fd0ef9f00aeed37179c62282a3d14262dbbafb74ec0ba16e1b1864d8a12169"},
+    {file = "asyncpg-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52e8f8f9ff6e21f9b39ca9f8e3e33a5fcdceaf5667a8c5c32bee158e313be385"},
+    {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e6823a7012be8b68301342ba33b4740e5a166f6bbda0aee32bc01638491a22"},
+    {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:746e80d83ad5d5464cfbf94315eb6744222ab00aa4e522b704322fb182b83610"},
+    {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ff8e8109cd6a46ff852a5e6bab8b0a047d7ea42fcb7ca5ae6eaae97d8eacf397"},
+    {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97eb024685b1d7e72b1972863de527c11ff87960837919dac6e34754768098eb"},
+    {file = "asyncpg-0.29.0-cp310-cp310-win32.whl", hash = "sha256:5bbb7f2cafd8d1fa3e65431833de2642f4b2124be61a449fa064e1a08d27e449"},
+    {file = "asyncpg-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:76c3ac6530904838a4b650b2880f8e7af938ee049e769ec2fba7cd66469d7772"},
+    {file = "asyncpg-0.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4900ee08e85af01adb207519bb4e14b1cae8fd21e0ccf80fac6aa60b6da37b4"},
+    {file = "asyncpg-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a65c1dcd820d5aea7c7d82a3fdcb70e096f8f70d1a8bf93eb458e49bfad036ac"},
+    {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b52e46f165585fd6af4863f268566668407c76b2c72d366bb8b522fa66f1870"},
+    {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc600ee8ef3dd38b8d67421359779f8ccec30b463e7aec7ed481c8346decf99f"},
+    {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:039a261af4f38f949095e1e780bae84a25ffe3e370175193174eb08d3cecab23"},
+    {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6feaf2d8f9138d190e5ec4390c1715c3e87b37715cd69b2c3dfca616134efd2b"},
+    {file = "asyncpg-0.29.0-cp311-cp311-win32.whl", hash = "sha256:1e186427c88225ef730555f5fdda6c1812daa884064bfe6bc462fd3a71c4b675"},
+    {file = "asyncpg-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfe73ffae35f518cfd6e4e5f5abb2618ceb5ef02a2365ce64f132601000587d3"},
+    {file = "asyncpg-0.29.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6011b0dc29886ab424dc042bf9eeb507670a3b40aece3439944006aafe023178"},
+    {file = "asyncpg-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b544ffc66b039d5ec5a7454667f855f7fec08e0dfaf5a5490dfafbb7abbd2cfb"},
+    {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d84156d5fb530b06c493f9e7635aa18f518fa1d1395ef240d211cb563c4e2364"},
+    {file = "asyncpg-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54858bc25b49d1114178d65a88e48ad50cb2b6f3e475caa0f0c092d5f527c106"},
+    {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bde17a1861cf10d5afce80a36fca736a86769ab3579532c03e45f83ba8a09c59"},
+    {file = "asyncpg-0.29.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:37a2ec1b9ff88d8773d3eb6d3784dc7e3fee7756a5317b67f923172a4748a175"},
+    {file = "asyncpg-0.29.0-cp312-cp312-win32.whl", hash = "sha256:bb1292d9fad43112a85e98ecdc2e051602bce97c199920586be83254d9dafc02"},
+    {file = "asyncpg-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:2245be8ec5047a605e0b454c894e54bf2ec787ac04b1cb7e0d3c67aa1e32f0fe"},
+    {file = "asyncpg-0.29.0.tar.gz", hash = "sha256:d1c49e1f44fffafd9a55e1a9b101590859d881d639ea2922516f5d9c512d354e"},
+]
+
+[[package]]
+name = "asyncpg-stubs"
+version = "0.29.1"
+requires_python = ">=3.8,<4.0"
+summary = "asyncpg stubs"
+groups = ["linting"]
+dependencies = [
+    "asyncpg<0.30,>=0.29",
+    "typing-extensions<5.0.0,>=4.7.0",
+]
+files = [
+    {file = "asyncpg_stubs-0.29.1-py3-none-any.whl", hash = "sha256:cce994d5a19394249e74ae8d252bde3c77cee0ddfc776cc708b724fdb4adebb6"},
+    {file = "asyncpg_stubs-0.29.1.tar.gz", hash = "sha256:686afcc0af3a2f3c8e393cd850e0de430e5a139ce82b2f28ef8f693ecdf918bf"},
+]
+
+[[package]]
+name = "autodocsumm"
+version = "0.2.12"
+requires_python = ">=3.7"
+summary = "Extended sphinx autodoc including automatic autosummaries"
+groups = ["docs"]
+dependencies = [
+    "Sphinx<8.0,>=2.2",
+]
+files = [
+    {file = "autodocsumm-0.2.12-py3-none-any.whl", hash = "sha256:b842b53c686c07a4f174721ca4e729b027367703dbf42e2508863a3c6d6c049c"},
+    {file = "autodocsumm-0.2.12.tar.gz", hash = "sha256:848fe8c38df433c6635489499b969cb47cc389ed3d7b6e75c8ccbc94d4b3bf9e"},
+]
+
+[[package]]
+name = "babel"
+version = "2.15.0"
+requires_python = ">=3.8"
+summary = "Internationalization utilities"
+groups = ["docs"]
+files = [
+    {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"},
+    {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"},
+]
+
+[[package]]
+name = "beautifulsoup4"
+version = "4.12.3"
+requires_python = ">=3.6.0"
+summary = "Screen-scraping library"
+groups = ["docs"]
+dependencies = [
+    "soupsieve>1.2",
+]
+files = [
+    {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"},
+    {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"},
+]
+
+[[package]]
+name = "cachecontrol"
+version = "0.14.0"
+requires_python = ">=3.7"
+summary = "httplib2 caching for requests"
+groups = ["docs"]
+dependencies = [
+    "msgpack<2.0.0,>=0.5.2",
+    "requests>=2.16.0",
+]
+files = [
+    {file = "cachecontrol-0.14.0-py3-none-any.whl", hash = "sha256:f5bf3f0620c38db2e5122c0726bdebb0d16869de966ea6a2befe92470b740ea0"},
+    {file = "cachecontrol-0.14.0.tar.gz", hash = "sha256:7db1195b41c81f8274a7bbd97c956f44e8348265a1bc7641c37dfebc39f0c938"},
+]
+
+[[package]]
+name = "cachecontrol"
+version = "0.14.0"
+extras = ["filecache"]
+requires_python = ">=3.7"
+summary = "httplib2 caching for requests"
+groups = ["docs"]
+dependencies = [
+    "cachecontrol==0.14.0",
+    "filelock>=3.8.0",
+]
+files = [
+    {file = "cachecontrol-0.14.0-py3-none-any.whl", hash = "sha256:f5bf3f0620c38db2e5122c0726bdebb0d16869de966ea6a2befe92470b740ea0"},
+    {file = "cachecontrol-0.14.0.tar.gz", hash = "sha256:7db1195b41c81f8274a7bbd97c956f44e8348265a1bc7641c37dfebc39f0c938"},
+]
+
+[[package]]
+name = "certifi"
+version = "2024.6.2"
+requires_python = ">=3.6"
+summary = "Python package for providing Mozilla's CA Bundle."
+groups = ["default", "docs"]
+files = [
+    {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"},
+    {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"},
+]
+
+[[package]]
+name = "cffi"
+version = "1.16.0"
+requires_python = ">=3.8"
+summary = "Foreign Function Interface for Python calling C code."
+groups = ["default", "linting"]
+dependencies = [
+    "pycparser",
+]
+files = [
+    {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
+    {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
+    {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
+    {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
+    {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
+    {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
+    {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
+    {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
+    {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
+    {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
+    {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
+    {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
+    {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
+    {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
+    {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
+    {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
+    {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
+    {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
+    {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
+    {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
+    {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
+    {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
+    {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
+    {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
+    {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
+    {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
+    {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
+    {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
+    {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
+    {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
+    {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
+    {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
+    {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
+]
+
+[[package]]
+name = "cfgv"
+version = "3.4.0"
+requires_python = ">=3.8"
+summary = "Validate configuration and produce human readable error messages."
+groups = ["linting"]
+files = [
+    {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
+    {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.3.2"
+requires_python = ">=3.7.0"
+summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+groups = ["docs"]
+files = [
+    {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
+    {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
+    {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
+    {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
+    {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.7"
+requires_python = ">=3.7"
+summary = "Composable command line interface toolkit"
+groups = ["default", "docs", "linting"]
+dependencies = [
+    "colorama; platform_system == \"Windows\"",
+]
+files = [
+    {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
+    {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+summary = "Cross-platform colored terminal text."
+groups = ["default", "docs", "linting", "test"]
+files = [
+    {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+    {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.5.4"
+requires_python = ">=3.8"
+summary = "Code coverage measurement for Python"
+groups = ["test"]
+files = [
+    {file = "coverage-7.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99"},
+    {file = "coverage-7.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47"},
+    {file = "coverage-7.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e"},
+    {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d"},
+    {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3"},
+    {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016"},
+    {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136"},
+    {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9"},
+    {file = "coverage-7.5.4-cp310-cp310-win32.whl", hash = "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8"},
+    {file = "coverage-7.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f"},
+    {file = "coverage-7.5.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5"},
+    {file = "coverage-7.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba"},
+    {file = "coverage-7.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b"},
+    {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080"},
+    {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c"},
+    {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da"},
+    {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0"},
+    {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078"},
+    {file = "coverage-7.5.4-cp311-cp311-win32.whl", hash = "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806"},
+    {file = "coverage-7.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d"},
+    {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"},
+    {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"},
+    {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"},
+    {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"},
+    {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"},
+    {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"},
+    {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"},
+    {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"},
+    {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"},
+    {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"},
+    {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"},
+    {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.5.4"
+extras = ["toml"]
+requires_python = ">=3.8"
+summary = "Code coverage measurement for Python"
+groups = ["test"]
+dependencies = [
+    "coverage==7.5.4",
+    "tomli; python_full_version <= \"3.11.0a6\"",
+]
+files = [
+    {file = "coverage-7.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99"},
+    {file = "coverage-7.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47"},
+    {file = "coverage-7.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e"},
+    {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d"},
+    {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3"},
+    {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016"},
+    {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136"},
+    {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9"},
+    {file = "coverage-7.5.4-cp310-cp310-win32.whl", hash = "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8"},
+    {file = "coverage-7.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f"},
+    {file = "coverage-7.5.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5"},
+    {file = "coverage-7.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba"},
+    {file = "coverage-7.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b"},
+    {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080"},
+    {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c"},
+    {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da"},
+    {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0"},
+    {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078"},
+    {file = "coverage-7.5.4-cp311-cp311-win32.whl", hash = "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806"},
+    {file = "coverage-7.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d"},
+    {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"},
+    {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"},
+    {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"},
+    {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"},
+    {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"},
+    {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"},
+    {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"},
+    {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"},
+    {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"},
+    {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"},
+    {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"},
+    {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"},
+]
+
+[[package]]
+name = "croniter"
+version = "2.0.5"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.6"
+summary = "croniter provides iteration for datetime object with cron like format"
+groups = ["default"]
+dependencies = [
+    "python-dateutil",
+    "pytz>2021.1",
+]
+files = [
+    {file = "croniter-2.0.5-py2.py3-none-any.whl", hash = "sha256:fdbb44920944045cc323db54599b321325141d82d14fa7453bc0699826bbe9ed"},
+    {file = "croniter-2.0.5.tar.gz", hash = "sha256:f1f8ca0af64212fbe99b1bee125ee5a1b53a9c1b433968d8bca8817b79d237f3"},
+]
+
+[[package]]
+name = "cryptography"
+version = "42.0.8"
+requires_python = ">=3.7"
+summary = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+groups = ["default", "linting"]
+dependencies = [
+    "cffi>=1.12; platform_python_implementation != \"PyPy\"",
+]
+files = [
+    {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"},
+    {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"},
+    {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"},
+    {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"},
+    {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"},
+    {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"},
+    {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"},
+    {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"},
+    {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"},
+    {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"},
+    {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"},
+    {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"},
+    {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"},
+    {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"},
+    {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"},
+    {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"},
+    {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"},
+    {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"},
+    {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"},
+    {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"},
+    {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"},
+    {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"},
+    {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"},
+    {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"},
+    {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"},
+    {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"},
+    {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"},
+    {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"},
+    {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"},
+    {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"},
+    {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"},
+    {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"},
+]
+
+[[package]]
+name = "cssutils"
+version = "2.11.1"
+requires_python = ">=3.8"
+summary = "A CSS Cascading Style Sheets library for Python"
+groups = ["docs"]
+dependencies = [
+    "more-itertools",
+]
+files = [
+    {file = "cssutils-2.11.1-py3-none-any.whl", hash = "sha256:a67bfdfdff4f3867fab43698ec4897c1a828eca5973f4073321b3bccaf1199b1"},
+    {file = "cssutils-2.11.1.tar.gz", hash = "sha256:0563a76513b6af6eebbe788c3bf3d01c920e46b3f90c8416738c5cfc773ff8e2"},
+]
+
+[[package]]
+name = "dict2css"
+version = "0.3.0.post1"
+requires_python = ">=3.6"
+summary = "A μ-library for constructing cascading style sheets from Python dictionaries."
+groups = ["docs"]
+dependencies = [
+    "cssutils>=2.2.0",
+    "domdf-python-tools>=2.2.0",
+]
+files = [
+    {file = "dict2css-0.3.0.post1-py3-none-any.whl", hash = "sha256:f006a6b774c3e31869015122ae82c491fd25e7de4a75607a62aa3e798f837e0d"},
+    {file = "dict2css-0.3.0.post1.tar.gz", hash = "sha256:89c544c21c4ca7472c3fffb9d37d3d926f606329afdb751dc1de67a411b70719"},
+]
+
+[[package]]
+name = "dill"
+version = "0.3.8"
+requires_python = ">=3.8"
+summary = "serialize all of Python"
+groups = ["linting"]
+files = [
+    {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"},
+    {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"},
+]
+
+[[package]]
+name = "distlib"
+version = "0.3.8"
+summary = "Distribution utilities"
+groups = ["linting"]
+files = [
+    {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
+    {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
+]
+
+[[package]]
+name = "docutils"
+version = "0.21.2"
+requires_python = ">=3.9"
+summary = "Docutils -- Python Documentation Utilities"
+groups = ["docs"]
+files = [
+    {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"},
+    {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"},
+]
+
+[[package]]
+name = "domdf-python-tools"
+version = "3.8.1"
+requires_python = ">=3.6"
+summary = "Helpful functions for Python 🐍 🛠️"
+groups = ["docs"]
+dependencies = [
+    "natsort>=7.0.1",
+    "typing-extensions>=3.7.4.1",
+]
+files = [
+    {file = "domdf_python_tools-3.8.1-py3-none-any.whl", hash = "sha256:9821d76505e16c0fab60b37be90b8acf401c9604f7119cf6bce314f848461c7e"},
+    {file = "domdf_python_tools-3.8.1.tar.gz", hash = "sha256:f45e34cf4d3363af59c32da28a9de9480ed916eff06bd0cf9a1644b6b460fb88"},
+]
+
+[[package]]
+name = "ecdsa"
+version = "0.19.0"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.6"
+summary = "ECDSA cryptographic signature library (pure python)"
+groups = ["default"]
+dependencies = [
+    "six>=1.9.0",
+]
+files = [
+    {file = "ecdsa-0.19.0-py2.py3-none-any.whl", hash = "sha256:2cea9b88407fdac7bbeca0833b189e4c9c53f2ef1e1eaa29f6224dbc809b707a"},
+    {file = "ecdsa-0.19.0.tar.gz", hash = "sha256:60eaad1199659900dd0af521ed462b793bbdf867432b3948e87416ae4caf6bf8"},
+]
+
+[[package]]
+name = "editorconfig"
+version = "0.12.4"
+summary = "EditorConfig File Locator and Interpreter for Python"
+groups = ["default"]
+files = [
+    {file = "EditorConfig-0.12.4.tar.gz", hash = "sha256:24857fa1793917dd9ccf0c7810a07e05404ce9b823521c7dce22a4fb5d125f80"},
+]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.2.1"
+requires_python = ">=3.7"
+summary = "Backport of PEP 654 (exception groups)"
+groups = ["default", "docs", "test"]
+marker = "python_version < \"3.11\""
+files = [
+    {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"},
+    {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"},
+]
+
+[[package]]
+name = "execnet"
+version = "2.1.1"
+requires_python = ">=3.8"
+summary = "execnet: rapid multi-Python deployment"
+groups = ["test"]
+files = [
+    {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"},
+    {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"},
+]
+
+[[package]]
+name = "faker"
+version = "25.9.1"
+requires_python = ">=3.8"
+summary = "Faker is a Python package that generates fake data for you."
+groups = ["default"]
+dependencies = [
+    "python-dateutil>=2.4",
+]
+files = [
+    {file = "Faker-25.9.1-py3-none-any.whl", hash = "sha256:f1dc27dc8035cb7e97e96afbb5fe1305eed6aeea53374702cbac96acfe851626"},
+    {file = "Faker-25.9.1.tar.gz", hash = "sha256:0e1cf7a8d3c94de91a65ab1e9cf7050903efae1e97901f8e5924a9f45147ae44"},
+]
+
+[[package]]
+name = "filelock"
+version = "3.15.4"
+requires_python = ">=3.8"
+summary = "A platform independent file lock."
+groups = ["docs", "linting"]
+files = [
+    {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"},
+    {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"},
+]
+
+[[package]]
+name = "granian"
+version = "1.4.4"
+requires_python = ">=3.8"
+summary = "A Rust HTTP server for Python applications"
+groups = ["default"]
+dependencies = [
+    "click>=8.0.0",
+    "uvloop>=0.18.0; sys_platform != \"win32\" and platform_python_implementation == \"CPython\"",
+]
+files = [
+    {file = "granian-1.4.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d796248f519574c419d359ce83c7d3bcfa085aeb08d9ade09a9640f0d41d7106"},
+    {file = "granian-1.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:811e9aeb401679af79aa1cdd0950d0c98e1ba5647c358ae979a1323a50214d39"},
+    {file = "granian-1.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c6e8e3c200b22ac172fccd765d141e7aeb0927bcdfa89ac17329ca8a445f905"},
+    {file = "granian-1.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6305adc0f52538be3bbf88b75f2f1cad94d19e37e0bd848b949c59def5d74bcc"},
+    {file = "granian-1.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9051bb5db918bfb37008f9ff9bb687cc3737268798ed43fc8a3238a173deca83"},
+    {file = "granian-1.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5141c42c82ffd245db9dd3e3cbf21676f417d7d9adbb38f97b3ea0c5907c0340"},
+    {file = "granian-1.4.4-cp310-none-win_amd64.whl", hash = "sha256:8f7d91588b336bcc2702288035cb1ee1cea4a96a1dcfe2460608ea7e34ab07bc"},
+    {file = "granian-1.4.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0c4a084ea7610b1c08f9516aa6c680c80fbdf7136d53bd6985d6ef9bde8591b8"},
+    {file = "granian-1.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f0d8bffb1fde587cc78ae4eb7e0cea910229dadc8138a5dc95ad83cacc6f6fc"},
+    {file = "granian-1.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:226df8a66083edfc9282c3232592c1ce6619c2696783d967d9c14e098d89f7a1"},
+    {file = "granian-1.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bc8faaa5495c2bf7c69598d9282b460cc268df32ec82d2316caf5486a23be61"},
+    {file = "granian-1.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3771998e3e50122de7b5752815235088087bb50553111363d72bc8ccca656a41"},
+    {file = "granian-1.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d50b16e57658e82144a906805edcd96d2549ceb91a31715d5dacaf7e24ba35fd"},
+    {file = "granian-1.4.4-cp311-none-win_amd64.whl", hash = "sha256:a2fd2ea2b58edc484ac5852923843f34cc5f6515cbe19d133c260e9bbf373533"},
+    {file = "granian-1.4.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b00ddb5525381743db3e2ea4cad74bda547249e3a7a505ce438ec06318ee978e"},
+    {file = "granian-1.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dffefff008f61b211e9f7b1c412316cbdd0c9acb4dacb2b6a045bdfe0e021045"},
+    {file = "granian-1.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce723be2155f1d7f08e7042b2080f5761273e8db25ab8cf5aef088bab01ebc8"},
+    {file = "granian-1.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fc158e0f50035fb069a4971cebe65f981e6b1d860bff8a918458171411d3001"},
+    {file = "granian-1.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c5863bda4e2bd3409de2410738253bbd83cbf2e58f9c4645c62c5178ada13b55"},
+    {file = "granian-1.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:78e11190e98e01ff3d15088589c4ced2d2cc951ce2e01ee95e51ace098bd4d73"},
+    {file = "granian-1.4.4-cp312-none-win_amd64.whl", hash = "sha256:cfb1085e6c5ebe1c865120e14d6d74540d7ad17113d5ec1803dc63998a135dd6"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8d29dc7cdc3e95779276632426bb5daf842c32ceaae035a8b3df1acff7478190"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:92d80566b098854a4e9e26702e9ed40803176994abb9830dabc4ec81d9713a1b"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97f9a745e14a8c685b5e419a500674fbfb3a3a6cc68cfc1b85f9a426506f9a0e"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66a5cc114ebb025330874897a421c940cd5071849d2f9926026167de614cb89c"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3466d2f6d1b313048623219ff08ccb5719ffc22f6201239f9cf1a524a542bc5d"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:572167067698f231502990ffbb96793e047413641f33c0d0d5bdf4902b4bcb6f"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:707c79c303a626a87e29fff2c7855e9ac7fbf8e691836b23f5ebb5768df412b3"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d476fcba0c5f4a41ae9662251e4fb658f06d525d2da98fea713b0007bbb8580c"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:311a88a67f0290e51920ede62ce3a2d0da71111a304e4f8198c43b387763e739"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcbc562010483ad2993a4edc19ace53b80bd0539cfead558d4a864b785969311"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90625f9e8f5b14d2f869a16482835c3f13d3d3cb0ba14e885e2bce11fe2b1bdc"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bf8d688db0c1b21cd0dabca9d5b220daf67ef02f7bd3575b5ec3fc51bcccbfd1"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:47c69d1cf2d7edbafacf3ee49c4e676dd854485e3d14b5d93e5b05bd8454e3b5"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:eedfcf82ebc5e52afd8390a1ac84b8b651b957911d0d1637859171dd2ab0d316"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:228ebbc7170cc288b152dc7f10cd83e3522c95eb040ebd0c249819d2be7706e2"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:be4a79d484ee7ef103c4cb7a2ef10649323034288f3369202f45ef162b603551"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:918cf909e1775d06ae6de415f97909d3c807a5385850d299d67e147e64f98d37"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9169edc508f0e5bd26f31889116698dddc9e8476358ecf0aee006a52be2c6646"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:120e1d2bf54063883ec84690840cbb61dd909d03eca84084afc141e0638551ec"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2321edac173f6443033180effa98b6ed79842f253be2fcf2b5baee6adaafd1e9"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a5b8295eabf00cf51c86fc5c9b2e38462d5f09fb1f02c8090c91ae84f90c780c"},
+    {file = "granian-1.4.4.tar.gz", hash = "sha256:b13bf595b4773317db43a7191179f183fa886b2658bb8611bfbdf1fa2e8d3ada"},
+]
+
+[[package]]
+name = "granian"
+version = "1.4.4"
+extras = ["all"]
+requires_python = ">=3.8"
+summary = "A Rust HTTP server for Python applications"
+groups = ["default"]
+dependencies = [
+    "granian==1.4.4",
+    "granian[pname,reload]",
+]
+files = [
+    {file = "granian-1.4.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d796248f519574c419d359ce83c7d3bcfa085aeb08d9ade09a9640f0d41d7106"},
+    {file = "granian-1.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:811e9aeb401679af79aa1cdd0950d0c98e1ba5647c358ae979a1323a50214d39"},
+    {file = "granian-1.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c6e8e3c200b22ac172fccd765d141e7aeb0927bcdfa89ac17329ca8a445f905"},
+    {file = "granian-1.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6305adc0f52538be3bbf88b75f2f1cad94d19e37e0bd848b949c59def5d74bcc"},
+    {file = "granian-1.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9051bb5db918bfb37008f9ff9bb687cc3737268798ed43fc8a3238a173deca83"},
+    {file = "granian-1.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5141c42c82ffd245db9dd3e3cbf21676f417d7d9adbb38f97b3ea0c5907c0340"},
+    {file = "granian-1.4.4-cp310-none-win_amd64.whl", hash = "sha256:8f7d91588b336bcc2702288035cb1ee1cea4a96a1dcfe2460608ea7e34ab07bc"},
+    {file = "granian-1.4.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0c4a084ea7610b1c08f9516aa6c680c80fbdf7136d53bd6985d6ef9bde8591b8"},
+    {file = "granian-1.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f0d8bffb1fde587cc78ae4eb7e0cea910229dadc8138a5dc95ad83cacc6f6fc"},
+    {file = "granian-1.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:226df8a66083edfc9282c3232592c1ce6619c2696783d967d9c14e098d89f7a1"},
+    {file = "granian-1.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bc8faaa5495c2bf7c69598d9282b460cc268df32ec82d2316caf5486a23be61"},
+    {file = "granian-1.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3771998e3e50122de7b5752815235088087bb50553111363d72bc8ccca656a41"},
+    {file = "granian-1.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d50b16e57658e82144a906805edcd96d2549ceb91a31715d5dacaf7e24ba35fd"},
+    {file = "granian-1.4.4-cp311-none-win_amd64.whl", hash = "sha256:a2fd2ea2b58edc484ac5852923843f34cc5f6515cbe19d133c260e9bbf373533"},
+    {file = "granian-1.4.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b00ddb5525381743db3e2ea4cad74bda547249e3a7a505ce438ec06318ee978e"},
+    {file = "granian-1.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dffefff008f61b211e9f7b1c412316cbdd0c9acb4dacb2b6a045bdfe0e021045"},
+    {file = "granian-1.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce723be2155f1d7f08e7042b2080f5761273e8db25ab8cf5aef088bab01ebc8"},
+    {file = "granian-1.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fc158e0f50035fb069a4971cebe65f981e6b1d860bff8a918458171411d3001"},
+    {file = "granian-1.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c5863bda4e2bd3409de2410738253bbd83cbf2e58f9c4645c62c5178ada13b55"},
+    {file = "granian-1.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:78e11190e98e01ff3d15088589c4ced2d2cc951ce2e01ee95e51ace098bd4d73"},
+    {file = "granian-1.4.4-cp312-none-win_amd64.whl", hash = "sha256:cfb1085e6c5ebe1c865120e14d6d74540d7ad17113d5ec1803dc63998a135dd6"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8d29dc7cdc3e95779276632426bb5daf842c32ceaae035a8b3df1acff7478190"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:92d80566b098854a4e9e26702e9ed40803176994abb9830dabc4ec81d9713a1b"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97f9a745e14a8c685b5e419a500674fbfb3a3a6cc68cfc1b85f9a426506f9a0e"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66a5cc114ebb025330874897a421c940cd5071849d2f9926026167de614cb89c"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3466d2f6d1b313048623219ff08ccb5719ffc22f6201239f9cf1a524a542bc5d"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:572167067698f231502990ffbb96793e047413641f33c0d0d5bdf4902b4bcb6f"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:707c79c303a626a87e29fff2c7855e9ac7fbf8e691836b23f5ebb5768df412b3"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d476fcba0c5f4a41ae9662251e4fb658f06d525d2da98fea713b0007bbb8580c"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:311a88a67f0290e51920ede62ce3a2d0da71111a304e4f8198c43b387763e739"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcbc562010483ad2993a4edc19ace53b80bd0539cfead558d4a864b785969311"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90625f9e8f5b14d2f869a16482835c3f13d3d3cb0ba14e885e2bce11fe2b1bdc"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bf8d688db0c1b21cd0dabca9d5b220daf67ef02f7bd3575b5ec3fc51bcccbfd1"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:47c69d1cf2d7edbafacf3ee49c4e676dd854485e3d14b5d93e5b05bd8454e3b5"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:eedfcf82ebc5e52afd8390a1ac84b8b651b957911d0d1637859171dd2ab0d316"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:228ebbc7170cc288b152dc7f10cd83e3522c95eb040ebd0c249819d2be7706e2"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:be4a79d484ee7ef103c4cb7a2ef10649323034288f3369202f45ef162b603551"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:918cf909e1775d06ae6de415f97909d3c807a5385850d299d67e147e64f98d37"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9169edc508f0e5bd26f31889116698dddc9e8476358ecf0aee006a52be2c6646"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:120e1d2bf54063883ec84690840cbb61dd909d03eca84084afc141e0638551ec"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2321edac173f6443033180effa98b6ed79842f253be2fcf2b5baee6adaafd1e9"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a5b8295eabf00cf51c86fc5c9b2e38462d5f09fb1f02c8090c91ae84f90c780c"},
+    {file = "granian-1.4.4.tar.gz", hash = "sha256:b13bf595b4773317db43a7191179f183fa886b2658bb8611bfbdf1fa2e8d3ada"},
+]
+
+[[package]]
+name = "granian"
+version = "1.4.4"
+extras = ["pname", "reload"]
+requires_python = ">=3.8"
+summary = "A Rust HTTP server for Python applications"
+groups = ["default"]
+dependencies = [
+    "granian==1.4.4",
+    "setproctitle~=1.3.3",
+    "watchfiles~=0.21",
+]
+files = [
+    {file = "granian-1.4.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d796248f519574c419d359ce83c7d3bcfa085aeb08d9ade09a9640f0d41d7106"},
+    {file = "granian-1.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:811e9aeb401679af79aa1cdd0950d0c98e1ba5647c358ae979a1323a50214d39"},
+    {file = "granian-1.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c6e8e3c200b22ac172fccd765d141e7aeb0927bcdfa89ac17329ca8a445f905"},
+    {file = "granian-1.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6305adc0f52538be3bbf88b75f2f1cad94d19e37e0bd848b949c59def5d74bcc"},
+    {file = "granian-1.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9051bb5db918bfb37008f9ff9bb687cc3737268798ed43fc8a3238a173deca83"},
+    {file = "granian-1.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5141c42c82ffd245db9dd3e3cbf21676f417d7d9adbb38f97b3ea0c5907c0340"},
+    {file = "granian-1.4.4-cp310-none-win_amd64.whl", hash = "sha256:8f7d91588b336bcc2702288035cb1ee1cea4a96a1dcfe2460608ea7e34ab07bc"},
+    {file = "granian-1.4.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0c4a084ea7610b1c08f9516aa6c680c80fbdf7136d53bd6985d6ef9bde8591b8"},
+    {file = "granian-1.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f0d8bffb1fde587cc78ae4eb7e0cea910229dadc8138a5dc95ad83cacc6f6fc"},
+    {file = "granian-1.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:226df8a66083edfc9282c3232592c1ce6619c2696783d967d9c14e098d89f7a1"},
+    {file = "granian-1.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bc8faaa5495c2bf7c69598d9282b460cc268df32ec82d2316caf5486a23be61"},
+    {file = "granian-1.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3771998e3e50122de7b5752815235088087bb50553111363d72bc8ccca656a41"},
+    {file = "granian-1.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d50b16e57658e82144a906805edcd96d2549ceb91a31715d5dacaf7e24ba35fd"},
+    {file = "granian-1.4.4-cp311-none-win_amd64.whl", hash = "sha256:a2fd2ea2b58edc484ac5852923843f34cc5f6515cbe19d133c260e9bbf373533"},
+    {file = "granian-1.4.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b00ddb5525381743db3e2ea4cad74bda547249e3a7a505ce438ec06318ee978e"},
+    {file = "granian-1.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dffefff008f61b211e9f7b1c412316cbdd0c9acb4dacb2b6a045bdfe0e021045"},
+    {file = "granian-1.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce723be2155f1d7f08e7042b2080f5761273e8db25ab8cf5aef088bab01ebc8"},
+    {file = "granian-1.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fc158e0f50035fb069a4971cebe65f981e6b1d860bff8a918458171411d3001"},
+    {file = "granian-1.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c5863bda4e2bd3409de2410738253bbd83cbf2e58f9c4645c62c5178ada13b55"},
+    {file = "granian-1.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:78e11190e98e01ff3d15088589c4ced2d2cc951ce2e01ee95e51ace098bd4d73"},
+    {file = "granian-1.4.4-cp312-none-win_amd64.whl", hash = "sha256:cfb1085e6c5ebe1c865120e14d6d74540d7ad17113d5ec1803dc63998a135dd6"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8d29dc7cdc3e95779276632426bb5daf842c32ceaae035a8b3df1acff7478190"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:92d80566b098854a4e9e26702e9ed40803176994abb9830dabc4ec81d9713a1b"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97f9a745e14a8c685b5e419a500674fbfb3a3a6cc68cfc1b85f9a426506f9a0e"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66a5cc114ebb025330874897a421c940cd5071849d2f9926026167de614cb89c"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3466d2f6d1b313048623219ff08ccb5719ffc22f6201239f9cf1a524a542bc5d"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:572167067698f231502990ffbb96793e047413641f33c0d0d5bdf4902b4bcb6f"},
+    {file = "granian-1.4.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:707c79c303a626a87e29fff2c7855e9ac7fbf8e691836b23f5ebb5768df412b3"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d476fcba0c5f4a41ae9662251e4fb658f06d525d2da98fea713b0007bbb8580c"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:311a88a67f0290e51920ede62ce3a2d0da71111a304e4f8198c43b387763e739"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcbc562010483ad2993a4edc19ace53b80bd0539cfead558d4a864b785969311"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90625f9e8f5b14d2f869a16482835c3f13d3d3cb0ba14e885e2bce11fe2b1bdc"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bf8d688db0c1b21cd0dabca9d5b220daf67ef02f7bd3575b5ec3fc51bcccbfd1"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:47c69d1cf2d7edbafacf3ee49c4e676dd854485e3d14b5d93e5b05bd8454e3b5"},
+    {file = "granian-1.4.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:eedfcf82ebc5e52afd8390a1ac84b8b651b957911d0d1637859171dd2ab0d316"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:228ebbc7170cc288b152dc7f10cd83e3522c95eb040ebd0c249819d2be7706e2"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:be4a79d484ee7ef103c4cb7a2ef10649323034288f3369202f45ef162b603551"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:918cf909e1775d06ae6de415f97909d3c807a5385850d299d67e147e64f98d37"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9169edc508f0e5bd26f31889116698dddc9e8476358ecf0aee006a52be2c6646"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:120e1d2bf54063883ec84690840cbb61dd909d03eca84084afc141e0638551ec"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2321edac173f6443033180effa98b6ed79842f253be2fcf2b5baee6adaafd1e9"},
+    {file = "granian-1.4.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a5b8295eabf00cf51c86fc5c9b2e38462d5f09fb1f02c8090c91ae84f90c780c"},
+    {file = "granian-1.4.4.tar.gz", hash = "sha256:b13bf595b4773317db43a7191179f183fa886b2658bb8611bfbdf1fa2e8d3ada"},
+]
+
+[[package]]
+name = "greenlet"
+version = "3.0.3"
+requires_python = ">=3.7"
+summary = "Lightweight in-process concurrent programming"
+groups = ["default"]
+marker = "(platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\" or sys_platform == \"darwin\""
+files = [
+    {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"},
+    {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"},
+    {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"},
+    {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"},
+    {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"},
+    {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"},
+    {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"},
+    {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"},
+    {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"},
+    {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"},
+    {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"},
+    {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"},
+    {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"},
+    {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"},
+    {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"},
+    {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"},
+    {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"},
+    {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"},
+    {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"},
+    {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"},
+    {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"},
+    {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"},
+    {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"},
+    {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"},
+    {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"},
+    {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"},
+    {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"},
+    {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"},
+]
+
+[[package]]
+name = "h11"
+version = "0.14.0"
+requires_python = ">=3.7"
+summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
+groups = ["default", "docs"]
+files = [
+    {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
+    {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
+]
+
+[[package]]
+name = "hiredis"
+version = "2.3.2"
+requires_python = ">=3.7"
+summary = "Python wrapper for hiredis"
+groups = ["default"]
+files = [
+    {file = "hiredis-2.3.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:742093f33d374098aa21c1696ac6e4874b52658c870513a297a89265a4d08fe5"},
+    {file = "hiredis-2.3.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:9e14fb70ca4f7efa924f508975199353bf653f452e4ef0a1e47549e208f943d7"},
+    {file = "hiredis-2.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d7302b4b17fcc1cc727ce84ded7f6be4655701e8d58744f73b09cb9ed2b13df"},
+    {file = "hiredis-2.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed63e8b75c193c5e5a8288d9d7b011da076cc314fafc3bfd59ec1d8a750d48c8"},
+    {file = "hiredis-2.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b4edee59dc089bc3948f4f6fba309f51aa2ccce63902364900aa0a553a85e97"},
+    {file = "hiredis-2.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6481c3b7673a86276220140456c2a6fbfe8d1fb5c613b4728293c8634134824"},
+    {file = "hiredis-2.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684840b014ce83541a087fcf2d48227196576f56ae3e944d4dfe14c0a3e0ccb7"},
+    {file = "hiredis-2.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c4c0bcf786f0eac9593367b6279e9b89534e008edbf116dcd0de956524702c8"},
+    {file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66ab949424ac6504d823cba45c4c4854af5c59306a1531edb43b4dd22e17c102"},
+    {file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:322c668ee1c12d6c5750a4b1057e6b4feee2a75b3d25d630922a463cfe5e7478"},
+    {file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bfa73e3f163c6e8b2ec26f22285d717a5f77ab2120c97a2605d8f48b26950dac"},
+    {file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7f39f28ffc65de577c3bc0c7615f149e35bc927802a0f56e612db9b530f316f9"},
+    {file = "hiredis-2.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:55ce31bf4711da879b96d511208efb65a6165da4ba91cb3a96d86d5a8d9d23e6"},
+    {file = "hiredis-2.3.2-cp310-cp310-win32.whl", hash = "sha256:3dd63d0bbbe75797b743f35d37a4cca7ca7ba35423a0de742ae2985752f20c6d"},
+    {file = "hiredis-2.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:ea002656a8d974daaf6089863ab0a306962c8b715db6b10879f98b781a2a5bf5"},
+    {file = "hiredis-2.3.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:adfbf2e9c38b77d0db2fb32c3bdaea638fa76b4e75847283cd707521ad2475ef"},
+    {file = "hiredis-2.3.2-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:80b02d27864ebaf9b153d4b99015342382eeaed651f5591ce6f07e840307c56d"},
+    {file = "hiredis-2.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd40d2e2f82a483de0d0a6dfd8c3895a02e55e5c9949610ecbded18188fd0a56"},
+    {file = "hiredis-2.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfa904045d7cebfb0f01dad51352551cce1d873d7c3f80c7ded7d42f8cac8f89"},
+    {file = "hiredis-2.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28bd184b33e0dd6d65816c16521a4ba1ffbe9ff07d66873c42ea4049a62fed83"},
+    {file = "hiredis-2.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f70481213373d44614148f0f2e38e7905be3f021902ae5167289413196de4ba4"},
+    {file = "hiredis-2.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb8797b528c1ff81eef06713623562b36db3dafa106b59f83a6468df788ff0d1"},
+    {file = "hiredis-2.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02fc71c8333586871602db4774d3a3e403b4ccf6446dc4603ec12df563127cee"},
+    {file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0da56915bda1e0a49157191b54d3e27689b70960f0685fdd5c415dacdee2fbed"},
+    {file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e2674a5a3168349435b08fa0b82998ed2536eb9acccf7087efe26e4cd088a525"},
+    {file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:dc1c3fd49930494a67dcec37d0558d99d84eca8eb3f03b17198424538f2608d7"},
+    {file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:14c7b43205e515f538a9defb4e411e0f0576caaeeda76bb9993ed505486f7562"},
+    {file = "hiredis-2.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7bac7e02915b970c3723a7a7c5df4ba7a11a3426d2a3f181e041aa506a1ff028"},
+    {file = "hiredis-2.3.2-cp311-cp311-win32.whl", hash = "sha256:63a090761ddc3c1f7db5e67aa4e247b4b3bb9890080bdcdadd1b5200b8b89ac4"},
+    {file = "hiredis-2.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:70d226ab0306a5b8d408235cabe51d4bf3554c9e8a72d53ce0b3c5c84cf78881"},
+    {file = "hiredis-2.3.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5c614552c6bd1d0d907f448f75550f6b24fb56cbfce80c094908b7990cad9702"},
+    {file = "hiredis-2.3.2-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9c431431abf55b64347ddc8df68b3ef840269cb0aa5bc2d26ad9506eb4b1b866"},
+    {file = "hiredis-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a45857e87e9d2b005e81ddac9d815a33efd26ec67032c366629f023fe64fb415"},
+    {file = "hiredis-2.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e138d141ec5a6ec800b6d01ddc3e5561ce1c940215e0eb9960876bfde7186aae"},
+    {file = "hiredis-2.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:387f655444d912a963ab68abf64bf6e178a13c8e4aa945cb27388fd01a02e6f1"},
+    {file = "hiredis-2.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4852f4bf88f0e2d9bdf91279892f5740ed22ae368335a37a52b92a5c88691140"},
+    {file = "hiredis-2.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d711c107e83117129b7f8bd08e9820c43ceec6204fff072a001fd82f6d13db9f"},
+    {file = "hiredis-2.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92830c16885f29163e1c2da1f3c1edb226df1210ec7e8711aaabba3dd0d5470a"},
+    {file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:16b01d9ceae265d4ab9547be0cd628ecaff14b3360357a9d30c029e5ae8b7e7f"},
+    {file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5986fb5f380169270a0293bebebd95466a1c85010b4f1afc2727e4d17c452512"},
+    {file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:49532d7939cc51f8e99efc326090c54acf5437ed88b9c904cc8015b3c4eda9c9"},
+    {file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8f34801b251ca43ad70691fb08b606a2e55f06b9c9fb1fc18fd9402b19d70f7b"},
+    {file = "hiredis-2.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7298562a49d95570ab1c7fc4051e72824c6a80e907993a21a41ba204223e7334"},
+    {file = "hiredis-2.3.2-cp312-cp312-win32.whl", hash = "sha256:e1d86b75de787481b04d112067a4033e1ecfda2a060e50318a74e4e1c9b2948c"},
+    {file = "hiredis-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:6dbfe1887ffa5cf3030451a56a8f965a9da2fa82b7149357752b67a335a05fc6"},
+    {file = "hiredis-2.3.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e8bf4444b09419b77ce671088db9f875b26720b5872d97778e2545cd87dba4a"},
+    {file = "hiredis-2.3.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bd42d0d45ea47a2f96babd82a659fbc60612ab9423a68e4a8191e538b85542a"},
+    {file = "hiredis-2.3.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80441b55edbef868e2563842f5030982b04349408396e5ac2b32025fb06b5212"},
+    {file = "hiredis-2.3.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec444ab8f27562a363672d6a7372bc0700a1bdc9764563c57c5f9efa0e592b5f"},
+    {file = "hiredis-2.3.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f9f606e810858207d4b4287b4ef0dc622c2aa469548bf02b59dcc616f134f811"},
+    {file = "hiredis-2.3.2-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c3dde4ca00fe9eee3b76209711f1941bb86db42b8a75d7f2249ff9dfc026ab0e"},
+    {file = "hiredis-2.3.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4dd676107a1d3c724a56a9d9db38166ad4cf44f924ee701414751bd18a784a0"},
+    {file = "hiredis-2.3.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce42649e2676ad783186264d5ffc788a7612ecd7f9effb62d51c30d413a3eefe"},
+    {file = "hiredis-2.3.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e3f8b1733078ac663dad57e20060e16389a60ab542f18a97931f3a2a2dd64a4"},
+    {file = "hiredis-2.3.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:532a84a82156a82529ec401d1c25d677c6543c791e54a263aa139541c363995f"},
+    {file = "hiredis-2.3.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4d59f88c4daa36b8c38e59ac7bffed6f5d7f68eaccad471484bf587b28ccc478"},
+    {file = "hiredis-2.3.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91a14dd95e24dc078204b18b0199226ee44644974c645dc54ee7b00c3157330"},
+    {file = "hiredis-2.3.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb777a38797c8c7df0444533119570be18d1a4ce5478dffc00c875684df7bfcb"},
+    {file = "hiredis-2.3.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d47c915897a99d0d34a39fad4be97b4b709ab3d0d3b779ebccf2b6024a8c681e"},
+    {file = "hiredis-2.3.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:333b5e04866758b11bda5f5315b4e671d15755fc6ed3b7969721bc6311d0ee36"},
+    {file = "hiredis-2.3.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c8937f1100435698c18e4da086968c4b5d70e86ea718376f833475ab3277c9aa"},
+    {file = "hiredis-2.3.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa45f7d771094b8145af10db74704ab0f698adb682fbf3721d8090f90e42cc49"},
+    {file = "hiredis-2.3.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d5ebc93c39aed4b5bc769f8ce0819bc50e74bb95d57a35f838f1c4378978e0"},
+    {file = "hiredis-2.3.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a797d8c7df9944314d309b0d9e1b354e2fa4430a05bb7604da13b6ad291bf959"},
+    {file = "hiredis-2.3.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e15a408f71a6c8c87b364f1f15a6cd9c1baca12bbc47a326ac8ab99ec7ad3c64"},
+    {file = "hiredis-2.3.2.tar.gz", hash = "sha256:733e2456b68f3f126ddaf2cd500a33b25146c3676b97ea843665717bda0c5d43"},
+]
+
+[[package]]
+name = "html5lib"
+version = "1.1"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+summary = "HTML parser based on the WHATWG HTML specification"
+groups = ["docs"]
+dependencies = [
+    "six>=1.9",
+    "webencodings",
+]
+files = [
+    {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"},
+    {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"},
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.5"
+requires_python = ">=3.8"
+summary = "A minimal low-level HTTP client."
+groups = ["default"]
+dependencies = [
+    "certifi",
+    "h11<0.15,>=0.13",
+]
+files = [
+    {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"},
+    {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"},
+]
+
+[[package]]
+name = "httptools"
+version = "0.6.1"
+requires_python = ">=3.8.0"
+summary = "A collection of framework independent HTTP protocol utils."
+groups = ["default"]
+files = [
+    {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"},
+    {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"},
+    {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"},
+    {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"},
+    {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"},
+    {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"},
+    {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"},
+    {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"},
+    {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"},
+    {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"},
+    {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"},
+    {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"},
+    {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"},
+    {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"},
+    {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"},
+    {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"},
+    {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"},
+    {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"},
+    {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"},
+    {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"},
+    {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"},
+    {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"},
+]
+
+[[package]]
+name = "httpx"
+version = "0.27.0"
+requires_python = ">=3.8"
+summary = "The next generation HTTP client."
+groups = ["default"]
+dependencies = [
+    "anyio",
+    "certifi",
+    "httpcore==1.*",
+    "idna",
+    "sniffio",
+]
+files = [
+    {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"},
+    {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"},
+]
+
+[[package]]
+name = "identify"
+version = "2.5.36"
+requires_python = ">=3.8"
+summary = "File identification library for Python"
+groups = ["linting"]
+files = [
+    {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"},
+    {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"},
+]
+
+[[package]]
+name = "idna"
+version = "3.7"
+requires_python = ">=3.5"
+summary = "Internationalized Domain Names in Applications (IDNA)"
+groups = ["default", "docs"]
+files = [
+    {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
+    {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
+]
+
+[[package]]
+name = "imagesize"
+version = "1.4.1"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+summary = "Getting image size from png/jpeg/jpeg2000/gif file"
+groups = ["docs"]
+files = [
+    {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"},
+    {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+requires_python = ">=3.7"
+summary = "brain-dead simple config-ini parsing"
+groups = ["test"]
+files = [
+    {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+    {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "isort"
+version = "5.13.2"
+requires_python = ">=3.8.0"
+summary = "A Python utility / library to sort Python imports."
+groups = ["linting"]
+files = [
+    {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"},
+    {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"},
+]
+
+[[package]]
+name = "jinja2"
+version = "3.1.4"
+requires_python = ">=3.7"
+summary = "A very fast and expressive template engine."
+groups = ["default", "docs"]
+dependencies = [
+    "MarkupSafe>=2.0",
+]
+files = [
+    {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
+    {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
+]
+
+[[package]]
+name = "jsbeautifier"
+version = "1.15.1"
+summary = "JavaScript unobfuscator and beautifier."
+groups = ["default"]
+dependencies = [
+    "editorconfig>=0.12.2",
+    "six>=1.13.0",
+]
+files = [
+    {file = "jsbeautifier-1.15.1.tar.gz", hash = "sha256:ebd733b560704c602d744eafc839db60a1ee9326e30a2a80c4adb8718adc1b24"},
+]
+
+[[package]]
+name = "litestar"
+version = "2.9.1"
+requires_python = "<4.0,>=3.8"
+summary = "Litestar - A production-ready, highly performant, extensible ASGI API Framework"
+groups = ["default"]
+dependencies = [
+    "anyio>=3",
+    "click",
+    "exceptiongroup; python_version < \"3.11\"",
+    "httpx>=0.22",
+    "msgspec>=0.18.2",
+    "multidict>=6.0.2",
+    "polyfactory>=2.6.3",
+    "pyyaml",
+    "rich-click",
+    "rich>=13.0.0",
+    "typing-extensions",
+]
+files = [
+    {file = "litestar-2.9.1-py3-none-any.whl", hash = "sha256:fe3e4ec91a9c24af652775fed5fa4d789902f165cabbd7d2e62821fec1f69462"},
+    {file = "litestar-2.9.1.tar.gz", hash = "sha256:7c13bb4dd7b1c77f6c462262cfe401ca6429eab3e4d98f38586b68268bd5ac97"},
+]
+
+[[package]]
+name = "litestar-granian"
+version = "0.5.0"
+requires_python = ">=3.8"
+summary = "Granian plugin for Litestar"
+groups = ["default"]
+dependencies = [
+    "granian[all]>=1.3.0",
+    "litestar>=2.0.1",
+]
+files = [
+    {file = "litestar_granian-0.5.0-py3-none-any.whl", hash = "sha256:394c94d99c997e827dac96495429e1020f3b5a72c8ff1bb30b5fc85de5786e1d"},
+    {file = "litestar_granian-0.5.0.tar.gz", hash = "sha256:2d1f464255b937b232b8bf4e2aaa16b8b036f6a345209ef805234cb7a08d7de6"},
+]
+
+[[package]]
+name = "litestar-saq"
+version = "0.1.21"
+requires_python = ">=3.8"
+summary = "Litestar integration for SAQ"
+groups = ["default"]
+dependencies = [
+    "litestar>=2.0.1",
+    "saq>=0.12.0",
+]
+files = [
+    {file = "litestar_saq-0.1.21-py3-none-any.whl", hash = "sha256:f30f3efbcbc8d4a2c33b22ea2c79b008f1a0363231194001c7e34e78a50a78ea"},
+    {file = "litestar_saq-0.1.21.tar.gz", hash = "sha256:2e8121dbc091efb1d7a76f2b7b611a8d178f82da39f2d39143dde08b2210f714"},
+]
+
+[[package]]
+name = "litestar-sphinx-theme"
+version = "0.2.0"
+requires_python = ">=3.8,<4.0"
+git = "https://github.com/litestar-org/litestar-sphinx-theme.git"
+revision = "76b1d0e4c8afff1ad135b1917fe09cf6c1cc6c9b"
+summary = "A Sphinx theme for the Litestar organization"
+groups = ["docs"]
+dependencies = [
+    "pydata-sphinx-theme<1.0.0,>=0.13.3",
+    "sphinx-design<1.0.0,>=0.3.0",
+]
+
+[[package]]
+name = "litestar-vite"
+version = "0.1.22"
+requires_python = ">=3.8"
+summary = "Vite plugin for Litestar"
+groups = ["default"]
+dependencies = [
+    "litestar[jinja]>=2.4.0",
+]
+files = [
+    {file = "litestar_vite-0.1.22-py3-none-any.whl", hash = "sha256:d950a665d6c261c6ecb2b2be129077abca43e8e464f1a39fc74cad6cd70816ab"},
+    {file = "litestar_vite-0.1.22.tar.gz", hash = "sha256:7062a7b1ed25f60476dde0eb81ec076d14c958b4eff226f33c77b25f60929c70"},
+]
+
+[[package]]
+name = "litestar-vite"
+version = "0.1.22"
+extras = ["nodeenv"]
+requires_python = ">=3.8"
+summary = "Vite plugin for Litestar"
+groups = ["default"]
+dependencies = [
+    "litestar-vite==0.1.22",
+    "nodeenv",
+]
+files = [
+    {file = "litestar_vite-0.1.22-py3-none-any.whl", hash = "sha256:d950a665d6c261c6ecb2b2be129077abca43e8e464f1a39fc74cad6cd70816ab"},
+    {file = "litestar_vite-0.1.22.tar.gz", hash = "sha256:7062a7b1ed25f60476dde0eb81ec076d14c958b4eff226f33c77b25f60929c70"},
+]
+
+[[package]]
+name = "litestar"
+version = "2.9.1"
+extras = ["jinja", "jwt", "redis", "structlog"]
+requires_python = "<4.0,>=3.8"
+summary = "Litestar - A production-ready, highly performant, extensible ASGI API Framework"
+groups = ["default"]
+dependencies = [
+    "cryptography",
+    "jinja2>=3.1.2",
+    "litestar==2.9.1",
+    "python-jose",
+    "redis[hiredis]>=4.4.4",
+    "structlog",
+]
+files = [
+    {file = "litestar-2.9.1-py3-none-any.whl", hash = "sha256:fe3e4ec91a9c24af652775fed5fa4d789902f165cabbd7d2e62821fec1f69462"},
+    {file = "litestar-2.9.1.tar.gz", hash = "sha256:7c13bb4dd7b1c77f6c462262cfe401ca6429eab3e4d98f38586b68268bd5ac97"},
+]
+
+[[package]]
+name = "litestar"
+version = "2.9.1"
+extras = ["jinja"]
+requires_python = "<4.0,>=3.8"
+summary = "Litestar - A production-ready, highly performant, extensible ASGI API Framework"
+groups = ["default"]
+dependencies = [
+    "jinja2>=3.1.2",
+    "litestar==2.9.1",
+]
+files = [
+    {file = "litestar-2.9.1-py3-none-any.whl", hash = "sha256:fe3e4ec91a9c24af652775fed5fa4d789902f165cabbd7d2e62821fec1f69462"},
+    {file = "litestar-2.9.1.tar.gz", hash = "sha256:7c13bb4dd7b1c77f6c462262cfe401ca6429eab3e4d98f38586b68268bd5ac97"},
+]
+
+[[package]]
+name = "mako"
+version = "1.3.5"
+requires_python = ">=3.8"
+summary = "A super-fast templating language that borrows the best ideas from the existing templating languages."
+groups = ["default"]
+dependencies = [
+    "MarkupSafe>=0.9.2",
+]
+files = [
+    {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"},
+    {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"},
+]
+
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+requires_python = ">=3.8"
+summary = "Python port of markdown-it. Markdown parsing, done right!"
+groups = ["default"]
+dependencies = [
+    "mdurl~=0.1",
+]
+files = [
+    {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
+    {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
+]
+
+[[package]]
+name = "markupsafe"
+version = "2.1.5"
+requires_python = ">=3.7"
+summary = "Safely add untrusted strings to HTML/XML markup."
+groups = ["default", "docs"]
+files = [
+    {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
+    {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
+    {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
+    {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
+    {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
+    {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
+    {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
+    {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
+    {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
+    {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
+    {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
+    {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
+    {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
+    {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
+    {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
+    {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
+    {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
+    {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
+    {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
+    {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
+    {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
+    {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
+    {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
+    {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
+    {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
+    {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
+    {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
+    {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
+    {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
+    {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
+    {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
+]
+
+[[package]]
+name = "mccabe"
+version = "0.7.0"
+requires_python = ">=3.6"
+summary = "McCabe checker, plugin for flake8"
+groups = ["linting"]
+files = [
+    {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
+    {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
+]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+requires_python = ">=3.7"
+summary = "Markdown URL utilities"
+groups = ["default"]
+files = [
+    {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+    {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
+]
+
+[[package]]
+name = "more-itertools"
+version = "10.3.0"
+requires_python = ">=3.8"
+summary = "More routines for operating on iterables, beyond itertools"
+groups = ["docs"]
+files = [
+    {file = "more-itertools-10.3.0.tar.gz", hash = "sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463"},
+    {file = "more_itertools-10.3.0-py3-none-any.whl", hash = "sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320"},
+]
+
+[[package]]
+name = "msgpack"
+version = "1.0.8"
+requires_python = ">=3.8"
+summary = "MessagePack serializer"
+groups = ["docs"]
+files = [
+    {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"},
+    {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"},
+    {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"},
+    {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"},
+    {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"},
+    {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"},
+    {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"},
+    {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"},
+    {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"},
+    {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"},
+    {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"},
+    {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"},
+    {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"},
+    {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"},
+    {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"},
+    {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"},
+    {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"},
+    {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"},
+    {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"},
+    {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"},
+    {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"},
+    {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"},
+    {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"},
+    {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"},
+    {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"},
+    {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"},
+    {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"},
+    {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"},
+    {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"},
+    {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"},
+    {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"},
+    {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"},
+    {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"},
+    {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"},
+]
+
+[[package]]
+name = "msgspec"
+version = "0.18.6"
+requires_python = ">=3.8"
+summary = "A fast serialization and validation library, with builtin support for JSON, MessagePack, YAML, and TOML."
+groups = ["default"]
+files = [
+    {file = "msgspec-0.18.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77f30b0234eceeff0f651119b9821ce80949b4d667ad38f3bfed0d0ebf9d6d8f"},
+    {file = "msgspec-0.18.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a76b60e501b3932782a9da039bd1cd552b7d8dec54ce38332b87136c64852dd"},
+    {file = "msgspec-0.18.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06acbd6edf175bee0e36295d6b0302c6de3aaf61246b46f9549ca0041a9d7177"},
+    {file = "msgspec-0.18.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40a4df891676d9c28a67c2cc39947c33de516335680d1316a89e8f7218660410"},
+    {file = "msgspec-0.18.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a6896f4cd5b4b7d688018805520769a8446df911eb93b421c6c68155cdf9dd5a"},
+    {file = "msgspec-0.18.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3ac4dd63fd5309dd42a8c8c36c1563531069152be7819518be0a9d03be9788e4"},
+    {file = "msgspec-0.18.6-cp310-cp310-win_amd64.whl", hash = "sha256:fda4c357145cf0b760000c4ad597e19b53adf01382b711f281720a10a0fe72b7"},
+    {file = "msgspec-0.18.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e77e56ffe2701e83a96e35770c6adb655ffc074d530018d1b584a8e635b4f36f"},
+    {file = "msgspec-0.18.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5351afb216b743df4b6b147691523697ff3a2fc5f3d54f771e91219f5c23aaa"},
+    {file = "msgspec-0.18.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3232fabacef86fe8323cecbe99abbc5c02f7698e3f5f2e248e3480b66a3596b"},
+    {file = "msgspec-0.18.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3b524df6ea9998bbc99ea6ee4d0276a101bcc1aa8d14887bb823914d9f60d07"},
+    {file = "msgspec-0.18.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:37f67c1d81272131895bb20d388dd8d341390acd0e192a55ab02d4d6468b434c"},
+    {file = "msgspec-0.18.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d0feb7a03d971c1c0353de1a8fe30bb6579c2dc5ccf29b5f7c7ab01172010492"},
+    {file = "msgspec-0.18.6-cp311-cp311-win_amd64.whl", hash = "sha256:41cf758d3f40428c235c0f27bc6f322d43063bc32da7b9643e3f805c21ed57b4"},
+    {file = "msgspec-0.18.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d86f5071fe33e19500920333c11e2267a31942d18fed4d9de5bc2fbab267d28c"},
+    {file = "msgspec-0.18.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce13981bfa06f5eb126a3a5a38b1976bddb49a36e4f46d8e6edecf33ccf11df1"},
+    {file = "msgspec-0.18.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97dec6932ad5e3ee1e3c14718638ba333befc45e0661caa57033cd4cc489466"},
+    {file = "msgspec-0.18.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad237100393f637b297926cae1868b0d500f764ccd2f0623a380e2bcfb2809ca"},
+    {file = "msgspec-0.18.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db1d8626748fa5d29bbd15da58b2d73af25b10aa98abf85aab8028119188ed57"},
+    {file = "msgspec-0.18.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d70cb3d00d9f4de14d0b31d38dfe60c88ae16f3182988246a9861259c6722af6"},
+    {file = "msgspec-0.18.6-cp312-cp312-win_amd64.whl", hash = "sha256:1003c20bfe9c6114cc16ea5db9c5466e49fae3d7f5e2e59cb70693190ad34da0"},
+    {file = "msgspec-0.18.6.tar.gz", hash = "sha256:a59fc3b4fcdb972d09138cb516dbde600c99d07c38fd9372a6ef500d2d031b4e"},
+]
+
+[[package]]
+name = "multidict"
+version = "6.0.5"
+requires_python = ">=3.7"
+summary = "multidict implementation"
+groups = ["default"]
+files = [
+    {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"},
+    {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"},
+    {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"},
+    {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"},
+    {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"},
+    {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"},
+    {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"},
+    {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"},
+    {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"},
+    {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"},
+    {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"},
+    {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"},
+    {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"},
+    {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"},
+    {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"},
+    {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"},
+    {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"},
+    {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"},
+    {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"},
+    {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"},
+    {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"},
+    {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"},
+    {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"},
+    {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"},
+    {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"},
+    {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"},
+    {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"},
+    {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"},
+    {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"},
+    {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"},
+    {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"},
+    {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"},
+    {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"},
+    {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"},
+    {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"},
+    {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"},
+    {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"},
+    {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"},
+    {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"},
+    {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"},
+    {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"},
+    {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"},
+    {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"},
+    {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"},
+    {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"},
+    {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"},
+    {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"},
+]
+
+[[package]]
+name = "mypy"
+version = "1.10.1"
+requires_python = ">=3.8"
+summary = "Optional static typing for Python"
+groups = ["linting"]
+dependencies = [
+    "mypy-extensions>=1.0.0",
+    "tomli>=1.1.0; python_version < \"3.11\"",
+    "typing-extensions>=4.1.0",
+]
+files = [
+    {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"},
+    {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"},
+    {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"},
+    {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"},
+    {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"},
+    {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"},
+    {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"},
+    {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"},
+    {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"},
+    {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"},
+    {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"},
+    {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"},
+    {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"},
+    {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"},
+    {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"},
+    {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"},
+    {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"},
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.0.0"
+requires_python = ">=3.5"
+summary = "Type system extensions for programs checked with the mypy type checker."
+groups = ["linting"]
+files = [
+    {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+    {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
+
+[[package]]
+name = "natsort"
+version = "8.4.0"
+requires_python = ">=3.7"
+summary = "Simple yet flexible natural sorting in Python."
+groups = ["docs"]
+files = [
+    {file = "natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c"},
+    {file = "natsort-8.4.0.tar.gz", hash = "sha256:45312c4a0e5507593da193dedd04abb1469253b601ecaf63445ad80f0a1ea581"},
+]
+
+[[package]]
+name = "nodeenv"
+version = "1.9.1"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+summary = "Node.js virtual environment builder"
+groups = ["default", "dev", "linting"]
+files = [
+    {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
+    {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
+]
+
+[[package]]
+name = "packaging"
+version = "24.1"
+requires_python = ">=3.8"
+summary = "Core utilities for Python packages"
+groups = ["docs", "test"]
+files = [
+    {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
+    {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
+]
+
+[[package]]
+name = "passlib"
+version = "1.7.4"
+summary = "comprehensive password hashing framework supporting over 30 schemes"
+groups = ["default"]
+files = [
+    {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"},
+    {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"},
+]
+
+[[package]]
+name = "passlib"
+version = "1.7.4"
+extras = ["argon2"]
+summary = "comprehensive password hashing framework supporting over 30 schemes"
+groups = ["default"]
+dependencies = [
+    "argon2-cffi>=18.2.0",
+    "passlib==1.7.4",
+]
+files = [
+    {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"},
+    {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"},
+]
+
+[[package]]
+name = "platformdirs"
+version = "4.2.2"
+requires_python = ">=3.8"
+summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
+groups = ["docs", "linting"]
+files = [
+    {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"},
+    {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"},
+]
+
+[[package]]
+name = "pluggy"
+version = "1.5.0"
+requires_python = ">=3.8"
+summary = "plugin and hook calling mechanisms for python"
+groups = ["test"]
+files = [
+    {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
+    {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
+]
+
+[[package]]
+name = "polyfactory"
+version = "2.16.0"
+requires_python = "<4.0,>=3.8"
+summary = "Mock data generation factories"
+groups = ["default"]
+dependencies = [
+    "faker",
+    "typing-extensions>=4.6.0",
+]
+files = [
+    {file = "polyfactory-2.16.0-py3-none-any.whl", hash = "sha256:168d8e50b77e91e35e691e8b3eedac43d7e423a6857fa26d473def96d53f0ecf"},
+    {file = "polyfactory-2.16.0.tar.gz", hash = "sha256:03d8c706b70c4782ac8e637d0f6ab52760a7d11b712da5936a95a8f7022b2688"},
+]
+
+[[package]]
+name = "pre-commit"
+version = "3.7.1"
+requires_python = ">=3.9"
+summary = "A framework for managing and maintaining multi-language pre-commit hooks."
+groups = ["linting"]
+dependencies = [
+    "cfgv>=2.0.0",
+    "identify>=1.0.0",
+    "nodeenv>=0.11.1",
+    "pyyaml>=5.1",
+    "virtualenv>=20.10.0",
+]
+files = [
+    {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"},
+    {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"},
+]
+
+[[package]]
+name = "pyasn1"
+version = "0.6.0"
+requires_python = ">=3.8"
+summary = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
+groups = ["default"]
+files = [
+    {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"},
+    {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"},
+]
+
+[[package]]
+name = "pycparser"
+version = "2.22"
+requires_python = ">=3.8"
+summary = "C parser in Python"
+groups = ["default", "linting"]
+files = [
+    {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
+    {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
+]
+
+[[package]]
+name = "pydata-sphinx-theme"
+version = "0.15.3"
+requires_python = ">=3.9"
+summary = "Bootstrap-based Sphinx theme from the PyData community"
+groups = ["docs"]
+dependencies = [
+    "Babel",
+    "accessible-pygments",
+    "beautifulsoup4",
+    "docutils!=0.17.0",
+    "packaging",
+    "pygments>=2.7",
+    "sphinx>=5",
+    "typing-extensions",
+]
+files = [
+    {file = "pydata_sphinx_theme-0.15.3-py3-none-any.whl", hash = "sha256:a48ee049dc9b0f7064dbb8f7064b1cf3ae48aa193faafe14abd403a1b7102810"},
+    {file = "pydata_sphinx_theme-0.15.3.tar.gz", hash = "sha256:f26ed9b676f61d1b2ae9289f3d7e496e8678dd56f2568b27a66fa4ad1f164efd"},
+]
+
+[[package]]
+name = "pygments"
+version = "2.18.0"
+requires_python = ">=3.8"
+summary = "Pygments is a syntax highlighting package written in Python."
+groups = ["default", "docs"]
+files = [
+    {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
+    {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
+]
+
+[[package]]
+name = "pylint"
+version = "3.2.5"
+requires_python = ">=3.8.0"
+summary = "python code static checker"
+groups = ["linting"]
+dependencies = [
+    "astroid<=3.3.0-dev0,>=3.2.2",
+    "colorama>=0.4.5; sys_platform == \"win32\"",
+    "dill>=0.2; python_version < \"3.11\"",
+    "dill>=0.3.6; python_version >= \"3.11\"",
+    "dill>=0.3.7; python_version >= \"3.12\"",
+    "isort!=5.13.0,<6,>=4.2.5",
+    "mccabe<0.8,>=0.6",
+    "platformdirs>=2.2.0",
+    "tomli>=1.1.0; python_version < \"3.11\"",
+    "tomlkit>=0.10.1",
+]
+files = [
+    {file = "pylint-3.2.5-py3-none-any.whl", hash = "sha256:32cd6c042b5004b8e857d727708720c54a676d1e22917cf1a2df9b4d4868abd6"},
+    {file = "pylint-3.2.5.tar.gz", hash = "sha256:e9b7171e242dcc6ebd0aaa7540481d1a72860748a0a7816b8fe6cf6c80a6fe7e"},
+]
+
+[[package]]
+name = "pytest"
+version = "8.2.2"
+requires_python = ">=3.8"
+summary = "pytest: simple powerful testing with Python"
+groups = ["test"]
+dependencies = [
+    "colorama; sys_platform == \"win32\"",
+    "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"",
+    "iniconfig",
+    "packaging",
+    "pluggy<2.0,>=1.5",
+    "tomli>=1; python_version < \"3.11\"",
+]
+files = [
+    {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"},
+    {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"},
+]
+
+[[package]]
+name = "pytest-cov"
+version = "5.0.0"
+requires_python = ">=3.8"
+summary = "Pytest plugin for measuring coverage."
+groups = ["test"]
+dependencies = [
+    "coverage[toml]>=5.2.1",
+    "pytest>=4.6",
+]
+files = [
+    {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"},
+    {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"},
+]
+
+[[package]]
+name = "pytest-databases"
+version = "0.7.0"
+requires_python = ">=3.8"
+summary = "Reusable database fixtures for any and all databases."
+groups = ["test"]
+dependencies = [
+    "pytest",
+]
+files = [
+    {file = "pytest_databases-0.7.0-py3-none-any.whl", hash = "sha256:887cf616455519d21d876eb2d007ac9572ea462334416062bd7ebc0afcd33d0f"},
+    {file = "pytest_databases-0.7.0.tar.gz", hash = "sha256:451794173487e92ed6166418961609fd390f515df8f8afb4ef7adecb91e847dd"},
+]
+
+[[package]]
+name = "pytest-databases"
+version = "0.7.0"
+extras = ["postgres", "redis"]
+requires_python = ">=3.8"
+summary = "Reusable database fixtures for any and all databases."
+groups = ["test"]
+dependencies = [
+    "asyncpg>=0.29.0",
+    "pytest-databases==0.7.0",
+    "redis",
+]
+files = [
+    {file = "pytest_databases-0.7.0-py3-none-any.whl", hash = "sha256:887cf616455519d21d876eb2d007ac9572ea462334416062bd7ebc0afcd33d0f"},
+    {file = "pytest_databases-0.7.0.tar.gz", hash = "sha256:451794173487e92ed6166418961609fd390f515df8f8afb4ef7adecb91e847dd"},
+]
+
+[[package]]
+name = "pytest-mock"
+version = "3.14.0"
+requires_python = ">=3.8"
+summary = "Thin-wrapper around the mock package for easier use with pytest"
+groups = ["test"]
+dependencies = [
+    "pytest>=6.2.5",
+]
+files = [
+    {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
+    {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
+]
+
+[[package]]
+name = "pytest-sugar"
+version = "1.0.0"
+summary = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)."
+groups = ["test"]
+dependencies = [
+    "packaging>=21.3",
+    "pytest>=6.2.0",
+    "termcolor>=2.1.0",
+]
+files = [
+    {file = "pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a"},
+    {file = "pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd"},
+]
+
+[[package]]
+name = "pytest-xdist"
+version = "3.6.1"
+requires_python = ">=3.8"
+summary = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
+groups = ["test"]
+dependencies = [
+    "execnet>=2.1",
+    "pytest>=7.0.0",
+]
+files = [
+    {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"},
+    {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"},
+]
+
+[[package]]
+name = "python-dateutil"
+version = "2.9.0.post0"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+summary = "Extensions to the standard Python datetime module"
+groups = ["default"]
+dependencies = [
+    "six>=1.5",
+]
+files = [
+    {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
+    {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
+]
+
+[[package]]
+name = "python-dotenv"
+version = "1.0.1"
+requires_python = ">=3.8"
+summary = "Read key-value pairs from a .env file and set them as environment variables"
+groups = ["default"]
+files = [
+    {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
+    {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
+]
+
+[[package]]
+name = "python-jose"
+version = "3.3.0"
+summary = "JOSE implementation in Python"
+groups = ["default"]
+dependencies = [
+    "ecdsa!=0.15",
+    "pyasn1",
+    "rsa",
+]
+files = [
+    {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"},
+    {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"},
+]
+
+[[package]]
+name = "pytz"
+version = "2024.1"
+summary = "World timezone definitions, modern and historical"
+groups = ["default"]
+files = [
+    {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
+    {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.1"
+requires_python = ">=3.6"
+summary = "YAML parser and emitter for Python"
+groups = ["default", "linting"]
+files = [
+    {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
+    {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
+    {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
+    {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
+    {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
+    {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
+    {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
+    {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
+    {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
+    {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
+    {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
+    {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
+    {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+    {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
+    {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
+    {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+    {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
+    {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+    {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
+    {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
+    {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
+    {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
+    {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
+    {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
+]
+
+[[package]]
+name = "redis"
+version = "5.0.6"
+requires_python = ">=3.7"
+summary = "Python client for Redis database and key-value store"
+groups = ["default", "test"]
+dependencies = [
+    "async-timeout>=4.0.3; python_full_version < \"3.11.3\"",
+]
+files = [
+    {file = "redis-5.0.6-py3-none-any.whl", hash = "sha256:c0d6d990850c627bbf7be01c5c4cbaadf67b48593e913bb71c9819c30df37eee"},
+    {file = "redis-5.0.6.tar.gz", hash = "sha256:38473cd7c6389ad3e44a91f4c3eaf6bcb8a9f746007f29bf4fb20824ff0b2197"},
+]
+
+[[package]]
+name = "redis"
+version = "5.0.6"
+extras = ["hiredis"]
+requires_python = ">=3.7"
+summary = "Python client for Redis database and key-value store"
+groups = ["default"]
+dependencies = [
+    "hiredis>=1.0.0",
+    "redis==5.0.6",
+]
+files = [
+    {file = "redis-5.0.6-py3-none-any.whl", hash = "sha256:c0d6d990850c627bbf7be01c5c4cbaadf67b48593e913bb71c9819c30df37eee"},
+    {file = "redis-5.0.6.tar.gz", hash = "sha256:38473cd7c6389ad3e44a91f4c3eaf6bcb8a9f746007f29bf4fb20824ff0b2197"},
+]
+
+[[package]]
+name = "requests"
+version = "2.32.3"
+requires_python = ">=3.8"
+summary = "Python HTTP for Humans."
+groups = ["docs"]
+dependencies = [
+    "certifi>=2017.4.17",
+    "charset-normalizer<4,>=2",
+    "idna<4,>=2.5",
+    "urllib3<3,>=1.21.1",
+]
+files = [
+    {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
+    {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
+]
+
+[[package]]
+name = "rich"
+version = "13.7.1"
+requires_python = ">=3.7.0"
+summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+groups = ["default"]
+dependencies = [
+    "markdown-it-py>=2.2.0",
+    "pygments<3.0.0,>=2.13.0",
+]
+files = [
+    {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"},
+    {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"},
+]
+
+[[package]]
+name = "rich-click"
+version = "1.8.3"
+requires_python = ">=3.7"
+summary = "Format click help output nicely with rich"
+groups = ["default"]
+dependencies = [
+    "click>=7",
+    "rich>=10.7",
+    "typing-extensions",
+]
+files = [
+    {file = "rich_click-1.8.3-py3-none-any.whl", hash = "sha256:636d9c040d31c5eee242201b5bf4f2d358bfae4db14bb22ec1cafa717cfd02cd"},
+    {file = "rich_click-1.8.3.tar.gz", hash = "sha256:6d75bdfa7aa9ed2c467789a0688bc6da23fbe3a143e19aa6ad3f8bac113d2ab3"},
+]
+
+[[package]]
+name = "rsa"
+version = "4.9"
+requires_python = ">=3.6,<4"
+summary = "Pure-Python RSA implementation"
+groups = ["default"]
+dependencies = [
+    "pyasn1>=0.1.3",
+]
+files = [
+    {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
+    {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
+]
+
+[[package]]
+name = "ruamel-yaml"
+version = "0.18.6"
+requires_python = ">=3.7"
+summary = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order"
+groups = ["docs"]
+dependencies = [
+    "ruamel-yaml-clib>=0.2.7; platform_python_implementation == \"CPython\" and python_version < \"3.13\"",
+]
+files = [
+    {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"},
+    {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"},
+]
+
+[[package]]
+name = "ruamel-yaml-clib"
+version = "0.2.8"
+requires_python = ">=3.6"
+summary = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml"
+groups = ["docs"]
+marker = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""
+files = [
+    {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"},
+    {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"},
+    {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"},
+    {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"},
+    {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"},
+    {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"},
+    {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"},
+    {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"},
+    {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"},
+    {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"},
+    {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"},
+    {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"},
+    {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"},
+    {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"},
+    {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"},
+    {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"},
+    {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"},
+    {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"},
+    {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"},
+    {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"},
+    {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"},
+    {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"},
+    {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"},
+    {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"},
+    {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"},
+]
+
+[[package]]
+name = "ruff"
+version = "0.5.0"
+requires_python = ">=3.7"
+summary = "An extremely fast Python linter and code formatter, written in Rust."
+groups = ["linting"]
+files = [
+    {file = "ruff-0.5.0-py3-none-linux_armv6l.whl", hash = "sha256:ee770ea8ab38918f34e7560a597cc0a8c9a193aaa01bfbd879ef43cb06bd9c4c"},
+    {file = "ruff-0.5.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:38f3b8327b3cb43474559d435f5fa65dacf723351c159ed0dc567f7ab735d1b6"},
+    {file = "ruff-0.5.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7594f8df5404a5c5c8f64b8311169879f6cf42142da644c7e0ba3c3f14130370"},
+    {file = "ruff-0.5.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adc7012d6ec85032bc4e9065110df205752d64010bed5f958d25dbee9ce35de3"},
+    {file = "ruff-0.5.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d505fb93b0fabef974b168d9b27c3960714d2ecda24b6ffa6a87ac432905ea38"},
+    {file = "ruff-0.5.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dc5cfd3558f14513ed0d5b70ce531e28ea81a8a3b1b07f0f48421a3d9e7d80a"},
+    {file = "ruff-0.5.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:db3ca35265de239a1176d56a464b51557fce41095c37d6c406e658cf80bbb362"},
+    {file = "ruff-0.5.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b1a321c4f68809fddd9b282fab6a8d8db796b270fff44722589a8b946925a2a8"},
+    {file = "ruff-0.5.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c4dfcd8d34b143916994b3876b63d53f56724c03f8c1a33a253b7b1e6bf2a7d"},
+    {file = "ruff-0.5.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81e5facfc9f4a674c6a78c64d38becfbd5e4f739c31fcd9ce44c849f1fad9e4c"},
+    {file = "ruff-0.5.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e589e27971c2a3efff3fadafb16e5aef7ff93250f0134ec4b52052b673cf988d"},
+    {file = "ruff-0.5.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2ffbc3715a52b037bcb0f6ff524a9367f642cdc5817944f6af5479bbb2eb50e"},
+    {file = "ruff-0.5.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cd096e23c6a4f9c819525a437fa0a99d1c67a1b6bb30948d46f33afbc53596cf"},
+    {file = "ruff-0.5.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:46e193b36f2255729ad34a49c9a997d506e58f08555366b2108783b3064a0e1e"},
+    {file = "ruff-0.5.0-py3-none-win32.whl", hash = "sha256:49141d267100f5ceff541b4e06552e98527870eafa1acc9dec9139c9ec5af64c"},
+    {file = "ruff-0.5.0-py3-none-win_amd64.whl", hash = "sha256:e9118f60091047444c1b90952736ee7b1792910cab56e9b9a9ac20af94cd0440"},
+    {file = "ruff-0.5.0-py3-none-win_arm64.whl", hash = "sha256:ed5c4df5c1fb4518abcb57725b576659542bdbe93366f4f329e8f398c4b71178"},
+    {file = "ruff-0.5.0.tar.gz", hash = "sha256:eb641b5873492cf9bd45bc9c5ae5320648218e04386a5f0c264ad6ccce8226a1"},
+]
+
+[[package]]
+name = "saq"
+version = "0.12.8"
+summary = "Distributed Python job queue with asyncio and redis"
+groups = ["default"]
+dependencies = [
+    "croniter>=0.3.18",
+    "redis<6.0,>=4.2",
+]
+files = [
+    {file = "saq-0.12.8-py3-none-any.whl", hash = "sha256:cda1a10677b721bc890079f007dd864ec5c0d80b992597223722438356fafba6"},
+    {file = "saq-0.12.8.tar.gz", hash = "sha256:6d4c7787610bd27ace3d111f74839bbaf9f77167eb8edbbd36c213e3b5da2036"},
+]
+
+[[package]]
+name = "setproctitle"
+version = "1.3.3"
+requires_python = ">=3.7"
+summary = "A Python module to customize the process title"
+groups = ["default"]
+files = [
+    {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:897a73208da48db41e687225f355ce993167079eda1260ba5e13c4e53be7f754"},
+    {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c331e91a14ba4076f88c29c777ad6b58639530ed5b24b5564b5ed2fd7a95452"},
+    {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbbd6c7de0771c84b4aa30e70b409565eb1fc13627a723ca6be774ed6b9d9fa3"},
+    {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c05ac48ef16ee013b8a326c63e4610e2430dbec037ec5c5b58fcced550382b74"},
+    {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1342f4fdb37f89d3e3c1c0a59d6ddbedbde838fff5c51178a7982993d238fe4f"},
+    {file = "setproctitle-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc74e84fdfa96821580fb5e9c0b0777c1c4779434ce16d3d62a9c4d8c710df39"},
+    {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9617b676b95adb412bb69645d5b077d664b6882bb0d37bfdafbbb1b999568d85"},
+    {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6a249415f5bb88b5e9e8c4db47f609e0bf0e20a75e8d744ea787f3092ba1f2d0"},
+    {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:38da436a0aaace9add67b999eb6abe4b84397edf4a78ec28f264e5b4c9d53cd5"},
+    {file = "setproctitle-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:da0d57edd4c95bf221b2ebbaa061e65b1788f1544977288bdf95831b6e44e44d"},
+    {file = "setproctitle-1.3.3-cp310-cp310-win32.whl", hash = "sha256:a1fcac43918b836ace25f69b1dca8c9395253ad8152b625064415b1d2f9be4fb"},
+    {file = "setproctitle-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:200620c3b15388d7f3f97e0ae26599c0c378fdf07ae9ac5a13616e933cbd2086"},
+    {file = "setproctitle-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:334f7ed39895d692f753a443102dd5fed180c571eb6a48b2a5b7f5b3564908c8"},
+    {file = "setproctitle-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:950f6476d56ff7817a8fed4ab207727fc5260af83481b2a4b125f32844df513a"},
+    {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:195c961f54a09eb2acabbfc90c413955cf16c6e2f8caa2adbf2237d1019c7dd8"},
+    {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f05e66746bf9fe6a3397ec246fe481096664a9c97eb3fea6004735a4daf867fd"},
+    {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5901a31012a40ec913265b64e48c2a4059278d9f4e6be628441482dd13fb8b5"},
+    {file = "setproctitle-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64286f8a995f2cd934082b398fc63fca7d5ffe31f0e27e75b3ca6b4efda4e353"},
+    {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:184239903bbc6b813b1a8fc86394dc6ca7d20e2ebe6f69f716bec301e4b0199d"},
+    {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:664698ae0013f986118064b6676d7dcd28fefd0d7d5a5ae9497cbc10cba48fa5"},
+    {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e5119a211c2e98ff18b9908ba62a3bd0e3fabb02a29277a7232a6fb4b2560aa0"},
+    {file = "setproctitle-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:417de6b2e214e837827067048f61841f5d7fc27926f2e43954567094051aff18"},
+    {file = "setproctitle-1.3.3-cp311-cp311-win32.whl", hash = "sha256:6a143b31d758296dc2f440175f6c8e0b5301ced3b0f477b84ca43cdcf7f2f476"},
+    {file = "setproctitle-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a680d62c399fa4b44899094027ec9a1bdaf6f31c650e44183b50d4c4d0ccc085"},
+    {file = "setproctitle-1.3.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d4460795a8a7a391e3567b902ec5bdf6c60a47d791c3b1d27080fc203d11c9dc"},
+    {file = "setproctitle-1.3.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bdfd7254745bb737ca1384dee57e6523651892f0ea2a7344490e9caefcc35e64"},
+    {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477d3da48e216d7fc04bddab67b0dcde633e19f484a146fd2a34bb0e9dbb4a1e"},
+    {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ab2900d111e93aff5df9fddc64cf51ca4ef2c9f98702ce26524f1acc5a786ae7"},
+    {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088b9efc62d5aa5d6edf6cba1cf0c81f4488b5ce1c0342a8b67ae39d64001120"},
+    {file = "setproctitle-1.3.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6d50252377db62d6a0bb82cc898089916457f2db2041e1d03ce7fadd4a07381"},
+    {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:87e668f9561fd3a457ba189edfc9e37709261287b52293c115ae3487a24b92f6"},
+    {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:287490eb90e7a0ddd22e74c89a92cc922389daa95babc833c08cf80c84c4df0a"},
+    {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe1c49486109f72d502f8be569972e27f385fe632bd8895f4730df3c87d5ac8"},
+    {file = "setproctitle-1.3.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4a6ba2494a6449b1f477bd3e67935c2b7b0274f2f6dcd0f7c6aceae10c6c6ba3"},
+    {file = "setproctitle-1.3.3-cp312-cp312-win32.whl", hash = "sha256:2df2b67e4b1d7498632e18c56722851ba4db5d6a0c91aaf0fd395111e51cdcf4"},
+    {file = "setproctitle-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:f38d48abc121263f3b62943f84cbaede05749047e428409c2c199664feb6abc7"},
+    {file = "setproctitle-1.3.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6b9e62ddb3db4b5205c0321dd69a406d8af9ee1693529d144e86bd43bcb4b6c0"},
+    {file = "setproctitle-1.3.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e3b99b338598de0bd6b2643bf8c343cf5ff70db3627af3ca427a5e1a1a90dd9"},
+    {file = "setproctitle-1.3.3-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ae9a02766dad331deb06855fb7a6ca15daea333b3967e214de12cfae8f0ef5"},
+    {file = "setproctitle-1.3.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:200ede6fd11233085ba9b764eb055a2a191fb4ffb950c68675ac53c874c22e20"},
+    {file = "setproctitle-1.3.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0d3a953c50776751e80fe755a380a64cb14d61e8762bd43041ab3f8cc436092f"},
+    {file = "setproctitle-1.3.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5e08e232b78ba3ac6bc0d23ce9e2bee8fad2be391b7e2da834fc9a45129eb87"},
+    {file = "setproctitle-1.3.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1da82c3e11284da4fcbf54957dafbf0655d2389cd3d54e4eaba636faf6d117a"},
+    {file = "setproctitle-1.3.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:aeaa71fb9568ebe9b911ddb490c644fbd2006e8c940f21cb9a1e9425bd709574"},
+    {file = "setproctitle-1.3.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:59335d000c6250c35989394661eb6287187854e94ac79ea22315469ee4f4c244"},
+    {file = "setproctitle-1.3.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3ba57029c9c50ecaf0c92bb127224cc2ea9fda057b5d99d3f348c9ec2855ad3"},
+    {file = "setproctitle-1.3.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d876d355c53d975c2ef9c4f2487c8f83dad6aeaaee1b6571453cb0ee992f55f6"},
+    {file = "setproctitle-1.3.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:224602f0939e6fb9d5dd881be1229d485f3257b540f8a900d4271a2c2aa4e5f4"},
+    {file = "setproctitle-1.3.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d7f27e0268af2d7503386e0e6be87fb9b6657afd96f5726b733837121146750d"},
+    {file = "setproctitle-1.3.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5e7266498cd31a4572378c61920af9f6b4676a73c299fce8ba93afd694f8ae7"},
+    {file = "setproctitle-1.3.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33c5609ad51cd99d388e55651b19148ea99727516132fb44680e1f28dd0d1de9"},
+    {file = "setproctitle-1.3.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:eae8988e78192fd1a3245a6f4f382390b61bce6cfcc93f3809726e4c885fa68d"},
+    {file = "setproctitle-1.3.3.tar.gz", hash = "sha256:c913e151e7ea01567837ff037a23ca8740192880198b7fbb90b16d181607caae"},
+]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+summary = "Python 2 and 3 compatibility utilities"
+groups = ["default", "docs"]
+files = [
+    {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+    {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
+
+[[package]]
+name = "slotscheck"
+version = "0.19.0"
+requires_python = ">=3.8.1"
+summary = "Ensure your __slots__ are working properly."
+groups = ["linting"]
+dependencies = [
+    "click<9.0,>=8.0",
+    "tomli<3.0.0,>=0.2.6; python_version < \"3.11\"",
+]
+files = [
+    {file = "slotscheck-0.19.0-py3-none-any.whl", hash = "sha256:53fbc9befacb331a2ab25b385004d99ea72b5cee4f3deb6da676c8f08d0fcdd9"},
+    {file = "slotscheck-0.19.0.tar.gz", hash = "sha256:707b4339d280664139ffd2c78fef99b3028e215f13cc77244147dd6126fe2e0d"},
+]
+
+[[package]]
+name = "sniffio"
+version = "1.3.1"
+requires_python = ">=3.7"
+summary = "Sniff out which async library your code is running under"
+groups = ["default", "docs"]
+files = [
+    {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
+    {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
+]
+
+[[package]]
+name = "snowballstemmer"
+version = "2.2.0"
+summary = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
+groups = ["docs"]
+files = [
+    {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
+    {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
+]
+
+[[package]]
+name = "soupsieve"
+version = "2.5"
+requires_python = ">=3.8"
+summary = "A modern CSS selector implementation for Beautiful Soup."
+groups = ["docs"]
+files = [
+    {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"},
+    {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"},
+]
+
+[[package]]
+name = "sphinx"
+version = "7.3.7"
+requires_python = ">=3.9"
+summary = "Python documentation generator"
+groups = ["docs"]
+dependencies = [
+    "Jinja2>=3.0",
+    "Pygments>=2.14",
+    "alabaster~=0.7.14",
+    "babel>=2.9",
+    "colorama>=0.4.5; sys_platform == \"win32\"",
+    "docutils<0.22,>=0.18.1",
+    "imagesize>=1.3",
+    "packaging>=21.0",
+    "requests>=2.25.0",
+    "snowballstemmer>=2.0",
+    "sphinxcontrib-applehelp",
+    "sphinxcontrib-devhelp",
+    "sphinxcontrib-htmlhelp>=2.0.0",
+    "sphinxcontrib-jsmath",
+    "sphinxcontrib-qthelp",
+    "sphinxcontrib-serializinghtml>=1.1.9",
+    "tomli>=2; python_version < \"3.11\"",
+]
+files = [
+    {file = "sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3"},
+    {file = "sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc"},
+]
+
+[[package]]
+name = "sphinx-autobuild"
+version = "2024.4.16"
+requires_python = ">=3.9"
+summary = "Rebuild Sphinx documentation on changes, with hot reloading in the browser."
+groups = ["docs"]
+dependencies = [
+    "colorama",
+    "sphinx",
+    "starlette>=0.35",
+    "uvicorn>=0.25",
+    "watchfiles>=0.20",
+    "websockets>=11",
+]
+files = [
+    {file = "sphinx_autobuild-2024.4.16-py3-none-any.whl", hash = "sha256:f2522779d30fcbf0253e09714f274ce8c608cb6ebcd67922b1c54de59faba702"},
+    {file = "sphinx_autobuild-2024.4.16.tar.gz", hash = "sha256:1c0ed37a1970eed197f9c5a66d65759e7c4e4cba7b5a5d77940752bf1a59f2c7"},
+]
+
+[[package]]
+name = "sphinx-autodoc-typehints"
+version = "2.2.2"
+requires_python = ">=3.9"
+summary = "Type hints (PEP 484) support for the Sphinx autodoc extension"
+groups = ["docs"]
+dependencies = [
+    "sphinx>=7.3.5",
+]
+files = [
+    {file = "sphinx_autodoc_typehints-2.2.2-py3-none-any.whl", hash = "sha256:b98337a8530c95b73ba0c65465847a8ab0a13403bdc81294d5ef396bbd1f783e"},
+    {file = "sphinx_autodoc_typehints-2.2.2.tar.gz", hash = "sha256:128e600eeef63b722f3d8dac6403594592c8cade3ba66fd11dcb997465ee259d"},
+]
+
+[[package]]
+name = "sphinx-click"
+version = "6.0.0"
+requires_python = ">=3.8"
+summary = "Sphinx extension that automatically documents click applications"
+groups = ["docs"]
+dependencies = [
+    "click>=8.0",
+    "docutils",
+    "sphinx>=4.0",
+]
+files = [
+    {file = "sphinx_click-6.0.0-py3-none-any.whl", hash = "sha256:1e0a3c83bcb7c55497751b19d07ebe56b5d7b85eb76dd399cf9061b497adc317"},
+    {file = "sphinx_click-6.0.0.tar.gz", hash = "sha256:f5d664321dc0c6622ff019f1e1c84e58ce0cecfddeb510e004cf60c2a3ab465b"},
+]
+
+[[package]]
+name = "sphinx-copybutton"
+version = "0.5.2"
+requires_python = ">=3.7"
+summary = "Add a copy button to each of your code cells."
+groups = ["docs"]
+dependencies = [
+    "sphinx>=1.8",
+]
+files = [
+    {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"},
+    {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"},
+]
+
+[[package]]
+name = "sphinx-design"
+version = "0.6.0"
+requires_python = ">=3.9"
+summary = "A sphinx extension for designing beautiful, view size responsive web components."
+groups = ["docs"]
+dependencies = [
+    "sphinx<8,>=5",
+]
+files = [
+    {file = "sphinx_design-0.6.0-py3-none-any.whl", hash = "sha256:e9bd07eecec82eb07ff72cb50fc3624e186b04f5661270bc7b62db86c7546e95"},
+    {file = "sphinx_design-0.6.0.tar.gz", hash = "sha256:ec8e3c5c59fed4049b3a5a2e209360feab31829346b5f6a0c7c342b894082192"},
+]
+
+[[package]]
+name = "sphinx-jinja2-compat"
+version = "0.3.0"
+requires_python = ">=3.6"
+summary = "Patches Jinja2 v3 to restore compatibility with earlier Sphinx versions."
+groups = ["docs"]
+dependencies = [
+    "jinja2>=2.10",
+    "markupsafe>=1",
+    "standard-imghdr==3.10.14; python_version >= \"3.13\"",
+]
+files = [
+    {file = "sphinx_jinja2_compat-0.3.0-py3-none-any.whl", hash = "sha256:b1e4006d8e1ea31013fa9946d1b075b0c8d2a42c6e3425e63542c1e9f8be9084"},
+    {file = "sphinx_jinja2_compat-0.3.0.tar.gz", hash = "sha256:f3c1590b275f42e7a654e081db5e3e5fb97f515608422bde94015ddf795dfe7c"},
+]
+
+[[package]]
+name = "sphinx-prompt"
+version = "1.8.0"
+requires_python = ">=3.9,<4.0"
+summary = "Sphinx directive to add unselectable prompt"
+groups = ["docs"]
+dependencies = [
+    "Sphinx<8.0.0,>=7.0.0",
+    "docutils",
+    "pygments",
+]
+files = [
+    {file = "sphinx_prompt-1.8.0-py3-none-any.whl", hash = "sha256:369ecc633f0711886f9b3a078c83264245be1adf46abeeb9b88b5519e4b51007"},
+    {file = "sphinx_prompt-1.8.0.tar.gz", hash = "sha256:47482f86fcec29662fdfd23e7c04ef03582714195d01f5d565403320084372ed"},
+]
+
+[[package]]
+name = "sphinx-tabs"
+version = "3.4.5"
+requires_python = "~=3.7"
+summary = "Tabbed views for Sphinx"
+groups = ["docs"]
+dependencies = [
+    "docutils",
+    "pygments",
+    "sphinx",
+]
+files = [
+    {file = "sphinx-tabs-3.4.5.tar.gz", hash = "sha256:ba9d0c1e3e37aaadd4b5678449eb08176770e0fc227e769b6ce747df3ceea531"},
+    {file = "sphinx_tabs-3.4.5-py3-none-any.whl", hash = "sha256:92cc9473e2ecf1828ca3f6617d0efc0aa8acb06b08c56ba29d1413f2f0f6cf09"},
+]
+
+[[package]]
+name = "sphinx-toolbox"
+version = "3.6.0"
+requires_python = ">=3.7"
+summary = "Box of handy tools for Sphinx 🧰 📔"
+groups = ["docs"]
+dependencies = [
+    "apeye>=0.4.0",
+    "autodocsumm>=0.2.0",
+    "beautifulsoup4>=4.9.1",
+    "cachecontrol[filecache]>=0.13.0",
+    "dict2css>=0.2.3",
+    "docutils>=0.16",
+    "domdf-python-tools>=2.9.0",
+    "filelock>=3.8.0",
+    "html5lib>=1.1",
+    "ruamel-yaml>=0.16.12",
+    "sphinx-autodoc-typehints>=1.11.1",
+    "sphinx-jinja2-compat>=0.1.0",
+    "sphinx-prompt>=1.1.0",
+    "sphinx-tabs<3.5.0,>=1.2.1",
+    "sphinx>=3.2.0",
+    "tabulate>=0.8.7",
+    "typing-extensions!=3.10.0.1,>=3.7.4.3",
+]
+files = [
+    {file = "sphinx_toolbox-3.6.0-py3-none-any.whl", hash = "sha256:33db016958e29bf727fb416b1f17375635b4dfeef2b521e86dfa9b351c068e9d"},
+    {file = "sphinx_toolbox-3.6.0.tar.gz", hash = "sha256:4a124a9fca1d0ad881e57130f38c785196648ba6fd4fa672cffaf4127df954db"},
+]
+
+[[package]]
+name = "sphinxcontrib-applehelp"
+version = "1.0.8"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books"
+groups = ["docs"]
+files = [
+    {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"},
+    {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"},
+]
+
+[[package]]
+name = "sphinxcontrib-devhelp"
+version = "1.0.6"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents"
+groups = ["docs"]
+files = [
+    {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"},
+    {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"},
+]
+
+[[package]]
+name = "sphinxcontrib-htmlhelp"
+version = "2.0.5"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
+groups = ["docs"]
+files = [
+    {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"},
+    {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"},
+]
+
+[[package]]
+name = "sphinxcontrib-jsmath"
+version = "1.0.1"
+requires_python = ">=3.5"
+summary = "A sphinx extension which renders display math in HTML via JavaScript"
+groups = ["docs"]
+files = [
+    {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
+    {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
+]
+
+[[package]]
+name = "sphinxcontrib-mermaid"
+version = "0.9.2"
+requires_python = ">=3.7"
+summary = "Mermaid diagrams in yours Sphinx powered docs"
+groups = ["docs"]
+files = [
+    {file = "sphinxcontrib-mermaid-0.9.2.tar.gz", hash = "sha256:252ef13dd23164b28f16d8b0205cf184b9d8e2b714a302274d9f59eb708e77af"},
+    {file = "sphinxcontrib_mermaid-0.9.2-py3-none-any.whl", hash = "sha256:6795a72037ca55e65663d2a2c1a043d636dc3d30d418e56dd6087d1459d98a5d"},
+]
+
+[[package]]
+name = "sphinxcontrib-qthelp"
+version = "1.0.7"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents"
+groups = ["docs"]
+files = [
+    {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"},
+    {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"},
+]
+
+[[package]]
+name = "sphinxcontrib-serializinghtml"
+version = "1.1.10"
+requires_python = ">=3.9"
+summary = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)"
+groups = ["docs"]
+files = [
+    {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"},
+    {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"},
+]
+
+[[package]]
+name = "sqlalchemy"
+version = "2.0.31"
+requires_python = ">=3.7"
+summary = "Database Abstraction Library"
+groups = ["default"]
+dependencies = [
+    "greenlet!=0.4.17; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"",
+    "typing-extensions>=4.6.0",
+]
+files = [
+    {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f2a213c1b699d3f5768a7272de720387ae0122f1becf0901ed6eaa1abd1baf6c"},
+    {file = "SQLAlchemy-2.0.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9fea3d0884e82d1e33226935dac990b967bef21315cbcc894605db3441347443"},
+    {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ad7f221d8a69d32d197e5968d798217a4feebe30144986af71ada8c548e9fa"},
+    {file = "SQLAlchemy-2.0.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2bee229715b6366f86a95d497c347c22ddffa2c7c96143b59a2aa5cc9eebbc"},
+    {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cd5b94d4819c0c89280b7c6109c7b788a576084bf0a480ae17c227b0bc41e109"},
+    {file = "SQLAlchemy-2.0.31-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:750900a471d39a7eeba57580b11983030517a1f512c2cb287d5ad0fcf3aebd58"},
+    {file = "SQLAlchemy-2.0.31-cp310-cp310-win32.whl", hash = "sha256:7bd112be780928c7f493c1a192cd8c5fc2a2a7b52b790bc5a84203fb4381c6be"},
+    {file = "SQLAlchemy-2.0.31-cp310-cp310-win_amd64.whl", hash = "sha256:5a48ac4d359f058474fadc2115f78a5cdac9988d4f99eae44917f36aa1476327"},
+    {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f68470edd70c3ac3b6cd5c2a22a8daf18415203ca1b036aaeb9b0fb6f54e8298"},
+    {file = "SQLAlchemy-2.0.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e2c38c2a4c5c634fe6c3c58a789712719fa1bf9b9d6ff5ebfce9a9e5b89c1ca"},
+    {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd15026f77420eb2b324dcb93551ad9c5f22fab2c150c286ef1dc1160f110203"},
+    {file = "SQLAlchemy-2.0.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2196208432deebdfe3b22185d46b08f00ac9d7b01284e168c212919891289396"},
+    {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:352b2770097f41bff6029b280c0e03b217c2dcaddc40726f8f53ed58d8a85da4"},
+    {file = "SQLAlchemy-2.0.31-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56d51ae825d20d604583f82c9527d285e9e6d14f9a5516463d9705dab20c3740"},
+    {file = "SQLAlchemy-2.0.31-cp311-cp311-win32.whl", hash = "sha256:6e2622844551945db81c26a02f27d94145b561f9d4b0c39ce7bfd2fda5776dac"},
+    {file = "SQLAlchemy-2.0.31-cp311-cp311-win_amd64.whl", hash = "sha256:ccaf1b0c90435b6e430f5dd30a5aede4764942a695552eb3a4ab74ed63c5b8d3"},
+    {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3b74570d99126992d4b0f91fb87c586a574a5872651185de8297c6f90055ae42"},
+    {file = "SQLAlchemy-2.0.31-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f77c4f042ad493cb8595e2f503c7a4fe44cd7bd59c7582fd6d78d7e7b8ec52c"},
+    {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd1591329333daf94467e699e11015d9c944f44c94d2091f4ac493ced0119449"},
+    {file = "SQLAlchemy-2.0.31-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74afabeeff415e35525bf7a4ecdab015f00e06456166a2eba7590e49f8db940e"},
+    {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b9c01990d9015df2c6f818aa8f4297d42ee71c9502026bb074e713d496e26b67"},
+    {file = "SQLAlchemy-2.0.31-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66f63278db425838b3c2b1c596654b31939427016ba030e951b292e32b99553e"},
+    {file = "SQLAlchemy-2.0.31-cp312-cp312-win32.whl", hash = "sha256:0b0f658414ee4e4b8cbcd4a9bb0fd743c5eeb81fc858ca517217a8013d282c96"},
+    {file = "SQLAlchemy-2.0.31-cp312-cp312-win_amd64.whl", hash = "sha256:fa4b1af3e619b5b0b435e333f3967612db06351217c58bfb50cee5f003db2a5a"},
+    {file = "SQLAlchemy-2.0.31-py3-none-any.whl", hash = "sha256:69f3e3c08867a8e4856e92d7afb618b95cdee18e0bc1647b77599722c9a28911"},
+    {file = "SQLAlchemy-2.0.31.tar.gz", hash = "sha256:b607489dd4a54de56984a0c7656247504bd5523d9d0ba799aef59d4add009484"},
+]
+
+[[package]]
+name = "standard-imghdr"
+version = "3.10.14"
+summary = "Standard library imghdr redistribution. \"dead battery\"."
+groups = ["docs"]
+marker = "python_version >= \"3.13\""
+files = [
+    {file = "standard_imghdr-3.10.14-py3-none-any.whl", hash = "sha256:cdf6883163349624dee9a81d2853a20260337c4cd41c04e99c082e01833a08e2"},
+    {file = "standard_imghdr-3.10.14.tar.gz", hash = "sha256:2598fe2e7c540dbda34b233295e10957ab8dc8ac6f3bd9eaa8d38be167232e52"},
+]
+
+[[package]]
+name = "starlette"
+version = "0.37.2"
+requires_python = ">=3.8"
+summary = "The little ASGI library that shines."
+groups = ["docs"]
+dependencies = [
+    "anyio<5,>=3.4.0",
+]
+files = [
+    {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"},
+    {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"},
+]
+
+[[package]]
+name = "structlog"
+version = "24.2.0"
+requires_python = ">=3.8"
+summary = "Structured Logging for Python"
+groups = ["default"]
+files = [
+    {file = "structlog-24.2.0-py3-none-any.whl", hash = "sha256:983bd49f70725c5e1e3867096c0c09665918936b3db27341b41d294283d7a48a"},
+    {file = "structlog-24.2.0.tar.gz", hash = "sha256:0e3fe74924a6d8857d3f612739efb94c72a7417d7c7c008d12276bca3b5bf13b"},
+]
+
+[[package]]
+name = "tabulate"
+version = "0.9.0"
+requires_python = ">=3.7"
+summary = "Pretty-print tabular data"
+groups = ["docs"]
+files = [
+    {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"},
+    {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"},
+]
+
+[[package]]
+name = "termcolor"
+version = "2.4.0"
+requires_python = ">=3.8"
+summary = "ANSI color formatting for output in terminal"
+groups = ["test"]
+files = [
+    {file = "termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63"},
+    {file = "termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a"},
+]
+
+[[package]]
+name = "tomli"
+version = "2.0.1"
+requires_python = ">=3.7"
+summary = "A lil' TOML parser"
+groups = ["docs", "linting", "test"]
+marker = "python_version < \"3.11\""
+files = [
+    {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+    {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
+
+[[package]]
+name = "tomlkit"
+version = "0.12.5"
+requires_python = ">=3.7"
+summary = "Style preserving TOML library"
+groups = ["linting"]
+files = [
+    {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"},
+    {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"},
+]
+
+[[package]]
+name = "types-cffi"
+version = "1.16.0.20240331"
+requires_python = ">=3.8"
+summary = "Typing stubs for cffi"
+groups = ["linting"]
+dependencies = [
+    "types-setuptools",
+]
+files = [
+    {file = "types-cffi-1.16.0.20240331.tar.gz", hash = "sha256:b8b20d23a2b89cfed5f8c5bc53b0cb8677c3aac6d970dbc771e28b9c698f5dee"},
+    {file = "types_cffi-1.16.0.20240331-py3-none-any.whl", hash = "sha256:a363e5ea54a4eb6a4a105d800685fde596bc318089b025b27dee09849fe41ff0"},
+]
+
+[[package]]
+name = "types-click"
+version = "7.1.8"
+summary = "Typing stubs for click"
+groups = ["linting"]
+files = [
+    {file = "types-click-7.1.8.tar.gz", hash = "sha256:b6604968be6401dc516311ca50708a0a28baa7a0cb840efd7412f0dbbff4e092"},
+    {file = "types_click-7.1.8-py3-none-any.whl", hash = "sha256:8cb030a669e2e927461be9827375f83c16b8178c365852c060a34e24871e7e81"},
+]
+
+[[package]]
+name = "types-passlib"
+version = "1.7.7.20240327"
+requires_python = ">=3.8"
+summary = "Typing stubs for passlib"
+groups = ["linting"]
+files = [
+    {file = "types-passlib-1.7.7.20240327.tar.gz", hash = "sha256:4cce6a1a3a6afee9fc4728b4d9784300764ac2be747f5bcc01646d904b85f4bb"},
+    {file = "types_passlib-1.7.7.20240327-py3-none-any.whl", hash = "sha256:3a3b7f4258b71034d2e2f4f307d6810f9904f906cdf375514c8bdbdb28a4ad23"},
+]
+
+[[package]]
+name = "types-pyasn1"
+version = "0.6.0.20240402"
+requires_python = ">=3.8"
+summary = "Typing stubs for pyasn1"
+groups = ["linting"]
+files = [
+    {file = "types-pyasn1-0.6.0.20240402.tar.gz", hash = "sha256:5d54dcb33f69dd269071ca098e923ac20c5f03c814631fa7f3ed9ee035a5da3a"},
+    {file = "types_pyasn1-0.6.0.20240402-py3-none-any.whl", hash = "sha256:848d01e7313c200acc035a8b3d377fe7b2aecbe77f2be49eb160a7f82835aaaf"},
+]
+
+[[package]]
+name = "types-pyopenssl"
+version = "24.1.0.20240425"
+requires_python = ">=3.8"
+summary = "Typing stubs for pyOpenSSL"
+groups = ["linting"]
+dependencies = [
+    "cryptography>=35.0.0",
+    "types-cffi",
+]
+files = [
+    {file = "types-pyOpenSSL-24.1.0.20240425.tar.gz", hash = "sha256:0a7e82626c1983dc8dc59292bf20654a51c3c3881bcbb9b337c1da6e32f0204e"},
+    {file = "types_pyOpenSSL-24.1.0.20240425-py3-none-any.whl", hash = "sha256:f51a156835555dd2a1f025621e8c4fbe7493470331afeef96884d1d29bf3a473"},
+]
+
+[[package]]
+name = "types-python-jose"
+version = "3.3.4.20240106"
+requires_python = ">=3.8"
+summary = "Typing stubs for python-jose"
+groups = ["linting"]
+dependencies = [
+    "types-pyasn1",
+]
+files = [
+    {file = "types-python-jose-3.3.4.20240106.tar.gz", hash = "sha256:b18cf8c5080bbfe1ef7c3b707986435d9efca3e90889acb6a06f65e06bc3405a"},
+    {file = "types_python_jose-3.3.4.20240106-py3-none-any.whl", hash = "sha256:b515a6c0c61f5e2a53bc93e3a2b024cbd42563e2e19cbde9fd1c2cc2cfe77ccc"},
+]
+
+[[package]]
+name = "types-pyyaml"
+version = "6.0.12.20240311"
+requires_python = ">=3.8"
+summary = "Typing stubs for PyYAML"
+groups = ["linting"]
+files = [
+    {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"},
+    {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"},
+]
+
+[[package]]
+name = "types-redis"
+version = "4.6.0.20240425"
+requires_python = ">=3.8"
+summary = "Typing stubs for redis"
+groups = ["linting"]
+dependencies = [
+    "cryptography>=35.0.0",
+    "types-pyOpenSSL",
+]
+files = [
+    {file = "types-redis-4.6.0.20240425.tar.gz", hash = "sha256:9402a10ee931d241fdfcc04592ebf7a661d7bb92a8dea631279f0d8acbcf3a22"},
+    {file = "types_redis-4.6.0.20240425-py3-none-any.whl", hash = "sha256:ac5bc19e8f5997b9e76ad5d9cf15d0392d9f28cf5fc7746ea4a64b989c45c6a8"},
+]
+
+[[package]]
+name = "types-setuptools"
+version = "70.0.0.20240524"
+requires_python = ">=3.8"
+summary = "Typing stubs for setuptools"
+groups = ["linting"]
+files = [
+    {file = "types-setuptools-70.0.0.20240524.tar.gz", hash = "sha256:e31fee7b9d15ef53980526579ac6089b3ae51a005a281acf97178e90ac71aff6"},
+    {file = "types_setuptools-70.0.0.20240524-py3-none-any.whl", hash = "sha256:8f5379b9948682d72a9ab531fbe52932e84c4f38deda570255f9bae3edd766bc"},
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.12.2"
+requires_python = ">=3.8"
+summary = "Backported and Experimental Type Hints for Python 3.8+"
+groups = ["default", "docs", "linting"]
+files = [
+    {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
+    {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
+]
+
+[[package]]
+name = "urllib3"
+version = "2.2.2"
+requires_python = ">=3.8"
+summary = "HTTP library with thread-safe connection pooling, file post, and more."
+groups = ["docs"]
+files = [
+    {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
+    {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
+]
+
+[[package]]
+name = "uuid-utils"
+version = "0.8.0"
+requires_python = ">=3.8"
+summary = "Drop-in replacement for Python UUID in Rust"
+groups = ["default"]
+files = [
+    {file = "uuid_utils-0.8.0-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:79c5cb8e8edafe018d818b6ae702a1cc58c77177f48a6c52138293c383072a11"},
+    {file = "uuid_utils-0.8.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:df9db6fc543854266cd21cc232b6021f49b062be1e1fedce6943ef073f767c9d"},
+    {file = "uuid_utils-0.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:235508055a1ecc17fa9e4623d36e70d9d5769b36dbe4fa63939535c4bfddb0d3"},
+    {file = "uuid_utils-0.8.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:761bdec7e9dba866ef0ba6e3cf1998566a48929d08f450ee20c8a26cd4d81e75"},
+    {file = "uuid_utils-0.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8c11d0bff47b43d556377862877f61d2cb239fdfdcb594a25763a84254de8f3"},
+    {file = "uuid_utils-0.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8c84c3f4f43b9242327e955696b7760b153112eafe3e168cefc98513b6c001b"},
+    {file = "uuid_utils-0.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93bf94e643ef57254a2e68fda5ba1019b55841ac96922f3a316d4207390859b"},
+    {file = "uuid_utils-0.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7f3a3f6f8a35b2d5dd369e1300970424dd4169d6da1c7fe6225047e16dbbc6af"},
+    {file = "uuid_utils-0.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:752ae76423a77da3eafcce1cd0e0b2065d199245dedb53d06821cfdc772aee21"},
+    {file = "uuid_utils-0.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a7bc630afb8e6173b92b900d5648c396bf1c23570f13f9283f846e88f13ddfda"},
+    {file = "uuid_utils-0.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6277de886cf525f41b5092091a514c08664a1d50f5103e077afeb9b256287625"},
+    {file = "uuid_utils-0.8.0-cp310-none-win32.whl", hash = "sha256:fa5595b8cb49cb950d567e1a4b9e3cc709061cb8bad91102c3331b84a440c82c"},
+    {file = "uuid_utils-0.8.0-cp310-none-win_amd64.whl", hash = "sha256:ab65787780ffc66e65590025b7da4876e7434086729e9809dfffcbee8cc865e4"},
+    {file = "uuid_utils-0.8.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3659dbfbb02e0992fcc8b3e569e8fdaf7c6a9c557641cd759ee0ca0750f43a3c"},
+    {file = "uuid_utils-0.8.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4f88d378046712158c6b2bae759a79b9d423d6ef31471e7d1fc479171b948b53"},
+    {file = "uuid_utils-0.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3d860656a11ad3e12b81dc3529cbba0991188a38b8aac364b02be0bfe0c3d6a"},
+    {file = "uuid_utils-0.8.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:248a62fdb3f8ca7b6b16978c92e1923252ed3910040154cfdbb891460aa4dc90"},
+    {file = "uuid_utils-0.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49993e9a0157ecfc3751785461048515ff5fed8cd2e6fa74c91c469157f9a79c"},
+    {file = "uuid_utils-0.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a5d1268fadb2c2d06a44f6394cf251e0237136a59a85cfc957587ae7b5355d1"},
+    {file = "uuid_utils-0.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5acda5a1c10a69cfa8fe652bbe916af8b7bbb1eec495e3040f435ecc13f1180b"},
+    {file = "uuid_utils-0.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:78c17a0d51918ffb4b5f3934ab84a19dc340743da7a2db4257d425328eb35df8"},
+    {file = "uuid_utils-0.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d30c95a5f50031c9d7e220ce51dd48c7ae8d044414715ad1842c41a35a2b004f"},
+    {file = "uuid_utils-0.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a6eaa3a03c6d219b79501c096cd06f08a91f9a707a1f380a96acc7f9f1620ba3"},
+    {file = "uuid_utils-0.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:98dc793004cd69c36e03a0654c0b288719ab0545fbbd59274511e3d25dcf6725"},
+    {file = "uuid_utils-0.8.0-cp311-none-win32.whl", hash = "sha256:b9c51ca726721bd9532180edaeea57026fdfeaacaa5a7c72fae61865ffe16fda"},
+    {file = "uuid_utils-0.8.0-cp311-none-win_amd64.whl", hash = "sha256:f5fe73a1d6ca1b586881a91178a946c6b63509d705999850102aa3a7dea584bf"},
+    {file = "uuid_utils-0.8.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:9a1fe530c0762ad781470ba20464ebaf15b2263b49b74361798153e9fab4db60"},
+    {file = "uuid_utils-0.8.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7bcd38931b35ffbeb1bb5292be72c74f41a51b2a6023093c7203ded5a257956a"},
+    {file = "uuid_utils-0.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d29f8fdfcdbc97db39d45df8399161738a5b4fae73b9458bce76d6f927b88499"},
+    {file = "uuid_utils-0.8.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:842645f00a4883edca9b53908aad57d9658cabf4837b25e3578eaae5b6935eed"},
+    {file = "uuid_utils-0.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0860b1f3148d1bffe3321cd7f1fa60c202a852bf6d375f453431c1edc3593612"},
+    {file = "uuid_utils-0.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d99e4b2e294c71cefa92217c14848b186898493a96e1103e4e54c13827e005ea"},
+    {file = "uuid_utils-0.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8c4a7208070251e85df64bc428d464ec46da599b709ec1115692b706d3b8f38"},
+    {file = "uuid_utils-0.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d9496e77aea6e98bddfee52f72b915c02e91dd2462474eb0ddd47028e3235a0c"},
+    {file = "uuid_utils-0.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8028b3604a3493d4df2b1e2fdc0def0678805680ed6a66bf97c3ed84b25524ae"},
+    {file = "uuid_utils-0.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d0248cedc45b9cf1ebad1e2887a30d209aba21f6e0a41571a1f22597d1349333"},
+    {file = "uuid_utils-0.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:989dd47a5e383af9b72fa5d74f23c16276e95f1d4f2bb40240d4bf9e7334741f"},
+    {file = "uuid_utils-0.8.0-cp312-none-win_amd64.whl", hash = "sha256:1594b8f22067ca080708e6fca13c94e62285ce03c0977841d9f1e5192b66335e"},
+    {file = "uuid_utils-0.8.0-pp38-pypy38_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0859c5b907445d8304baf381b3985f72fba366597eed9315cd0ae6d21998d8e0"},
+    {file = "uuid_utils-0.8.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d8544f800e513aa9bf9c9a9df56a218a8a050ffd109ec7a58f86c5decb7e40fa"},
+    {file = "uuid_utils-0.8.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfde725effb4b09ed871a3685bd4bcbacda99a612e43ef65c9fc33dc07c5270c"},
+    {file = "uuid_utils-0.8.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:59fc6ba8bcd653af28e805c65b7d7e623000bafdfcbbd2f7483556e012d7dbc2"},
+    {file = "uuid_utils-0.8.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc4d254841950ad33f26ce5fa043a32add0cef5ac96475062d6e23aa76f91d70"},
+    {file = "uuid_utils-0.8.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd30365435561390db02576ee3a6ff9896a0fc4c5c73dcf3d02a500043776fc4"},
+    {file = "uuid_utils-0.8.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62a210eb875e480a2cd843a37719e29853350e87904672fb66b6798eb4d140b7"},
+    {file = "uuid_utils-0.8.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:07c9cb3c467ed710c9f3bfbaeb2d2594012a661d42d5e5b63e228ad80d4c4eaf"},
+    {file = "uuid_utils-0.8.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2c414f2cc72e0139f4acff3ab6936a61bc5e37dff42cb66b07948dcd5d646c85"},
+    {file = "uuid_utils-0.8.0-pp39-pypy39_pp73-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:91e34a233b0213f9871a2f91c2398794a0a8c08b749936f77c4ddb466402aceb"},
+    {file = "uuid_utils-0.8.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ec4e5b9e60d7a602b3e9dcd058fe9ae3d045af02b4cb2d3a125311de7dbc8e8b"},
+    {file = "uuid_utils-0.8.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3e2bf7536ea1fc7492b94da7d081c23363047aac167a453df69498fe9bdf5a1"},
+    {file = "uuid_utils-0.8.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ea2754b941addb4ef04c958aa4c21d2d6526cd2003d543f9dd6559e5f937a5d6"},
+    {file = "uuid_utils-0.8.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb8049d535f8d8da07ee31842a90543b616fd662afb74680cabdb7cc338e7cc7"},
+    {file = "uuid_utils-0.8.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a23ff19249d48c0e0203305348390f12e531f52986a043d095ffc9f9e605cbc"},
+    {file = "uuid_utils-0.8.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cadf3de8d588b72a63d91a0b4dac8f0470337f09e94ae7bd2c9a09913f2b886e"},
+    {file = "uuid_utils-0.8.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81e76bbd330c45398f42d3f3550d94af3d01a81ff8d95c8f8a07a27e825debb"},
+    {file = "uuid_utils-0.8.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c85809d454b3695d70d9695b14c44e7509e3e0fd748d59f7ff8491731c24c9fd"},
+    {file = "uuid_utils-0.8.0.tar.gz", hash = "sha256:f9049dd91045bb99bdbe05976faf822d1a6ddb7a4b699ba55eff656750d95ee3"},
+]
+
+[[package]]
+name = "uvicorn"
+version = "0.30.1"
+requires_python = ">=3.8"
+summary = "The lightning-fast ASGI server."
+groups = ["docs"]
+dependencies = [
+    "click>=7.0",
+    "h11>=0.8",
+    "typing-extensions>=4.0; python_version < \"3.11\"",
+]
+files = [
+    {file = "uvicorn-0.30.1-py3-none-any.whl", hash = "sha256:cd17daa7f3b9d7a24de3617820e634d0933b69eed8e33a516071174427238c81"},
+    {file = "uvicorn-0.30.1.tar.gz", hash = "sha256:d46cd8e0fd80240baffbcd9ec1012a712938754afcf81bce56c024c1656aece8"},
+]
+
+[[package]]
+name = "uvloop"
+version = "0.19.0"
+requires_python = ">=3.8.0"
+summary = "Fast implementation of asyncio event loop on top of libuv"
+groups = ["default"]
+marker = "sys_platform != \"win32\" and platform_python_implementation == \"CPython\""
+files = [
+    {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"},
+    {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"},
+    {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"},
+    {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"},
+    {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"},
+    {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"},
+    {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"},
+    {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"},
+    {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"},
+    {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"},
+    {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"},
+    {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"},
+    {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"},
+    {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"},
+    {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"},
+    {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"},
+    {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"},
+    {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"},
+    {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"},
+]
+
+[[package]]
+name = "virtualenv"
+version = "20.26.3"
+requires_python = ">=3.7"
+summary = "Virtual Python Environment builder"
+groups = ["linting"]
+dependencies = [
+    "distlib<1,>=0.3.7",
+    "filelock<4,>=3.12.2",
+    "platformdirs<5,>=3.9.1",
+]
+files = [
+    {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"},
+    {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"},
+]
+
+[[package]]
+name = "watchfiles"
+version = "0.22.0"
+requires_python = ">=3.8"
+summary = "Simple, modern and high performance file watching and code reload in python."
+groups = ["default", "docs"]
+dependencies = [
+    "anyio>=3.0.0",
+]
+files = [
+    {file = "watchfiles-0.22.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:da1e0a8caebf17976e2ffd00fa15f258e14749db5e014660f53114b676e68538"},
+    {file = "watchfiles-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61af9efa0733dc4ca462347becb82e8ef4945aba5135b1638bfc20fad64d4f0e"},
+    {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d9188979a58a096b6f8090e816ccc3f255f137a009dd4bbec628e27696d67c1"},
+    {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2bdadf6b90c099ca079d468f976fd50062905d61fae183f769637cb0f68ba59a"},
+    {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:067dea90c43bf837d41e72e546196e674f68c23702d3ef80e4e816937b0a3ffd"},
+    {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbf8a20266136507abf88b0df2328e6a9a7c7309e8daff124dda3803306a9fdb"},
+    {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1235c11510ea557fe21be5d0e354bae2c655a8ee6519c94617fe63e05bca4171"},
+    {file = "watchfiles-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2444dc7cb9d8cc5ab88ebe792a8d75709d96eeef47f4c8fccb6df7c7bc5be71"},
+    {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c5af2347d17ab0bd59366db8752d9e037982e259cacb2ba06f2c41c08af02c39"},
+    {file = "watchfiles-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9624a68b96c878c10437199d9a8b7d7e542feddda8d5ecff58fdc8e67b460848"},
+    {file = "watchfiles-0.22.0-cp310-none-win32.whl", hash = "sha256:4b9f2a128a32a2c273d63eb1fdbf49ad64852fc38d15b34eaa3f7ca2f0d2b797"},
+    {file = "watchfiles-0.22.0-cp310-none-win_amd64.whl", hash = "sha256:2627a91e8110b8de2406d8b2474427c86f5a62bf7d9ab3654f541f319ef22bcb"},
+    {file = "watchfiles-0.22.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8c39987a1397a877217be1ac0fb1d8b9f662c6077b90ff3de2c05f235e6a8f96"},
+    {file = "watchfiles-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a927b3034d0672f62fb2ef7ea3c9fc76d063c4b15ea852d1db2dc75fe2c09696"},
+    {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052d668a167e9fc345c24203b104c313c86654dd6c0feb4b8a6dfc2462239249"},
+    {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e45fb0d70dda1623a7045bd00c9e036e6f1f6a85e4ef2c8ae602b1dfadf7550"},
+    {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c49b76a78c156979759d759339fb62eb0549515acfe4fd18bb151cc07366629c"},
+    {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a65474fd2b4c63e2c18ac67a0c6c66b82f4e73e2e4d940f837ed3d2fd9d4da"},
+    {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc0cba54f47c660d9fa3218158b8963c517ed23bd9f45fe463f08262a4adae1"},
+    {file = "watchfiles-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ebe84a035993bb7668f58a0ebf998174fb723a39e4ef9fce95baabb42b787f"},
+    {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e0f0a874231e2839abbf473256efffe577d6ee2e3bfa5b540479e892e47c172d"},
+    {file = "watchfiles-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:213792c2cd3150b903e6e7884d40660e0bcec4465e00563a5fc03f30ea9c166c"},
+    {file = "watchfiles-0.22.0-cp311-none-win32.whl", hash = "sha256:b44b70850f0073b5fcc0b31ede8b4e736860d70e2dbf55701e05d3227a154a67"},
+    {file = "watchfiles-0.22.0-cp311-none-win_amd64.whl", hash = "sha256:00f39592cdd124b4ec5ed0b1edfae091567c72c7da1487ae645426d1b0ffcad1"},
+    {file = "watchfiles-0.22.0-cp311-none-win_arm64.whl", hash = "sha256:3218a6f908f6a276941422b035b511b6d0d8328edd89a53ae8c65be139073f84"},
+    {file = "watchfiles-0.22.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c7b978c384e29d6c7372209cbf421d82286a807bbcdeb315427687f8371c340a"},
+    {file = "watchfiles-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd4c06100bce70a20c4b81e599e5886cf504c9532951df65ad1133e508bf20be"},
+    {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:425440e55cd735386ec7925f64d5dde392e69979d4c8459f6bb4e920210407f2"},
+    {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68fe0c4d22332d7ce53ad094622b27e67440dacefbaedd29e0794d26e247280c"},
+    {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8a31bfd98f846c3c284ba694c6365620b637debdd36e46e1859c897123aa232"},
+    {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2e8fe41f3cac0660197d95216c42910c2b7e9c70d48e6d84e22f577d106fc1"},
+    {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b7cc10261c2786c41d9207193a85c1db1b725cf87936df40972aab466179b6"},
+    {file = "watchfiles-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28585744c931576e535860eaf3f2c0ec7deb68e3b9c5a85ca566d69d36d8dd27"},
+    {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00095dd368f73f8f1c3a7982a9801190cc88a2f3582dd395b289294f8975172b"},
+    {file = "watchfiles-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:52fc9b0dbf54d43301a19b236b4a4614e610605f95e8c3f0f65c3a456ffd7d35"},
+    {file = "watchfiles-0.22.0-cp312-none-win32.whl", hash = "sha256:581f0a051ba7bafd03e17127735d92f4d286af941dacf94bcf823b101366249e"},
+    {file = "watchfiles-0.22.0-cp312-none-win_amd64.whl", hash = "sha256:aec83c3ba24c723eac14225194b862af176d52292d271c98820199110e31141e"},
+    {file = "watchfiles-0.22.0-cp312-none-win_arm64.whl", hash = "sha256:c668228833c5619f6618699a2c12be057711b0ea6396aeaece4ded94184304ea"},
+    {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b810a2c7878cbdecca12feae2c2ae8af59bea016a78bc353c184fa1e09f76b68"},
+    {file = "watchfiles-0.22.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7e1f9c5d1160d03b93fc4b68a0aeb82fe25563e12fbcdc8507f8434ab6f823c"},
+    {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030bc4e68d14bcad2294ff68c1ed87215fbd9a10d9dea74e7cfe8a17869785ab"},
+    {file = "watchfiles-0.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace7d060432acde5532e26863e897ee684780337afb775107c0a90ae8dbccfd2"},
+    {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5834e1f8b71476a26df97d121c0c0ed3549d869124ed2433e02491553cb468c2"},
+    {file = "watchfiles-0.22.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0bc3b2f93a140df6806c8467c7f51ed5e55a931b031b5c2d7ff6132292e803d6"},
+    {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fdebb655bb1ba0122402352b0a4254812717a017d2dc49372a1d47e24073795"},
+    {file = "watchfiles-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c8e0aa0e8cc2a43561e0184c0513e291ca891db13a269d8d47cb9841ced7c71"},
+    {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2f350cbaa4bb812314af5dab0eb8d538481e2e2279472890864547f3fe2281ed"},
+    {file = "watchfiles-0.22.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7a74436c415843af2a769b36bf043b6ccbc0f8d784814ba3d42fc961cdb0a9dc"},
+    {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00ad0bcd399503a84cc688590cdffbe7a991691314dde5b57b3ed50a41319a31"},
+    {file = "watchfiles-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72a44e9481afc7a5ee3291b09c419abab93b7e9c306c9ef9108cb76728ca58d2"},
+    {file = "watchfiles-0.22.0.tar.gz", hash = "sha256:988e981aaab4f3955209e7e28c7794acdb690be1efa7f16f8ea5aba7ffdadacb"},
+]
+
+[[package]]
+name = "webencodings"
+version = "0.5.1"
+summary = "Character encoding aliases for legacy web content"
+groups = ["docs"]
+files = [
+    {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
+    {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
+]
+
+[[package]]
+name = "websockets"
+version = "12.0"
+requires_python = ">=3.8"
+summary = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
+groups = ["docs"]
+files = [
+    {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"},
+    {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"},
+    {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"},
+    {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"},
+    {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"},
+    {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"},
+    {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"},
+    {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"},
+    {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"},
+    {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"},
+    {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"},
+    {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"},
+    {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"},
+    {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"},
+    {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"},
+    {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"},
+    {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"},
+    {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"},
+    {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"},
+    {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"},
+    {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"},
+    {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"},
+    {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"},
+    {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"},
+    {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"},
+    {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"},
+    {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"},
+    {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"},
+    {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"},
+    {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"},
+    {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"},
+    {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"},
+    {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"},
+    {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"},
+    {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"},
+    {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"},
+    {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"},
+    {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"},
+    {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"},
+    {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"},
+    {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"},
+    {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"},
+    {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"},
+    {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"},
+    {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"},
+    {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"},
+    {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"},
+    {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"},
+    {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"},
+    {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"},
+]
diff --git a/poetry.lock b/poetry.lock
deleted file mode 100644
index d2f6c656..00000000
--- a/poetry.lock
+++ /dev/null
@@ -1,2642 +0,0 @@
-[[package]]
-name = "alembic"
-version = "1.8.1"
-description = "A database migration tool for SQLAlchemy."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-Mako = "*"
-SQLAlchemy = ">=1.3.0"
-
-[package.extras]
-tz = ["python-dateutil"]
-
-[[package]]
-name = "anyio"
-version = "3.6.1"
-description = "High level compatibility layer for multiple asynchronous event loop implementations"
-category = "main"
-optional = false
-python-versions = ">=3.6.2"
-
-[package.dependencies]
-idna = ">=2.8"
-sniffio = ">=1.1"
-
-[package.extras]
-doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"]
-test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"]
-trio = ["trio (>=0.16)"]
-
-[[package]]
-name = "astor"
-version = "0.8.1"
-description = "Read/rewrite/write Python ASTs"
-category = "dev"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
-
-[[package]]
-name = "astroid"
-version = "2.11.7"
-description = "An abstract syntax tree for Python with inference support."
-category = "dev"
-optional = false
-python-versions = ">=3.6.2"
-
-[package.dependencies]
-lazy-object-proxy = ">=1.4.0"
-wrapt = ">=1.11,<2"
-
-[[package]]
-name = "async-timeout"
-version = "4.0.2"
-description = "Timeout context manager for asyncio programs"
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "asyncpg"
-version = "0.26.0"
-description = "An asyncio PostgreSQL driver"
-category = "main"
-optional = false
-python-versions = ">=3.6.0"
-
-[package.extras]
-dev = ["Cython (>=0.29.24,<0.30.0)", "pytest (>=6.0)", "Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "pycodestyle (>=2.7.0,<2.8.0)", "flake8 (>=3.9.2,<3.10.0)", "uvloop (>=0.15.3)"]
-docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)"]
-test = ["pycodestyle (>=2.7.0,<2.8.0)", "flake8 (>=3.9.2,<3.10.0)", "uvloop (>=0.15.3)"]
-
-[[package]]
-name = "atomicwrites"
-version = "1.4.1"
-description = "Atomic file writes."
-category = "dev"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-
-[[package]]
-name = "attrs"
-version = "22.1.0"
-description = "Classes Without Boilerplate"
-category = "dev"
-optional = false
-python-versions = ">=3.5"
-
-[package.extras]
-dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
-docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
-tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
-tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"]
-
-[[package]]
-name = "bandit"
-version = "1.7.4"
-description = "Security oriented static analyser for python code."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""}
-GitPython = ">=1.0.1"
-PyYAML = ">=5.3.1"
-stevedore = ">=1.20.0"
-
-[package.extras]
-test = ["coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml", "beautifulsoup4 (>=4.8.0)", "pylint (==1.9.4)"]
-toml = ["toml"]
-yaml = ["pyyaml"]
-
-[[package]]
-name = "black"
-version = "22.6.0"
-description = "The uncompromising code formatter."
-category = "dev"
-optional = false
-python-versions = ">=3.6.2"
-
-[package.dependencies]
-click = ">=8.0.0"
-mypy-extensions = ">=0.4.3"
-pathspec = ">=0.9.0"
-platformdirs = ">=2"
-tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
-
-[package.extras]
-colorama = ["colorama (>=0.4.3)"]
-d = ["aiohttp (>=3.7.4)"]
-jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
-uvloop = ["uvloop (>=0.15.2)"]
-
-[[package]]
-name = "blacken-docs"
-version = "1.12.1"
-description = "Run `black` on python code blocks in documentation files"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-black = ">=19.3b0"
-
-[[package]]
-name = "brotli"
-version = "1.0.9"
-description = "Python bindings for the Brotli compression library"
-category = "main"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "bump2version"
-version = "1.0.1"
-description = "Version-bump your software with a single command!"
-category = "dev"
-optional = false
-python-versions = ">=3.5"
-
-[[package]]
-name = "certifi"
-version = "2022.6.15"
-description = "Python package for providing Mozilla's CA Bundle."
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "cfgv"
-version = "3.3.1"
-description = "Validate configuration and produce human readable error messages."
-category = "dev"
-optional = false
-python-versions = ">=3.6.1"
-
-[[package]]
-name = "classify-imports"
-version = "4.1.0"
-description = "Utilities for refactoring imports in python-like syntax."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[[package]]
-name = "click"
-version = "8.1.3"
-description = "Composable command line interface toolkit"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-colorama = {version = "*", markers = "platform_system == \"Windows\""}
-
-[[package]]
-name = "colorama"
-version = "0.4.5"
-description = "Cross-platform colored terminal text."
-category = "main"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-
-[[package]]
-name = "commonmark"
-version = "0.9.1"
-description = "Python parser for the CommonMark Markdown spec"
-category = "main"
-optional = false
-python-versions = "*"
-
-[package.extras]
-test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"]
-
-[[package]]
-name = "coverage"
-version = "6.4.2"
-description = "Code coverage measurement for Python"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
-
-[package.extras]
-toml = ["tomli"]
-
-[[package]]
-name = "deprecated"
-version = "1.2.13"
-description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
-category = "main"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-
-[package.dependencies]
-wrapt = ">=1.10,<2"
-
-[package.extras]
-dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"]
-
-[[package]]
-name = "dill"
-version = "0.3.5.1"
-description = "serialize all of python"
-category = "dev"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
-
-[package.extras]
-graph = ["objgraph (>=1.7.2)"]
-
-[[package]]
-name = "distlib"
-version = "0.3.5"
-description = "Distribution utilities"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "dnspython"
-version = "2.2.1"
-description = "DNS toolkit"
-category = "main"
-optional = false
-python-versions = ">=3.6,<4.0"
-
-[package.extras]
-dnssec = ["cryptography (>=2.6,<37.0)"]
-curio = ["curio (>=1.2,<2.0)", "sniffio (>=1.1,<2.0)"]
-doh = ["h2 (>=4.1.0)", "httpx (>=0.21.1)", "requests (>=2.23.0,<3.0.0)", "requests-toolbelt (>=0.9.1,<0.10.0)"]
-idna = ["idna (>=2.1,<4.0)"]
-trio = ["trio (>=0.14,<0.20)"]
-wmi = ["wmi (>=1.5.1,<2.0.0)"]
-
-[[package]]
-name = "email-validator"
-version = "1.2.1"
-description = "A robust email syntax and deliverability validation library."
-category = "main"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
-
-[package.dependencies]
-dnspython = ">=1.15.0"
-idna = ">=2.0.0"
-
-[[package]]
-name = "exceptiongroup"
-version = "1.0.0rc8"
-description = "Backport of PEP 654 (exception groups)"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-test = ["pytest (>=6)"]
-
-[[package]]
-name = "faker"
-version = "13.15.1"
-description = "Faker is a Python package that generates fake data for you."
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-python-dateutil = ">=2.4"
-
-[[package]]
-name = "filelock"
-version = "3.7.1"
-description = "A platform independent file lock."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"]
-testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"]
-
-[[package]]
-name = "flake8"
-version = "5.0.0"
-description = "the modular source code checker: pep8 pyflakes and co"
-category = "dev"
-optional = false
-python-versions = ">=3.6.1"
-
-[package.dependencies]
-mccabe = ">=0.7.0,<0.8.0"
-pycodestyle = ">=2.9.0,<2.10.0"
-pyflakes = ">=2.5.0,<2.6.0"
-
-[[package]]
-name = "flake8-bugbear"
-version = "22.7.1"
-description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle."
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-attrs = ">=19.2.0"
-flake8 = ">=3.0.0"
-
-[package.extras]
-dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"]
-
-[[package]]
-name = "flake8-comprehensions"
-version = "3.10.0"
-description = "A flake8 plugin to help you write better list/set/dict comprehensions."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-flake8 = ">=3.0,<3.2.0 || >3.2.0"
-
-[[package]]
-name = "flake8-mutable"
-version = "1.2.0"
-description = "mutable defaults flake8 extension"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[package.dependencies]
-flake8 = "*"
-
-[[package]]
-name = "flake8-print"
-version = "5.0.0"
-description = "print statement checker plugin for flake8"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-flake8 = ">=3.0"
-pycodestyle = "*"
-
-[[package]]
-name = "flake8-simplify"
-version = "0.19.3"
-description = "flake8 plugin which checks for code that can be simplified"
-category = "dev"
-optional = false
-python-versions = ">=3.6.1"
-
-[package.dependencies]
-astor = ">=0.1"
-flake8 = ">=3.7"
-
-[[package]]
-name = "flake8-type-checking"
-version = "2.1.0"
-description = "A flake8 plugin for managing type-checking imports & forward references"
-category = "dev"
-optional = false
-python-versions = ">=3.8,<4.0"
-
-[package.dependencies]
-classify-imports = "*"
-flake8 = "*"
-
-[[package]]
-name = "freezegun"
-version = "1.2.1"
-description = "Let your Python tests travel through time"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-python-dateutil = ">=2.7"
-
-[[package]]
-name = "ghp-import"
-version = "2.1.0"
-description = "Copy your docs directly to the gh-pages branch."
-category = "dev"
-optional = false
-python-versions = "*"
-
-[package.dependencies]
-python-dateutil = ">=2.8.1"
-
-[package.extras]
-dev = ["twine", "markdown", "flake8", "wheel"]
-
-[[package]]
-name = "gitdb"
-version = "4.0.9"
-description = "Git Object Database"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-smmap = ">=3.0.1,<6"
-
-[[package]]
-name = "gitpython"
-version = "3.1.27"
-description = "GitPython is a python library used to interact with Git repositories"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-gitdb = ">=4.0.1,<5"
-
-[[package]]
-name = "greenlet"
-version = "1.1.2"
-description = "Lightweight in-process concurrent programming"
-category = "main"
-optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
-
-[package.extras]
-docs = ["sphinx"]
-
-[[package]]
-name = "griffe"
-version = "0.22.0"
-description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-async = ["aiofiles (>=0.7,<1.0)"]
-
-[[package]]
-name = "gunicorn"
-version = "20.1.0"
-description = "WSGI HTTP Server for UNIX"
-category = "main"
-optional = false
-python-versions = ">=3.5"
-
-[package.extras]
-eventlet = ["eventlet (>=0.24.1)"]
-gevent = ["gevent (>=1.4.0)"]
-setproctitle = ["setproctitle"]
-tornado = ["tornado (>=0.2)"]
-
-[[package]]
-name = "h11"
-version = "0.12.0"
-description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "hiredis"
-version = "2.0.0"
-description = "Python wrapper for hiredis"
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "httpcore"
-version = "0.15.0"
-description = "A minimal low-level HTTP client."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-anyio = ">=3.0.0,<4.0.0"
-certifi = "*"
-h11 = ">=0.11,<0.13"
-sniffio = ">=1.0.0,<2.0.0"
-
-[package.extras]
-http2 = ["h2 (>=3,<5)"]
-socks = ["socksio (>=1.0.0,<2.0.0)"]
-
-[[package]]
-name = "httptools"
-version = "0.4.0"
-description = "A collection of framework independent HTTP protocol utils."
-category = "main"
-optional = false
-python-versions = ">=3.5.0"
-
-[package.extras]
-test = ["Cython (>=0.29.24,<0.30.0)"]
-
-[[package]]
-name = "httpx"
-version = "0.23.0"
-description = "The next generation HTTP client."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-certifi = "*"
-httpcore = ">=0.15.0,<0.16.0"
-rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
-sniffio = "*"
-
-[package.extras]
-brotli = ["brotlicffi", "brotli"]
-cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10,<13)", "pygments (>=2.0.0,<3.0.0)"]
-http2 = ["h2 (>=3,<5)"]
-socks = ["socksio (>=1.0.0,<2.0.0)"]
-
-[[package]]
-name = "hypothesis"
-version = "6.53.0"
-description = "A library for property-based testing"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-attrs = ">=19.2.0"
-black = {version = ">=19.10b0", optional = true, markers = "extra == \"cli\""}
-click = {version = ">=7.0", optional = true, markers = "extra == \"cli\""}
-exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
-rich = {version = ">=9.0.0", optional = true, markers = "extra == \"cli\""}
-sortedcontainers = ">=2.1.0,<3.0.0"
-
-[package.extras]
-all = ["black (>=19.10b0)", "click (>=7.0)", "django (>=2.2)", "dpcontracts (>=0.4)", "lark-parser (>=0.6.5)", "libcst (>=0.3.16)", "numpy (>=1.9.0)", "pandas (>=1.0)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "importlib-metadata (>=3.6)", "backports.zoneinfo (>=0.2.1)", "tzdata (>=2022.1)"]
-cli = ["click (>=7.0)", "black (>=19.10b0)", "rich (>=9.0.0)"]
-codemods = ["libcst (>=0.3.16)"]
-dateutil = ["python-dateutil (>=1.4)"]
-django = ["django (>=2.2)"]
-dpcontracts = ["dpcontracts (>=0.4)"]
-ghostwriter = ["black (>=19.10b0)"]
-lark = ["lark-parser (>=0.6.5)"]
-numpy = ["numpy (>=1.9.0)"]
-pandas = ["pandas (>=1.0)"]
-pytest = ["pytest (>=4.6)"]
-pytz = ["pytz (>=2014.1)"]
-redis = ["redis (>=3.0.0)"]
-zoneinfo = ["backports.zoneinfo (>=0.2.1)", "tzdata (>=2022.1)"]
-
-[[package]]
-name = "identify"
-version = "2.5.2"
-description = "File identification library for Python"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-license = ["ukkonen"]
-
-[[package]]
-name = "idna"
-version = "3.3"
-description = "Internationalized Domain Names in Applications (IDNA)"
-category = "main"
-optional = false
-python-versions = ">=3.5"
-
-[[package]]
-name = "importlib-metadata"
-version = "4.12.0"
-description = "Read metadata from Python packages"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-zipp = ">=0.5"
-
-[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
-perf = ["ipython"]
-testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
-
-[[package]]
-name = "iniconfig"
-version = "1.1.1"
-description = "iniconfig: brain-dead simple config-ini parsing"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "isort"
-version = "5.10.1"
-description = "A Python utility / library to sort Python imports."
-category = "dev"
-optional = false
-python-versions = ">=3.6.1,<4.0"
-
-[package.extras]
-pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
-requirements_deprecated_finder = ["pipreqs", "pip-api"]
-colors = ["colorama (>=0.4.3,<0.5.0)"]
-plugins = ["setuptools"]
-
-[[package]]
-name = "jinja2"
-version = "3.1.2"
-description = "A very fast and expressive template engine."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-MarkupSafe = ">=2.0"
-
-[package.extras]
-i18n = ["Babel (>=2.7)"]
-
-[[package]]
-name = "lazy-object-proxy"
-version = "1.7.1"
-description = "A fast and thorough lazy object proxy."
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "mako"
-version = "1.2.1"
-description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-MarkupSafe = ">=0.9.2"
-
-[package.extras]
-babel = ["babel"]
-lingua = ["lingua"]
-testing = ["pytest"]
-
-[[package]]
-name = "markdown"
-version = "3.3.7"
-description = "Python implementation of Markdown."
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[package.extras]
-testing = ["coverage", "pyyaml"]
-
-[[package]]
-name = "markupsafe"
-version = "2.1.1"
-description = "Safely add untrusted strings to HTML/XML markup."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[[package]]
-name = "mccabe"
-version = "0.7.0"
-description = "McCabe checker, plugin for flake8"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "mergedeep"
-version = "1.3.4"
-description = "A deep merge function for 🐍."
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "mkdocs"
-version = "1.3.1"
-description = "Project documentation with Markdown."
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-click = ">=3.3"
-ghp-import = ">=1.0"
-importlib-metadata = ">=4.3"
-Jinja2 = ">=2.10.2"
-Markdown = ">=3.2.1,<3.4"
-mergedeep = ">=1.3.4"
-packaging = ">=20.5"
-PyYAML = ">=3.10"
-pyyaml-env-tag = ">=0.1"
-watchdog = ">=2.0"
-
-[package.extras]
-i18n = ["babel (>=2.9.0)"]
-
-[[package]]
-name = "mkdocs-autorefs"
-version = "0.4.1"
-description = "Automatically link across pages in MkDocs."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-Markdown = ">=3.3"
-mkdocs = ">=1.1"
-
-[[package]]
-name = "mkdocs-material"
-version = "8.3.9"
-description = "Documentation that simply works"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-jinja2 = ">=3.0.2"
-markdown = ">=3.2"
-mkdocs = ">=1.3.0"
-mkdocs-material-extensions = ">=1.0.3"
-pygments = ">=2.12"
-pymdown-extensions = ">=9.4"
-
-[[package]]
-name = "mkdocs-material-extensions"
-version = "1.0.3"
-description = "Extension pack for Python Markdown."
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "mkdocstrings"
-version = "0.19.0"
-description = "Automatic documentation from sources, for MkDocs."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-Jinja2 = ">=2.11.1"
-Markdown = ">=3.3"
-MarkupSafe = ">=1.1"
-mkdocs = ">=1.2"
-mkdocs-autorefs = ">=0.3.1"
-pymdown-extensions = ">=6.3"
-
-[package.extras]
-crystal = ["mkdocstrings-crystal (>=0.3.4)"]
-python = ["mkdocstrings-python (>=0.5.2)"]
-python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"]
-
-[[package]]
-name = "mkdocstrings-python"
-version = "0.7.1"
-description = "A Python handler for mkdocstrings."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-griffe = ">=0.11.1"
-mkdocstrings = ">=0.19"
-
-[[package]]
-name = "mypy"
-version = "0.971"
-description = "Optional static typing for Python"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-mypy-extensions = ">=0.4.3"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
-typing-extensions = ">=3.10"
-
-[package.extras]
-dmypy = ["psutil (>=4.0)"]
-python2 = ["typed-ast (>=1.4.0,<2)"]
-reports = ["lxml"]
-
-[[package]]
-name = "mypy-extensions"
-version = "0.4.3"
-description = "Experimental type system extensions for programs checked with the mypy typechecker."
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "nodeenv"
-version = "1.7.0"
-description = "Node.js virtual environment builder"
-category = "dev"
-optional = false
-python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
-
-[[package]]
-name = "openapi-schema-pydantic"
-version = "1.2.4"
-description = "OpenAPI (v3) specification schema as pydantic class"
-category = "main"
-optional = false
-python-versions = ">=3.6.1"
-
-[package.dependencies]
-pydantic = ">=1.8.2"
-
-[[package]]
-name = "orjson"
-version = "3.7.11"
-description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[[package]]
-name = "packaging"
-version = "21.3"
-description = "Core utilities for Python packages"
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
-
-[[package]]
-name = "pathspec"
-version = "0.9.0"
-description = "Utility library for gitignore style pattern matching of file paths."
-category = "dev"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
-
-[[package]]
-name = "pbr"
-version = "5.9.0"
-description = "Python Build Reasonableness"
-category = "dev"
-optional = false
-python-versions = ">=2.6"
-
-[[package]]
-name = "picologging"
-version = "0.6.0"
-description = "A fast and lightweight logging library for Python"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-dev = ["pytest-cov", "pytest", "rich"]
-
-[[package]]
-name = "platformdirs"
-version = "2.5.2"
-description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
-test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
-
-[[package]]
-name = "pluggy"
-version = "1.0.0"
-description = "plugin and hook calling mechanisms for python"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[package.extras]
-dev = ["pre-commit", "tox"]
-testing = ["pytest", "pytest-benchmark"]
-
-[[package]]
-name = "pre-commit"
-version = "2.20.0"
-description = "A framework for managing and maintaining multi-language pre-commit hooks."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-cfgv = ">=2.0.0"
-identify = ">=1.0.0"
-nodeenv = ">=0.11.1"
-pyyaml = ">=5.1"
-toml = "*"
-virtualenv = ">=20.0.8"
-
-[[package]]
-name = "py"
-version = "1.11.0"
-description = "library with cross-python path, ini-parsing, io, code, log facilities"
-category = "dev"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
-
-[[package]]
-name = "pycodestyle"
-version = "2.9.0"
-description = "Python style guide checker"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "pydantic"
-version = "1.9.1"
-description = "Data validation and settings management using python type hints"
-category = "main"
-optional = false
-python-versions = ">=3.6.1"
-
-[package.dependencies]
-email-validator = {version = ">=1.0.3", optional = true, markers = "extra == \"email\""}
-python-dotenv = {version = ">=0.10.4", optional = true, markers = "extra == \"dotenv\""}
-typing-extensions = ">=3.7.4.3"
-
-[package.extras]
-dotenv = ["python-dotenv (>=0.10.4)"]
-email = ["email-validator (>=1.0.3)"]
-
-[[package]]
-name = "pydantic-factories"
-version = "1.4.1"
-description = "Mock data generation for pydantic based models"
-category = "main"
-optional = false
-python-versions = ">=3.7,<4.0"
-
-[package.dependencies]
-faker = "*"
-pydantic = "*"
-typing-extensions = "*"
-xeger = ">=0.3.5,<0.4.0"
-
-[[package]]
-name = "pyflakes"
-version = "2.5.0"
-description = "passive checker of Python programs"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "pygments"
-version = "2.12.0"
-description = "Pygments is a syntax highlighting package written in Python."
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "pylint"
-version = "2.14.5"
-description = "python code static checker"
-category = "dev"
-optional = false
-python-versions = ">=3.7.2"
-
-[package.dependencies]
-astroid = ">=2.11.6,<=2.12.0-dev0"
-colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
-dill = ">=0.2"
-isort = ">=4.2.5,<6"
-mccabe = ">=0.6,<0.8"
-platformdirs = ">=2.2.0"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
-tomlkit = ">=0.10.1"
-
-[package.extras]
-spelling = ["pyenchant (>=3.2,<4.0)"]
-testutils = ["gitpython (>3)"]
-
-[[package]]
-name = "pymdown-extensions"
-version = "9.5"
-description = "Extension pack for Python Markdown."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-markdown = ">=3.2"
-
-[[package]]
-name = "pyparsing"
-version = "3.0.9"
-description = "pyparsing module - Classes and methods to define and execute parsing grammars"
-category = "main"
-optional = false
-python-versions = ">=3.6.8"
-
-[package.extras]
-diagrams = ["railroad-diagrams", "jinja2"]
-
-[[package]]
-name = "pytest"
-version = "7.1.2"
-description = "pytest: simple powerful testing with Python"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
-attrs = ">=19.2.0"
-colorama = {version = "*", markers = "sys_platform == \"win32\""}
-iniconfig = "*"
-packaging = "*"
-pluggy = ">=0.12,<2.0"
-py = ">=1.8.2"
-tomli = ">=1.0.0"
-
-[package.extras]
-testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
-
-[[package]]
-name = "pytest-asyncio"
-version = "0.19.0"
-description = "Pytest support for asyncio"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-pytest = ">=6.1.0"
-
-[package.extras]
-testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)", "flaky (>=3.5.0)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"]
-
-[[package]]
-name = "pytest-cov"
-version = "3.0.0"
-description = "Pytest plugin for measuring coverage."
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-coverage = {version = ">=5.2.1", extras = ["toml"]}
-pytest = ">=4.6"
-
-[package.extras]
-testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"]
-
-[[package]]
-name = "pytest-dotenv"
-version = "0.5.2"
-description = "A py.test plugin that parses environment files before running tests"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[package.dependencies]
-pytest = ">=5.0.0"
-python-dotenv = ">=0.9.1"
-
-[[package]]
-name = "pytest-mock"
-version = "3.8.2"
-description = "Thin-wrapper around the mock package for easier use with pytest"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-pytest = ">=5.0"
-
-[package.extras]
-dev = ["pre-commit", "tox", "pytest-asyncio"]
-
-[[package]]
-name = "python-dateutil"
-version = "2.8.2"
-description = "Extensions to the standard Python datetime module"
-category = "main"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
-
-[package.dependencies]
-six = ">=1.5"
-
-[[package]]
-name = "python-dotenv"
-version = "0.20.0"
-description = "Read key-value pairs from a .env file and set them as environment variables"
-category = "main"
-optional = false
-python-versions = ">=3.5"
-
-[package.extras]
-cli = ["click (>=5.0)"]
-
-[[package]]
-name = "python-multipart"
-version = "0.0.5"
-description = "A streaming multipart parser for Python"
-category = "main"
-optional = false
-python-versions = "*"
-
-[package.dependencies]
-six = ">=1.4.0"
-
-[[package]]
-name = "pyupgrade"
-version = "2.37.3"
-description = "A tool to automatically upgrade syntax for newer versions."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-tokenize-rt = ">=3.2.0"
-
-[[package]]
-name = "pyyaml"
-version = "6.0"
-description = "YAML parser and emitter for Python"
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "pyyaml-env-tag"
-version = "0.1"
-description = "A custom YAML tag for referencing environment variables in YAML files. "
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-pyyaml = "*"
-
-[[package]]
-name = "redis"
-version = "4.3.4"
-description = "Python client for Redis database and key-value store"
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-async-timeout = ">=4.0.2"
-deprecated = ">=1.2.3"
-packaging = ">=20.4"
-
-[package.extras]
-hiredis = ["hiredis (>=1.0.0)"]
-ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"]
-
-[[package]]
-name = "rfc3986"
-version = "1.5.0"
-description = "Validating URI References per RFC 3986"
-category = "main"
-optional = false
-python-versions = "*"
-
-[package.dependencies]
-idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
-
-[package.extras]
-idna2008 = ["idna"]
-
-[[package]]
-name = "rich"
-version = "12.5.1"
-description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
-category = "main"
-optional = false
-python-versions = ">=3.6.3,<4.0.0"
-
-[package.dependencies]
-commonmark = ">=0.9.0,<0.10.0"
-pygments = ">=2.6.0,<3.0.0"
-
-[package.extras]
-jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
-
-[[package]]
-name = "six"
-version = "1.16.0"
-description = "Python 2 and 3 compatibility utilities"
-category = "main"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
-
-[[package]]
-name = "slotscheck"
-version = "0.14.1"
-description = "Ensure your __slots__ are working properly."
-category = "dev"
-optional = false
-python-versions = ">=3.6.2,<4"
-
-[package.dependencies]
-click = ">=8.0,<9.0"
-tomli = ">=0.2.6,<3.0.0"
-
-[[package]]
-name = "smmap"
-version = "5.0.0"
-description = "A pure Python implementation of a sliding window memory map manager"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[[package]]
-name = "sniffio"
-version = "1.2.0"
-description = "Sniff out which async library your code is running under"
-category = "main"
-optional = false
-python-versions = ">=3.5"
-
-[[package]]
-name = "sortedcontainers"
-version = "2.4.0"
-description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "SQLAlchemy"
-version = "2.0.0b1.dev0"
-description = "Database Abstraction Library"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-develop = false
-
-[package.dependencies]
-greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
-mypy = {version = ">=0.910", optional = true, markers = "extra == \"mypy\""}
-typing-extensions = ">=4.1.0"
-
-[package.extras]
-aiomysql = ["greenlet (!=0.4.17)", "aiomysql"]
-aiosqlite = ["greenlet (!=0.4.17)", "aiosqlite", "typing-extensions (!=3.10.0.1)"]
-asyncio = ["greenlet (!=0.4.17)"]
-asyncmy = ["greenlet (!=0.4.17)", "asyncmy (>=0.2.3,!=0.2.4)"]
-mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"]
-mssql = ["pyodbc"]
-mssql_pymssql = ["pymssql"]
-mssql_pyodbc = ["pyodbc"]
-mypy = ["mypy (>=0.910)"]
-mysql = ["mysqlclient (>=1.4.0)"]
-mysql_connector = ["mysql-connector-python"]
-oracle = ["cx-oracle (>=7)"]
-oracle_oracledb = ["oracledb (>=1.0.1)"]
-postgresql = ["psycopg2 (>=2.7)"]
-postgresql_asyncpg = ["greenlet (!=0.4.17)", "asyncpg"]
-postgresql_pg8000 = ["pg8000 (>=1.29.1)"]
-postgresql_psycopg = ["psycopg (>=3.0.7)"]
-postgresql_psycopg2binary = ["psycopg2-binary"]
-postgresql_psycopg2cffi = ["psycopg2cffi"]
-pymysql = ["pymysql"]
-sqlcipher = ["sqlcipher3-binary"]
-
-[package.source]
-type = "git"
-url = "https://github.com/sqlalchemy/sqlalchemy.git"
-reference = "main"
-resolved_reference = "3ff18812d8d80b2016ceeea98c808a76cae85e48"
-
-[[package]]
-name = "starlette"
-version = "0.20.4"
-description = "The little ASGI library that shines."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-anyio = ">=3.4.0,<5"
-
-[package.extras]
-full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"]
-
-[[package]]
-name = "starlite"
-version = "1.7.2"
-description = "Light-weight and flexible ASGI API Framework"
-category = "main"
-optional = false
-python-versions = ">=3.7,<4.0"
-
-[package.dependencies]
-brotli = {version = "*", optional = true, markers = "extra == \"testing\" or extra == \"brotli\""}
-openapi-schema-pydantic = "*"
-orjson = "*"
-pydantic = "*"
-pydantic-factories = "*"
-python-multipart = "*"
-pyyaml = "*"
-starlette = "*"
-typing-extensions = "*"
-
-[package.extras]
-testing = ["requests", "brotli"]
-brotli = ["brotli"]
-
-[[package]]
-name = "stevedore"
-version = "4.0.0"
-description = "Manage dynamic plugins for Python applications"
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-
-[package.dependencies]
-pbr = ">=2.0.0,<2.1.0 || >2.1.0"
-
-[[package]]
-name = "tokenize-rt"
-version = "4.2.1"
-description = "A wrapper around the stdlib `tokenize` which roundtrips."
-category = "dev"
-optional = false
-python-versions = ">=3.6.1"
-
-[[package]]
-name = "toml"
-version = "0.10.2"
-description = "Python Library for Tom's Obvious, Minimal Language"
-category = "dev"
-optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
-
-[[package]]
-name = "tomli"
-version = "2.0.1"
-description = "A lil' TOML parser"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[[package]]
-name = "tomlkit"
-version = "0.11.1"
-description = "Style preserving TOML library"
-category = "dev"
-optional = false
-python-versions = ">=3.6,<4.0"
-
-[[package]]
-name = "tox"
-version = "3.25.1"
-description = "tox is a generic virtualenv management and test command line tool"
-category = "dev"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
-
-[package.dependencies]
-colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""}
-filelock = ">=3.0.0"
-packaging = ">=14"
-pluggy = ">=0.12.0"
-py = ">=1.4.17"
-six = ">=1.14.0"
-toml = ">=0.9.4"
-virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7"
-
-[package.extras]
-docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"]
-testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "psutil (>=5.6.1)", "pathlib2 (>=2.3.3)"]
-
-[[package]]
-name = "typer"
-version = "0.6.1"
-description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
-category = "main"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-click = ">=7.1.1,<9.0.0"
-
-[package.extras]
-test = ["rich (>=10.11.0,<13.0.0)", "isort (>=5.0.6,<6.0.0)", "black (>=22.3.0,<23.0.0)", "mypy (==0.910)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<2.0.0)", "coverage (>=5.2,<6.0)", "pytest-cov (>=2.10.0,<3.0.0)", "pytest (>=4.4.0,<5.4.0)", "shellingham (>=1.3.0,<2.0.0)"]
-doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mkdocs (>=1.1.2,<2.0.0)"]
-dev = ["pre-commit (>=2.17.0,<3.0.0)", "flake8 (>=3.8.3,<4.0.0)", "autoflake (>=1.3.1,<2.0.0)"]
-all = ["rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)", "colorama (>=0.4.3,<0.5.0)"]
-
-[[package]]
-name = "types-freezegun"
-version = "1.1.10"
-description = "Typing stubs for freezegun"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "types-pyyaml"
-version = "6.0.11"
-description = "Typing stubs for PyYAML"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "types-redis"
-version = "4.3.12"
-description = "Typing stubs for redis"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "types-requests"
-version = "2.28.6"
-description = "Typing stubs for requests"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[package.dependencies]
-types-urllib3 = "<1.27"
-
-[[package]]
-name = "types-urllib3"
-version = "1.26.19"
-description = "Typing stubs for urllib3"
-category = "dev"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "typing-extensions"
-version = "4.3.0"
-description = "Backported and Experimental Type Hints for Python 3.7+"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[[package]]
-name = "uvicorn"
-version = "0.18.2"
-description = "The lightning-fast ASGI server."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-click = ">=7.0"
-colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""}
-h11 = ">=0.8"
-httptools = {version = ">=0.4.0", optional = true, markers = "extra == \"standard\""}
-python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
-PyYAML = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
-uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
-watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
-websockets = {version = ">=10.0", optional = true, markers = "extra == \"standard\""}
-
-[package.extras]
-standard = ["websockets (>=10.0)", "httptools (>=0.4.0)", "watchfiles (>=0.13)", "python-dotenv (>=0.13)", "PyYAML (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "colorama (>=0.4)"]
-
-[[package]]
-name = "uvloop"
-version = "0.16.0"
-description = "Fast implementation of asyncio event loop on top of libuv"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-dev = ["Cython (>=0.29.24,<0.30.0)", "pytest (>=3.6.0)", "Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "psutil", "pycodestyle (>=2.7.0,<2.8.0)", "pyOpenSSL (>=19.0.0,<19.1.0)", "mypy (>=0.800)"]
-docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)"]
-test = ["aiohttp", "flake8 (>=3.9.2,<3.10.0)", "psutil", "pycodestyle (>=2.7.0,<2.8.0)", "pyOpenSSL (>=19.0.0,<19.1.0)", "mypy (>=0.800)"]
-
-[[package]]
-name = "virtualenv"
-version = "20.16.2"
-description = "Virtual Python Environment builder"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-distlib = ">=0.3.1,<1"
-filelock = ">=3.2,<4"
-platformdirs = ">=2,<3"
-
-[package.extras]
-docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
-testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"]
-
-[[package]]
-name = "watchdog"
-version = "2.1.9"
-description = "Filesystem events monitoring"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-
-[package.extras]
-watchmedo = ["PyYAML (>=3.10)"]
-
-[[package]]
-name = "watchfiles"
-version = "0.16.1"
-description = "Simple, modern and high performance file watching and code reload in python."
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-anyio = ">=3.0.0,<4"
-
-[[package]]
-name = "websockets"
-version = "10.3"
-description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[[package]]
-name = "wrapt"
-version = "1.14.1"
-description = "Module for decorators, wrappers and monkey patching."
-category = "main"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
-
-[[package]]
-name = "xeger"
-version = "0.3.5"
-description = "A library for generating random strings from a valid regular expression."
-category = "main"
-optional = false
-python-versions = "*"
-
-[[package]]
-name = "zipp"
-version = "3.8.1"
-description = "Backport of pathlib-compatible object wrapper for zip files"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"]
-testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
-
-[metadata]
-lock-version = "1.1"
-python-versions = ">=3.10,<4.0"
-content-hash = "6b953a86fd16b606f62e8b03399a68ad02b0130a362238e8f69b2348b32e3b21"
-
-[metadata.files]
-alembic = []
-anyio = [
-    {file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"},
-    {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"},
-]
-astor = [
-    {file = "astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5"},
-    {file = "astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e"},
-]
-astroid = []
-async-timeout = [
-    {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
-    {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
-]
-asyncpg = []
-atomicwrites = []
-attrs = [
-    {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
-    {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
-]
-bandit = [
-    {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"},
-    {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"},
-]
-black = [
-    {file = "black-22.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69"},
-    {file = "black-22.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807"},
-    {file = "black-22.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e"},
-    {file = "black-22.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def"},
-    {file = "black-22.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666"},
-    {file = "black-22.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d"},
-    {file = "black-22.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256"},
-    {file = "black-22.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78"},
-    {file = "black-22.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849"},
-    {file = "black-22.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c"},
-    {file = "black-22.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90"},
-    {file = "black-22.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f"},
-    {file = "black-22.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e"},
-    {file = "black-22.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6"},
-    {file = "black-22.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad"},
-    {file = "black-22.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf"},
-    {file = "black-22.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c"},
-    {file = "black-22.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2"},
-    {file = "black-22.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee"},
-    {file = "black-22.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b"},
-    {file = "black-22.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4"},
-    {file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"},
-    {file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"},
-]
-blacken-docs = []
-brotli = [
-    {file = "Brotli-1.0.9-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70"},
-    {file = "Brotli-1.0.9-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b"},
-    {file = "Brotli-1.0.9-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6"},
-    {file = "Brotli-1.0.9-cp27-cp27m-win32.whl", hash = "sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa"},
-    {file = "Brotli-1.0.9-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452"},
-    {file = "Brotli-1.0.9-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7"},
-    {file = "Brotli-1.0.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9744a863b489c79a73aba014df554b0e7a0fc44ef3f8a0ef2a52919c7d155031"},
-    {file = "Brotli-1.0.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a72661af47119a80d82fa583b554095308d6a4c356b2a554fdc2799bc19f2a43"},
-    {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ee83d3e3a024a9618e5be64648d6d11c37047ac48adff25f12fa4226cf23d1c"},
-    {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:19598ecddd8a212aedb1ffa15763dd52a388518c4550e615aed88dc3753c0f0c"},
-    {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:44bb8ff420c1d19d91d79d8c3574b8954288bdff0273bf788954064d260d7ab0"},
-    {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e23281b9a08ec338469268f98f194658abfb13658ee98e2b7f85ee9dd06caa91"},
-    {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3496fc835370da351d37cada4cf744039616a6db7d13c430035e901443a34daa"},
-    {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83bb06a0192cccf1eb8d0a28672a1b79c74c3a8a5f2619625aeb6f28b3a82bb"},
-    {file = "Brotli-1.0.9-cp310-cp310-win32.whl", hash = "sha256:26d168aac4aaec9a4394221240e8a5436b5634adc3cd1cdf637f6645cecbf181"},
-    {file = "Brotli-1.0.9-cp310-cp310-win_amd64.whl", hash = "sha256:622a231b08899c864eb87e85f81c75e7b9ce05b001e59bbfbf43d4a71f5f32b2"},
-    {file = "Brotli-1.0.9-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4"},
-    {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:6b2ae9f5f67f89aade1fab0f7fd8f2832501311c363a21579d02defa844d9296"},
-    {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430"},
-    {file = "Brotli-1.0.9-cp35-cp35m-win32.whl", hash = "sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1"},
-    {file = "Brotli-1.0.9-cp35-cp35m-win_amd64.whl", hash = "sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea"},
-    {file = "Brotli-1.0.9-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:503fa6af7da9f4b5780bb7e4cbe0c639b010f12be85d02c99452825dd0feef3f"},
-    {file = "Brotli-1.0.9-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4"},
-    {file = "Brotli-1.0.9-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a"},
-    {file = "Brotli-1.0.9-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87fdccbb6bb589095f413b1e05734ba492c962b4a45a13ff3408fa44ffe6479b"},
-    {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:6d847b14f7ea89f6ad3c9e3901d1bc4835f6b390a9c71df999b0162d9bb1e20f"},
-    {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:495ba7e49c2db22b046a53b469bbecea802efce200dffb69b93dd47397edc9b6"},
-    {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:4688c1e42968ba52e57d8670ad2306fe92e0169c6f3af0089be75bbac0c64a3b"},
-    {file = "Brotli-1.0.9-cp36-cp36m-win32.whl", hash = "sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14"},
-    {file = "Brotli-1.0.9-cp36-cp36m-win_amd64.whl", hash = "sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c"},
-    {file = "Brotli-1.0.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126"},
-    {file = "Brotli-1.0.9-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d"},
-    {file = "Brotli-1.0.9-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6c772d6c0a79ac0f414a9f8947cc407e119b8598de7621f39cacadae3cf57d12"},
-    {file = "Brotli-1.0.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29d1d350178e5225397e28ea1b7aca3648fcbab546d20e7475805437bfb0a130"},
-    {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7bbff90b63328013e1e8cb50650ae0b9bac54ffb4be6104378490193cd60f85a"},
-    {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ec1947eabbaf8e0531e8e899fc1d9876c179fc518989461f5d24e2223395a9e3"},
-    {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12effe280b8ebfd389022aa65114e30407540ccb89b177d3fbc9a4f177c4bd5d"},
-    {file = "Brotli-1.0.9-cp37-cp37m-win32.whl", hash = "sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1"},
-    {file = "Brotli-1.0.9-cp37-cp37m-win_amd64.whl", hash = "sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5"},
-    {file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e16eb9541f3dd1a3e92b89005e37b1257b157b7256df0e36bd7b33b50be73bcb"},
-    {file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8"},
-    {file = "Brotli-1.0.9-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb"},
-    {file = "Brotli-1.0.9-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5b6ef7d9f9c38292df3690fe3e302b5b530999fa90014853dcd0d6902fb59f26"},
-    {file = "Brotli-1.0.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a674ac10e0a87b683f4fa2b6fa41090edfd686a6524bd8dedbd6138b309175c"},
-    {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e2d9e1cbc1b25e22000328702b014227737756f4b5bf5c485ac1d8091ada078b"},
-    {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b336c5e9cf03c7be40c47b5fd694c43c9f1358a80ba384a21969e0b4e66a9b17"},
-    {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:85f7912459c67eaab2fb854ed2bc1cc25772b300545fe7ed2dc03954da638649"},
-    {file = "Brotli-1.0.9-cp38-cp38-win32.whl", hash = "sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429"},
-    {file = "Brotli-1.0.9-cp38-cp38-win_amd64.whl", hash = "sha256:269a5743a393c65db46a7bb982644c67ecba4b8d91b392403ad8a861ba6f495f"},
-    {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2aad0e0baa04517741c9bb5b07586c642302e5fb3e75319cb62087bd0995ab19"},
-    {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5cb1e18167792d7d21e21365d7650b72d5081ed476123ff7b8cac7f45189c0c7"},
-    {file = "Brotli-1.0.9-cp39-cp39-manylinux1_i686.whl", hash = "sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b"},
-    {file = "Brotli-1.0.9-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389"},
-    {file = "Brotli-1.0.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bf919756d25e4114ace16a8ce91eb340eb57a08e2c6950c3cebcbe3dff2a5e7"},
-    {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e4c4e92c14a57c9bd4cb4be678c25369bf7a092d55fd0866f759e425b9660806"},
-    {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e48f4234f2469ed012a98f4b7874e7f7e173c167bed4934912a29e03167cf6b1"},
-    {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9ed4c92a0665002ff8ea852353aeb60d9141eb04109e88928026d3c8a9e5433c"},
-    {file = "Brotli-1.0.9-cp39-cp39-win32.whl", hash = "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3"},
-    {file = "Brotli-1.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761"},
-    {file = "Brotli-1.0.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9749a124280a0ada4187a6cfd1ffd35c350fb3af79c706589d98e088c5044267"},
-    {file = "Brotli-1.0.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:76ffebb907bec09ff511bb3acc077695e2c32bc2142819491579a695f77ffd4d"},
-    {file = "Brotli-1.0.9.zip", hash = "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438"},
-]
-bump2version = [
-    {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"},
-    {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"},
-]
-certifi = [
-    {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
-    {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
-]
-cfgv = [
-    {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
-    {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
-]
-classify-imports = [
-    {file = "classify_imports-4.1.0-py2.py3-none-any.whl", hash = "sha256:45436d3c4c886ca9092a2c90551b392ba120360e7a782574169ddeb866bbc08a"},
-    {file = "classify_imports-4.1.0.tar.gz", hash = "sha256:69ddc4320690c26aa8baa66bf7e0fa0eecfda49d99cf71a59dee0b57dac82616"},
-]
-click = [
-    {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
-    {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
-]
-colorama = [
-    {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
-    {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
-]
-commonmark = [
-    {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"},
-    {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"},
-]
-coverage = []
-deprecated = [
-    {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"},
-    {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"},
-]
-dill = [
-    {file = "dill-0.3.5.1-py2.py3-none-any.whl", hash = "sha256:33501d03270bbe410c72639b350e941882a8b0fd55357580fbc873fba0c59302"},
-    {file = "dill-0.3.5.1.tar.gz", hash = "sha256:d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86"},
-]
-distlib = []
-dnspython = [
-    {file = "dnspython-2.2.1-py3-none-any.whl", hash = "sha256:a851e51367fb93e9e1361732c1d60dab63eff98712e503ea7d92e6eccb109b4f"},
-    {file = "dnspython-2.2.1.tar.gz", hash = "sha256:0f7569a4a6ff151958b64304071d370daa3243d15941a7beedf0c9fe5105603e"},
-]
-email-validator = [
-    {file = "email_validator-1.2.1-py2.py3-none-any.whl", hash = "sha256:c8589e691cf73eb99eed8d10ce0e9cbb05a0886ba920c8bcb7c82873f4c5789c"},
-    {file = "email_validator-1.2.1.tar.gz", hash = "sha256:6757aea012d40516357c0ac2b1a4c31219ab2f899d26831334c5d069e8b6c3d8"},
-]
-exceptiongroup = [
-    {file = "exceptiongroup-1.0.0rc8-py3-none-any.whl", hash = "sha256:ab0a968e1ef769e55d9a596f4a89f7be9ffedbc9fdefdb77cc68cf5c33ce1035"},
-    {file = "exceptiongroup-1.0.0rc8.tar.gz", hash = "sha256:6990c24f06b8d33c8065cfe43e5e8a4bfa384e0358be036af9cc60b6321bd11a"},
-]
-faker = [
-    {file = "Faker-13.15.1-py3-none-any.whl", hash = "sha256:172e45220b7a46743f4fb58cf380adb306d5c3ab1c0b0d97062508474cec5ff8"},
-    {file = "Faker-13.15.1.tar.gz", hash = "sha256:7c3f8ee807d3916415568169a172bf0893ea9cc3371ab55e4e5f5170d2185bea"},
-]
-filelock = [
-    {file = "filelock-3.7.1-py3-none-any.whl", hash = "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404"},
-    {file = "filelock-3.7.1.tar.gz", hash = "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04"},
-]
-flake8 = [
-    {file = "flake8-5.0.0-py2.py3-none-any.whl", hash = "sha256:f44e470195849d0596cb488c7bd769086fcbe987c10cc9daae9a13b4136abb24"},
-    {file = "flake8-5.0.0.tar.gz", hash = "sha256:503b06b6795189e55298a70b695b1eb4f6b8d479fae81352fc97c72ca242509e"},
-]
-flake8-bugbear = []
-flake8-comprehensions = [
-    {file = "flake8-comprehensions-3.10.0.tar.gz", hash = "sha256:181158f7e7aa26a63a0a38e6017cef28c6adee71278ce56ce11f6ec9c4905058"},
-    {file = "flake8_comprehensions-3.10.0-py3-none-any.whl", hash = "sha256:dad454fd3d525039121e98fa1dd90c46bc138708196a4ebbc949ad3c859adedb"},
-]
-flake8-mutable = []
-flake8-print = []
-flake8-simplify = [
-    {file = "flake8_simplify-0.19.3-py3-none-any.whl", hash = "sha256:1057320e9312d75849541fee822900d27bcad05b2405edc84713affee635629e"},
-    {file = "flake8_simplify-0.19.3.tar.gz", hash = "sha256:2fb083bf5142a98d9c9554755cf2f56f8926eb4a33eae30c0809041b1546879e"},
-]
-flake8-type-checking = [
-    {file = "flake8-type-checking-2.1.0.tar.gz", hash = "sha256:2c4835e606086af680826fa741fbc1c5d0364f6be18d3419db7131f19c780479"},
-    {file = "flake8_type_checking-2.1.0-py3-none-any.whl", hash = "sha256:32fa12dca8e6a5489d9ad31f99b3fc3b66697f9685a2a2a39d19f9a8b7df90ed"},
-]
-freezegun = [
-    {file = "freezegun-1.2.1-py3-none-any.whl", hash = "sha256:15103a67dfa868ad809a8f508146e396be2995172d25f927e48ce51c0bf5cb09"},
-    {file = "freezegun-1.2.1.tar.gz", hash = "sha256:b4c64efb275e6bc68dc6e771b17ffe0ff0f90b81a2a5189043550b6519926ba4"},
-]
-ghp-import = [
-    {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"},
-    {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"},
-]
-gitdb = [
-    {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"},
-    {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"},
-]
-gitpython = [
-    {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"},
-    {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"},
-]
-greenlet = [
-    {file = "greenlet-1.1.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6"},
-    {file = "greenlet-1.1.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a"},
-    {file = "greenlet-1.1.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d"},
-    {file = "greenlet-1.1.2-cp27-cp27m-win32.whl", hash = "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713"},
-    {file = "greenlet-1.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40"},
-    {file = "greenlet-1.1.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d"},
-    {file = "greenlet-1.1.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8"},
-    {file = "greenlet-1.1.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d"},
-    {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"},
-    {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"},
-    {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"},
-    {file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"},
-    {file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"},
-    {file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"},
-    {file = "greenlet-1.1.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c"},
-    {file = "greenlet-1.1.2-cp35-cp35m-win32.whl", hash = "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963"},
-    {file = "greenlet-1.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e"},
-    {file = "greenlet-1.1.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073"},
-    {file = "greenlet-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c"},
-    {file = "greenlet-1.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e"},
-    {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"},
-    {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"},
-    {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"},
-    {file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"},
-    {file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"},
-    {file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"},
-    {file = "greenlet-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b"},
-    {file = "greenlet-1.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c"},
-    {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"},
-    {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"},
-    {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"},
-    {file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"},
-    {file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"},
-    {file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"},
-    {file = "greenlet-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627"},
-    {file = "greenlet-1.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478"},
-    {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"},
-    {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"},
-    {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"},
-    {file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"},
-    {file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"},
-    {file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"},
-    {file = "greenlet-1.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab"},
-    {file = "greenlet-1.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5"},
-    {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"},
-    {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"},
-    {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"},
-    {file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"},
-    {file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"},
-    {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"},
-]
-griffe = []
-gunicorn = [
-    {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"},
-    {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"},
-]
-h11 = [
-    {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"},
-    {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"},
-]
-hiredis = [
-    {file = "hiredis-2.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b4c8b0bc5841e578d5fb32a16e0c305359b987b850a06964bd5a62739d688048"},
-    {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0adea425b764a08270820531ec2218d0508f8ae15a448568109ffcae050fee26"},
-    {file = "hiredis-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3d55e36715ff06cdc0ab62f9591607c4324297b6b6ce5b58cb9928b3defe30ea"},
-    {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:5d2a48c80cf5a338d58aae3c16872f4d452345e18350143b3bf7216d33ba7b99"},
-    {file = "hiredis-2.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:240ce6dc19835971f38caf94b5738092cb1e641f8150a9ef9251b7825506cb05"},
-    {file = "hiredis-2.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5dc7a94bb11096bc4bffd41a3c4f2b958257085c01522aa81140c68b8bf1630a"},
-    {file = "hiredis-2.0.0-cp36-cp36m-win32.whl", hash = "sha256:139705ce59d94eef2ceae9fd2ad58710b02aee91e7fa0ccb485665ca0ecbec63"},
-    {file = "hiredis-2.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c39c46d9e44447181cd502a35aad2bb178dbf1b1f86cf4db639d7b9614f837c6"},
-    {file = "hiredis-2.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adf4dd19d8875ac147bf926c727215a0faf21490b22c053db464e0bf0deb0485"},
-    {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0f41827028901814c709e744060843c77e78a3aca1e0d6875d2562372fcb405a"},
-    {file = "hiredis-2.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:508999bec4422e646b05c95c598b64bdbef1edf0d2b715450a078ba21b385bcc"},
-    {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0d5109337e1db373a892fdcf78eb145ffb6bbd66bb51989ec36117b9f7f9b579"},
-    {file = "hiredis-2.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:04026461eae67fdefa1949b7332e488224eac9e8f2b5c58c98b54d29af22093e"},
-    {file = "hiredis-2.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a00514362df15af041cc06e97aebabf2895e0a7c42c83c21894be12b84402d79"},
-    {file = "hiredis-2.0.0-cp37-cp37m-win32.whl", hash = "sha256:09004096e953d7ebd508cded79f6b21e05dff5d7361771f59269425108e703bc"},
-    {file = "hiredis-2.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f8196f739092a78e4f6b1b2172679ed3343c39c61a3e9d722ce6fcf1dac2824a"},
-    {file = "hiredis-2.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:294a6697dfa41a8cba4c365dd3715abc54d29a86a40ec6405d677ca853307cfb"},
-    {file = "hiredis-2.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3dddf681284fe16d047d3ad37415b2e9ccdc6c8986c8062dbe51ab9a358b50a5"},
-    {file = "hiredis-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:dcef843f8de4e2ff5e35e96ec2a4abbdf403bd0f732ead127bd27e51f38ac298"},
-    {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:87c7c10d186f1743a8fd6a971ab6525d60abd5d5d200f31e073cd5e94d7e7a9d"},
-    {file = "hiredis-2.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:7f0055f1809b911ab347a25d786deff5e10e9cf083c3c3fd2dd04e8612e8d9db"},
-    {file = "hiredis-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11d119507bb54e81f375e638225a2c057dda748f2b1deef05c2b1a5d42686048"},
-    {file = "hiredis-2.0.0-cp38-cp38-win32.whl", hash = "sha256:7492af15f71f75ee93d2a618ca53fea8be85e7b625e323315169977fae752426"},
-    {file = "hiredis-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:65d653df249a2f95673976e4e9dd7ce10de61cfc6e64fa7eeaa6891a9559c581"},
-    {file = "hiredis-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8427a5e9062ba66fc2c62fb19a72276cf12c780e8db2b0956ea909c48acff5"},
-    {file = "hiredis-2.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:3f5f7e3a4ab824e3de1e1700f05ad76ee465f5f11f5db61c4b297ec29e692b2e"},
-    {file = "hiredis-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e3447d9e074abf0e3cd85aef8131e01ab93f9f0e86654db7ac8a3f73c63706ce"},
-    {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:8b42c0dc927b8d7c0eb59f97e6e34408e53bc489f9f90e66e568f329bff3e443"},
-    {file = "hiredis-2.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b84f29971f0ad4adaee391c6364e6f780d5aae7e9226d41964b26b49376071d0"},
-    {file = "hiredis-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0b39ec237459922c6544d071cdcf92cbb5bc6685a30e7c6d985d8a3e3a75326e"},
-    {file = "hiredis-2.0.0-cp39-cp39-win32.whl", hash = "sha256:a7928283143a401e72a4fad43ecc85b35c27ae699cf5d54d39e1e72d97460e1d"},
-    {file = "hiredis-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:a4ee8000454ad4486fb9f28b0cab7fa1cd796fc36d639882d0b34109b5b3aec9"},
-    {file = "hiredis-2.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f03d4dadd595f7a69a75709bc81902673fa31964c75f93af74feac2f134cc54"},
-    {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:04927a4c651a0e9ec11c68e4427d917e44ff101f761cd3b5bc76f86aaa431d27"},
-    {file = "hiredis-2.0.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a39efc3ade8c1fb27c097fd112baf09d7fd70b8cb10ef1de4da6efbe066d381d"},
-    {file = "hiredis-2.0.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:07bbf9bdcb82239f319b1f09e8ef4bdfaec50ed7d7ea51a56438f39193271163"},
-    {file = "hiredis-2.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:807b3096205c7cec861c8803a6738e33ed86c9aae76cac0e19454245a6bbbc0a"},
-    {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:1233e303645f468e399ec906b6b48ab7cd8391aae2d08daadbb5cad6ace4bd87"},
-    {file = "hiredis-2.0.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cb2126603091902767d96bcb74093bd8b14982f41809f85c9b96e519c7e1dc41"},
-    {file = "hiredis-2.0.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:f52010e0a44e3d8530437e7da38d11fb822acfb0d5b12e9cd5ba655509937ca0"},
-    {file = "hiredis-2.0.0.tar.gz", hash = "sha256:81d6d8e39695f2c37954d1011c0480ef7cf444d4e3ae24bc5e89ee5de360139a"},
-]
-httpcore = [
-    {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"},
-    {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"},
-]
-httptools = [
-    {file = "httptools-0.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcddfe70553be717d9745990dfdb194e22ee0f60eb8f48c0794e7bfeda30d2d5"},
-    {file = "httptools-0.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1ee0b459257e222b878a6c09ccf233957d3a4dcb883b0847640af98d2d9aac23"},
-    {file = "httptools-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceafd5e960b39c7e0d160a1936b68eb87c5e79b3979d66e774f0c77d4d8faaed"},
-    {file = "httptools-0.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fdb9f9ed79bc6f46b021b3319184699ba1a22410a82204e6e89c774530069683"},
-    {file = "httptools-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:abe829275cdd4174b4c4e65ad718715d449e308d59793bf3a931ee1bf7e7b86c"},
-    {file = "httptools-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7af6bdbd21a2a25d6784f6d67f44f5df33ef39b6159543b9f9064d365c01f919"},
-    {file = "httptools-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d1fe6b6661022fd6cac541f54a4237496b246e6f1c0a6b41998ee08a1135afe"},
-    {file = "httptools-0.4.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:48e48530d9b995a84d1d89ae6b3ec4e59ea7d494b150ac3bbc5e2ac4acce92cd"},
-    {file = "httptools-0.4.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a113789e53ac1fa26edf99856a61e4c493868e125ae0dd6354cf518948fbbd5c"},
-    {file = "httptools-0.4.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8e2eb957787cbb614a0f006bfc5798ff1d90ac7c4dd24854c84edbdc8c02369e"},
-    {file = "httptools-0.4.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:7ee9f226acab9085037582c059d66769862706e8e8cd2340470ceb8b3850873d"},
-    {file = "httptools-0.4.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:701e66b59dd21a32a274771238025d58db7e2b6ecebbab64ceff51b8e31527ae"},
-    {file = "httptools-0.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6a1a7dfc1f9c78a833e2c4904757a0f47ce25d08634dd2a52af394eefe5f9777"},
-    {file = "httptools-0.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:903f739c9fb78dab8970b0f3ea51f21955b24b45afa77b22ff0e172fc11ef111"},
-    {file = "httptools-0.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54bbd295f031b866b9799dd39cb45deee81aca036c9bff9f58ca06726f6494f1"},
-    {file = "httptools-0.4.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3194f6d6443befa8d4db16c1946b2fc428a3ceb8ab32eb6f09a59f86104dc1a0"},
-    {file = "httptools-0.4.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cd1295f52971097f757edfbfce827b6dbbfb0f7a74901ee7d4933dff5ad4c9af"},
-    {file = "httptools-0.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:20a45bcf22452a10fa8d58b7dbdb474381f6946bf5b8933e3662d572bc61bae4"},
-    {file = "httptools-0.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d1f27bb0f75bef722d6e22dc609612bfa2f994541621cd2163f8c943b6463dfe"},
-    {file = "httptools-0.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7f7bfb74718f52d5ed47d608d507bf66d3bc01d4a8b3e6dd7134daaae129357b"},
-    {file = "httptools-0.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a522d12e2ddbc2e91842ffb454a1aeb0d47607972c7d8fc88bd0838d97fb8a2a"},
-    {file = "httptools-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2db44a0b294d317199e9f80123e72c6b005c55b625b57fae36de68670090fa48"},
-    {file = "httptools-0.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c286985b5e194ca0ebb2908d71464b9be8f17cc66d6d3e330e8d5407248f56ad"},
-    {file = "httptools-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3a4e165ca6204f34856b765d515d558dc84f1352033b8721e8d06c3e44930c3"},
-    {file = "httptools-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:72aa3fbe636b16d22e04b5a9d24711b043495e0ecfe58080addf23a1a37f3409"},
-    {file = "httptools-0.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9967d9758df505975913304c434cb9ab21e2c609ad859eb921f2f615a038c8de"},
-    {file = "httptools-0.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f72b5d24d6730035128b238decdc4c0f2104b7056a7ca55cf047c106842ec890"},
-    {file = "httptools-0.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:29bf97a5c532da9c7a04de2c7a9c31d1d54f3abd65a464119b680206bbbb1055"},
-    {file = "httptools-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98993805f1e3cdb53de4eed02b55dcc953cdf017ba7bbb2fd89226c086a6d855"},
-    {file = "httptools-0.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d9b90bf58f3ba04e60321a23a8723a1ff2a9377502535e70495e5ada8e6e6722"},
-    {file = "httptools-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a99346ebcb801b213c591540837340bdf6fd060a8687518d01c607d338b7424"},
-    {file = "httptools-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:645373c070080e632480a3d251d892cb795be3d3a15f86975d0f1aca56fd230d"},
-    {file = "httptools-0.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:34d2903dd2a3dd85d33705b6fde40bf91fc44411661283763fd0746723963c83"},
-    {file = "httptools-0.4.0.tar.gz", hash = "sha256:2c9a930c378b3d15d6b695fb95ebcff81a7395b4f9775c4f10a076beb0b2c1ff"},
-]
-httpx = [
-    {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"},
-    {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"},
-]
-hypothesis = [
-    {file = "hypothesis-6.53.0-py3-none-any.whl", hash = "sha256:9c2b073731010cc00bcf060e71bd8f9507f6fe8f6d173f454634d1b91c537412"},
-    {file = "hypothesis-6.53.0.tar.gz", hash = "sha256:39f225530c28fe484a59c39839c8a2fb414f2ecfd4743f75d82f55e0f09268c0"},
-]
-identify = [
-    {file = "identify-2.5.2-py2.py3-none-any.whl", hash = "sha256:feaa9db2dc0ce333b453ce171c0cf1247bbfde2c55fc6bb785022d411a1b78b5"},
-    {file = "identify-2.5.2.tar.gz", hash = "sha256:a3d4c096b384d50d5e6dc5bc8b9bc44f1f61cefebd750a7b3e9f939b53fb214d"},
-]
-idna = [
-    {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
-    {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
-]
-importlib-metadata = [
-    {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"},
-    {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"},
-]
-iniconfig = [
-    {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
-    {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
-]
-isort = [
-    {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
-    {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
-]
-jinja2 = [
-    {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
-    {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
-]
-lazy-object-proxy = [
-    {file = "lazy-object-proxy-1.7.1.tar.gz", hash = "sha256:d609c75b986def706743cdebe5e47553f4a5a1da9c5ff66d76013ef396b5a8a4"},
-    {file = "lazy_object_proxy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb8c5fd1684d60a9902c60ebe276da1f2281a318ca16c1d0a96db28f62e9166b"},
-    {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a57d51ed2997e97f3b8e3500c984db50a554bb5db56c50b5dab1b41339b37e36"},
-    {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd45683c3caddf83abbb1249b653a266e7069a09f486daa8863fb0e7496a9fdb"},
-    {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8561da8b3dd22d696244d6d0d5330618c993a215070f473b699e00cf1f3f6443"},
-    {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fccdf7c2c5821a8cbd0a9440a456f5050492f2270bd54e94360cac663398739b"},
-    {file = "lazy_object_proxy-1.7.1-cp310-cp310-win32.whl", hash = "sha256:898322f8d078f2654d275124a8dd19b079080ae977033b713f677afcfc88e2b9"},
-    {file = "lazy_object_proxy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:85b232e791f2229a4f55840ed54706110c80c0a210d076eee093f2b2e33e1bfd"},
-    {file = "lazy_object_proxy-1.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:46ff647e76f106bb444b4533bb4153c7370cdf52efc62ccfc1a28bdb3cc95442"},
-    {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12f3bb77efe1367b2515f8cb4790a11cffae889148ad33adad07b9b55e0ab22c"},
-    {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c19814163728941bb871240d45c4c30d33b8a2e85972c44d4e63dd7107faba44"},
-    {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:e40f2013d96d30217a51eeb1db28c9ac41e9d0ee915ef9d00da639c5b63f01a1"},
-    {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2052837718516a94940867e16b1bb10edb069ab475c3ad84fd1e1a6dd2c0fcfc"},
-    {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win32.whl", hash = "sha256:6a24357267aa976abab660b1d47a34aaf07259a0c3859a34e536f1ee6e76b5bb"},
-    {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6aff3fe5de0831867092e017cf67e2750c6a1c7d88d84d2481bd84a2e019ec35"},
-    {file = "lazy_object_proxy-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6a6e94c7b02641d1311228a102607ecd576f70734dc3d5e22610111aeacba8a0"},
-    {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ce15276a1a14549d7e81c243b887293904ad2d94ad767f42df91e75fd7b5b6"},
-    {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e368b7f7eac182a59ff1f81d5f3802161932a41dc1b1cc45c1f757dc876b5d2c"},
-    {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6ecbb350991d6434e1388bee761ece3260e5228952b1f0c46ffc800eb313ff42"},
-    {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:553b0f0d8dbf21890dd66edd771f9b1b5f51bd912fa5f26de4449bfc5af5e029"},
-    {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:c7a683c37a8a24f6428c28c561c80d5f4fd316ddcf0c7cab999b15ab3f5c5c69"},
-    {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:df2631f9d67259dc9620d831384ed7732a198eb434eadf69aea95ad18c587a28"},
-    {file = "lazy_object_proxy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07fa44286cda977bd4803b656ffc1c9b7e3bc7dff7d34263446aec8f8c96f88a"},
-    {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dca6244e4121c74cc20542c2ca39e5c4a5027c81d112bfb893cf0790f96f57e"},
-    {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91ba172fc5b03978764d1df5144b4ba4ab13290d7bab7a50f12d8117f8630c38"},
-    {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:043651b6cb706eee4f91854da4a089816a6606c1428fd391573ef8cb642ae4f7"},
-    {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b9e89b87c707dd769c4ea91f7a31538888aad05c116a59820f28d59b3ebfe25a"},
-    {file = "lazy_object_proxy-1.7.1-cp38-cp38-win32.whl", hash = "sha256:9d166602b525bf54ac994cf833c385bfcc341b364e3ee71e3bf5a1336e677b55"},
-    {file = "lazy_object_proxy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:8f3953eb575b45480db6568306893f0bd9d8dfeeebd46812aa09ca9579595148"},
-    {file = "lazy_object_proxy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dd7ed7429dbb6c494aa9bc4e09d94b778a3579be699f9d67da7e6804c422d3de"},
-    {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ed0c2b380eb6248abdef3cd425fc52f0abd92d2b07ce26359fcbc399f636ad"},
-    {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7096a5e0c1115ec82641afbdd70451a144558ea5cf564a896294e346eb611be1"},
-    {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f769457a639403073968d118bc70110e7dce294688009f5c24ab78800ae56dc8"},
-    {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:39b0e26725c5023757fc1ab2a89ef9d7ab23b84f9251e28f9cc114d5b59c1b09"},
-    {file = "lazy_object_proxy-1.7.1-cp39-cp39-win32.whl", hash = "sha256:2130db8ed69a48a3440103d4a520b89d8a9405f1b06e2cc81640509e8bf6548f"},
-    {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"},
-    {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"},
-]
-mako = [
-    {file = "Mako-1.2.1-py3-none-any.whl", hash = "sha256:df3921c3081b013c8a2d5ff03c18375651684921ae83fd12e64800b7da923257"},
-    {file = "Mako-1.2.1.tar.gz", hash = "sha256:f054a5ff4743492f1aa9ecc47172cb33b42b9d993cffcc146c9de17e717b0307"},
-]
-markdown = [
-    {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"},
-    {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"},
-]
-markupsafe = [
-    {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"},
-    {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"},
-    {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"},
-    {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"},
-    {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"},
-    {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"},
-]
-mccabe = [
-    {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
-    {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
-]
-mergedeep = [
-    {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"},
-    {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"},
-]
-mkdocs = [
-    {file = "mkdocs-1.3.1-py3-none-any.whl", hash = "sha256:fda92466393127d2da830bc6edc3a625a14b436316d1caf347690648e774c4f0"},
-    {file = "mkdocs-1.3.1.tar.gz", hash = "sha256:a41a2ff25ce3bbacc953f9844ba07d106233cd76c88bac1f59cb1564ac0d87ed"},
-]
-mkdocs-autorefs = []
-mkdocs-material = [
-    {file = "mkdocs-material-8.3.9.tar.gz", hash = "sha256:dc82b667d2a83f0de581b46a6d0949732ab77e7638b87ea35b770b33bc02e75a"},
-    {file = "mkdocs_material-8.3.9-py2.py3-none-any.whl", hash = "sha256:263f2721f3abe533b61f7c8bed435a0462620912742c919821ac2d698b4bfe67"},
-]
-mkdocs-material-extensions = [
-    {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"},
-    {file = "mkdocs_material_extensions-1.0.3-py3-none-any.whl", hash = "sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44"},
-]
-mkdocstrings = []
-mkdocstrings-python = []
-mypy = [
-    {file = "mypy-0.971-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2899a3cbd394da157194f913a931edfd4be5f274a88041c9dc2d9cdcb1c315c"},
-    {file = "mypy-0.971-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98e02d56ebe93981c41211c05adb630d1d26c14195d04d95e49cd97dbc046dc5"},
-    {file = "mypy-0.971-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:19830b7dba7d5356d3e26e2427a2ec91c994cd92d983142cbd025ebe81d69cf3"},
-    {file = "mypy-0.971-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02ef476f6dcb86e6f502ae39a16b93285fef97e7f1ff22932b657d1ef1f28655"},
-    {file = "mypy-0.971-cp310-cp310-win_amd64.whl", hash = "sha256:25c5750ba5609a0c7550b73a33deb314ecfb559c350bb050b655505e8aed4103"},
-    {file = "mypy-0.971-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d3348e7eb2eea2472db611486846742d5d52d1290576de99d59edeb7cd4a42ca"},
-    {file = "mypy-0.971-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3fa7a477b9900be9b7dd4bab30a12759e5abe9586574ceb944bc29cddf8f0417"},
-    {file = "mypy-0.971-cp36-cp36m-win_amd64.whl", hash = "sha256:2ad53cf9c3adc43cf3bea0a7d01a2f2e86db9fe7596dfecb4496a5dda63cbb09"},
-    {file = "mypy-0.971-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:855048b6feb6dfe09d3353466004490b1872887150c5bb5caad7838b57328cc8"},
-    {file = "mypy-0.971-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:23488a14a83bca6e54402c2e6435467a4138785df93ec85aeff64c6170077fb0"},
-    {file = "mypy-0.971-cp37-cp37m-win_amd64.whl", hash = "sha256:4b21e5b1a70dfb972490035128f305c39bc4bc253f34e96a4adf9127cf943eb2"},
-    {file = "mypy-0.971-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9796a2ba7b4b538649caa5cecd398d873f4022ed2333ffde58eaf604c4d2cb27"},
-    {file = "mypy-0.971-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a361d92635ad4ada1b1b2d3630fc2f53f2127d51cf2def9db83cba32e47c856"},
-    {file = "mypy-0.971-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b793b899f7cf563b1e7044a5c97361196b938e92f0a4343a5d27966a53d2ec71"},
-    {file = "mypy-0.971-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d1ea5d12c8e2d266b5fb8c7a5d2e9c0219fedfeb493b7ed60cd350322384ac27"},
-    {file = "mypy-0.971-cp38-cp38-win_amd64.whl", hash = "sha256:23c7ff43fff4b0df93a186581885c8512bc50fc4d4910e0f838e35d6bb6b5e58"},
-    {file = "mypy-0.971-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f7656b69974a6933e987ee8ffb951d836272d6c0f81d727f1d0e2696074d9e6"},
-    {file = "mypy-0.971-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2022bfadb7a5c2ef410d6a7c9763188afdb7f3533f22a0a32be10d571ee4bbe"},
-    {file = "mypy-0.971-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef943c72a786b0f8d90fd76e9b39ce81fb7171172daf84bf43eaf937e9f220a9"},
-    {file = "mypy-0.971-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d744f72eb39f69312bc6c2abf8ff6656973120e2eb3f3ec4f758ed47e414a4bf"},
-    {file = "mypy-0.971-cp39-cp39-win_amd64.whl", hash = "sha256:77a514ea15d3007d33a9e2157b0ba9c267496acf12a7f2b9b9f8446337aac5b0"},
-    {file = "mypy-0.971-py3-none-any.whl", hash = "sha256:0d054ef16b071149917085f51f89555a576e2618d5d9dd70bd6eea6410af3ac9"},
-    {file = "mypy-0.971.tar.gz", hash = "sha256:40b0f21484238269ae6a57200c807d80debc6459d444c0489a102d7c6a75fa56"},
-]
-mypy-extensions = [
-    {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
-    {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
-]
-nodeenv = [
-    {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"},
-    {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"},
-]
-openapi-schema-pydantic = [
-    {file = "openapi-schema-pydantic-1.2.4.tar.gz", hash = "sha256:3e22cf58b74a69f752cc7e5f1537f6e44164282db2700cbbcd3bb99ddd065196"},
-    {file = "openapi_schema_pydantic-1.2.4-py3-none-any.whl", hash = "sha256:a932ecc5dcbb308950282088956e94dea069c9823c84e507d64f6b622222098c"},
-]
-orjson = [
-    {file = "orjson-3.7.11-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:51e00a59dd6486c40f395da07633718f50b85af414e1add751f007dde6248090"},
-    {file = "orjson-3.7.11-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:c84d096f800d8cf062f8f514bb89baa1f067259ad8f71889b1d204039c2e2dd7"},
-    {file = "orjson-3.7.11-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7051f5259aeef76492763a458d3d05efe820c0d20439aa3d3396b427fb40f85d"},
-    {file = "orjson-3.7.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd5b9ed454bf5237ad4bb0ec2170329a9a74dab065eaf2a2c31b84a7eff96c72"},
-    {file = "orjson-3.7.11-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3f8c767331039e4e12324a6af41d3538c503503bdf107f40d4e292bb5542ff90"},
-    {file = "orjson-3.7.11-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:fd9508534ae29b368a60deb7668a65801869bc96635ee64550b7c119205984c0"},
-    {file = "orjson-3.7.11-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7168059c4a02f3cbe2ce3a26908e199e38fe55feb325ee7484c61f15719ec85e"},
-    {file = "orjson-3.7.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:87ab1b07ec863d870e8b2abcbae4da62aae2aed3a5119938a4b6309aa94ec973"},
-    {file = "orjson-3.7.11-cp310-none-win_amd64.whl", hash = "sha256:01863ff99f67afdb1a3a6a777d2de5a81f9b8203db70ef450b25363e7db48442"},
-    {file = "orjson-3.7.11-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a7962f2fb550a11f3e785c0aabfde6c2e7f823995f9d2d71f759708c6117a902"},
-    {file = "orjson-3.7.11-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:4d33d13b0521ddca84b58c9a75c18e854b79480a6a13e6d0c105cfc0d4e8b2a7"},
-    {file = "orjson-3.7.11-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b62b758b220f5deb6c90381baed8afec5d9b72e916886d73e944b78be3524f39"},
-    {file = "orjson-3.7.11-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9390a69422ec12264bf76469c1cbd006a8672a552e7cc393664c66011343da71"},
-    {file = "orjson-3.7.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a48e232130437fdbfc6c025cbf8aaac92c13ba1d9f7bd4445e177aae2f282028"},
-    {file = "orjson-3.7.11-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:da1637f98a5e2ac6fe1a722f990474fbf05ca15a21f8bfbc2d06a14c62f74bfa"},
-    {file = "orjson-3.7.11-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:c2f52563dcb0c500f9c9a028459950e1d14b66f504f8e5cdb50122a2538b38b0"},
-    {file = "orjson-3.7.11-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fbebb207a9d104efbd5e1b3e7dc3b63723ebbcd73f589f01bc7466b36c185e51"},
-    {file = "orjson-3.7.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18026b1b1a0c78e277b07230e2713af79ec4b9a8225a778983fd2f8455ae0e09"},
-    {file = "orjson-3.7.11-cp37-none-win_amd64.whl", hash = "sha256:77dff65c25dffea9e7dd9d41d3b55248dad2f6bf622d89e8ebb19a76780f9cd7"},
-    {file = "orjson-3.7.11-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:f76e9d7a0c1a586999094bbfbed5c17246dc217ffea061356b7056d3805b31b8"},
-    {file = "orjson-3.7.11-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:0479adf8c7f18ba52ce30b64a03de2f1facb85b7a620832a0c8d5e01326f32bd"},
-    {file = "orjson-3.7.11-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:113add34e29ef4a0f8538d67dc4992a950a7b4f49e556525cd8247c82a3d3f6c"},
-    {file = "orjson-3.7.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49cc542d3d2105fb7fb90a445ebe68f38cd846e6d86ea2c6e8724afbb9f052fc"},
-    {file = "orjson-3.7.11-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:42e41ceda915e1602c0c8f5b00b0f852c8c0bb2f9262138e13bf02128de8a0b7"},
-    {file = "orjson-3.7.11-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:df7f9b5d8c35e59c7df137587ebad2ec1d54947bbc6c7b1c4e7083c7012e3bba"},
-    {file = "orjson-3.7.11-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:17b77c2155da5186c18e3fe2ed5dc0d6babde5758fae81934a0a348c26430849"},
-    {file = "orjson-3.7.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7fcbfc44a7fd94f55652e6705e03271c43b2a171220ee31d6447721b690acd9"},
-    {file = "orjson-3.7.11-cp38-none-win_amd64.whl", hash = "sha256:78177a47c186cd6188e624477cbaf91c941a03047afe8b8816091495bc6481ce"},
-    {file = "orjson-3.7.11-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:3a5324f0da7b15df64b6b586608af503c7fa8b0cfb6e2b9f4f4fdc4855af6978"},
-    {file = "orjson-3.7.11-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:5c063c9777b5f795b9d59ba8d58b44548e3f2e9a00a9e3ddddb8145d9eb57b68"},
-    {file = "orjson-3.7.11-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf99de2f61fb8014a755640f9e2768890baf9aa1365742ccc3b9e6a19f528b16"},
-    {file = "orjson-3.7.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71bc8155a08239a655d4cf821f106a0821d4eb566f7c7a0163ccc41763488116"},
-    {file = "orjson-3.7.11-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:02a4875acb6e5f6109c40f7b9e27313bbe67f2c3e4d5ea01390ae9399061d913"},
-    {file = "orjson-3.7.11-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:6fc774923377e8594bf54291854919155e3c785081e95efc6cfcc9d76657a906"},
-    {file = "orjson-3.7.11-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:01c77aab9ed881cc4322aca6ca3c534473f5334e5211b8dbb8622769595439ce"},
-    {file = "orjson-3.7.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee69490145cc7d338a376a342415bba2f0c4d219f213c23fb64948cc40d9f255"},
-    {file = "orjson-3.7.11-cp39-none-win_amd64.whl", hash = "sha256:145367654c236127f59894025a5354bce124bd6ee1d5417c28635969b7628482"},
-    {file = "orjson-3.7.11.tar.gz", hash = "sha256:b4e6517861a397d9a1c72e7f8e8c72d6baf96d732a64637fb090ea49ead6042c"},
-]
-packaging = [
-    {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
-    {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
-]
-pathspec = [
-    {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
-    {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
-]
-pbr = [
-    {file = "pbr-5.9.0-py2.py3-none-any.whl", hash = "sha256:e547125940bcc052856ded43be8e101f63828c2d94239ffbe2b327ba3d5ccf0a"},
-    {file = "pbr-5.9.0.tar.gz", hash = "sha256:e8dca2f4b43560edef58813969f52a56cef023146cbb8931626db80e6c1c4308"},
-]
-picologging = [
-    {file = "picologging-0.6.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:3972954562b77ab40e4c0215c15b08c12cc8af5dc6ae70ba2a2009cd4e1c6f4d"},
-    {file = "picologging-0.6.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:e69833066cdd6146a88141f15d374b067d7415a5ac0aa17d7b0afd075957307f"},
-    {file = "picologging-0.6.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a08d7feea5a33665b7ab3104f956d24eb18bfc045af0c44811a72d349fcc085"},
-    {file = "picologging-0.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83647233aad50b4b77c77752a268b46f344376d0464376a3f5ddf04c060c5df8"},
-    {file = "picologging-0.6.0-cp310-cp310-win32.whl", hash = "sha256:f9c36932d03bbb0207e1ba9f7c3d613554a498ad993bde5b2e26a671e13eb6be"},
-    {file = "picologging-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:69cfb58d14271587e97fbce56af460441feaa00423119808511aa01787949800"},
-    {file = "picologging-0.6.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:aab351781f51202b5a0b82d0f941df0d64f597f6de3bf2366cd8bae553379315"},
-    {file = "picologging-0.6.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:a3b0232a0256d2ea65e97fd068ce12ee9973c5918dac80a2624cdac457fc213a"},
-    {file = "picologging-0.6.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdb0954f82b6749561a9f3870726f75d67b85149342f4ff23fa07a99323d137d"},
-    {file = "picologging-0.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be7eed35e3982e4a495b0ad5b9076204d0faf208c8b8eab3017001c610042bfd"},
-    {file = "picologging-0.6.0-cp311-cp311-win32.whl", hash = "sha256:a543c9ccd5b5bef20b4adb2d1e904fac7e670ffa9414c78300bff18857fb88ba"},
-    {file = "picologging-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:848ffd19b9b15291f06bfaa37f35d6194365f57f26d2424fab9aac81a1d6329a"},
-    {file = "picologging-0.6.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:ba291ee71709e9c00cd4406bcff1f936bac398a084b8d4b509bc72a713746f18"},
-    {file = "picologging-0.6.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:957b3034bd7e72269a9996f9d2243efd6bd2b7cd0cfe9a68db4d7171e461db10"},
-    {file = "picologging-0.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49487d3d00046fed9621561116893f3dd115eb6979f7429875887429e5dbcf60"},
-    {file = "picologging-0.6.0-cp37-cp37m-win32.whl", hash = "sha256:e2eca9862d5c12d943cdf70d6d5c59c1a66bc198b7701f16696e895921a967b9"},
-    {file = "picologging-0.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:fb503387045a5a0c7987a550be8c6bddce16df075ef237e543267c1c077f07da"},
-    {file = "picologging-0.6.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:5edb5a1ae75a0fe30785d211645da34fae66984afea5d6791dad9fa85d63d6ec"},
-    {file = "picologging-0.6.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:8cfa79bd019f79f0d2f532dab1ac03e933df71291a10d6edf2d69d383a377fed"},
-    {file = "picologging-0.6.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a07e78b8941facf8df9ac3bd1f78c5284db54404a03f9e382c0f0f5f81c2f4d"},
-    {file = "picologging-0.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c05dc2a507d0a4310a7ff20e197a4b5e3090aa9f2411c49a30ed685f88dbe67"},
-    {file = "picologging-0.6.0-cp38-cp38-win32.whl", hash = "sha256:a3a370e37cb91e32de43f23e4eb7cd82fa5302df53b7889e8fe08f7a3c48e983"},
-    {file = "picologging-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:63fc3a2b2a03cf56beb797818a30d9efb48ffd4b10e08986bfe60e1db68e9c7f"},
-    {file = "picologging-0.6.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:31590a7e800482992239767a016c8b61c29e43b580d4263ed3de606856033d61"},
-    {file = "picologging-0.6.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:96c595d2951e3e5178a48e515547150b94ee66c7c4ece3c17da9dcfbdbc304fe"},
-    {file = "picologging-0.6.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abc6646e4d2e2f3108902711106098502914ad34f5b9c937572c377cd05ebe2d"},
-    {file = "picologging-0.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29f45b2e8c2ce3b2eee86fe25afd361a017722441773a9bede80da1024b8c096"},
-    {file = "picologging-0.6.0-cp39-cp39-win32.whl", hash = "sha256:a02605cb6974a2747b3576547268d0d0caca35841cac6b714c32a18092d738c8"},
-    {file = "picologging-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:be34fb90fac1d5fdc645bcd78b34dd542b98f1967107ba70277b25dd17cc0b06"},
-    {file = "picologging-0.6.0.tar.gz", hash = "sha256:ba9808cbd2c87e69e7679a3b584201854e97679826672c2d4ccd376f8a1387fb"},
-]
-platformdirs = [
-    {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
-    {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
-]
-pluggy = [
-    {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
-    {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
-]
-pre-commit = []
-py = [
-    {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
-    {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
-]
-pycodestyle = [
-    {file = "pycodestyle-2.9.0-py2.py3-none-any.whl", hash = "sha256:289cdc0969d589d90752582bef6dff57c5fbc6949ee8b013ad6d6449a8ae9437"},
-    {file = "pycodestyle-2.9.0.tar.gz", hash = "sha256:beaba44501f89d785be791c9462553f06958a221d166c64e1f107320f839acc2"},
-]
-pydantic = [
-    {file = "pydantic-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193"},
-    {file = "pydantic-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11"},
-    {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310"},
-    {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131"},
-    {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580"},
-    {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd"},
-    {file = "pydantic-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd"},
-    {file = "pydantic-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761"},
-    {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918"},
-    {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74"},
-    {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a"},
-    {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166"},
-    {file = "pydantic-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b"},
-    {file = "pydantic-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892"},
-    {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e"},
-    {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608"},
-    {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537"},
-    {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380"},
-    {file = "pydantic-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728"},
-    {file = "pydantic-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a"},
-    {file = "pydantic-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1"},
-    {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195"},
-    {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b"},
-    {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49"},
-    {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6"},
-    {file = "pydantic-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0"},
-    {file = "pydantic-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6"},
-    {file = "pydantic-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810"},
-    {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f"},
-    {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee"},
-    {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761"},
-    {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd"},
-    {file = "pydantic-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1"},
-    {file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"},
-    {file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"},
-]
-pydantic-factories = [
-    {file = "pydantic-factories-1.4.1.tar.gz", hash = "sha256:0ac03224ba5677de3d43d304beb4648c0dcbdf0e997e5c6d34af0d09794ccb81"},
-    {file = "pydantic_factories-1.4.1-py3-none-any.whl", hash = "sha256:775e550c359ad32cb23e723dfba7b3bda2043880e5db7dd64d2d2948f87b894f"},
-]
-pyflakes = [
-    {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"},
-    {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"},
-]
-pygments = [
-    {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"},
-    {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"},
-]
-pylint = [
-    {file = "pylint-2.14.5-py3-none-any.whl", hash = "sha256:fabe30000de7d07636d2e82c9a518ad5ad7908590fe135ace169b44839c15f90"},
-    {file = "pylint-2.14.5.tar.gz", hash = "sha256:487ce2192eee48211269a0e976421f334cf94de1806ca9d0a99449adcdf0285e"},
-]
-pymdown-extensions = [
-    {file = "pymdown_extensions-9.5-py3-none-any.whl", hash = "sha256:ec141c0f4983755349f0c8710416348d1a13753976c028186ed14f190c8061c4"},
-    {file = "pymdown_extensions-9.5.tar.gz", hash = "sha256:3ef2d998c0d5fa7eb09291926d90d69391283561cf6306f85cd588a5eb5befa0"},
-]
-pyparsing = [
-    {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
-    {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
-]
-pytest = [
-    {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"},
-    {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"},
-]
-pytest-asyncio = []
-pytest-cov = [
-    {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"},
-    {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"},
-]
-pytest-dotenv = [
-    {file = "pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732"},
-    {file = "pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f"},
-]
-pytest-mock = []
-python-dateutil = [
-    {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
-    {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
-]
-python-dotenv = [
-    {file = "python-dotenv-0.20.0.tar.gz", hash = "sha256:b7e3b04a59693c42c36f9ab1cc2acc46fa5df8c78e178fc33a8d4cd05c8d498f"},
-    {file = "python_dotenv-0.20.0-py3-none-any.whl", hash = "sha256:d92a187be61fe482e4fd675b6d52200e7be63a12b724abbf931a40ce4fa92938"},
-]
-python-multipart = [
-    {file = "python-multipart-0.0.5.tar.gz", hash = "sha256:f7bb5f611fc600d15fa47b3974c8aa16e93724513b49b5f95c81e6624c83fa43"},
-]
-pyupgrade = [
-    {file = "pyupgrade-2.37.3-py2.py3-none-any.whl", hash = "sha256:9746efd064dbf53d7f86d6f88a1d48120f58dbfc378f517768634740ea2225e2"},
-    {file = "pyupgrade-2.37.3.tar.gz", hash = "sha256:1414c7a7c558004cf610e6180716b876814b639b5a5789c3da023c5cdaebcd49"},
-]
-pyyaml = [
-    {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
-    {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
-    {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
-    {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
-    {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
-    {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
-    {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
-    {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
-    {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
-    {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
-    {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
-    {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
-    {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
-    {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
-    {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
-    {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
-    {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
-    {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
-    {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
-    {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
-    {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
-    {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
-    {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
-    {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
-    {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
-    {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
-    {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
-    {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
-    {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
-    {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
-    {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
-    {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
-    {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
-]
-pyyaml-env-tag = [
-    {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"},
-    {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"},
-]
-redis = [
-    {file = "redis-4.3.4-py3-none-any.whl", hash = "sha256:a52d5694c9eb4292770084fa8c863f79367ca19884b329ab574d5cb2036b3e54"},
-    {file = "redis-4.3.4.tar.gz", hash = "sha256:ddf27071df4adf3821c4f2ca59d67525c3a82e5f268bed97b813cb4fabf87880"},
-]
-rfc3986 = [
-    {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
-    {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
-]
-rich = []
-six = [
-    {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
-    {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
-]
-slotscheck = []
-smmap = [
-    {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"},
-    {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"},
-]
-sniffio = [
-    {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"},
-    {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"},
-]
-sortedcontainers = [
-    {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
-    {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
-]
-SQLAlchemy = []
-starlette = [
-    {file = "starlette-0.20.4-py3-none-any.whl", hash = "sha256:c0414d5a56297d37f3db96a84034d61ce29889b9eaccf65eb98a0b39441fcaa3"},
-    {file = "starlette-0.20.4.tar.gz", hash = "sha256:42fcf3122f998fefce3e2c5ad7e5edbf0f02cf685d646a83a08d404726af5084"},
-]
-starlite = [
-    {file = "starlite-1.7.2-py3-none-any.whl", hash = "sha256:d2a8cf70b8346a4bbb65f4ab05d71f3317d98c2233ca97cfec57377c8487ddc3"},
-    {file = "starlite-1.7.2.tar.gz", hash = "sha256:4af0140a37260386225250a2733effd6de7f3ff45b2ce3b6cd7f5528ba518a98"},
-]
-stevedore = []
-tokenize-rt = [
-    {file = "tokenize_rt-4.2.1-py2.py3-none-any.whl", hash = "sha256:08a27fa032a81cf45e8858d0ac706004fcd523e8463415ddf1442be38e204ea8"},
-    {file = "tokenize_rt-4.2.1.tar.gz", hash = "sha256:0d4f69026fed520f8a1e0103aa36c406ef4661417f20ca643f913e33531b3b94"},
-]
-toml = [
-    {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
-    {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
-]
-tomli = [
-    {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
-    {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
-]
-tomlkit = []
-tox = [
-    {file = "tox-3.25.1-py2.py3-none-any.whl", hash = "sha256:c38e15f4733683a9cc0129fba078633e07eb0961f550a010ada879e95fb32632"},
-    {file = "tox-3.25.1.tar.gz", hash = "sha256:c138327815f53bc6da4fe56baec5f25f00622ae69ef3fe4e1e385720e22486f9"},
-]
-typer = [
-    {file = "typer-0.6.1-py3-none-any.whl", hash = "sha256:54b19e5df18654070a82f8c2aa1da456a4ac16a2a83e6dcd9f170e291c56338e"},
-    {file = "typer-0.6.1.tar.gz", hash = "sha256:2d5720a5e63f73eaf31edaa15f6ab87f35f0690f8ca233017d7d23d743a91d73"},
-]
-types-freezegun = []
-types-pyyaml = [
-    {file = "types-PyYAML-6.0.11.tar.gz", hash = "sha256:7f7da2fd11e9bc1e5e9eb3ea1be84f4849747017a59fc2eee0ea34ed1147c2e0"},
-    {file = "types_PyYAML-6.0.11-py3-none-any.whl", hash = "sha256:8f890028123607379c63550179ddaec4517dc751f4c527a52bb61934bf495989"},
-]
-types-redis = [
-    {file = "types-redis-4.3.12.tar.gz", hash = "sha256:d015eb6b6e10378dfc4fdf0be6aa48649eaa03a8a2aa4bb19ee4dfcef78d58c6"},
-    {file = "types_redis-4.3.12-py3-none-any.whl", hash = "sha256:1f592a503cf11a51c73c1c807af040e0ccb988bb2577d11b18549cb32e86908d"},
-]
-types-requests = [
-    {file = "types-requests-2.28.6.tar.gz", hash = "sha256:cf3383bbd79394bf051a0a9202d6831fa962f186f923c178f7c059e3424bd00e"},
-    {file = "types_requests-2.28.6-py3-none-any.whl", hash = "sha256:d8d7607419cd4b41a7b9497e15e8c0bad78d50df43c48ad25bc526a11518c3a9"},
-]
-types-urllib3 = [
-    {file = "types-urllib3-1.26.19.tar.gz", hash = "sha256:45b307bdb73d2eac0c2fb1386da97e51c9ae7f1474ef35f61024c3084b6bf371"},
-    {file = "types_urllib3-1.26.19-py3-none-any.whl", hash = "sha256:540bf5a42ba09a4a58c406cb2c2c8654b0aadf413f8337fdc184711ab48b900c"},
-]
-typing-extensions = [
-    {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},
-    {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"},
-]
-uvicorn = [
-    {file = "uvicorn-0.18.2-py3-none-any.whl", hash = "sha256:c19a057deb1c5bb060946e2e5c262fc01590c6529c0af2c3d9ce941e89bc30e0"},
-    {file = "uvicorn-0.18.2.tar.gz", hash = "sha256:cade07c403c397f9fe275492a48c1b869efd175d5d8a692df649e6e7e2ed8f4e"},
-]
-uvloop = [
-    {file = "uvloop-0.16.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6224f1401025b748ffecb7a6e2652b17768f30b1a6a3f7b44660e5b5b690b12d"},
-    {file = "uvloop-0.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:30ba9dcbd0965f5c812b7c2112a1ddf60cf904c1c160f398e7eed3a6b82dcd9c"},
-    {file = "uvloop-0.16.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bd53f7f5db562f37cd64a3af5012df8cac2c464c97e732ed556800129505bd64"},
-    {file = "uvloop-0.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:772206116b9b57cd625c8a88f2413df2fcfd0b496eb188b82a43bed7af2c2ec9"},
-    {file = "uvloop-0.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b572256409f194521a9895aef274cea88731d14732343da3ecdb175228881638"},
-    {file = "uvloop-0.16.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:04ff57aa137230d8cc968f03481176041ae789308b4d5079118331ab01112450"},
-    {file = "uvloop-0.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a19828c4f15687675ea912cc28bbcb48e9bb907c801873bd1519b96b04fb805"},
-    {file = "uvloop-0.16.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e814ac2c6f9daf4c36eb8e85266859f42174a4ff0d71b99405ed559257750382"},
-    {file = "uvloop-0.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd8f42ea1ea8f4e84d265769089964ddda95eb2bb38b5cbe26712b0616c3edee"},
-    {file = "uvloop-0.16.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:647e481940379eebd314c00440314c81ea547aa636056f554d491e40503c8464"},
-    {file = "uvloop-0.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e0d26fa5875d43ddbb0d9d79a447d2ace4180d9e3239788208527c4784f7cab"},
-    {file = "uvloop-0.16.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ccd57ae8db17d677e9e06192e9c9ec4bd2066b77790f9aa7dede2cc4008ee8f"},
-    {file = "uvloop-0.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:089b4834fd299d82d83a25e3335372f12117a7d38525217c2258e9b9f4578897"},
-    {file = "uvloop-0.16.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98d117332cc9e5ea8dfdc2b28b0a23f60370d02e1395f88f40d1effd2cb86c4f"},
-    {file = "uvloop-0.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e5f2e2ff51aefe6c19ee98af12b4ae61f5be456cd24396953244a30880ad861"},
-    {file = "uvloop-0.16.0.tar.gz", hash = "sha256:f74bc20c7b67d1c27c72601c78cf95be99d5c2cdd4514502b4f3eb0933ff1228"},
-]
-virtualenv = [
-    {file = "virtualenv-20.16.2-py2.py3-none-any.whl", hash = "sha256:635b272a8e2f77cb051946f46c60a54ace3cb5e25568228bd6b57fc70eca9ff3"},
-    {file = "virtualenv-20.16.2.tar.gz", hash = "sha256:0ef5be6d07181946891f5abc8047fda8bc2f0b4b9bf222c64e6e8963baee76db"},
-]
-watchdog = [
-    {file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a735a990a1095f75ca4f36ea2ef2752c99e6ee997c46b0de507ba40a09bf7330"},
-    {file = "watchdog-2.1.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b17d302850c8d412784d9246cfe8d7e3af6bcd45f958abb2d08a6f8bedf695d"},
-    {file = "watchdog-2.1.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee3e38a6cc050a8830089f79cbec8a3878ec2fe5160cdb2dc8ccb6def8552658"},
-    {file = "watchdog-2.1.9-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64a27aed691408a6abd83394b38503e8176f69031ca25d64131d8d640a307591"},
-    {file = "watchdog-2.1.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:195fc70c6e41237362ba720e9aaf394f8178bfc7fa68207f112d108edef1af33"},
-    {file = "watchdog-2.1.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bfc4d351e6348d6ec51df007432e6fe80adb53fd41183716017026af03427846"},
-    {file = "watchdog-2.1.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8250546a98388cbc00c3ee3cc5cf96799b5a595270dfcfa855491a64b86ef8c3"},
-    {file = "watchdog-2.1.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:117ffc6ec261639a0209a3252546b12800670d4bf5f84fbd355957a0595fe654"},
-    {file = "watchdog-2.1.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:97f9752208f5154e9e7b76acc8c4f5a58801b338de2af14e7e181ee3b28a5d39"},
-    {file = "watchdog-2.1.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:247dcf1df956daa24828bfea5a138d0e7a7c98b1a47cf1fa5b0c3c16241fcbb7"},
-    {file = "watchdog-2.1.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:226b3c6c468ce72051a4c15a4cc2ef317c32590d82ba0b330403cafd98a62cfd"},
-    {file = "watchdog-2.1.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d9820fe47c20c13e3c9dd544d3706a2a26c02b2b43c993b62fcd8011bcc0adb3"},
-    {file = "watchdog-2.1.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:70af927aa1613ded6a68089a9262a009fbdf819f46d09c1a908d4b36e1ba2b2d"},
-    {file = "watchdog-2.1.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ed80a1628cee19f5cfc6bb74e173f1b4189eb532e705e2a13e3250312a62e0c9"},
-    {file = "watchdog-2.1.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9f05a5f7c12452f6a27203f76779ae3f46fa30f1dd833037ea8cbc2887c60213"},
-    {file = "watchdog-2.1.9-py3-none-manylinux2014_armv7l.whl", hash = "sha256:255bb5758f7e89b1a13c05a5bceccec2219f8995a3a4c4d6968fe1de6a3b2892"},
-    {file = "watchdog-2.1.9-py3-none-manylinux2014_i686.whl", hash = "sha256:d3dda00aca282b26194bdd0adec21e4c21e916956d972369359ba63ade616153"},
-    {file = "watchdog-2.1.9-py3-none-manylinux2014_ppc64.whl", hash = "sha256:186f6c55abc5e03872ae14c2f294a153ec7292f807af99f57611acc8caa75306"},
-    {file = "watchdog-2.1.9-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:083171652584e1b8829581f965b9b7723ca5f9a2cd7e20271edf264cfd7c1412"},
-    {file = "watchdog-2.1.9-py3-none-manylinux2014_s390x.whl", hash = "sha256:b530ae007a5f5d50b7fbba96634c7ee21abec70dc3e7f0233339c81943848dc1"},
-    {file = "watchdog-2.1.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:4f4e1c4aa54fb86316a62a87b3378c025e228178d55481d30d857c6c438897d6"},
-    {file = "watchdog-2.1.9-py3-none-win32.whl", hash = "sha256:5952135968519e2447a01875a6f5fc8c03190b24d14ee52b0f4b1682259520b1"},
-    {file = "watchdog-2.1.9-py3-none-win_amd64.whl", hash = "sha256:7a833211f49143c3d336729b0020ffd1274078e94b0ae42e22f596999f50279c"},
-    {file = "watchdog-2.1.9-py3-none-win_ia64.whl", hash = "sha256:ad576a565260d8f99d97f2e64b0f97a48228317095908568a9d5c786c829d428"},
-    {file = "watchdog-2.1.9.tar.gz", hash = "sha256:43ce20ebb36a51f21fa376f76d1d4692452b2527ccd601950d69ed36b9e21609"},
-]
-watchfiles = [
-    {file = "watchfiles-0.16.1-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:1e41c8b4bf3e07c18aa51775b36b718830fa727929529a7d6e5b38cf845a06b4"},
-    {file = "watchfiles-0.16.1-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:b2c7ad91a867dd688b9a12097dd6a4f89397b43fccee871152aa67197cc94398"},
-    {file = "watchfiles-0.16.1-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:75a4b9cec1b1c337ea77d4428b29861553d6bf8179923b1bc7e825e217460e2c"},
-    {file = "watchfiles-0.16.1-cp37-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2a3debb19912072799d7ca53e99fc5f090f77948f5601392623b2a416b4c86be"},
-    {file = "watchfiles-0.16.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35f3e411822e14a35f2ef656535aad4e6e79670d6b6ef8e53db958e28916b1fe"},
-    {file = "watchfiles-0.16.1-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9a7a6dc63684ff5ba11f0be0e64f744112c3c7a0baf4ec8f6794f9a6257d21e"},
-    {file = "watchfiles-0.16.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e939a2693404ac11e055f9d1237db8ad7635e2185a6143bde00116e691ea2983"},
-    {file = "watchfiles-0.16.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cd7d2fd9a8f28066edc8db5278f3632eb94d10596af760fa0601631f32b1a41e"},
-    {file = "watchfiles-0.16.1-cp37-abi3-win32.whl", hash = "sha256:f91035a273001390093a09e52274a34695b0d15ee8736183b640bbc3b8a432ab"},
-    {file = "watchfiles-0.16.1-cp37-abi3-win_amd64.whl", hash = "sha256:a8a1809bf910672aa0b7ed6e6045d4fc2cf1e0718b99bc443ef17faa5697b68a"},
-    {file = "watchfiles-0.16.1-cp37-abi3-win_arm64.whl", hash = "sha256:baa6d0c1c5140e1dcf6ff802dd7b09fcd95b358e50d42fabc83d83f719451c54"},
-    {file = "watchfiles-0.16.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5741246ae399a03395aa5ee35480083a4f29d58ffd41dd3395594f8805f8cdbc"},
-    {file = "watchfiles-0.16.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:44c6aff58b8a70a26431737e483a54e8e224279b21873388571ed184fe7c91a7"},
-    {file = "watchfiles-0.16.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d1b2d0cf060e5222a930a3e2f40f6577da1d18c085c32741b98a128dc1e72c"},
-    {file = "watchfiles-0.16.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:70159e759f52b65a50c498182dece80364bfd721e839c254c328cbc7a1716616"},
-    {file = "watchfiles-0.16.1-pp39-pypy39_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22af3b915f928ef59d427d7228668f87ac8054ed8200808c73fbcaa4f82d5572"},
-    {file = "watchfiles-0.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a6a1ac96edf5bc3f8e36f4462fc1daad0ec3769ff2adb920571e120e37c91c5"},
-    {file = "watchfiles-0.16.1.tar.gz", hash = "sha256:aed7575e24434c8fec2f2bbb0cecb1521ea1240234d9108db7915a3424d92394"},
-]
-websockets = [
-    {file = "websockets-10.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:661f641b44ed315556a2fa630239adfd77bd1b11cb0b9d96ed8ad90b0b1e4978"},
-    {file = "websockets-10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b529fdfa881b69fe563dbd98acce84f3e5a67df13de415e143ef053ff006d500"},
-    {file = "websockets-10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f351c7d7d92f67c0609329ab2735eee0426a03022771b00102816a72715bb00b"},
-    {file = "websockets-10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379e03422178436af4f3abe0aa8f401aa77ae2487843738542a75faf44a31f0c"},
-    {file = "websockets-10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e904c0381c014b914136c492c8fa711ca4cced4e9b3d110e5e7d436d0fc289e8"},
-    {file = "websockets-10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e7e6f2d6fd48422071cc8a6f8542016f350b79cc782752de531577d35e9bd677"},
-    {file = "websockets-10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9c77f0d1436ea4b4dc089ed8335fa141e6a251a92f75f675056dac4ab47a71e"},
-    {file = "websockets-10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e6fa05a680e35d0fcc1470cb070b10e6fe247af54768f488ed93542e71339d6f"},
-    {file = "websockets-10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2f94fa3ae454a63ea3a19f73b95deeebc9f02ba2d5617ca16f0bbdae375cda47"},
-    {file = "websockets-10.3-cp310-cp310-win32.whl", hash = "sha256:6ed1d6f791eabfd9808afea1e068f5e59418e55721db8b7f3bfc39dc831c42ae"},
-    {file = "websockets-10.3-cp310-cp310-win_amd64.whl", hash = "sha256:347974105bbd4ea068106ec65e8e8ebd86f28c19e529d115d89bd8cc5cda3079"},
-    {file = "websockets-10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fab7c640815812ed5f10fbee7abbf58788d602046b7bb3af9b1ac753a6d5e916"},
-    {file = "websockets-10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:994cdb1942a7a4c2e10098d9162948c9e7b235df755de91ca33f6e0481366fdb"},
-    {file = "websockets-10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aad5e300ab32036eb3fdc350ad30877210e2f51bceaca83fb7fef4d2b6c72b79"},
-    {file = "websockets-10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e49ea4c1a9543d2bd8a747ff24411509c29e4bdcde05b5b0895e2120cb1a761d"},
-    {file = "websockets-10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6ea6b300a6bdd782e49922d690e11c3669828fe36fc2471408c58b93b5535a98"},
-    {file = "websockets-10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ef5ce841e102278c1c2e98f043db99d6755b1c58bde475516aef3a008ed7f28e"},
-    {file = "websockets-10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d1655a6fc7aecd333b079d00fb3c8132d18988e47f19740c69303bf02e9883c6"},
-    {file = "websockets-10.3-cp37-cp37m-win32.whl", hash = "sha256:83e5ca0d5b743cde3d29fda74ccab37bdd0911f25bd4cdf09ff8b51b7b4f2fa1"},
-    {file = "websockets-10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:da4377904a3379f0c1b75a965fff23b28315bcd516d27f99a803720dfebd94d4"},
-    {file = "websockets-10.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a1e15b230c3613e8ea82c9fc6941b2093e8eb939dd794c02754d33980ba81e36"},
-    {file = "websockets-10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:31564a67c3e4005f27815634343df688b25705cccb22bc1db621c781ddc64c69"},
-    {file = "websockets-10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c8d1d14aa0f600b5be363077b621b1b4d1eb3fbf90af83f9281cda668e6ff7fd"},
-    {file = "websockets-10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fbd7d77f8aba46d43245e86dd91a8970eac4fb74c473f8e30e9c07581f852b2"},
-    {file = "websockets-10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:210aad7fdd381c52e58777560860c7e6110b6174488ef1d4b681c08b68bf7f8c"},
-    {file = "websockets-10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6075fd24df23133c1b078e08a9b04a3bc40b31a8def4ee0b9f2c8865acce913e"},
-    {file = "websockets-10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f6d96fdb0975044fdd7953b35d003b03f9e2bcf85f2d2cf86285ece53e9f991"},
-    {file = "websockets-10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c7250848ce69559756ad0086a37b82c986cd33c2d344ab87fea596c5ac6d9442"},
-    {file = "websockets-10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:28dd20b938a57c3124028680dc1600c197294da5db4292c76a0b48efb3ed7f76"},
-    {file = "websockets-10.3-cp38-cp38-win32.whl", hash = "sha256:54c000abeaff6d8771a4e2cef40900919908ea7b6b6a30eae72752607c6db559"},
-    {file = "websockets-10.3-cp38-cp38-win_amd64.whl", hash = "sha256:7ab36e17af592eec5747c68ef2722a74c1a4a70f3772bc661079baf4ae30e40d"},
-    {file = "websockets-10.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a141de3d5a92188234afa61653ed0bbd2dde46ad47b15c3042ffb89548e77094"},
-    {file = "websockets-10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:97bc9d41e69a7521a358f9b8e44871f6cdeb42af31815c17aed36372d4eec667"},
-    {file = "websockets-10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d6353ba89cfc657a3f5beabb3b69be226adbb5c6c7a66398e17809b0ce3c4731"},
-    {file = "websockets-10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec2b0ab7edc8cd4b0eb428b38ed89079bdc20c6bdb5f889d353011038caac2f9"},
-    {file = "websockets-10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:85506b3328a9e083cc0a0fb3ba27e33c8db78341b3eb12eb72e8afd166c36680"},
-    {file = "websockets-10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8af75085b4bc0b5c40c4a3c0e113fa95e84c60f4ed6786cbb675aeb1ee128247"},
-    {file = "websockets-10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07cdc0a5b2549bcfbadb585ad8471ebdc7bdf91e32e34ae3889001c1c106a6af"},
-    {file = "websockets-10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5b936bf552e4f6357f5727579072ff1e1324717902127ffe60c92d29b67b7be3"},
-    {file = "websockets-10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e4e08305bfd76ba8edab08dcc6496f40674f44eb9d5e23153efa0a35750337e8"},
-    {file = "websockets-10.3-cp39-cp39-win32.whl", hash = "sha256:bb621ec2dbbbe8df78a27dbd9dd7919f9b7d32a73fafcb4d9252fc4637343582"},
-    {file = "websockets-10.3-cp39-cp39-win_amd64.whl", hash = "sha256:51695d3b199cd03098ae5b42833006a0f43dc5418d3102972addc593a783bc02"},
-    {file = "websockets-10.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:907e8247480f287aa9bbc9391bd6de23c906d48af54c8c421df84655eef66af7"},
-    {file = "websockets-10.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b1359aba0ff810d5830d5ab8e2c4a02bebf98a60aa0124fb29aa78cfdb8031f"},
-    {file = "websockets-10.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:93d5ea0b5da8d66d868b32c614d2b52d14304444e39e13a59566d4acb8d6e2e4"},
-    {file = "websockets-10.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7934e055fd5cd9dee60f11d16c8d79c4567315824bacb1246d0208a47eca9755"},
-    {file = "websockets-10.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3eda1cb7e9da1b22588cefff09f0951771d6ee9fa8dbe66f5ae04cc5f26b2b55"},
-    {file = "websockets-10.3.tar.gz", hash = "sha256:fc06cc8073c8e87072138ba1e431300e2d408f054b27047d047b549455066ff4"},
-]
-wrapt = [
-    {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"},
-    {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"},
-    {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"},
-    {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"},
-    {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"},
-    {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"},
-    {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"},
-    {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"},
-    {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"},
-    {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"},
-    {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"},
-    {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"},
-    {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"},
-    {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"},
-    {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"},
-    {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"},
-    {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"},
-    {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"},
-    {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"},
-    {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"},
-    {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"},
-    {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"},
-    {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"},
-    {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"},
-    {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"},
-    {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"},
-    {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"},
-    {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"},
-    {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"},
-    {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"},
-    {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"},
-    {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"},
-    {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"},
-    {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"},
-    {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"},
-    {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"},
-    {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"},
-    {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"},
-    {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"},
-    {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"},
-    {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"},
-    {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"},
-    {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"},
-    {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"},
-    {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"},
-    {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"},
-    {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"},
-    {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"},
-    {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"},
-    {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"},
-    {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"},
-    {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"},
-    {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"},
-    {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"},
-    {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"},
-    {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"},
-    {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"},
-    {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"},
-    {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"},
-    {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"},
-    {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"},
-    {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"},
-    {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"},
-    {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"},
-]
-xeger = [
-    {file = "xeger-0.3.5.tar.gz", hash = "sha256:2a91341fc2c814b27917b8bd24e8d212c8a3b904d98e9a6703d27484c2cb0f82"},
-]
-zipp = []
diff --git a/pyproject.toml b/pyproject.toml
index ee3e7a86..f02e80f5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,116 +1,372 @@
-[tool.poetry]
-name = "starlite-spa"
-version = "0.1.0"
-description = "Opinionated template for integrating Starlite with a SPA"
-authors = ["Cody Fincher <cody.fincher@gmail.com>"]
-maintainers = ["Cody Fincher <cody.fincher@gmail.com>"]
-license = "MIT"
-readme = "README.md"
-homepage = "https://github.com/cofin/pyspa"
-repository = "https://github.com/cofin/pyspa"
-documentation = "https://starlite-api.github.io/starlite-spa"
-keywords = ["api", "rest", "http", "asgi", "pydantic", "starlette", "fastapi", "framework", "websocket", "vite", "ssr"]
+[project]
+authors = [{ name = "Shrirang Bhate", email = "shrirang.bhate@gmail.com" }]
 classifiers = [
-    "Development Status :: 5 - Production/Stable",
-    "Environment :: Web Environment",
-    "License :: OSI Approved :: MIT License",
-    "Natural Language :: English",
-    "Operating System :: OS Independent",
-    "Programming Language :: Python :: 3.7",
-    "Programming Language :: Python :: 3.8",
-    "Programming Language :: Python :: 3.9",
-    "Programming Language :: Python :: 3.10",
-    "Programming Language :: Python",
-    "Topic :: Internet :: WWW/HTTP",
-    "Topic :: Software Development :: Libraries",
-    "Topic :: Software Development",
-    "Typing :: Typed",
+  "Development Status :: 3 - Alpha",
+  "Environment :: Web Environment",
+  "License :: Other/Proprietary License",
+  "Natural Language :: English",
+  "Operating System :: OS Independent",
+  "Programming Language :: Python :: 3.11",
+  "Programming Language :: Python :: 3.12",
+  "Programming Language :: Python",
+  "Topic :: Internet :: WWW/HTTP",
+  "Topic :: Software Development",
+  "Typing :: Typed",
 ]
-include = ["CHANGELOG.md"]
-packages = [
-    { include = "pyspa", from = "src" },
+dependencies = [
+  "litestar[jinja,jwt,redis,structlog]>=2.8.3",
+  "jsbeautifier",
+  "advanced-alchemy[uuid]>=0.10.0",
+  "asyncpg>=0.28.0",
+  "python-dotenv>=1.0.0",
+  "passlib[argon2]>=1.7.4",
+  "litestar-saq>=0.1.3",
+  "litestar-vite[nodeenv]>=0.1.21",
+  "litestar-granian>=0.1.4",
+  "aiosqlite>=0.19.0",
+  "httptools",
 ]
-[tool.poetry.scripts]
-pyspa = "pyspa:main"
-pg-ready = "pyspa.utils.postgres_ready:main"
-redis-ready = "pyspa.utils.redis_ready:main"
-
-[tool.poetry.dependencies]
-python = ">=3.10,<4.0"
-starlite = { extras = ["brotli"], version = "*" }
-pydantic = {extras = ["dotenv","email"], version = "*"}
-typer = "*"
-gunicorn = "*"
-uvicorn = {extras = ["standard"], version = "*"}
-sqlalchemy = { git = "https://github.com/sqlalchemy/sqlalchemy.git", branch = "main"}
-alembic = "*"
-redis = "*"
-hiredis = "*"
-asyncpg = "*"
-rich = "*"
-picologging = "*"
-httpx = "*"
-
-[tool.poetry.dev-dependencies]
-hypothesis = { extras = ["cli"], version = "*" }
-mkdocs-material = "*"
-uvicorn = "*"
-sqlalchemy = { git = "https://github.com/sqlalchemy/sqlalchemy.git", branch = "main", extras = ["mypy"] }
-coverage = {extras = ["toml"], version = "*"}
-mypy = "*"
-pre-commit = "*"
-pylint = "*"
-pytest = "*"
-pytest-asyncio = "*"
-pytest-cov = "*"
-pytest-dotenv = "*"
-types-redis = "*"
-types-pyyaml = "*"
-types-requests = "*"
-bump2version = "*"
-flake8 = "*"
-flake8-type-checking = "*"
-pyupgrade = "*"
-blacken-docs ="*"
-bandit = "*"
-flake8-bugbear = "*"
-flake8-comprehensions = "*"
-flake8-mutable = "*"
-flake8-print = "*"
-flake8-simplify = "*"
-slotscheck = "*"
-types-freezegun = "*"
-Jinja2 = "*"
-freezegun = "*"
-pytest-mock = "*"
-tox = "*"
-mkdocs = "*"
-mkdocstrings = "*"
-mkdocstrings-python = "*"
-
-[build-system]
-requires = ["poetry-core>=1.0.0"]
-build-backend = "poetry.core.masonry.api"
+description = "Opinionated template for a Litestar application."
+keywords = ["litestar", "sqlalchemy", "alembic", "api", "asgi", "litestar", "spa"]
+license = { text = "Propriatory" }
+name = "app"
+readme = "README.md"
+requires-python = ">=3.10"
+version = "0.2.0"
 
-[tool.black]
-line-length = 88
-include = '\.pyi?$'
+[tool.pdm]
+distribution = true
 
-[tool.isort]
-profile = "black"
-multi_line_output = 3
+[project.scripts]
+app = "app.__main__:run_cli"
 
+[tool.pdm.scripts]
+lint = "pdm run pre-commit run --all-files"
+post_install = "pdm run python scripts/pre-build.py --install-packages"
+pre_build = "pdm run python scripts/pre-build.py --build-assets"
+start-infra = "docker compose -f docker-compose.infra.yml up --force-recreate -d"
+stop-infra = "docker compose -f docker-compose.infra.yml down --remove-orphans"
+test = "pdm run pytest tests"
+test_coverage = { composite = ["pdm run pytest tests", "pdm run coverage html", "pdm run coverage xml"] }
 
-[tool.coverage.run]
-omit = ["*/tests/*"]
+[tool.pdm.dev-dependencies]
+dev = ["nodeenv"]
+docs = [
+  "sphinx>=7.2.6",
+  "sphinx-autobuild>=2021.3.14",
+  "sphinx-copybutton>=0.5.2",
+  "sphinx-toolbox>=3.5.0",
+  "sphinx-design>=0.5.0",
+  "sphinx-click>=5.0.1",
+  "sphinxcontrib-mermaid>=0.9.2",
+  "litestar-sphinx-theme @ git+https://github.com/litestar-org/litestar-sphinx-theme.git",
+]
+linting = [
+  "pre-commit>=3.4.0",
+  "mypy>=1.5.1",
+  "ruff>=0.0.287",
+  "slotscheck",
+  "types-click",
+  "types-passlib",
+  "types-python-jose",
+  "types-pyyaml",
+  "types-redis",
+  "asyncpg-stubs",
+  "pylint",
+]
+test = [
+  "pytest",
+  "pytest-xdist",
+  "pytest-mock",
+  "pytest-cov",
+  "coverage",
+  "pytest-sugar",
+  "pytest-databases[postgres,redis]>=0.1.0",
+]
+
+[tool.black]
+exclude = '''
+/(
+    \.git
+  | \.mypy_cache
+  | \.tox
+  | venv
+  | \.venv
+  | _build
+  | buck-out
+  | build
+  | dist
+)/
+'''
+include = '\.pyi?$'
+line-length = 120
 
 [tool.coverage.report]
 exclude_lines = [
-    'pragma: no cover',
-    'if TYPE_CHECKING:',
-    'except ImportError as e:',
-    '\.\.\.'
+  'if TYPE_CHECKING:',
+  'pragma: no cover',
+  "if __name__ == .__main__.:",
+  'def __repr__',
+  'if self\.debug:',
+  'if settings\.DEBUG',
+  'raise AssertionError',
+  'raise NotImplementedError',
+  'if 0:',
+  'class .*\bProtocol\):',
+  '@(abc\.)?abstractmethod',
 ]
+omit = ["*/tests/*"]
+show_missing = true
+
+[tool.coverage.run]
+branch = true
+omit = ["tests/*", "**/*/migrations/**/*.py", "scripts/*"]
 
 [tool.pytest.ini_options]
-asyncio_mode = "auto"
+addopts = ["-ra", "--ignore", "migrations"]
+# env_files = [".env.testing"]
+# env_override_existing_values = 1
+filterwarnings = [
+  "ignore::DeprecationWarning:pkg_resources",
+  "ignore::DeprecationWarning:google.*",
+  "ignore::DeprecationWarning:passlib.*",
+  "ignore::DeprecationWarning:aiosql.*",
+  "ignore::DeprecationWarning:litestar.constants.*",
+  "ignore::DeprecationWarning:litestar.utils.*",
+  "ignore::DeprecationWarning:litestar.cli.*",
+  "ignore::DeprecationWarning:httpx._client",
+]
+testpaths = ["tests"]
+
+[tool.ruff]
+exclude = [
+  ".bzr",
+  ".direnv",
+  ".eggs",
+  ".git",
+  ".hg",
+  ".mypy_cache",
+  ".nox",
+  ".pants.d",
+  ".ruff_cache",
+  ".svn",
+  ".tox",
+  ".venv",
+  "__pypackages__",
+  "_build",
+  "buck-out",
+  "build",
+  "dist",
+  "node_modules",
+  "venv",
+  '__pycache__',
+  "src/app/db/migrations/versions/*.py",
+]
+fix = true
+line-length = 120
+lint.fixable = ["ALL"]
+lint.ignore = [
+  "E501",    # pycodestyle line too long, handled by black
+  "D100",    # pydocstyle - missing docstring in public module
+  "D101",    # pydocstyle - missing docstring in public class
+  "D102",    # pydocstyle - missing docstring in public method
+  "D103",    # pydocstyle - missing docstring in public function
+  "D104",    # pydocstyle - missing docstring in public package
+  "D105",    # pydocstyle - missing docstring in magic method
+  "D106",    # pydocstyle - missing docstring in public nested class
+  "D107",    # pydocstyle - missing docstring in __init__
+  "D202",    # pydocstyle - no blank lines allowed after function docstring
+  "D205",    # pydocstyle - 1 blank line required between summary line and description
+  "D415",    # pydocstyle - first line should end with a period, question mark, or exclamation point
+  "UP037",   # pyupgrade - removes quotes from type annotation
+  "A003",    # flake8-builtins - class attribute {name} is shadowing a python builtin
+  "B010",    # flake8-bugbear - do not call setattr with a constant attribute value
+  "B008",    #  flake8-bugbear - Do not perform function call `Parameter` in argument defaultsRuff(B008)
+  "RUF012",  # ruff - mutable class attributes should be annotated with `typing.ClassVar`
+  "ANN401",  # ruff - Dynamically typed expressions (typing.Any) are disallowed
+  "ANN102",
+  "ANN101",  # ruff - Missing type annotation for `self` in method
+  "PLR0913", # ruff - Too many arguments to function call
+  "PLR2004", # Magic value used in comparison
+  "FBT001",  # Boolean typed positional argument in function definition
+  "FBT002",  # Boolean default positional argument in function definition
+  "FBT003",  # Boolean Boolean default positional argument in function definition
+  "ARG002",  # Unused method argument
+  "ARG001",  #  Unused function argument
+  "TD002",
+  "TD003",
+  "FIX002",
+  "PGH003",
+  "RUF006",
+  "SLF001",
+  "PT007",
+  'PT004',
+  'PT005',
+  'S603',
+  "E501",    # pycodestyle line too long, handled by black
+  "PLW2901", # pylint - for loop variable overwritten by assignment target
+  "ANN401",
+  "ANN102",
+  "ANN101",
+  "FBT",
+  "PLR0913", # too many arguments
+  "PT",
+  "TD",
+  "PERF203", # ignore for now; investigate
+]
+lint.select = ["ALL"]
+# Allow unused variables when underscore-prefixed.
+lint.dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
+src = ["src", "tests/"]
+target-version = "py311"
+
+[tool.ruff.lint.pydocstyle]
+convention = "google"
+
+[tool.ruff.lint.mccabe]
+max-complexity = 12
+
+[tool.ruff.lint.pep8-naming]
+classmethod-decorators = [
+  "classmethod",
+  "sqlalchemy.ext.declarative.declared_attr",
+  "sqlalchemy.orm.declared_attr.directive",
+  "sqlalchemy.orm.declared_attr",
+]
+
+[tool.ruff.lint.isort]
+known-first-party = ['tests', 'app']
+
+[tool.ruff.lint.per-file-ignores]
+"*/migrations/*.py" = ['D104', 'D103', 'D205', 'D212']
+"__init__.py" = ['F401', 'D104']
+"docs/**/*.*" = ["S", "B", "DTZ", "A", "TCH", "ERA", "D", "RET", "E731", "RUF012", "FA100", "ARG001"]
+"docs/conf.py" = ["FIX002", "ARG001", "INP001"]
+"scripts/**/*.*" = ["D", "ARG", "EM", "TRY", "G", "FBT", "INP001"]
+"scripts/*.py" = ["INP001", "ERA001"]
+"src/app/db/migrations/versions/*.py" = ["ANN201", "INP001"]
+"tests/*.py" = ['D103', 'S101', 'D104']
+
+[tool.slotscheck]
+strict-imports = false
+
+[tool.codespell]
+ignore-words-list = "alog"
+skip = 'pdm.lock, package-lock.json'
+
+[tool.mypy]
+disallow_any_generics = false
+disallow_incomplete_defs = true
+disallow_untyped_decorators = true
+disallow_untyped_defs = true
+exclude = '''(?x)(
+^build/
+|^dist/
+|^.venv/
+|^node_modules/
+|^migrations/
+)
+'''
+implicit_reexport = false
+plugins = []
+strict = true
+warn_redundant_casts = true
+warn_return_any = true
+warn_unreachable = true
+warn_unused_configs = true
+warn_unused_ignores = true
+
+[[tool.mypy.overrides]]
+disallow_untyped_decorators = false
+module = ["tests.*"]
+
+[[tool.mypy.overrides]]
+disallow_untyped_calls = false
+disallow_untyped_decorators = false
+module = ["aisoql.*"]
+
+[[tool.mypy.overrides]]
+ignore_missing_imports = true
+module = [
+  "sqlalchemy.*",
+  "saq.*",
+  "uvicorn.*",
+  "aiohttp.*",
+  "jsbeautifier",
+  "jsbeautifier.*",
+  "uvloop",
+  "uvloop.*",
+  "nodeenv",
+]
+
+[[tool.mypy.overrides]]
+ignore_errors = true
+module = ["app.db.migrations.*", "app.lib.dto.*"]
+
+
+[tool.pyright]
+exclude = ["scripts", "docs"]
+include = ["src/app", "tests"]
+
+[tool.git-cliff.changelog]
+body = """
+{% if version %}\
+    `Release [v{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} <https://github.com/litestar-org/litestar-fullstack/releases/tag/v{{ version | trim_start_matches(pat="v") }}>`_
+    ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+    * `See All commits in v{{ version | trim_start_matches(pat="v") }} <https://github.com/litestar-org/litestar-fullstack/commits/v{{ version | trim_start_matches(pat="v") }}>`_
+{% else %}\
+    [unreleased]
+    ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+{% endif %}\
+{% if previous %}\
+    {% if previous.commit_id %}
+        `{{ previous.commit_id | truncate(length=7, end="") }} <https://github.com/litestar-org/litestar-fullstack/commit/{{ previous.commit_id }}>`_ ... \
+            `{{ previous.commit_id | truncate(length=7, end="") }} <https://github.com/litestar-org/litestar-fullstack/commit/{{ commit_id }}>`_ \
+            | `See diff for {{ version }} <https://github.com/litestar-org/litestar-fullstack/compare/{{ previous.commit_id }}...{{ commit_id }}>`_
+    {% endif %}\
+{% endif %}\
+{% for group, commits in commits | group_by(attribute="group") %}
+    {{ group | upper_first }}
+    ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+    {% for commit in commits %}
+        * (`{{ commit.id | truncate(length=7, end="") }} <https://github.com/litestar-org/litestar-fullstack/commit/{{ commit.id }}>`_) {% if commit.breaking %}[**breaking**] {% endif %} - {{ commit.message | upper_first }} ({{ commit.author.name }})\
+          {% for footer in commit.footers -%}
+            , {{ footer.token }}{{ footer.separator }}{{ footer.value }}\
+          {% endfor %}\
+    {% endfor %}
+{% endfor %}\n
+"""
+footer = """
+Litestar Fullstack Changelog
+"""
+header = """
+=========
+Changelog
+=========\n
+All commits to this project will be documented in this file.\n
+"""
+trim = true
+
+[tool.git-cliff.git]
+commit_parsers = [
+  { message = "^feat", group = "Features" },
+  { message = "^fix", group = "Bug Fixes" },
+  { message = "^doc", group = "Documentation" },
+  { message = "^perf", group = "Performance" },
+  { message = "^refactor", group = "Refactor" },
+  { message = "^style", group = "Styling" },
+  { message = "^test", group = "Testing" },
+  { message = "^chore\\(release\\): prepare for", skip = true },
+  { message = "^chore", group = "Miscellaneous Tasks" },
+  { body = ".*security", group = "Security" },
+]
+conventional_commits = true
+filter_commits = false
+filter_unconventional = true
+ignore_tags = ""
+protect_breaking_commits = false
+skip_tags = "v0.1.0-beta.1"
+sort_commits = "oldest"
+split_commits = false
+tag_pattern = "v[0-9]*"
+topo_order = false
diff --git a/scripts/build-docs.py b/scripts/build-docs.py
new file mode 100644
index 00000000..96484916
--- /dev/null
+++ b/scripts/build-docs.py
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+import argparse
+import shutil
+import subprocess
+from contextlib import contextmanager
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Generator
+
+REDIRECT_TEMPLATE = """
+<!DOCTYPE HTML>
+<html lang="en-US">
+    <head>
+        <title>Page Redirection</title>
+        <meta charset="UTF-8">
+        <meta http-equiv="refresh" content="0; url={target}">
+        <script type="text/javascript">window.location.href = "{target}"</script>
+    </head>
+    <body>
+        You are being redirected. If this does not work, click <a href='{target}'>this link</a>
+    </body>
+</html>
+"""
+
+parser = argparse.ArgumentParser()
+parser.add_argument("output")
+
+
+@contextmanager
+def checkout(branch: str) -> Generator[None, None, None]:
+    subprocess.run(["git", "checkout", branch], check=True)  # noqa: S607
+    yield
+    subprocess.run(["git", "checkout", "-"], check=True)  # noqa: S607
+
+
+def build(output_dir: str) -> None:
+    subprocess.run(["make", "docs"], check=True)  # noqa: S607
+
+    output_dir = Path(output_dir)  # type: ignore[assignment]
+    output_dir.mkdir()  # type: ignore[attr-defined]
+    output_dir.joinpath(".nojekyll").touch(exist_ok=True)  # type: ignore[attr-defined]
+    output_dir.joinpath("index.html").write_text(REDIRECT_TEMPLATE.format(target="latest"))  # type: ignore[attr-defined]
+
+    docs_src_path = Path("docs/_build/html")
+    shutil.copytree(docs_src_path, output_dir / "latest", dirs_exist_ok=True)  # type: ignore[operator]
+
+
+def main() -> None:
+    args = parser.parse_args()
+    build(output_dir=args.output)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/convert-docs.sh b/scripts/convert-docs.sh
new file mode 100755
index 00000000..29a2d1e2
--- /dev/null
+++ b/scripts/convert-docs.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+CHANGELOG=docs/changelog.rst
+
+filename="${CHANGELOG%.*}"
+echo "Converting $CHANGELOG to $filename.md"
+pandoc --wrap=preserve $CHANGELOG -f rst -t markdown -o "$filename".md
diff --git a/scripts/post-builds.py b/scripts/post-builds.py
new file mode 100644
index 00000000..0c4882ce
--- /dev/null
+++ b/scripts/post-builds.py
@@ -0,0 +1,113 @@
+from __future__ import annotations
+
+import argparse
+import logging
+import os
+import shutil
+import subprocess
+import tempfile
+from pathlib import Path
+
+PYAPP_VERSION = "v0.14.0"
+PYAPP_URL = f"https://github.com/ofek/pyapp/releases/download/{PYAPP_VERSION}/source.tar.gz"
+PROJECT_ROOT = Path(__file__).parent.parent
+logging.basicConfig(
+    level=logging.INFO,
+    format="%(asctime)s [%(levelname)8s] %(message)s",
+    datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger("post-build")
+
+
+def package_standalone_app(options: argparse.Namespace) -> None:
+    subprocess.run(
+        ["/usr/bin/env", "pdm", "export", "--without-hashes", "--prod", "--output", "dist/requirements.txt"],
+        check=False,
+    )
+    with Path(PROJECT_ROOT / "dist/requirements.txt").open("+a") as f:
+        f.writelines([str(os.fspath(Path(options.wheel_file).absolute()))])
+    logger.info("PYAPP_PROJECT_PATH is set to %s", os.fspath(Path(options.wheel_file).absolute()))
+    pyapp_configuration = {
+        "PYAPP_PROJECT_PATH": str(Path(options.wheel_file).absolute()),
+        # "PYAPP_PROJECT_DEPENDENCY_FILE": str(Path(PROJECT_ROOT / "dist/requirements.txt").absolute()),
+        # "PYAPP_PROJECT_NAME": "app",
+        # "PYAPP_PROJECT_VERSION": "0.2.0",
+        # "PYAPP_EXEC_MODULE": "app",
+        "PYAPP_PYTHON_VERSION": "3.11",
+        # "PYAPP_DISTRIBUTION_EMBED": "1",
+        "PYAPP_FULL_ISOLATION": "1",
+        "PYAPP_EXEC_SPEC": "app.__main__:run_cli",
+        "PYAPP_PIP_EXTRA_ARGS": "--only-binary :all:",
+        "RUST_BACKTRACE": "full",
+        "CARGO_PROFILE_RELEASE_BUILD_OVERRIDE_DEBUG": "true",
+        "PATH": os.environ["PATH"],
+    }
+    for env_var, val in pyapp_configuration.items():
+        os.environ[env_var] = val
+    logger.info("Setting the following environment variables %s", pyapp_configuration)
+
+    with tempfile.TemporaryDirectory() as app_temp_dir:
+        subprocess.run(["/usr/bin/env", "wget", PYAPP_URL, "-O", f"{app_temp_dir}/pyapp.tar.gz"], check=False)
+        subprocess.run(
+            [
+                "/usr/bin/env",
+                "tar",
+                "-xvf",
+                f"{app_temp_dir}/pyapp.tar.gz",
+                "-C",
+                app_temp_dir,
+                "--strip-components",
+                "1",
+            ],
+            check=True,
+        )
+
+        subprocess.run(
+            [
+                "/usr/bin/env",
+                "cargo",
+                "build",
+                "--release",
+            ],
+            check=False,
+            cwd=app_temp_dir,
+            # env=pyapp_configuration,
+        )
+        # subprocess.run(
+        #     [
+        #         "/usr/bin/env",
+        #         "cargo",
+        #         "install",
+        #         "--path",
+        #         app_temp_dir,
+        #         # "--git",
+        #         # "https://github.com/ofek/pyapp",
+        #         # "--tag",
+        #         # PYAPP_VERSION,
+        #         "--force",
+        #         "--root",
+        #         app_temp_dir,
+        #     ],
+        #     env=pyapp_configuration,
+        #     check=True,
+        # )
+
+        for suffix in ["", ".exe"]:
+            from_path = Path(app_temp_dir, "bin", "pyapp").with_suffix(suffix)
+            if not from_path.exists():
+                continue
+
+            to_path = Path(options.out_dir, options.name).with_suffix(suffix)
+            to_path.parent.mkdir(parents=True, exist_ok=True)
+            shutil.copy(from_path, to_path)
+
+            break
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser("Manage Package Post-Build Processes")
+    parser.add_argument("--wheel-file", required=True)
+    parser.add_argument("--out-dir", required=True)
+    parser.add_argument("--name", required=True, default="app")
+    args = parser.parse_args()
+    package_standalone_app(args)
diff --git a/scripts/pre-build.py b/scripts/pre-build.py
new file mode 100644
index 00000000..527b58e2
--- /dev/null
+++ b/scripts/pre-build.py
@@ -0,0 +1,46 @@
+from __future__ import annotations
+
+import argparse
+import logging
+import os
+import platform
+import subprocess
+import sys
+from importlib.util import find_spec
+from pathlib import Path
+from typing import Any
+
+NODEENV_INSTALLED = find_spec("nodeenv") is not None
+
+logger = logging.getLogger("pre-build")
+
+PROJECT_ROOT = Path(__file__).parent.parent
+NODEENV = "nodeenv"
+DEFAULT_VENV_PATH = Path(PROJECT_ROOT / ".venv")
+
+
+def manage_resources(setup_kwargs: Any) -> Any:
+    # look for this in the environment and skip this function if it exists, sometimes building here is not needed, eg. when using nixpacks
+    no_nodeenv = os.environ.get("LITESTAR_SKIP_NODEENV_INSTALL") is not None or NODEENV_INSTALLED is False
+    kwargs: dict[str, Any] = {}
+    if no_nodeenv:
+        logger.info("skipping nodeenv configuration")
+    else:
+        found_in_local_venv = Path(DEFAULT_VENV_PATH / "bin" / NODEENV).exists()
+        nodeenv_command = f"{DEFAULT_VENV_PATH}/bin/{NODEENV}" if found_in_local_venv else NODEENV
+        install_dir = DEFAULT_VENV_PATH if found_in_local_venv else os.environ.get("VIRTUAL_ENV", sys.prefix)
+        logger.info("Installing Node environment to %s:", install_dir)
+        subprocess.run([nodeenv_command, install_dir, "--force", "--quiet"], **kwargs)  # noqa: PLW1510
+
+    if platform.system() == "Windows":
+        kwargs["shell"] = True
+    return setup_kwargs
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser("Manage Package Resources")
+    parser.add_argument("--build-assets", action="store_true", help="Build assets for static hosting.", default=None)
+    parser.add_argument("--install-packages", action="store_true", help="Install NPM packages.", default=None)
+    args = parser.parse_args()
+    setup_kwargs = {"build_assets": args.build_assets, "install_packages": args.install_packages}
+    manage_resources(setup_kwargs)
diff --git a/src/.DS_Store b/src/.DS_Store
deleted file mode 100644
index 5008ddfc..00000000
Binary files a/src/.DS_Store and /dev/null differ
diff --git a/src/app/__about__.py b/src/app/__about__.py
new file mode 100644
index 00000000..0c5f132b
--- /dev/null
+++ b/src/app/__about__.py
@@ -0,0 +1,4 @@
+# SPDX-FileCopyrightText: 2023-present Cody Fincher <cody.fincher@gmail.com>
+#
+# SPDX-License-Identifier: MIT
+__version__ = "0.2.0"
diff --git a/src/app/__init__.py b/src/app/__init__.py
new file mode 100644
index 00000000..d7829645
--- /dev/null
+++ b/src/app/__init__.py
@@ -0,0 +1,8 @@
+# SPDX-FileCopyrightText: 2023-present Cody Fincher <cody.fincher@gmail.com>
+#
+# SPDX-License-Identifier: MIT
+import multiprocessing
+import platform
+
+if platform.system() == "Darwin":
+    multiprocessing.set_start_method("fork", force=True)
diff --git a/src/app/__main__.py b/src/app/__main__.py
new file mode 100644
index 00000000..c551d159
--- /dev/null
+++ b/src/app/__main__.py
@@ -0,0 +1,30 @@
+# SPDX-FileCopyrightText: 2023-present Cody Fincher <cody.fincher@gmail.com>
+#
+# SPDX-License-Identifier: MIT
+from __future__ import annotations
+
+
+def run_cli() -> None:
+    """Application Entrypoint."""
+    import os
+    import sys
+    from pathlib import Path
+
+    current_path = Path(__file__).parent.parent.resolve()
+    sys.path.append(str(current_path))
+    os.environ.setdefault("LITESTAR_APP", "app.asgi:app")
+    try:
+        from litestar.__main__ import run_cli as run_litestar_cli
+
+    except ImportError as exc:
+        print(  # noqa: T201
+            "Could not load required libraries.  ",
+            "Please check your installation and make sure you activated any necessary virtual environment",
+        )
+        print(exc)  # noqa: T201
+        sys.exit(1)
+    run_litestar_cli()
+
+
+if __name__ == "__main__":
+    run_cli()
diff --git a/src/app/asgi.py b/src/app/asgi.py
new file mode 100644
index 00000000..ba2b1c37
--- /dev/null
+++ b/src/app/asgi.py
@@ -0,0 +1,52 @@
+# pylint: disable=[invalid-name,import-outside-toplevel]
+# SPDX-FileCopyrightText: 2023-present Cody Fincher <cody.fincher@gmail.com>
+#
+# SPDX-License-Identifier: MIT
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from litestar import Litestar
+
+
+def create_app() -> Litestar:
+    """Create ASGI application."""
+
+    from litestar import Litestar
+    from litestar.di import Provide
+
+    from app.config import app as config
+    from app.config import constants
+    from app.config.base import get_settings
+    from app.domain.accounts import signals as account_signals
+    from app.domain.accounts.dependencies import provide_user
+    from app.domain.accounts.guards import auth
+    from app.domain.teams import signals as team_signals
+    from app.lib.dependencies import create_collection_dependencies
+    from app.server import openapi, plugins, routers
+
+    dependencies = {constants.USER_DEPENDENCY_KEY: Provide(provide_user)}
+    dependencies.update(create_collection_dependencies())
+    settings = get_settings()
+
+    return Litestar(
+        cors_config=config.cors,
+        dependencies=dependencies,
+        debug=settings.app.DEBUG,
+        openapi_config=openapi.config,
+        route_handlers=routers.route_handlers,
+        plugins=[
+            plugins.app_config,
+            plugins.structlog,
+            plugins.alchemy,
+            plugins.vite,
+            plugins.saq,
+            plugins.granian,
+        ],
+        on_app_init=[auth.on_app_init],
+        listeners=[account_signals.user_created_event_handler, team_signals.team_created_event_handler],
+    )
+
+
+app = create_app()
diff --git a/src/pyspa/core/__init__.py b/src/app/cli/__init__.py
similarity index 100%
rename from src/pyspa/core/__init__.py
rename to src/app/cli/__init__.py
diff --git a/src/app/cli/commands.py b/src/app/cli/commands.py
new file mode 100644
index 00000000..81067b5b
--- /dev/null
+++ b/src/app/cli/commands.py
@@ -0,0 +1,195 @@
+from __future__ import annotations
+
+from typing import Any
+
+import click
+
+
+@click.group(name="users", invoke_without_command=False, help="Manage application users and roles.")
+@click.pass_context
+def user_management_app(_: dict[str, Any]) -> None:
+    """Manage application users."""
+
+
+async def load_database_fixtures() -> None:
+    """Import/Synchronize Database Fixtures."""
+
+    from pathlib import Path
+
+    from advanced_alchemy.utils.fixtures import open_fixture_async
+    from sqlalchemy import select
+    from sqlalchemy.orm import load_only
+    from structlog import get_logger
+
+    from app.config import get_settings
+    from app.config.app import alchemy
+    from app.db.models import Role
+    from app.domain.accounts.services import RoleService
+
+    settings = get_settings()
+    logger = get_logger()
+    fixtures_path = Path(settings.db.FIXTURE_PATH)
+    async with RoleService.new(
+        statement=select(Role).options(load_only(Role.id, Role.slug, Role.name, Role.description)),
+        config=alchemy,
+    ) as service:
+        fixture_data = await open_fixture_async(fixtures_path, "role")
+        await service.upsert_many(match_fields=["name"], data=fixture_data, auto_commit=True)
+        await logger.ainfo("loaded roles")
+
+
+@user_management_app.command(name="create-user", help="Create a user")
+@click.option(
+    "--email",
+    help="Email of the new user",
+    type=click.STRING,
+    required=False,
+    show_default=False,
+)
+@click.option(
+    "--name",
+    help="Full name of the new user",
+    type=click.STRING,
+    required=False,
+    show_default=False,
+)
+@click.option(
+    "--password",
+    help="Password",
+    type=click.STRING,
+    required=False,
+    show_default=False,
+)
+@click.option(
+    "--superuser",
+    help="Is a superuser",
+    type=click.BOOL,
+    default=False,
+    required=False,
+    show_default=False,
+    is_flag=True,
+)
+def create_user(
+    email: str | None,
+    name: str | None,
+    password: str | None,
+    superuser: bool | None,
+) -> None:
+    """Create a user."""
+    import anyio
+    import click
+    from rich import get_console
+
+    from app.config.app import alchemy
+    from app.domain.accounts.dependencies import provide_users_service
+    from app.domain.accounts.schemas import UserCreate
+
+    console = get_console()
+
+    async def _create_user(
+        email: str,
+        name: str,
+        password: str,
+        superuser: bool = False,
+    ) -> None:
+        obj_in = UserCreate(
+            email=email,
+            name=name,
+            password=password,
+            is_superuser=superuser,
+        )
+        async with alchemy.get_session() as db_session:
+            users_service = await anext(provide_users_service(db_session))
+            user = await users_service.create(data=obj_in.to_dict(), auto_commit=True)
+            console.print(f"User created: {user.email}")
+
+    console.rule("Create a new application user.")
+    email = email or click.prompt("Email")
+    name = name or click.prompt("Full Name", show_default=False)
+    password = password or click.prompt("Password", hide_input=True, confirmation_prompt=True)
+    superuser = superuser or click.prompt("Create as superuser?", show_default=True, type=click.BOOL)
+
+    anyio.run(_create_user, email, name, password, superuser)
+
+
+@user_management_app.command(name="promote-to-superuser", help="Promotes a user to application superuser")
+@click.option(
+    "--email",
+    help="Email of the user",
+    type=click.STRING,
+    required=False,
+    show_default=False,
+)
+def promote_to_superuser(email: str) -> None:
+    """Promote to Superuser.
+
+    Args:
+        email (str): The email address of the user to promote.
+    """
+    import anyio
+    from rich import get_console
+
+    from app.config.app import alchemy
+    from app.domain.accounts.schemas import UserUpdate
+    from app.domain.accounts.services import UserService
+
+    console = get_console()
+
+    async def _promote_to_superuser(email: str) -> None:
+        async with UserService.new(config=alchemy) as users_service:
+            user = await users_service.get_one_or_none(email=email)
+            if user:
+                console.print(f"Promoting user: %{user.email}")
+                user_in = UserUpdate(
+                    email=user.email,
+                    is_superuser=True,
+                )
+                user = await users_service.update(
+                    item_id=user.id,
+                    data=user_in.to_dict(),
+                    auto_commit=True,
+                )
+                console.print(f"Upgraded {email} to superuser")
+            else:
+                console.print(f"User not found: {email}")
+
+    console.rule("Promote user to superuser.")
+    anyio.run(_promote_to_superuser, email)
+
+
+@user_management_app.command(name="create-roles", help="Create pre-configured application roles and assign to users.")
+def create_default_roles() -> None:
+    """Create the default Roles for the system
+
+    Args:
+        email (str): The email address of the user to promote.
+    """
+    import anyio
+    from advanced_alchemy.utils.text import slugify
+    from rich import get_console
+
+    from app.config.app import alchemy
+    from app.db.models import UserRole
+    from app.domain.accounts.dependencies import provide_roles_service, provide_users_service
+
+    console = get_console()
+
+    async def _create_default_roles() -> None:
+        await load_database_fixtures()
+        async with alchemy.get_session() as db_session:
+            users_service = await anext(provide_users_service(db_session))
+            roles_service = await anext(provide_roles_service(db_session))
+            default_role = await roles_service.get_one_or_none(slug=slugify(users_service.default_role))
+            if default_role:
+                all_active_users = await users_service.list(is_active=True)
+                for user in all_active_users:
+                    if any(r.role_id == default_role.id for r in user.roles):
+                        console.print("User %s already has default role", user.email)
+                    else:
+                        user.roles.append(UserRole(role_id=default_role.id))
+                        console.print("Assigned %s default role", user.email)
+                        await users_service.repository.update(user)
+            await db_session.commit()
+
+    console.rule("Creating default roles.")
+    anyio.run(_create_default_roles)
diff --git a/src/app/config/__init__.py b/src/app/config/__init__.py
new file mode 100644
index 00000000..0cebc9da
--- /dev/null
+++ b/src/app/config/__init__.py
@@ -0,0 +1,14 @@
+from __future__ import annotations
+
+from . import app as plugin_configs
+from . import constants
+from .base import BASE_DIR, DEFAULT_MODULE_NAME, Settings, get_settings
+
+__all__ = (
+    "Settings",
+    "get_settings",
+    "constants",
+    "plugin_configs",
+    "DEFAULT_MODULE_NAME",
+    "BASE_DIR",
+)
diff --git a/src/app/config/app.py b/src/app/config/app.py
new file mode 100644
index 00000000..9763c5e0
--- /dev/null
+++ b/src/app/config/app.py
@@ -0,0 +1,132 @@
+import logging
+from typing import cast
+
+from advanced_alchemy.extensions.litestar import (
+    AlembicAsyncConfig,
+    AsyncSessionConfig,
+    SQLAlchemyAsyncConfig,
+    async_autocommit_before_send_handler,
+)
+from litestar.config.compression import CompressionConfig
+from litestar.config.cors import CORSConfig
+from litestar.config.csrf import CSRFConfig
+from litestar.logging.config import LoggingConfig, StructLoggingConfig
+from litestar.middleware.logging import LoggingMiddlewareConfig
+from litestar.plugins.structlog import StructlogConfig
+from litestar_saq import CronJob, QueueConfig, SAQConfig
+from litestar_vite import ViteConfig
+
+from .base import get_settings
+
+settings = get_settings()
+
+compression = CompressionConfig(backend="gzip")
+csrf = CSRFConfig(
+    secret=settings.app.SECRET_KEY,
+    cookie_secure=settings.app.CSRF_COOKIE_SECURE,
+    cookie_name=settings.app.CSRF_COOKIE_NAME,
+)
+cors = CORSConfig(allow_origins=cast("list[str]", settings.app.ALLOWED_CORS_ORIGINS))
+alchemy = SQLAlchemyAsyncConfig(
+    engine_instance=settings.db.get_engine(),
+    before_send_handler=async_autocommit_before_send_handler,
+    session_config=AsyncSessionConfig(expire_on_commit=False),
+    alembic_config=AlembicAsyncConfig(
+        version_table_name=settings.db.MIGRATION_DDL_VERSION_TABLE,
+        script_config=settings.db.MIGRATION_CONFIG,
+        script_location=settings.db.MIGRATION_PATH,
+    ),
+)
+vite = ViteConfig(
+    bundle_dir=settings.vite.BUNDLE_DIR,
+    resource_dir=settings.vite.RESOURCE_DIR,
+    template_dir=settings.vite.TEMPLATE_DIR,
+    use_server_lifespan=settings.vite.USE_SERVER_LIFESPAN,
+    dev_mode=settings.vite.DEV_MODE,
+    hot_reload=settings.vite.HOT_RELOAD,
+    is_react=settings.vite.ENABLE_REACT_HELPERS,
+    port=settings.vite.PORT,
+    host=settings.vite.HOST,
+)
+saq = SAQConfig(
+    redis=settings.redis.client,
+    web_enabled=settings.saq.WEB_ENABLED,
+    worker_processes=settings.saq.PROCESSES,
+    use_server_lifespan=settings.saq.USE_SERVER_LIFESPAN,
+    queue_configs=[
+        QueueConfig(
+            name="system-tasks",
+            tasks=["app.domain.system.tasks.system_task", "app.domain.system.tasks.system_upkeep"],
+            scheduled_tasks=[
+                CronJob(
+                    function="app.domain.system.tasks.system_upkeep",
+                    unique=True,
+                    cron="0 * * * *",
+                    timeout=500,
+                ),
+            ],
+        ),
+        QueueConfig(
+            name="background-tasks",
+            tasks=["app.domain.system.tasks.background_worker_task"],
+            scheduled_tasks=[
+                CronJob(
+                    function="app.domain.system.tasks.background_worker_task",
+                    unique=True,
+                    cron="* * * * *",
+                    timeout=300,
+                ),
+            ],
+        ),
+    ],
+)
+
+log = StructlogConfig(
+    structlog_logging_config=StructLoggingConfig(
+        log_exceptions="always",
+        standard_lib_logging_config=LoggingConfig(
+            root={"level": logging.getLevelName(settings.log.LEVEL), "handlers": ["queue_listener"]},
+            loggers={
+                "uvicorn.access": {
+                    "propagate": False,
+                    "level": settings.log.UVICORN_ACCESS_LEVEL,
+                    "handlers": ["queue_listener"],
+                },
+                "uvicorn.error": {
+                    "propagate": False,
+                    "level": settings.log.UVICORN_ERROR_LEVEL,
+                    "handlers": ["queue_listener"],
+                },
+                "granian.access": {
+                    "propagate": False,
+                    "level": settings.log.GRANIAN_ACCESS_LEVEL,
+                    "handlers": ["queue_listener"],
+                },
+                "granian.error": {
+                    "propagate": False,
+                    "level": settings.log.GRANIAN_ERROR_LEVEL,
+                    "handlers": ["queue_listener"],
+                },
+                "saq": {
+                    "propagate": False,
+                    "level": settings.log.SAQ_LEVEL,
+                    "handlers": ["queue_listener"],
+                },
+                "sqlalchemy.engine": {
+                    "propagate": False,
+                    "level": settings.log.SQLALCHEMY_LEVEL,
+                    "handlers": ["queue_listener"],
+                },
+                "sqlalchemy.pool": {
+                    "propagate": False,
+                    "level": settings.log.SQLALCHEMY_LEVEL,
+                    "handlers": ["queue_listener"],
+                },
+            },
+        ),
+    ),
+    middleware_logging_config=LoggingMiddlewareConfig(
+        request_log_fields=["method", "path", "path_params", "query"],
+        response_log_fields=["status_code"],
+    ),
+)
diff --git a/src/app/config/base.py b/src/app/config/base.py
new file mode 100644
index 00000000..3f53059c
--- /dev/null
+++ b/src/app/config/base.py
@@ -0,0 +1,470 @@
+from __future__ import annotations
+
+import binascii
+import json
+import os
+import enum
+from dataclasses import dataclass, field
+from functools import lru_cache
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Final
+from datetime import datetime, date
+
+from advanced_alchemy.utils.text import slugify
+from litestar.serialization import decode_json, encode_json
+from litestar.utils.module_loader import module_to_os_path
+from redis.asyncio import Redis
+from sqlalchemy import event
+from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
+from sqlalchemy.pool import NullPool
+
+if TYPE_CHECKING:
+    from litestar.data_extractors import RequestExtractorField, ResponseExtractorField
+
+DEFAULT_MODULE_NAME = "app"
+BASE_DIR: Final[Path] = module_to_os_path(DEFAULT_MODULE_NAME)
+
+TRUE_VALUES = {"True", "true", "1", "yes", "Y", "T"}
+
+
+def custom_json_serializer(obj):
+    """Serialise python obj to json."""
+    if isinstance(obj, date):
+        return obj.isoformat()
+    if isinstance(obj, datetime):
+        return obj.isoformat()
+    if isinstance(obj, enum.Enum):
+        return obj.value
+    if isinstance(obj, dict):
+        return {k: custom_json_serializer(v) for k, v in obj.items()}
+    if isinstance(obj, list):
+        return [custom_json_serializer(i) for i in obj]
+    if hasattr(obj, "to_dict"):
+        return {k: custom_json_serializer(v) for k, v in obj.to_dict().items()}
+    return obj
+
+
+@dataclass
+class DatabaseSettings:
+    ECHO: bool = field(
+        default_factory=lambda: os.getenv("DATABASE_ECHO", "False") in TRUE_VALUES,
+    )
+    """Enable SQLAlchemy engine logs."""
+    ECHO_POOL: bool = field(
+        default_factory=lambda: os.getenv("DATABASE_ECHO_POOL", "False") in TRUE_VALUES,
+    )
+    """Enable SQLAlchemy connection pool logs."""
+    POOL_DISABLED: bool = field(
+        default_factory=lambda: os.getenv("DATABASE_POOL_DISABLED", "False") in TRUE_VALUES,
+    )
+    """Disable SQLAlchemy pool configuration."""
+    POOL_MAX_OVERFLOW: int = field(default_factory=lambda: int(os.getenv("DATABASE_MAX_POOL_OVERFLOW", "10")))
+    """Max overflow for SQLAlchemy connection pool"""
+    POOL_SIZE: int = field(default_factory=lambda: int(os.getenv("DATABASE_POOL_SIZE", "5")))
+    """Pool size for SQLAlchemy connection pool"""
+    POOL_TIMEOUT: int = field(default_factory=lambda: int(os.getenv("DATABASE_POOL_TIMEOUT", "30")))
+    """Time in seconds for timing connections out of the connection pool."""
+    POOL_RECYCLE: int = field(default_factory=lambda: int(os.getenv("DATABASE_POOL_RECYCLE", "300")))
+    """Amount of time to wait before recycling connections."""
+    POOL_PRE_PING: bool = field(
+        default_factory=lambda: os.getenv("DATABASE_PRE_POOL_PING", "False") in TRUE_VALUES,
+    )
+    """Optionally ping database before fetching a session from the connection pool."""
+    URL: str = field(default_factory=lambda: os.getenv("DATABASE_URL", "sqlite+aiosqlite:///db.sqlite3"))
+    """SQLAlchemy Database URL."""
+    MIGRATION_CONFIG: str = f"{BASE_DIR}/db/migrations/alembic.ini"
+    """The path to the `alembic.ini` configuration file."""
+    MIGRATION_PATH: str = f"{BASE_DIR}/db/migrations"
+    """The path to the `alembic` database migrations."""
+    MIGRATION_DDL_VERSION_TABLE: str = "ddl_version"
+    """The name to use for the `alembic` versions table name."""
+    FIXTURE_PATH: str = f"{BASE_DIR}/db/fixtures"
+    """The path to JSON fixture files to load into tables."""
+    _engine_instance: AsyncEngine | None = None
+    """SQLAlchemy engine instance generated from settings."""
+
+    @property
+    def engine(self) -> AsyncEngine:
+        return self.get_engine()
+
+    def get_engine(self) -> AsyncEngine:
+        if self._engine_instance is not None:
+            return self._engine_instance
+        if self.URL.startswith("postgresql+asyncpg"):
+            engine = create_async_engine(
+                url=self.URL,
+                future=True,
+                json_serializer=custom_json_serializer,
+                json_deserializer=decode_json,
+                echo=self.ECHO,
+                echo_pool=self.ECHO_POOL,
+                max_overflow=self.POOL_MAX_OVERFLOW,
+                pool_size=self.POOL_SIZE,
+                pool_timeout=self.POOL_TIMEOUT,
+                pool_recycle=self.POOL_RECYCLE,
+                pool_pre_ping=self.POOL_PRE_PING,
+                pool_use_lifo=True,  # use lifo to reduce the number of idle connections
+                poolclass=NullPool if self.POOL_DISABLED else None,
+            )
+            """Database session factory.
+
+            See [`async_sessionmaker()`][sqlalchemy.ext.asyncio.async_sessionmaker].
+            """
+
+            @event.listens_for(engine.sync_engine, "connect")
+            def _sqla_on_connect(dbapi_connection: Any, _: Any) -> Any:  # pragma: no cover
+                """Using msgspec for serialization of the json column values means that the
+                output is binary, not `str` like `json.dumps` would output.
+                SQLAlchemy expects that the json serializer returns `str` and calls `.encode()` on the value to
+                turn it to bytes before writing to the JSONB column. I'd need to either wrap `serialization.to_json` to
+                return a `str` so that SQLAlchemy could then convert it to binary, or do the following, which
+                changes the behaviour of the dialect to expect a binary value from the serializer.
+                See Also https://github.com/sqlalchemy/sqlalchemy/blob/14bfbadfdf9260a1c40f63b31641b27fe9de12a0/lib/sqlalchemy/dialects/postgresql/asyncpg.py#L934  pylint: disable=line-too-long
+                """
+
+                def encoder(bin_value: bytes) -> bytes:
+                    return b"\x01" + encode_json(bin_value)
+
+                def decoder(bin_value: bytes) -> Any:
+                    # the byte is the \x01 prefix for jsonb used by PostgreSQL.
+                    # asyncpg returns it when format='binary'
+                    return decode_json(bin_value[1:])
+
+                dbapi_connection.await_(
+                    dbapi_connection.driver_connection.set_type_codec(
+                        "jsonb",
+                        encoder=encoder,
+                        decoder=decoder,
+                        schema="pg_catalog",
+                        format="binary",
+                    ),
+                )
+                dbapi_connection.await_(
+                    dbapi_connection.driver_connection.set_type_codec(
+                        "json",
+                        encoder=encoder,
+                        decoder=decoder,
+                        schema="pg_catalog",
+                        format="binary",
+                    ),
+                )
+        elif self.URL.startswith("sqlite+aiosqlite"):
+            engine = create_async_engine(
+                url=self.URL,
+                future=True,
+                json_serializer=encode_json,
+                json_deserializer=decode_json,
+                echo=self.ECHO,
+                echo_pool=self.ECHO_POOL,
+                pool_recycle=self.POOL_RECYCLE,
+                pool_pre_ping=self.POOL_PRE_PING,
+            )
+            """Database session factory.
+
+            See [`async_sessionmaker()`][sqlalchemy.ext.asyncio.async_sessionmaker].
+            """
+
+            @event.listens_for(engine.sync_engine, "connect")
+            def _sqla_on_connect(dbapi_connection: Any, _: Any) -> Any:  # pragma: no cover
+                """Override the default begin statement.  The disables the built in begin execution."""
+                dbapi_connection.isolation_level = None
+
+            @event.listens_for(engine.sync_engine, "begin")
+            def _sqla_on_begin(dbapi_connection: Any) -> Any:  # pragma: no cover
+                """Emits a custom begin"""
+                dbapi_connection.exec_driver_sql("BEGIN")
+        else:
+            engine = create_async_engine(
+                url=self.URL,
+                future=True,
+                json_serializer=encode_json,
+                json_deserializer=decode_json,
+                echo=self.ECHO,
+                echo_pool=self.ECHO_POOL,
+                max_overflow=self.POOL_MAX_OVERFLOW,
+                pool_size=self.POOL_SIZE,
+                pool_timeout=self.POOL_TIMEOUT,
+                pool_recycle=self.POOL_RECYCLE,
+                pool_pre_ping=self.POOL_PRE_PING,
+            )
+        self._engine_instance = engine
+        return self._engine_instance
+
+
+@dataclass
+class ViteSettings:
+    """Server configurations."""
+
+    DEV_MODE: bool = field(
+        default_factory=lambda: os.getenv("VITE_DEV_MODE", "False") in TRUE_VALUES,
+    )
+    """Start `vite` development server."""
+    USE_SERVER_LIFESPAN: bool = field(
+        default_factory=lambda: os.getenv("VITE_USE_SERVER_LIFESPAN", "True") in TRUE_VALUES,
+    )
+    """Auto start and stop `vite` processes when running in development mode.."""
+    HOST: str = field(default_factory=lambda: os.getenv("VITE_HOST", "0.0.0.0"))  # noqa: S104
+    """The host the `vite` process will listen on.  Defaults to `0.0.0.0`"""
+    PORT: int = field(default_factory=lambda: int(os.getenv("VITE_PORT", "5173")))
+    """The port to start vite on.  Default to `5173`"""
+    HOT_RELOAD: bool = field(
+        default_factory=lambda: os.getenv("VITE_HOT_RELOAD", "True") in TRUE_VALUES,
+    )
+    """Start `vite` with HMR enabled."""
+    ENABLE_REACT_HELPERS: bool = field(
+        default_factory=lambda: os.getenv("VITE_ENABLE_REACT_HELPERS", "True") in TRUE_VALUES,
+    )
+    """Enable React support in HMR."""
+    BUNDLE_DIR: Path = field(default_factory=lambda: Path(f"{BASE_DIR}/domain/web/public"))
+    """Bundle directory"""
+    RESOURCE_DIR: Path = field(default_factory=lambda: Path("resources"))
+    """Resource directory"""
+    TEMPLATE_DIR: Path = field(default_factory=lambda: Path(f"{BASE_DIR}/domain/web/templates"))
+    """Template directory."""
+    ASSET_URL: str = field(default_factory=lambda: os.getenv("ASSET_URL", "/static/"))
+    """Base URL for assets"""
+
+    @property
+    def set_static_files(self) -> bool:
+        """Serve static assets."""
+        return self.ASSET_URL.startswith("/")
+
+
+@dataclass
+class ServerSettings:
+    """Server configurations."""
+
+    APP_LOC: str = "app.asgi:app"
+    """Path to app executable, or factory."""
+    APP_LOC_IS_FACTORY: bool = False
+    """Indicate if APP_LOC points to an executable or factory."""
+    HOST: str = field(default_factory=lambda: os.getenv("LITESTAR_HOST", "0.0.0.0"))  # noqa: S104
+    """Server network host."""
+    PORT: int = field(default_factory=lambda: int(os.getenv("LITESTAR_PORT", "8000")))
+    """Server port."""
+    KEEPALIVE: int = field(default_factory=lambda: int(os.getenv("LITESTAR_KEEPALIVE", "65")))
+    """Seconds to hold connections open (65 is > AWS lb idle timeout)."""
+    RELOAD: bool = field(
+        default_factory=lambda: os.getenv("LITESTAR_RELOAD", "False") in TRUE_VALUES,
+    )
+    """Turn on hot reloading."""
+    RELOAD_DIRS: list[str] = field(default_factory=lambda: [f"{BASE_DIR}"])
+    """Directories to watch for reloading."""
+    HTTP_WORKERS: int | None = field(
+        default_factory=lambda: int(os.getenv("WEB_CONCURRENCY")) if os.getenv("WEB_CONCURRENCY") is not None else None,  # type: ignore[arg-type]
+    )
+    """Number of HTTP Worker processes to be spawned by Uvicorn."""
+
+
+@dataclass
+class SaqSettings:
+    """Server configurations."""
+
+    PROCESSES: int = field(default_factory=lambda: int(os.getenv("SAQ_PROCESSES", "1")))
+    """The number of worker processes to start.
+
+    Default is set to 1.
+    """
+    CONCURRENCY: int = field(default_factory=lambda: int(os.getenv("SAQ_CONCURRENCY", "10")))
+    """The number of concurrent jobs allowed to execute per worker process.
+
+    Default is set to 10.
+    """
+    WEB_ENABLED: bool = field(
+        default_factory=lambda: os.getenv("SAQ_WEB_ENABLED", "True") in TRUE_VALUES,
+    )
+    """If true, the worker admin UI is hosted on worker startup."""
+    USE_SERVER_LIFESPAN: bool = field(
+        default_factory=lambda: os.getenv("SAQ_USE_SERVER_LIFESPAN", "True") in TRUE_VALUES,
+    )
+    """Auto start and stop `saq` processes when starting the Litestar application."""
+
+
+@dataclass
+class LogSettings:
+    """Logger configuration"""
+
+    # https://stackoverflow.com/a/1845097/6560549
+    EXCLUDE_PATHS: str = r"\A(?!x)x"
+    """Regex to exclude paths from logging."""
+    HTTP_EVENT: str = "HTTP"
+    """Log event name for logs from Litestar handlers."""
+    INCLUDE_COMPRESSED_BODY: bool = False
+    """Include 'body' of compressed responses in log output."""
+    LEVEL: int = field(default_factory=lambda: int(os.getenv("LOG_LEVEL", "10")))
+    """Stdlib log levels.
+
+    Only emit logs at this level, or higher.
+    """
+    OBFUSCATE_COOKIES: set[str] = field(default_factory=lambda: {"session"})
+    """Request cookie keys to obfuscate."""
+    OBFUSCATE_HEADERS: set[str] = field(default_factory=lambda: {"Authorization", "X-API-KEY"})
+    """Request header keys to obfuscate."""
+    JOB_FIELDS: list[str] = field(
+        default_factory=lambda: [
+            "function",
+            "kwargs",
+            "key",
+            "scheduled",
+            "attempts",
+            "completed",
+            "queued",
+            "started",
+            "result",
+            "error",
+        ],
+    )
+    """Attributes of the SAQ.
+
+    [`Job`](https://github.com/tobymao/saq/blob/master/saq/job.py) to be
+    logged.
+    """
+    REQUEST_FIELDS: list[RequestExtractorField] = field(
+        default_factory=lambda: [
+            "path",
+            "method",
+            "headers",
+            "cookies",
+            "query",
+            "path_params",
+            "body",
+        ],
+    )
+    """Attributes of the [Request][litestar.connection.request.Request] to be
+    logged."""
+    RESPONSE_FIELDS: list[ResponseExtractorField] = field(
+        default_factory=lambda: [
+            "status_code",
+            "cookies",
+            "headers",
+            "body",
+        ],
+    )
+    """Attributes of the [Response][litestar.response.Response] to be
+    logged."""
+    WORKER_EVENT: str = "Worker"
+    """Log event name for logs from SAQ worker."""
+    SAQ_LEVEL: int = 20
+    """Level to log SAQ logs."""
+    SQLALCHEMY_LEVEL: int = 20
+    """Level to log SQLAlchemy logs."""
+    UVICORN_ACCESS_LEVEL: int = 20
+    """Level to log uvicorn access logs."""
+    UVICORN_ERROR_LEVEL: int = 20
+    """Level to log uvicorn error logs."""
+    GRANIAN_ACCESS_LEVEL: int = 30
+    """Level to log uvicorn access logs."""
+    GRANIAN_ERROR_LEVEL: int = 20
+    """Level to log uvicorn error logs."""
+
+
+@dataclass
+class RedisSettings:
+    URL: str = field(default_factory=lambda: os.getenv("REDIS_URL", "redis://localhost:6379/0"))
+    """A Redis connection URL."""
+    SOCKET_CONNECT_TIMEOUT: int = field(default_factory=lambda: int(os.getenv("REDIS_CONNECT_TIMEOUT", "5")))
+    """Length of time to wait (in seconds) for a connection to become
+    active."""
+    HEALTH_CHECK_INTERVAL: int = field(default_factory=lambda: int(os.getenv("REDIS_HEALTH_CHECK_INTERVAL", "5")))
+    """Length of time to wait (in seconds) before testing connection health."""
+    SOCKET_KEEPALIVE: bool = field(
+        default_factory=lambda: os.getenv("REDIS_SOCKET_KEEPALIVE", "True") in TRUE_VALUES,
+    )
+    """Length of time to wait (in seconds) between keepalive commands."""
+    _redis_instance: Redis | None = None
+    """Redis instance generated from settings."""
+
+    @property
+    def client(self) -> Redis:
+        return self.get_client()
+
+    def get_client(self) -> Redis:
+        if self._redis_instance is not None:
+            return self._redis_instance
+        self._redis_instance = Redis.from_url(
+            url=self.URL,
+            encoding="utf-8",
+            decode_responses=False,
+            socket_connect_timeout=self.SOCKET_CONNECT_TIMEOUT,
+            socket_keepalive=self.SOCKET_KEEPALIVE,
+            health_check_interval=self.HEALTH_CHECK_INTERVAL,
+        )
+        return self._redis_instance
+
+
+@dataclass
+class AppSettings:
+    """Application configuration"""
+
+    URL: str = field(default_factory=lambda: os.getenv("APP_URL", "http://localhost:8000"))
+    """The frontend base URL"""
+    DEBUG: bool = field(default_factory=lambda: os.getenv("LITESTAR_DEBUG", "False") in TRUE_VALUES)
+    """Run `Litestar` with `debug=True`."""
+    SECRET_KEY: str = field(
+        default_factory=lambda: os.getenv("SECRET_KEY", binascii.hexlify(os.urandom(32)).decode(encoding="utf-8")),
+    )
+    """Application secret key."""
+    NAME: str = field(default_factory=lambda: "app")
+    """Application name."""
+    ALLOWED_CORS_ORIGINS: list[str] | str = field(default_factory=lambda: os.getenv("ALLOWED_CORS_ORIGINS", '["*"]'))
+    """Allowed CORS Origins"""
+    CSRF_COOKIE_NAME: str = field(default_factory=lambda: "csrftoken")
+    """CSRF Cookie Name"""
+    CSRF_COOKIE_SECURE: bool = field(default_factory=lambda: False)
+    """CSRF Secure Cookie"""
+    JWT_ENCRYPTION_ALGORITHM: str = field(default_factory=lambda: "HS256")
+    """JWT Encryption Algorithm"""
+
+    @property
+    def slug(self) -> str:
+        """Return a slugified name.
+
+        Returns:
+            `self.NAME`, all lowercase and hyphens instead of spaces.
+        """
+        return slugify(self.NAME)
+
+    def __post_init__(self) -> None:
+        # Check if the ALLOWED_CORS_ORIGINS is a string.
+        if isinstance(self.ALLOWED_CORS_ORIGINS, str):
+            # Check if the string starts with "[" and ends with "]", indicating a list.
+            if self.ALLOWED_CORS_ORIGINS.startswith("[") and self.ALLOWED_CORS_ORIGINS.endswith("]"):
+                try:
+                    # Safely evaluate the string as a Python list.
+                    self.ALLOWED_CORS_ORIGINS = json.loads(self.ALLOWED_CORS_ORIGINS)
+                except (SyntaxError, ValueError):
+                    # Handle potential errors if the string is not a valid Python literal.
+                    msg = "ALLOWED_CORS_ORIGINS is not a valid list representation."
+                    raise ValueError(msg) from None
+            else:
+                # Split the string by commas into a list if it is not meant to be a list representation.
+                self.ALLOWED_CORS_ORIGINS = [host.strip() for host in self.ALLOWED_CORS_ORIGINS.split(",")]
+
+
+@dataclass
+class Settings:
+    app: AppSettings = field(default_factory=AppSettings)
+    db: DatabaseSettings = field(default_factory=DatabaseSettings)
+    vite: ViteSettings = field(default_factory=ViteSettings)
+    server: ServerSettings = field(default_factory=ServerSettings)
+    log: LogSettings = field(default_factory=LogSettings)
+    redis: RedisSettings = field(default_factory=RedisSettings)
+    saq: SaqSettings = field(default_factory=SaqSettings)
+
+    @classmethod
+    def from_env(cls, dotenv_filename: str = ".env") -> Settings:
+        from litestar.cli._utils import console
+
+        env_file = Path(f"{os.curdir}/{dotenv_filename}")
+        if env_file.is_file():
+            from dotenv import load_dotenv
+
+            console.print(f"[yellow]Loading environment configuration from {dotenv_filename}[/]")
+
+            load_dotenv(env_file)
+        return Settings()
+
+
+@lru_cache(maxsize=1, typed=True)
+def get_settings() -> Settings:
+    return Settings.from_env()
diff --git a/src/app/config/constants.py b/src/app/config/constants.py
new file mode 100644
index 00000000..fc806d9c
--- /dev/null
+++ b/src/app/config/constants.py
@@ -0,0 +1,27 @@
+from __future__ import annotations
+
+"""Name of the favicon file in the static directory"""
+DB_SESSION_DEPENDENCY_KEY = "db_session"
+"""The name of the key used for dependency injection of the database
+session."""
+USER_DEPENDENCY_KEY = "current_user"
+"""The name of the key used for dependency injection of the database
+session."""
+DTO_INFO_KEY = "info"
+"""The name of the key used for storing DTO information."""
+DEFAULT_PAGINATION_SIZE = 20
+"""Default page size to use."""
+CACHE_EXPIRATION: int = 60
+"""Default cache key expiration in seconds."""
+DEFAULT_USER_ROLE = "Application Access"
+"""The name of the default role assigned to all users."""
+HEALTH_ENDPOINT = "/health"
+"""The endpoint to use for the the service health check."""
+SITE_INDEX = "/"
+"""The site index URL."""
+OPENAPI_SCHEMA = "/schema"
+"""The URL path to use for the OpenAPI documentation."""
+DEFAULT_USER_ROLE = "Application Access"
+"""The name of the default role assigned to all users."""
+SUPERUSER_ACCESS_ROLE = "Superuser"
+"""The name of the super user role."""
diff --git a/cloudbuild.yml b/src/app/db/__init__.py
similarity index 100%
rename from cloudbuild.yml
rename to src/app/db/__init__.py
diff --git a/src/app/db/fixtures/role.json b/src/app/db/fixtures/role.json
new file mode 100644
index 00000000..1fd96ff7
--- /dev/null
+++ b/src/app/db/fixtures/role.json
@@ -0,0 +1,12 @@
+[
+    {
+       "slug": "application-access",
+      "name": "Application Access",
+      "description": "Default role required for access.  This role allows you to query and access the application."
+    },
+    {
+       "slug": "superuser",
+      "name": "Superuser",
+      "description": "Allows superuser access to the application."
+    }
+  ]
diff --git a/docs/.gitkeep b/src/app/db/migrations/__init__.py
similarity index 100%
rename from docs/.gitkeep
rename to src/app/db/migrations/__init__.py
diff --git a/src/app/db/migrations/alembic.ini b/src/app/db/migrations/alembic.ini
new file mode 100644
index 00000000..e2e46c9d
--- /dev/null
+++ b/src/app/db/migrations/alembic.ini
@@ -0,0 +1,73 @@
+# Advanced Alchemy Alembic Asyncio Config
+
+[alembic]
+prepend_sys_path = src:.
+# path to migration scripts
+script_location = src/app/lib/db/migrations
+
+# template used to generate migration files
+file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(slug)s_%%(rev)s
+
+# This is not required to be set when running through `advanced_alchemy`
+# sqlalchemy.url = driver://user:pass@localhost/dbname
+
+# timezone to use when rendering the date
+# within the migration file as well as the filename.
+# string value is passed to dateutil.tz.gettz()
+# leave blank for localtime
+timezone = UTC
+
+# max length of characters to apply to the
+# "slug" field
+truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+# set to 'true' to allow .pyc and .pyo files without
+# a source .py file to be detected as revisions in the
+# versions/ directory
+# sourceless = false
+
+# version location specification; this defaults
+# to alembic/versions.  When using multiple version
+# directories, initial revisions must be specified with --version-path
+# version_locations = %(here)s/bar %(here)s/bat alembic/versions
+
+# version path separator; As mentioned above, this is the character used to split
+# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
+# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
+# Valid values for version_path_separator are:
+#
+# version_path_separator = :
+# version_path_separator = ;
+# version_path_separator = space
+version_path_separator = os  # Use os.pathsep. Default configuration used for new projects.
+
+# set to 'true' to search source files recursively
+# in each "version_locations" directory
+# new in Alembic version 1.10
+# recursive_version_locations = false
+
+# the output encoding used when revision files
+# are written from script.py.mako
+output_encoding = utf-8
+
+# [post_write_hooks]
+# This section defines scripts or Python functions that are run
+# on newly generated revision scripts.  See the documentation for further
+# detail and examples
+
+# format using "black" - use the console_scripts runner,
+# against the "black" entrypoint
+# hooks = black
+# black.type = console_scripts
+# black.entrypoint = black
+# black.options = -l 120 REVISION_SCRIPT_FILENAME
+
+# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
+# hooks = ruff
+# ruff.type = exec
+# ruff.executable = %(here)s/.venv/bin/ruff
+# ruff.options = --fix REVISION_SCRIPT_FILENAME
diff --git a/src/app/db/migrations/env.py b/src/app/db/migrations/env.py
new file mode 100644
index 00000000..660a4f9a
--- /dev/null
+++ b/src/app/db/migrations/env.py
@@ -0,0 +1,143 @@
+from __future__ import annotations
+
+import asyncio
+from typing import TYPE_CHECKING, cast
+
+from advanced_alchemy.base import orm_registry
+from alembic import context
+from alembic.autogenerate import rewriter
+from alembic.operations import ops
+from sqlalchemy import Column, pool
+from sqlalchemy.ext.asyncio import AsyncEngine, async_engine_from_config
+
+if TYPE_CHECKING:
+    from advanced_alchemy.alembic.commands import AlembicCommandConfig
+    from alembic.runtime.environment import EnvironmentContext
+    from sqlalchemy.engine import Connection
+
+__all__ = ["do_run_migrations", "run_migrations_offline", "run_migrations_online"]
+
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config: AlembicCommandConfig = context.config  # type: ignore
+
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+
+target_metadata = orm_registry.metadata
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# ... etc.
+
+writer = rewriter.Rewriter()
+
+
+@writer.rewrites(ops.CreateTableOp)
+def order_columns(
+    context: EnvironmentContext,
+    revision: tuple[str, ...],
+    op: ops.CreateTableOp,
+) -> ops.CreateTableOp:
+    """Orders ID first and the audit columns at the end."""
+    special_names = {"id": -100, "sa_orm_sentinel": 3001, "created_at": 3002, "updated_at": 3002}
+    cols_by_key = [
+        (
+            special_names.get(col.key, index) if isinstance(col, Column) else 2000,
+            col.copy(),  # type: ignore[attr-defined]
+        )
+        for index, col in enumerate(op.columns)
+    ]
+    columns = [col for _, col in sorted(cols_by_key, key=lambda entry: entry[0])]
+    return ops.CreateTableOp(
+        op.table_name,
+        columns,
+        schema=op.schema,
+        # Remove when https://github.com/sqlalchemy/alembic/issues/1193 is fixed
+        _namespace_metadata=op._namespace_metadata,
+        **op.kw,
+    )
+
+
+def run_migrations_offline() -> None:
+    """Run migrations in 'offline' mode.
+
+    This configures the context with just a URL
+    and not an Engine, though an Engine is acceptable
+    here as well.  By skipping the Engine creation
+    we don't even need a DBAPI to be available.
+
+    Calls to context.execute() here emit the given string to the
+    script output.
+    """
+    context.configure(
+        url=config.db_url,
+        target_metadata=target_metadata,
+        literal_binds=True,
+        dialect_opts={"paramstyle": "named"},
+        compare_type=config.compare_type,
+        version_table=config.version_table_name,
+        version_table_pk=config.version_table_pk,
+        user_module_prefix=config.user_module_prefix,
+        render_as_batch=config.render_as_batch,
+        process_revision_directives=writer,  # type: ignore[arg-type]
+    )
+
+    with context.begin_transaction():
+        context.run_migrations()
+
+
+def do_run_migrations(connection: Connection) -> None:
+    """Run migrations."""
+    context.configure(
+        connection=connection,
+        target_metadata=target_metadata,
+        compare_type=config.compare_type,
+        version_table=config.version_table_name,
+        version_table_pk=config.version_table_pk,
+        user_module_prefix=config.user_module_prefix,
+        render_as_batch=config.render_as_batch,
+        process_revision_directives=writer,  # type: ignore[arg-type]
+    )
+
+    with context.begin_transaction():
+        context.run_migrations()
+
+
+async def run_migrations_online() -> None:
+    """Run migrations in 'online' mode.
+
+    In this scenario we need to create an Engine and associate a
+    connection with the context.
+    """
+    configuration = config.get_section(config.config_ini_section) or {}
+    configuration["sqlalchemy.url"] = config.db_url
+
+    connectable = cast(
+        "AsyncEngine",
+        config.engine
+        or async_engine_from_config(
+            configuration,
+            prefix="sqlalchemy.",
+            poolclass=pool.NullPool,
+            future=True,
+        ),
+    )
+    if connectable is None:
+        msg = "Could not get engine from config.  Please ensure your `alembic.ini` according to the official Alembic documentation."  # type: ignore[unreachable]
+        raise RuntimeError(
+            msg,
+        )
+
+    async with connectable.connect() as connection:
+        await connection.run_sync(do_run_migrations)
+
+    await connectable.dispose()
+
+
+if context.is_offline_mode():
+    run_migrations_offline()
+else:
+    asyncio.run(run_migrations_online())
diff --git a/src/app/db/migrations/script.py.mako b/src/app/db/migrations/script.py.mako
new file mode 100644
index 00000000..72f173ce
--- /dev/null
+++ b/src/app/db/migrations/script.py.mako
@@ -0,0 +1,63 @@
+# type: ignore
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from __future__ import annotations
+
+import warnings
+from typing import TYPE_CHECKING
+
+import sqlalchemy as sa
+from alembic import op
+from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC
+from sqlalchemy import Text  # noqa: F401
+${imports if imports else ""}
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
+
+sa.GUID = GUID
+sa.DateTimeUTC = DateTimeUTC
+sa.ORA_JSONB = ORA_JSONB
+sa.EncryptedString = EncryptedString
+sa.EncryptedText = EncryptedText
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+
+def upgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            schema_upgrades()
+            data_upgrades()
+
+def downgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            data_downgrades()
+            schema_downgrades()
+
+def schema_upgrades() -> None:
+    """schema upgrade migrations go here."""
+    ${upgrades if upgrades else "pass"}
+
+def schema_downgrades() -> None:
+    """schema downgrade migrations go here."""
+    ${downgrades if downgrades else "pass"}
+
+def data_upgrades() -> None:
+    """Add any optional data upgrade migrations here!"""
+
+def data_downgrades() -> None:
+    """Add any optional data downgrade migrations here!"""
diff --git a/src/app/db/migrations/versions/2024-07-25_add_tenant_and_link_to_user_b6ffcfae703a.py b/src/app/db/migrations/versions/2024-07-25_add_tenant_and_link_to_user_b6ffcfae703a.py
new file mode 100644
index 00000000..87e9222f
--- /dev/null
+++ b/src/app/db/migrations/versions/2024-07-25_add_tenant_and_link_to_user_b6ffcfae703a.py
@@ -0,0 +1,142 @@
+# type: ignore
+"""Add tenant and link to user
+
+Revision ID: b6ffcfae703a
+Revises: a22cc7704d14
+Create Date: 2024-07-25 10:01:59.090568+00:00
+
+"""
+from __future__ import annotations
+
+import warnings
+from typing import TYPE_CHECKING
+
+import sqlalchemy as sa
+from alembic import op
+from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC
+from sqlalchemy import Text  # noqa: F401
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
+
+sa.GUID = GUID
+sa.DateTimeUTC = DateTimeUTC
+sa.ORA_JSONB = ORA_JSONB
+sa.EncryptedString = EncryptedString
+sa.EncryptedText = EncryptedText
+
+# revision identifiers, used by Alembic.
+revision = 'b6ffcfae703a'
+down_revision = 'a22cc7704d14'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            schema_upgrades()
+            data_upgrades()
+
+def downgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            data_downgrades()
+            schema_downgrades()
+
+def schema_upgrades() -> None:
+    """schema upgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.create_table('tenant',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('name', sa.String(), nullable=False),
+    sa.Column('description', sa.String(length=500), nullable=True),
+    sa.Column('url', sa.String(), nullable=True),
+    sa.Column('is_active', sa.Boolean(), nullable=False),
+    sa.Column('slug', sa.String(length=100), nullable=False),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_tenant')),
+    sa.UniqueConstraint('slug', name='uq_tenant_slug')
+    )
+    with op.batch_alter_table('tenant', schema=None) as batch_op:
+        batch_op.create_index(batch_op.f('ix_tenant_name'), ['name'], unique=False)
+        batch_op.create_index('ix_tenant_slug_unique', ['slug'], unique=True)
+
+    with op.batch_alter_table('role', schema=None) as batch_op:
+        batch_op.create_index('ix_role_slug_unique', ['slug'], unique=True)
+
+    with op.batch_alter_table('tag', schema=None) as batch_op:
+        batch_op.create_index('ix_tag_slug_unique', ['slug'], unique=True)
+
+    with op.batch_alter_table('team', schema=None) as batch_op:
+        batch_op.create_index('ix_team_slug_unique', ['slug'], unique=True)
+
+    with op.batch_alter_table('user_account', schema=None) as batch_op:
+        batch_op.add_column(sa.Column('tenant_id', sa.GUID(length=16), nullable=False))
+        batch_op.create_foreign_key(batch_op.f('fk_user_account_tenant_id_tenant'), 'tenant', ['tenant_id'], ['id'])
+        batch_op.create_table_comment(
+        'User accounts for application access',
+        existing_comment=None
+    )
+
+    with op.batch_alter_table('user_account_oauth', schema=None) as batch_op:
+        batch_op.create_table_comment(
+        'Registered OAUTH2 Accounts for Users',
+        existing_comment=None
+    )
+
+    with op.batch_alter_table('user_account_role', schema=None) as batch_op:
+        batch_op.create_table_comment(
+        'Links a user to a specific role.',
+        existing_comment=None
+    )
+
+    # ### end Alembic commands ###
+
+def schema_downgrades() -> None:
+    """schema downgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    with op.batch_alter_table('user_account_role', schema=None) as batch_op:
+        batch_op.drop_table_comment(
+        existing_comment='Links a user to a specific role.'
+    )
+
+    with op.batch_alter_table('user_account_oauth', schema=None) as batch_op:
+        batch_op.drop_table_comment(
+        existing_comment='Registered OAUTH2 Accounts for Users'
+    )
+
+    with op.batch_alter_table('user_account', schema=None) as batch_op:
+        batch_op.drop_table_comment(
+        existing_comment='User accounts for application access'
+    )
+        batch_op.drop_constraint(batch_op.f('fk_user_account_tenant_id_tenant'), type_='foreignkey')
+        batch_op.drop_column('tenant_id')
+
+    with op.batch_alter_table('team', schema=None) as batch_op:
+        batch_op.drop_index('ix_team_slug_unique')
+
+    with op.batch_alter_table('tag', schema=None) as batch_op:
+        batch_op.drop_index('ix_tag_slug_unique')
+
+    with op.batch_alter_table('role', schema=None) as batch_op:
+        batch_op.drop_index('ix_role_slug_unique')
+
+    with op.batch_alter_table('tenant', schema=None) as batch_op:
+        batch_op.drop_index('ix_tenant_slug_unique')
+        batch_op.drop_index(batch_op.f('ix_tenant_name'))
+
+    op.drop_table('tenant')
+    # ### end Alembic commands ###
+
+def data_upgrades() -> None:
+    """Add any optional data upgrade migrations here!"""
+
+def data_downgrades() -> None:
+    """Add any optional data downgrade migrations here!"""
diff --git a/src/app/db/migrations/versions/2024-07-30_make_tenant_id_mandatory_in_user_and__a05c476c0ae9.py b/src/app/db/migrations/versions/2024-07-30_make_tenant_id_mandatory_in_user_and__a05c476c0ae9.py
new file mode 100644
index 00000000..a0d5c3ef
--- /dev/null
+++ b/src/app/db/migrations/versions/2024-07-30_make_tenant_id_mandatory_in_user_and__a05c476c0ae9.py
@@ -0,0 +1,142 @@
+# type: ignore
+"""Make tenant_id mandatory in user and add models company and job post
+
+Revision ID: a05c476c0ae9
+Revises: b6ffcfae703a
+Create Date: 2024-07-30 12:21:24.309890+00:00
+
+"""
+from __future__ import annotations
+
+import warnings
+from typing import TYPE_CHECKING
+
+import sqlalchemy as sa
+from alembic import op
+from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC
+from sqlalchemy import Text  # noqa: F401
+from sqlalchemy.dialects import postgresql
+
+from app.db.models.custom_types import LocationType, FundingType
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
+
+sa.GUID = GUID
+sa.DateTimeUTC = DateTimeUTC
+sa.ORA_JSONB = ORA_JSONB
+sa.EncryptedString = EncryptedString
+sa.EncryptedText = EncryptedText
+
+# revision identifiers, used by Alembic.
+revision = "a05c476c0ae9"
+down_revision = "b6ffcfae703a"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            schema_upgrades()
+            data_upgrades()
+
+
+def downgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            data_downgrades()
+            schema_downgrades()
+
+
+def schema_upgrades() -> None:
+    """schema upgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.create_table(
+        "company",
+        sa.Column("id", sa.GUID(length=16), nullable=False),
+        sa.Column("name", sa.String(), nullable=False),
+        sa.Column("description", sa.String(length=500), nullable=True),
+        sa.Column("type", sa.String(), nullable=True),
+        sa.Column("industry", sa.String(), nullable=True),
+        sa.Column("headcount", sa.Integer(), nullable=True),
+        sa.Column("founded_year", sa.Integer(), nullable=True),
+        sa.Column("url", sa.String(length=2083), nullable=True),
+        sa.Column("profile_pic_url", sa.String(), nullable=True),
+        sa.Column("linkedin_profile_url", sa.String(), nullable=True),
+        sa.Column("hq_location", LocationType(), nullable=True),
+        sa.Column("last_funding", FundingType(), nullable=True),
+        sa.Column("slug", sa.String(length=100), nullable=False),
+        sa.Column("sa_orm_sentinel", sa.Integer(), nullable=True),
+        sa.Column("created_at", sa.DateTimeUTC(timezone=True), nullable=False),
+        sa.Column("updated_at", sa.DateTimeUTC(timezone=True), nullable=False),
+        sa.PrimaryKeyConstraint("id", name=op.f("pk_company")),
+        sa.UniqueConstraint("slug", name="uq_company_slug"),
+    )
+    with op.batch_alter_table("company", schema=None) as batch_op:
+        batch_op.create_index(batch_op.f("ix_company_headcount"), ["headcount"], unique=False)
+        batch_op.create_index(batch_op.f("ix_company_industry"), ["industry"], unique=False)
+        batch_op.create_index(batch_op.f("ix_company_name"), ["name"], unique=False)
+        batch_op.create_index("ix_company_slug_unique", ["slug"], unique=True)
+        batch_op.create_index(batch_op.f("ix_company_type"), ["type"], unique=False)
+
+    op.create_table(
+        "job_post",
+        sa.Column("id", sa.GUID(length=16), nullable=False),
+        sa.Column("title", sa.String(), nullable=False),
+        sa.Column("body", sa.Text(), nullable=True),
+        sa.Column("location", LocationType(), nullable=True),
+        sa.Column("seniority_level", sa.String(), nullable=True),
+        sa.Column("employment_type", sa.String(), nullable=True),
+        sa.Column("job_functions", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
+        sa.Column("total_applicants", sa.Integer(), nullable=True),
+        sa.Column("url", sa.String(length=2083), nullable=True),
+        sa.Column("apply_url", sa.String(length=2083), nullable=True),
+        sa.Column("external_id", sa.String(), nullable=True),
+        sa.Column("company_id", sa.GUID(length=16), nullable=True),
+        sa.Column("sa_orm_sentinel", sa.Integer(), nullable=True),
+        sa.Column("created_at", sa.DateTimeUTC(timezone=True), nullable=False),
+        sa.Column("updated_at", sa.DateTimeUTC(timezone=True), nullable=False),
+        sa.ForeignKeyConstraint(["company_id"], ["company.id"], name=op.f("fk_job_post_company_id_company")),
+        sa.PrimaryKeyConstraint("id", name=op.f("pk_job_post")),
+    )
+    with op.batch_alter_table("job_post", schema=None) as batch_op:
+        batch_op.create_index(batch_op.f("ix_job_post_title"), ["title"], unique=False)
+
+    with op.batch_alter_table("user_account", schema=None) as batch_op:
+        batch_op.alter_column("tenant_id", existing_type=sa.UUID(), nullable=False)
+
+    # ### end Alembic commands ###
+
+
+def schema_downgrades() -> None:
+    """schema downgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    with op.batch_alter_table("user_account", schema=None) as batch_op:
+        batch_op.alter_column("tenant_id", existing_type=sa.UUID(), nullable=True)
+
+    with op.batch_alter_table("job_post", schema=None) as batch_op:
+        batch_op.drop_index(batch_op.f("ix_job_post_title"))
+
+    op.drop_table("job_post")
+    with op.batch_alter_table("company", schema=None) as batch_op:
+        batch_op.drop_index(batch_op.f("ix_company_type"))
+        batch_op.drop_index("ix_company_slug_unique")
+        batch_op.drop_index(batch_op.f("ix_company_name"))
+        batch_op.drop_index(batch_op.f("ix_company_industry"))
+        batch_op.drop_index(batch_op.f("ix_company_headcount"))
+
+    op.drop_table("company")
+    # ### end Alembic commands ###
+
+
+def data_upgrades() -> None:
+    """Add any optional data upgrade migrations here!"""
+
+
+def data_downgrades() -> None:
+    """Add any optional data downgrade migrations here!"""
diff --git a/src/app/db/migrations/versions/2024-07-31_add_a_company_person_relation_table_269b038e5545.py b/src/app/db/migrations/versions/2024-07-31_add_a_company_person_relation_table_269b038e5545.py
new file mode 100644
index 00000000..4ec484bd
--- /dev/null
+++ b/src/app/db/migrations/versions/2024-07-31_add_a_company_person_relation_table_269b038e5545.py
@@ -0,0 +1,86 @@
+# type: ignore
+"""Add a company person relation table
+
+Revision ID: 269b038e5545
+Revises: 698ddcfa9900
+Create Date: 2024-07-31 12:07:19.041712+00:00
+
+"""
+from __future__ import annotations
+
+import warnings
+from typing import TYPE_CHECKING
+
+import sqlalchemy as sa
+from alembic import op
+from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC
+from sqlalchemy import Text  # noqa: F401
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
+
+sa.GUID = GUID
+sa.DateTimeUTC = DateTimeUTC
+sa.ORA_JSONB = ORA_JSONB
+sa.EncryptedString = EncryptedString
+sa.EncryptedText = EncryptedText
+
+# revision identifiers, used by Alembic.
+revision = '269b038e5545'
+down_revision = '698ddcfa9900'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            schema_upgrades()
+            data_upgrades()
+
+def downgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            data_downgrades()
+            schema_downgrades()
+
+def schema_upgrades() -> None:
+    """schema upgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.create_table('company_person_relation',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('title', sa.String(), nullable=False),
+    sa.Column('company_id', sa.GUID(length=16), nullable=False),
+    sa.Column('person_id', sa.GUID(length=16), nullable=False),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.ForeignKeyConstraint(['company_id'], ['company.id'], name=op.f('fk_company_person_relation_company_id_company'), ondelete='CASCADE'),
+    sa.ForeignKeyConstraint(['person_id'], ['person.id'], name=op.f('fk_company_person_relation_person_id_person'), ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('company_id', 'person_id', 'id', name=op.f('pk_company_person_relation'))
+    )
+    with op.batch_alter_table('company_person_relation', schema=None) as batch_op:
+        batch_op.create_index(batch_op.f('ix_company_person_relation_company_id'), ['company_id'], unique=False)
+        batch_op.create_index(batch_op.f('ix_company_person_relation_title'), ['title'], unique=False)
+
+    # ### end Alembic commands ###
+
+def schema_downgrades() -> None:
+    """schema downgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    with op.batch_alter_table('company_person_relation', schema=None) as batch_op:
+        batch_op.drop_index(batch_op.f('ix_company_person_relation_title'))
+        batch_op.drop_index(batch_op.f('ix_company_person_relation_company_id'))
+
+    op.drop_table('company_person_relation')
+    # ### end Alembic commands ###
+
+def data_upgrades() -> None:
+    """Add any optional data upgrade migrations here!"""
+
+def data_downgrades() -> None:
+    """Add any optional data downgrade migrations here!"""
diff --git a/src/app/db/migrations/versions/2024-07-31_add_people_model_698ddcfa9900.py b/src/app/db/migrations/versions/2024-07-31_add_people_model_698ddcfa9900.py
new file mode 100644
index 00000000..9ac7839d
--- /dev/null
+++ b/src/app/db/migrations/versions/2024-07-31_add_people_model_698ddcfa9900.py
@@ -0,0 +1,105 @@
+# type: ignore
+"""Add people model
+
+Revision ID: 698ddcfa9900
+Revises: a05c476c0ae9
+Create Date: 2024-07-31 09:48:08.467309+00:00
+
+"""
+from __future__ import annotations
+
+import warnings
+from typing import TYPE_CHECKING
+
+import sqlalchemy as sa
+from alembic import op
+from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC
+from sqlalchemy import Text  # noqa: F401
+from sqlalchemy.dialects import postgresql
+from app.db.models.custom_types import LocationType, WorkExperienceType, SocialActivityType
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
+
+sa.GUID = GUID
+sa.DateTimeUTC = DateTimeUTC
+sa.ORA_JSONB = ORA_JSONB
+sa.EncryptedString = EncryptedString
+sa.EncryptedText = EncryptedText
+
+# revision identifiers, used by Alembic.
+revision = '698ddcfa9900'
+down_revision = 'a05c476c0ae9'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            schema_upgrades()
+            data_upgrades()
+
+def downgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            data_downgrades()
+            schema_downgrades()
+
+def schema_upgrades() -> None:
+    """schema upgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.create_table('person',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('first_name', sa.String(), nullable=True),
+    sa.Column('last_name', sa.String(), nullable=True),
+    sa.Column('full_name', sa.String(), nullable=True),
+    sa.Column('headline', sa.String(length=500), nullable=True),
+    sa.Column('summary', sa.String(length=2000), nullable=True),
+    sa.Column('occupation', sa.String(), nullable=True),
+    sa.Column('industry', sa.String(), nullable=True),
+    sa.Column('profile_pic_url', sa.String(length=2083), nullable=True),
+    sa.Column('url', sa.String(length=2083), nullable=True),
+    sa.Column('linkedin_profile_url', sa.String(length=2083), nullable=True),
+    sa.Column('twitter_profile_url', sa.String(length=2083), nullable=True),
+    sa.Column('github_profile_url', sa.String(length=2083), nullable=True),
+    sa.Column('location', LocationType(), nullable=True),
+    sa.Column('personal_emails', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
+    sa.Column('work_emails', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
+    sa.Column('personal_numbers', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
+    sa.Column('birth_date', sa.Date(), nullable=True),
+    sa.Column('gender', sa.String(), nullable=True),
+    sa.Column('languages', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
+    sa.Column('work_experiences', WorkExperienceType(), nullable=True),
+    sa.Column('social_activities', SocialActivityType(), nullable=True),
+    sa.Column('slug', sa.String(length=100), nullable=False),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_person')),
+    sa.UniqueConstraint('slug', name='uq_person_slug')
+    )
+    with op.batch_alter_table('person', schema=None) as batch_op:
+        batch_op.create_index(batch_op.f('ix_person_industry'), ['industry'], unique=False)
+        batch_op.create_index('ix_person_slug_unique', ['slug'], unique=True)
+
+    # ### end Alembic commands ###
+
+def schema_downgrades() -> None:
+    """schema downgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    with op.batch_alter_table('person', schema=None) as batch_op:
+        batch_op.drop_index('ix_person_slug_unique')
+        batch_op.drop_index(batch_op.f('ix_person_industry'))
+
+    op.drop_table('person')
+    # ### end Alembic commands ###
+
+def data_upgrades() -> None:
+    """Add any optional data upgrade migrations here!"""
+
+def data_downgrades() -> None:
+    """Add any optional data downgrade migrations here!"""
diff --git a/src/app/db/migrations/versions/2024-08-02_add_association_opportunity_job_post_992fa69e02cb.py b/src/app/db/migrations/versions/2024-08-02_add_association_opportunity_job_post_992fa69e02cb.py
new file mode 100644
index 00000000..b0e2d208
--- /dev/null
+++ b/src/app/db/migrations/versions/2024-08-02_add_association_opportunity_job_post_992fa69e02cb.py
@@ -0,0 +1,73 @@
+# type: ignore
+"""Add association opportunity_job_post
+
+Revision ID: 992fa69e02cb
+Revises: 44ace4dcae8c
+Create Date: 2024-08-02 13:55:31.388468+00:00
+
+"""
+from __future__ import annotations
+
+import warnings
+from typing import TYPE_CHECKING
+
+import sqlalchemy as sa
+from alembic import op
+from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC
+from sqlalchemy import Text  # noqa: F401
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
+
+sa.GUID = GUID
+sa.DateTimeUTC = DateTimeUTC
+sa.ORA_JSONB = ORA_JSONB
+sa.EncryptedString = EncryptedString
+sa.EncryptedText = EncryptedText
+
+# revision identifiers, used by Alembic.
+revision = '992fa69e02cb'
+down_revision = '44ace4dcae8c'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            schema_upgrades()
+            data_upgrades()
+
+def downgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            data_downgrades()
+            schema_downgrades()
+
+def schema_upgrades() -> None:
+    """schema upgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.create_table('opportunity_job_post_relation',
+    sa.Column('opportunity_id', sa.GUID(length=16), nullable=False),
+    sa.Column('job_post_id', sa.GUID(length=16), nullable=False),
+    sa.ForeignKeyConstraint(['job_post_id'], ['job_post.id'], name=op.f('fk_opportunity_job_post_relation_job_post_id_job_post'), ondelete='CASCADE'),
+    sa.ForeignKeyConstraint(['opportunity_id'], ['opportunity.id'], name=op.f('fk_opportunity_job_post_relation_opportunity_id_opportunity'), ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('opportunity_id', 'job_post_id', name=op.f('pk_opportunity_job_post_relation'))
+    )
+    # ### end Alembic commands ###
+
+def schema_downgrades() -> None:
+    """schema downgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.drop_table('opportunity_job_post_relation')
+    # ### end Alembic commands ###
+
+def data_upgrades() -> None:
+    """Add any optional data upgrade migrations here!"""
+
+def data_downgrades() -> None:
+    """Add any optional data downgrade migrations here!"""
diff --git a/src/app/db/migrations/versions/2024-08-02_add_opportunities_domain_along_with_the__44ace4dcae8c.py b/src/app/db/migrations/versions/2024-08-02_add_opportunities_domain_along_with_the__44ace4dcae8c.py
new file mode 100644
index 00000000..8d44b601
--- /dev/null
+++ b/src/app/db/migrations/versions/2024-08-02_add_opportunities_domain_along_with_the__44ace4dcae8c.py
@@ -0,0 +1,129 @@
+# type: ignore
+"""Add opportunities domain along with the model
+
+Revision ID: 44ace4dcae8c
+Revises: 269b038e5545
+Create Date: 2024-08-02 12:32:53.948339+00:00
+
+"""
+from __future__ import annotations
+
+import warnings
+from typing import TYPE_CHECKING
+
+import sqlalchemy as sa
+from alembic import op
+from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC
+from sqlalchemy import Text  # noqa: F401
+from sqlalchemy.dialects import postgresql
+
+from app.db.models.custom_types import OpportunityStageType
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
+
+sa.GUID = GUID
+sa.DateTimeUTC = DateTimeUTC
+sa.ORA_JSONB = ORA_JSONB
+sa.EncryptedString = EncryptedString
+sa.EncryptedText = EncryptedText
+
+# revision identifiers, used by Alembic.
+revision = '44ace4dcae8c'
+down_revision = '269b038e5545'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            schema_upgrades()
+            data_upgrades()
+
+def downgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            data_downgrades()
+            schema_downgrades()
+
+def schema_upgrades() -> None:
+    """schema upgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.create_table('opportunity',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('name', sa.String(), nullable=False),
+    sa.Column('stage', OpportunityStageType(), nullable=False),
+    sa.Column('notes', sa.Text(), nullable=False),
+    sa.Column('tenant_id', sa.GUID(length=16), nullable=False),
+    sa.Column('owner_id', sa.GUID(length=16), nullable=True),
+    sa.Column('company_id', sa.GUID(length=16), nullable=True),
+    sa.Column('slug', sa.String(length=100), nullable=False),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.ForeignKeyConstraint(['company_id'], ['company.id'], name=op.f('fk_opportunity_company_id_company')),
+    sa.ForeignKeyConstraint(['owner_id'], ['user_account.id'], name=op.f('fk_opportunity_owner_id_user_account')),
+    sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], name=op.f('fk_opportunity_tenant_id_tenant')),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_opportunity'))
+    )
+    with op.batch_alter_table('opportunity', schema=None) as batch_op:
+        batch_op.create_index('ix_opportunity_id_tenant_id', ['id', 'tenant_id'], unique=False)
+        batch_op.create_index(batch_op.f('ix_opportunity_name'), ['name'], unique=False)
+        batch_op.create_index(batch_op.f('ix_opportunity_stage'), ['stage'], unique=False)
+        batch_op.create_index(batch_op.f('ix_opportunity_tenant_id'), ['tenant_id'], unique=False)
+
+    op.create_table('opportunity_audit_log',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('operation', sa.String(), nullable=False),
+    sa.Column('diff', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
+    sa.Column('user_id', sa.GUID(length=16), nullable=False),
+    sa.Column('tenant_id', sa.GUID(length=16), nullable=False),
+    sa.Column('opportunity_id', sa.GUID(length=16), nullable=False),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.ForeignKeyConstraint(['opportunity_id'], ['opportunity.id'], name=op.f('fk_opportunity_audit_log_opportunity_id_opportunity')),
+    sa.ForeignKeyConstraint(['tenant_id'], ['tenant.id'], name=op.f('fk_opportunity_audit_log_tenant_id_tenant')),
+    sa.ForeignKeyConstraint(['user_id'], ['user_account.id'], name=op.f('fk_opportunity_audit_log_user_id_user_account')),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_opportunity_audit_log'))
+    )
+    with op.batch_alter_table('opportunity_audit_log', schema=None) as batch_op:
+        batch_op.create_index(batch_op.f('ix_opportunity_audit_log_opportunity_id'), ['opportunity_id'], unique=False)
+        batch_op.create_index('ix_opportunity_audit_log_opportunity_id_tenant_id', ['opportunity_id', 'tenant_id'], unique=False)
+
+    op.create_table('opportunity_person_relation',
+    sa.Column('opportunity_id', sa.GUID(length=16), nullable=False),
+    sa.Column('person_id', sa.GUID(length=16), nullable=False),
+    sa.ForeignKeyConstraint(['opportunity_id'], ['opportunity.id'], name=op.f('fk_opportunity_person_relation_opportunity_id_opportunity'), ondelete='CASCADE'),
+    sa.ForeignKeyConstraint(['person_id'], ['person.id'], name=op.f('fk_opportunity_person_relation_person_id_person'), ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('opportunity_id', 'person_id', name=op.f('pk_opportunity_person_relation'))
+    )
+    # ### end Alembic commands ###
+
+def schema_downgrades() -> None:
+    """schema downgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.drop_table('opportunity_person_relation')
+    with op.batch_alter_table('opportunity_audit_log', schema=None) as batch_op:
+        batch_op.drop_index('ix_opportunity_audit_log_opportunity_id_tenant_id')
+        batch_op.drop_index(batch_op.f('ix_opportunity_audit_log_opportunity_id'))
+
+    op.drop_table('opportunity_audit_log')
+    with op.batch_alter_table('opportunity', schema=None) as batch_op:
+        batch_op.drop_index(batch_op.f('ix_opportunity_tenant_id'))
+        batch_op.drop_index(batch_op.f('ix_opportunity_stage'))
+        batch_op.drop_index(batch_op.f('ix_opportunity_name'))
+        batch_op.drop_index('ix_opportunity_id_tenant_id')
+
+    op.drop_table('opportunity')
+    # ### end Alembic commands ###
+
+def data_upgrades() -> None:
+    """Add any optional data upgrade migrations here!"""
+
+def data_downgrades() -> None:
+    """Add any optional data downgrade migrations here!"""
diff --git a/src/pyspa/core/prestart.py b/src/app/db/migrations/versions/__init__.py
similarity index 100%
rename from src/pyspa/core/prestart.py
rename to src/app/db/migrations/versions/__init__.py
diff --git a/src/app/db/migrations/versions/a22cc7704d14_initial_revision.py b/src/app/db/migrations/versions/a22cc7704d14_initial_revision.py
new file mode 100644
index 00000000..b39413e4
--- /dev/null
+++ b/src/app/db/migrations/versions/a22cc7704d14_initial_revision.py
@@ -0,0 +1,225 @@
+# type: ignore
+"""Initial revision
+
+Revision ID: a22cc7704d14
+Revises:
+Create Date: 2024-01-14 14:59:07.826121
+
+"""
+from __future__ import annotations
+
+import warnings
+from typing import TYPE_CHECKING
+
+import sqlalchemy as sa
+from alembic import op
+from advanced_alchemy.types import EncryptedString, EncryptedText, GUID, ORA_JSONB, DateTimeUTC
+from sqlalchemy import Text  # noqa: F401
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+__all__ = ["downgrade", "upgrade", "schema_upgrades", "schema_downgrades", "data_upgrades", "data_downgrades"]
+
+sa.GUID = GUID
+sa.DateTimeUTC = DateTimeUTC
+sa.ORA_JSONB = ORA_JSONB
+sa.EncryptedString = EncryptedString
+sa.EncryptedText = EncryptedText
+
+# revision identifiers, used by Alembic.
+revision = 'a22cc7704d14'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            schema_upgrades()
+            data_upgrades()
+
+def downgrade() -> None:
+    with warnings.catch_warnings():
+        warnings.filterwarnings("ignore", category=UserWarning)
+        with op.get_context().autocommit_block():
+            data_downgrades()
+            schema_downgrades()
+
+def schema_upgrades() -> None:
+    """schema upgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.create_table('role',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('slug', sa.String(length=100), nullable=False),
+    sa.Column('name', sa.String(), nullable=False),
+    sa.Column('description', sa.String(), nullable=True),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_role')),
+    sa.UniqueConstraint('name'),
+    sa.UniqueConstraint('name', name=op.f('uq_role_name')),
+    sa.UniqueConstraint('slug'),
+    sa.UniqueConstraint('slug', name=op.f('uq_role_slug'))
+    )
+    op.create_table('tag',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('slug', sa.String(length=100), nullable=False),
+    sa.Column('name', sa.String(), nullable=False),
+    sa.Column('description', sa.String(length=255), nullable=True),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_tag')),
+    sa.UniqueConstraint('slug'),
+    sa.UniqueConstraint('slug', name=op.f('uq_tag_slug'))
+    )
+    op.create_table('team',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('slug', sa.String(length=100), nullable=False),
+    sa.Column('name', sa.String(), nullable=False),
+    sa.Column('description', sa.String(length=500), nullable=True),
+    sa.Column('is_active', sa.Boolean(), nullable=False),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_team')),
+    sa.UniqueConstraint('slug'),
+    sa.UniqueConstraint('slug', name=op.f('uq_team_slug'))
+    )
+    with op.batch_alter_table('team', schema=None) as batch_op:
+        batch_op.create_index(batch_op.f('ix_team_name'), ['name'], unique=False)
+
+    op.create_table('user_account',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('email', sa.String(), nullable=False),
+    sa.Column('name', sa.String(), nullable=True),
+    sa.Column('hashed_password', sa.String(length=255), nullable=True),
+    sa.Column('avatar_url', sa.String(length=500), nullable=True),
+    sa.Column('is_active', sa.Boolean(), nullable=False),
+    sa.Column('is_superuser', sa.Boolean(), nullable=False),
+    sa.Column('is_verified', sa.Boolean(), nullable=False),
+    sa.Column('verified_at', sa.Date(), nullable=True),
+    sa.Column('joined_at', sa.Date(), nullable=False),
+    sa.Column('login_count', sa.Integer(), nullable=False),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_user_account'))
+    )
+    with op.batch_alter_table('user_account', schema=None) as batch_op:
+        batch_op.create_index(batch_op.f('ix_user_account_email'), ['email'], unique=True)
+
+    op.create_table('team_invitation',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('team_id', sa.GUID(length=16), nullable=False),
+    sa.Column('email', sa.String(), nullable=False),
+    sa.Column('role', sa.String(length=50), nullable=False),
+    sa.Column('is_accepted', sa.Boolean(), nullable=False),
+    sa.Column('invited_by_id', sa.GUID(length=16), nullable=True),
+    sa.Column('invited_by_email', sa.String(), nullable=False),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.ForeignKeyConstraint(['invited_by_id'], ['user_account.id'], name=op.f('fk_team_invitation_invited_by_id_user_account'), ondelete='set null'),
+    sa.ForeignKeyConstraint(['team_id'], ['team.id'], name=op.f('fk_team_invitation_team_id_team'), ondelete='cascade'),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_team_invitation'))
+    )
+    with op.batch_alter_table('team_invitation', schema=None) as batch_op:
+        batch_op.create_index(batch_op.f('ix_team_invitation_email'), ['email'], unique=False)
+
+    op.create_table('team_member',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('user_id', sa.GUID(length=16), nullable=False),
+    sa.Column('team_id', sa.GUID(length=16), nullable=False),
+    sa.Column('role', sa.String(length=50), nullable=False),
+    sa.Column('is_owner', sa.Boolean(), nullable=False),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.ForeignKeyConstraint(['team_id'], ['team.id'], name=op.f('fk_team_member_team_id_team'), ondelete='cascade'),
+    sa.ForeignKeyConstraint(['user_id'], ['user_account.id'], name=op.f('fk_team_member_user_id_user_account'), ondelete='cascade'),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_team_member')),
+    sa.UniqueConstraint('user_id', 'team_id', name=op.f('uq_team_member_user_id'))
+    )
+    with op.batch_alter_table('team_member', schema=None) as batch_op:
+        batch_op.create_index(batch_op.f('ix_team_member_role'), ['role'], unique=False)
+
+    op.create_table('team_tag',
+    sa.Column('team_id', sa.GUID(length=16), nullable=False),
+    sa.Column('tag_id', sa.GUID(length=16), nullable=False),
+    sa.ForeignKeyConstraint(['tag_id'], ['tag.id'], name=op.f('fk_team_tag_tag_id_tag'), ondelete='CASCADE'),
+    sa.ForeignKeyConstraint(['team_id'], ['team.id'], name=op.f('fk_team_tag_team_id_team'), ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('team_id', 'tag_id', name=op.f('pk_team_tag'))
+    )
+    op.create_table('user_account_oauth',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('user_id', sa.GUID(length=16), nullable=False),
+    sa.Column('oauth_name', sa.String(length=100), nullable=False),
+    sa.Column('access_token', sa.String(length=1024), nullable=False),
+    sa.Column('expires_at', sa.Integer(), nullable=True),
+    sa.Column('refresh_token', sa.String(length=1024), nullable=True),
+    sa.Column('account_id', sa.String(length=320), nullable=False),
+    sa.Column('account_email', sa.String(length=320), nullable=False),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.ForeignKeyConstraint(['user_id'], ['user_account.id'], name=op.f('fk_user_account_oauth_user_id_user_account'), ondelete='cascade'),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_user_account_oauth'))
+    )
+    with op.batch_alter_table('user_account_oauth', schema=None) as batch_op:
+        batch_op.create_index(batch_op.f('ix_user_account_oauth_account_id'), ['account_id'], unique=False)
+        batch_op.create_index(batch_op.f('ix_user_account_oauth_oauth_name'), ['oauth_name'], unique=False)
+
+    op.create_table('user_account_role',
+    sa.Column('id', sa.GUID(length=16), nullable=False),
+    sa.Column('user_id', sa.GUID(length=16), nullable=False),
+    sa.Column('role_id', sa.GUID(length=16), nullable=False),
+    sa.Column('assigned_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('sa_orm_sentinel', sa.Integer(), nullable=True),
+    sa.Column('created_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.Column('updated_at', sa.DateTimeUTC(timezone=True), nullable=False),
+    sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_user_account_role_role_id_role'), ondelete='cascade'),
+    sa.ForeignKeyConstraint(['user_id'], ['user_account.id'], name=op.f('fk_user_account_role_user_id_user_account'), ondelete='cascade'),
+    sa.PrimaryKeyConstraint('id', name=op.f('pk_user_account_role'))
+    )
+    # ### end Alembic commands ###
+
+def schema_downgrades() -> None:
+    """schema downgrade migrations go here."""
+    # ### commands auto generated by Alembic - please adjust! ###
+    op.drop_table('user_account_role')
+    with op.batch_alter_table('user_account_oauth', schema=None) as batch_op:
+        batch_op.drop_index(batch_op.f('ix_user_account_oauth_oauth_name'))
+        batch_op.drop_index(batch_op.f('ix_user_account_oauth_account_id'))
+
+    op.drop_table('user_account_oauth')
+    op.drop_table('team_tag')
+    with op.batch_alter_table('team_member', schema=None) as batch_op:
+        batch_op.drop_index(batch_op.f('ix_team_member_role'))
+
+    op.drop_table('team_member')
+    with op.batch_alter_table('team_invitation', schema=None) as batch_op:
+        batch_op.drop_index(batch_op.f('ix_team_invitation_email'))
+
+    op.drop_table('team_invitation')
+    with op.batch_alter_table('user_account', schema=None) as batch_op:
+        batch_op.drop_index(batch_op.f('ix_user_account_email'))
+
+    op.drop_table('user_account')
+    with op.batch_alter_table('team', schema=None) as batch_op:
+        batch_op.drop_index(batch_op.f('ix_team_name'))
+
+    op.drop_table('team')
+    op.drop_table('tag')
+    op.drop_table('role')
+    # ### end Alembic commands ###
+
+def data_upgrades() -> None:
+    """Add any optional data upgrade migrations here!"""
+
+def data_downgrades() -> None:
+    """Add any optional data downgrade migrations here!"""
diff --git a/src/app/db/models/__init__.py b/src/app/db/models/__init__.py
new file mode 100644
index 00000000..b674d4f3
--- /dev/null
+++ b/src/app/db/models/__init__.py
@@ -0,0 +1,37 @@
+from .oauth_account import UserOauthAccount
+from .role import Role
+from .tag import Tag
+from .team import Team
+from .team_invitation import TeamInvitation
+from .team_member import TeamMember
+from .team_roles import TeamRoles
+from .team_tag import team_tag
+from .user import User
+from .user_role import UserRole
+from .tenant import Tenant
+from .company import Company, CompanyOrg
+from .job_post import JobPost
+from .person import Person
+from .opportunity import Opportunity, OpportunityAuditLog, opportunity_person_relation, opportunity_job_post_relation
+
+__all__ = (
+    "User",
+    "UserOauthAccount",
+    "Role",
+    "UserRole",
+    "Tag",
+    "team_tag",
+    "Team",
+    "TeamInvitation",
+    "TeamMember",
+    "TeamRoles",
+    "Tenant",
+    "Company",
+    "CompanyOrg",
+    "JobPost",
+    "Person",
+    "Opportunity",
+    "OpprtunityAuditLog",
+    "opportunity_person_relation",
+    "opportunity_job_post_relation",
+)
diff --git a/src/app/db/models/company.py b/src/app/db/models/company.py
new file mode 100644
index 00000000..c00a5bb3
--- /dev/null
+++ b/src/app/db/models/company.py
@@ -0,0 +1,43 @@
+from __future__ import annotations
+
+from uuid import UUID
+from datetime import date
+
+from advanced_alchemy.base import SlugKey, UUIDAuditBase
+from sqlalchemy import String, ForeignKey
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+from app.lib.schema import Location, Funding
+from .custom_types import LocationType, FundingType
+
+
+class CompanyOrg(UUIDAuditBase):
+    """A company people org."""
+
+    __tablename__ = "company_person_relation"
+    __pii_columns__ = {}
+    title: Mapped[str] = mapped_column(nullable=False, index=True)
+    company_id: Mapped[UUID] = mapped_column(ForeignKey("company.id", ondelete="CASCADE"), primary_key=True, index=True)
+    person_id: Mapped[UUID] = mapped_column(ForeignKey("person.id", ondelete="CASCADE"), primary_key=True)
+
+
+class Company(UUIDAuditBase, SlugKey):
+    """A company or an organization."""
+
+    __tablename__ = "company"
+    __pii_columns__ = {"name", "description", "url", "linkedin_url", "profile_pic_url"}
+    name: Mapped[str] = mapped_column(nullable=False, index=True)
+    description: Mapped[str | None] = mapped_column(String(length=500), nullable=True, default=None)
+    type: Mapped[str | None] = mapped_column(nullable=True, default=None, index=True)
+    industry: Mapped[str | None] = mapped_column(nullable=True, default=None, index=True)
+    headcount: Mapped[int | None] = mapped_column(nullable=True, default=None, index=True)
+    founded_year: Mapped[int | None] = mapped_column(nullable=True, default=None)
+    url: Mapped[str | None] = mapped_column(String(length=2083), nullable=True, default=None)
+    profile_pic_url: Mapped[str | None] = mapped_column(String(length=2083), nullable=True, default=None)
+    linkedin_profile_url: Mapped[str | None] = mapped_column(String(length=2083), nullable=True, default=None)
+    hq_location: Mapped[Location | None] = mapped_column(LocationType, nullable=True, default=None)
+    last_funding: Mapped[Funding | None] = mapped_column(FundingType, nullable=True, default=None)
+    # -----------
+    # ORM Relationships
+    # ------------
+    people: Mapped[list[CompanyOrg]] = relationship(cascade="all, delete")
diff --git a/src/app/db/models/custom_types.py b/src/app/db/models/custom_types.py
new file mode 100644
index 00000000..c2e1b249
--- /dev/null
+++ b/src/app/db/models/custom_types.py
@@ -0,0 +1,72 @@
+from __future__ import annotations
+
+import json
+from dataclasses import dataclass, asdict
+
+from sqlalchemy.types import TypeDecorator, String
+from sqlalchemy.dialects.postgresql import JSONB
+
+from app.lib.schema import Location, Funding, WorkExperience, SocialActivity, OpportunityStage
+
+
+class JSONBType(TypeDecorator):
+    impl = JSONB  # Use the PostgreSQL JSONB type as base
+
+    def process_bind_param(self, value, dialect):
+        """Convert Python object to JSON format before storing it in the database."""
+        if isinstance(value, dict):
+            return value
+        elif hasattr(value, 'to_dict'):
+            return value.to_dict()
+        return value
+
+    def process_result_value(self, value, dialect):
+        """Convert JSON format to Python object when reading from the database."""
+        if value:
+            return json.loads(value)
+        return None
+
+
+class LocationType(JSONBType):
+    def process_result_value(self, value, dialect):
+        """Convert JSON format to Python object when reading from the database."""
+        if value and isinstance(value, dict):
+            return Location.from_dict(value)
+        return None
+
+
+class FundingType(JSONBType):
+    def process_result_value(self, value, dialect):
+        """Convert JSON format to Python object when reading from the database."""
+        if value and isinstance(value, dict):
+            return Funding.from_dict(value)
+        return None
+
+
+class WorkExperienceType(JSONBType):
+    def process_result_value(self, value, dialect):
+        """Convert JSON format to Python object when reading from the database."""
+        if value and isinstance(value, dict):
+            return WorkExperience.from_dict(value)
+        return None
+
+class SocialActivityType(JSONBType):
+    def process_result_value(self, value, dialect):
+        """Convert JSON format to Python object when reading from the database."""
+        if value and isinstance(value, dict):
+            return SocialActivity.from_dict(value)
+        return None
+
+
+class OpportunityStageType(TypeDecorator):
+    impl = String
+
+    def process_bind_param(self, value, dialect):
+        if isinstance(value, OpportunityStage):
+            return value.value
+        return value
+
+    def process_result_value(self, value, dialect):
+        if value is not None:
+            return OpportunityStage(value)
+        return value
diff --git a/src/app/db/models/job_post.py b/src/app/db/models/job_post.py
new file mode 100644
index 00000000..159d82be
--- /dev/null
+++ b/src/app/db/models/job_post.py
@@ -0,0 +1,37 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from uuid import UUID
+from advanced_alchemy.base import SlugKey, UUIDAuditBase
+from sqlalchemy import String, Text, ForeignKey
+from sqlalchemy.dialects.postgresql import JSONB
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+from app.lib.schema import Location, Funding
+from .custom_types import LocationType, FundingType
+from .company import Company
+
+
+class JobPost(UUIDAuditBase):
+    """A job post."""
+
+    __tablename__ = "job_post"
+    __pii_columns__ = {}
+    title: Mapped[str] = mapped_column(nullable=False, index=True)
+    body: Mapped[str | None] = mapped_column(Text, nullable=True, default=None)
+    location: Mapped[Location | None] = mapped_column(LocationType, nullable=True, default=None)
+    seniority_level: Mapped[str | None] = mapped_column(nullable=True, default=None)
+    employment_type: Mapped[str | None] = mapped_column(nullable=True, default=None)
+    job_functions: Mapped[list[str] | None] = mapped_column(JSONB, nullable=True, default=None)
+    total_applicants: Mapped[int | None] = mapped_column(nullable=True, default=None)
+    url: Mapped[str | None] = mapped_column(String(length=2083), nullable=True, default=None)
+    apply_url: Mapped[str | None] = mapped_column(String(length=2083), nullable=True, default=None)
+    external_id: Mapped[str | None] = mapped_column(nullable=True, default=None)
+    company_id: Mapped[UUID] = mapped_column(ForeignKey("company.id"), nullable=True)
+    # -----------
+    # ORM Relationships
+    # ------------
+    company: Mapped[Company] = relationship(
+        lazy="joined",
+    )
diff --git a/src/app/db/models/oauth_account.py b/src/app/db/models/oauth_account.py
new file mode 100644
index 00000000..bb50e298
--- /dev/null
+++ b/src/app/db/models/oauth_account.py
@@ -0,0 +1,43 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from uuid import UUID  # noqa: TCH003
+
+from advanced_alchemy.base import UUIDAuditBase
+from sqlalchemy import ForeignKey, Integer, String
+from sqlalchemy.ext.associationproxy import AssociationProxy, association_proxy
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+if TYPE_CHECKING:
+    from .user import User
+
+
+class UserOauthAccount(UUIDAuditBase):
+    """User Oauth Account"""
+
+    __tablename__ = "user_account_oauth"
+    __table_args__ = {"comment": "Registered OAUTH2 Accounts for Users"}
+    __pii_columns__ = {"oauth_name", "account_email", "account_id"}
+
+    user_id: Mapped[UUID] = mapped_column(
+        ForeignKey("user_account.id", ondelete="cascade"),
+        nullable=False,
+    )
+    oauth_name: Mapped[str] = mapped_column(String(length=100), index=True, nullable=False)
+    access_token: Mapped[str] = mapped_column(String(length=1024), nullable=False)
+    expires_at: Mapped[int | None] = mapped_column(Integer, nullable=True)
+    refresh_token: Mapped[str | None] = mapped_column(String(length=1024), nullable=True)
+    account_id: Mapped[str] = mapped_column(String(length=320), index=True, nullable=False)
+    account_email: Mapped[str] = mapped_column(String(length=320), nullable=False)
+
+    # -----------
+    # ORM Relationships
+    # ------------
+    user_name: AssociationProxy[str] = association_proxy("user", "name")
+    user_email: AssociationProxy[str] = association_proxy("user", "email")
+    user: Mapped[User] = relationship(
+        back_populates="oauth_accounts",
+        viewonly=True,
+        innerjoin=True,
+        lazy="joined",
+    )
diff --git a/src/app/db/models/opportunity.py b/src/app/db/models/opportunity.py
new file mode 100644
index 00000000..ea6e8c62
--- /dev/null
+++ b/src/app/db/models/opportunity.py
@@ -0,0 +1,94 @@
+from __future__ import annotations
+
+from uuid import UUID
+from datetime import date
+from typing import Any, Final, TYPE_CHECKING
+
+from advanced_alchemy.base import SlugKey, UUIDAuditBase, orm_registry
+from sqlalchemy import String, Text, ForeignKey, Index, Column, Table
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+from sqlalchemy.dialects.postgresql import JSONB
+
+from app.lib.schema import OpportunityStage
+from .company import Company
+from .person import Person
+from .job_post import JobPost
+from .custom_types import OpportunityStageType
+
+if TYPE_CHECKING:
+    from .user import User
+
+
+opportunity_person_relation: Final[Table] = Table(
+    "opportunity_person_relation",
+    orm_registry.metadata,
+    Column("opportunity_id", ForeignKey("opportunity.id", ondelete="CASCADE"), primary_key=True),
+    Column("person_id", ForeignKey("person.id", ondelete="CASCADE"), primary_key=True),
+)
+
+opportunity_job_post_relation: Final[Table] = Table(
+    "opportunity_job_post_relation",
+    orm_registry.metadata,
+    Column("opportunity_id", ForeignKey("opportunity.id", ondelete="CASCADE"), primary_key=True),
+    Column("job_post_id", ForeignKey("job_post.id", ondelete="CASCADE"), primary_key=True),
+)
+
+class OpportunityAuditLog(UUIDAuditBase):
+    """An audit log for opportunity."""
+
+    __tablename__ = "opportunity_audit_log"
+    __table_args__ = (
+        Index('ix_opportunity_audit_log_opportunity_id_tenant_id', 'opportunity_id', 'tenant_id'),
+    )
+    operation: Mapped[str] = mapped_column(nullable=False)
+    diff: Mapped[dict[str, Any] | None] = mapped_column(JSONB, nullable=True, default=None)
+    user_id: Mapped[UUID] = mapped_column(ForeignKey("user_account.id"), nullable=False)
+    tenant_id: Mapped[UUID] = mapped_column(ForeignKey("tenant.id"), nullable=False)
+    opportunity_id: Mapped[UUID] = mapped_column(ForeignKey("opportunity.id"), nullable=False, index=True)
+    # -----------
+    # ORM Relationships
+    # ------------
+    user: Mapped[User] = relationship(
+        innerjoin=True,
+        lazy="joined",
+    )
+
+
+class Opportunity(UUIDAuditBase, SlugKey):
+    """An opportunity."""
+
+    __tablename__ = "opportunity"
+    __pii_columns__ = {}
+    __table_args__ = (
+        Index('ix_opportunity_id_tenant_id', 'id', 'tenant_id'),
+    )
+    name: Mapped[str] = mapped_column(nullable=False, index=True)
+    stage: Mapped[OpportunityStage] = mapped_column(OpportunityStageType, nullable=False, default="identified", index=True)
+    notes: Mapped[str] = mapped_column(Text, nullable=False, default="")
+    tenant_id: Mapped[UUID] = mapped_column(ForeignKey("tenant.id"), nullable=False, index=True)
+    owner_id: Mapped[UUID] = mapped_column(ForeignKey("user_account.id"), nullable=True, default=None)
+    company_id: Mapped[UUID] = mapped_column(ForeignKey("company.id"), nullable=True)
+    # -----------
+    # ORM Relationships
+    # ------------
+    owner: Mapped[User] = relationship(
+        lazy="joined",
+    )
+    company: Mapped[Company] = relationship(
+        lazy="joined",
+    )
+    contacts: Mapped[list[Person]] = relationship(
+        secondary=lambda: opportunity_person_relation,
+        cascade="all, delete",
+        passive_deletes=True,
+        lazy="selectin",
+    )
+    job_posts: Mapped[list[JobPost]] = relationship(
+        secondary=lambda: opportunity_job_post_relation,
+        cascade="all, delete",
+        passive_deletes=True,
+        lazy="selectin",
+    )
+    logs: Mapped[list[OpportunityAuditLog]] = relationship(
+        lazy="selectin",
+    )
diff --git a/src/app/db/models/person.py b/src/app/db/models/person.py
new file mode 100644
index 00000000..9511442e
--- /dev/null
+++ b/src/app/db/models/person.py
@@ -0,0 +1,43 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from datetime import date
+
+from advanced_alchemy.base import SlugKey, UUIDAuditBase
+from sqlalchemy import String
+from sqlalchemy.dialects.postgresql import JSONB
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+from app.lib.schema import Location, WorkExperience, SocialActivity
+from .custom_types import LocationType, WorkExperienceType, SocialActivityType
+
+
+class Person(UUIDAuditBase, SlugKey):
+    """A person."""
+
+    __tablename__ = "person"
+    __pii_columns__ = {"first_name", "last_name", "full_name", "linkedin_url", "profile_pic_url", "personal_emails", "work_emails", "personal_numbers", "social_activities"}
+    first_name: Mapped[str] = mapped_column(nullable=True, default=None)
+    last_name: Mapped[str] = mapped_column(nullable=True, default=None)
+    full_name: Mapped[str] = mapped_column(nullable=True, default=None)
+    headline: Mapped[str | None] = mapped_column(String(length=500), nullable=True, default=None)
+    summary: Mapped[str | None] = mapped_column(String(length=2000), nullable=True, default=None)
+    occupation: Mapped[str] = mapped_column(nullable=True, default=None)
+    industry: Mapped[str | None] = mapped_column(nullable=True, default=None, index=True)
+    profile_pic_url: Mapped[str | None] = mapped_column(String(length=2083), nullable=True, default=None)
+    url: Mapped[str | None] = mapped_column(String(length=2083), nullable=True, default=None)
+    linkedin_profile_url: Mapped[str | None] = mapped_column(String(length=2083), nullable=True, default=None)
+    twitter_profile_url: Mapped[str | None] = mapped_column(String(length=2083), nullable=True, default=None)
+    github_profile_url: Mapped[str | None] = mapped_column(String(length=2083), nullable=True, default=None)
+    location: Mapped[Location | None] = mapped_column(LocationType, nullable=True, default=None)
+    personal_emails: Mapped[list[str] | None] = mapped_column(JSONB, nullable=True, default=None)
+    work_emails: Mapped[list[str] | None] = mapped_column(JSONB, nullable=True, default=None)
+    personal_numbers: Mapped[list[str] | None] = mapped_column(JSONB, nullable=True, default=None)
+    birth_date: Mapped[date | None] = mapped_column(nullable=True, default=None)
+    gender: Mapped[str | None] = mapped_column(nullable=True, default=None)
+    languages: Mapped[list[str] | None] = mapped_column(JSONB, nullable=True, default=None)
+    work_experiences: Mapped[list[WorkExperience] | None] = mapped_column(WorkExperienceType, nullable=True, default=None)
+    social_activities: Mapped[list[SocialActivity] | None] = mapped_column(SocialActivityType, nullable=True, default=None)
+    # -----------
+    # ORM Relationships
+    # ------------
diff --git a/src/app/db/models/role.py b/src/app/db/models/role.py
new file mode 100644
index 00000000..ae028285
--- /dev/null
+++ b/src/app/db/models/role.py
@@ -0,0 +1,27 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from advanced_alchemy.base import SlugKey, UUIDAuditBase
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+if TYPE_CHECKING:
+    from .user_role import UserRole
+
+
+class Role(UUIDAuditBase, SlugKey):
+    """Role."""
+
+    __tablename__ = "role"
+
+    name: Mapped[str] = mapped_column(unique=True)
+    description: Mapped[str | None]
+    # -----------
+    # ORM Relationships
+    # ------------
+    users: Mapped[list[UserRole]] = relationship(
+        back_populates="role",
+        cascade="all, delete",
+        lazy="noload",
+        viewonly=True,
+    )
diff --git a/src/app/db/models/tag.py b/src/app/db/models/tag.py
new file mode 100644
index 00000000..24967718
--- /dev/null
+++ b/src/app/db/models/tag.py
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from advanced_alchemy.base import SlugKey, UUIDAuditBase
+from advanced_alchemy.mixins import UniqueMixin
+from advanced_alchemy.utils.text import slugify
+from sqlalchemy import (
+    ColumnElement,
+    String,
+    Table,
+)
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+if TYPE_CHECKING:
+    from collections.abc import Hashable
+
+    from .team import Team
+
+
+class Tag(UUIDAuditBase, SlugKey, UniqueMixin):
+    """Tag."""
+
+    __tablename__ = "tag"
+    name: Mapped[str] = mapped_column(index=False)
+    description: Mapped[str | None] = mapped_column(String(length=255), index=False, nullable=True)
+
+    # -----------
+    # ORM Relationships
+    # ------------
+    teams: Mapped[list[Team]] = relationship(
+        secondary=lambda: _team_tag(),
+        back_populates="tags",
+    )
+
+    @classmethod
+    def unique_hash(cls, name: str, slug: str | None = None) -> Hashable:  # noqa: ARG003
+        return slugify(name)
+
+    @classmethod
+    def unique_filter(
+        cls,
+        name: str,
+        slug: str | None = None,  # noqa: ARG003
+    ) -> ColumnElement[bool]:
+        return cls.slug == slugify(name)
+
+
+def _team_tag() -> Table:
+    from .team_tag import team_tag
+
+    return team_tag
diff --git a/src/app/db/models/team.py b/src/app/db/models/team.py
new file mode 100644
index 00000000..bc7ea72f
--- /dev/null
+++ b/src/app/db/models/team.py
@@ -0,0 +1,49 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from advanced_alchemy.base import SlugKey, UUIDAuditBase
+from sqlalchemy import String
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+from .team_tag import team_tag
+
+if TYPE_CHECKING:
+    from .tag import Tag
+    from .team_invitation import TeamInvitation
+    from .team_member import TeamMember
+
+
+class Team(UUIDAuditBase, SlugKey):
+    """A group of users with common permissions.
+    Users can create and invite users to a team.
+    """
+
+    __tablename__ = "team"
+    __pii_columns__ = {"name", "description"}
+    name: Mapped[str] = mapped_column(nullable=False, index=True)
+    description: Mapped[str | None] = mapped_column(String(length=500), nullable=True, default=None)
+    is_active: Mapped[bool] = mapped_column(default=True, nullable=False)
+    # -----------
+    # ORM Relationships
+    # ------------
+    members: Mapped[list[TeamMember]] = relationship(
+        back_populates="team",
+        cascade="all, delete",
+        passive_deletes=True,
+        lazy="selectin",
+    )
+    invitations: Mapped[list[TeamInvitation]] = relationship(
+        back_populates="team",
+        cascade="all, delete",
+    )
+    pending_invitations: Mapped[list[TeamInvitation]] = relationship(
+        primaryjoin="and_(TeamInvitation.team_id==Team.id, TeamInvitation.is_accepted == False)",
+        viewonly=True,
+    )
+    tags: Mapped[list[Tag]] = relationship(
+        secondary=lambda: team_tag,
+        back_populates="teams",
+        cascade="all, delete",
+        passive_deletes=True,
+    )
diff --git a/src/app/db/models/team_invitation.py b/src/app/db/models/team_invitation.py
new file mode 100644
index 00000000..1e34e42a
--- /dev/null
+++ b/src/app/db/models/team_invitation.py
@@ -0,0 +1,31 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from uuid import UUID  # noqa: TCH003
+
+from advanced_alchemy.base import UUIDAuditBase
+from sqlalchemy import ForeignKey, String
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+from app.db.models.team_roles import TeamRoles
+
+if TYPE_CHECKING:
+    from .team import Team
+    from .user import User
+
+
+class TeamInvitation(UUIDAuditBase):
+    """Team Invite."""
+
+    __tablename__ = "team_invitation"
+    team_id: Mapped[UUID] = mapped_column(ForeignKey("team.id", ondelete="cascade"))
+    email: Mapped[str] = mapped_column(index=True)
+    role: Mapped[TeamRoles] = mapped_column(String(length=50), default=TeamRoles.MEMBER)
+    is_accepted: Mapped[bool] = mapped_column(default=False)
+    invited_by_id: Mapped[UUID | None] = mapped_column(ForeignKey("user_account.id", ondelete="set null"))
+    invited_by_email: Mapped[str]
+    # -----------
+    # ORM Relationships
+    # ------------
+    team: Mapped[Team] = relationship(foreign_keys="TeamInvitation.team_id", lazy="noload")
+    invited_by: Mapped[User] = relationship(foreign_keys="TeamInvitation.invited_by_id", lazy="noload", uselist=False)
diff --git a/src/app/db/models/team_member.py b/src/app/db/models/team_member.py
new file mode 100644
index 00000000..b8fe5605
--- /dev/null
+++ b/src/app/db/models/team_member.py
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from uuid import UUID  # noqa: TCH003
+
+from advanced_alchemy.base import UUIDAuditBase
+from sqlalchemy import ForeignKey, String, UniqueConstraint
+from sqlalchemy.ext.associationproxy import AssociationProxy, association_proxy
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+from .team_roles import TeamRoles
+
+if TYPE_CHECKING:
+    from .team import Team
+    from .user import User
+
+
+class TeamMember(UUIDAuditBase):
+    """Team Membership."""
+
+    __tablename__ = "team_member"
+    __table_args__ = (UniqueConstraint("user_id", "team_id"),)
+    user_id: Mapped[UUID] = mapped_column(ForeignKey("user_account.id", ondelete="cascade"), nullable=False)
+    team_id: Mapped[UUID] = mapped_column(ForeignKey("team.id", ondelete="cascade"), nullable=False)
+    role: Mapped[TeamRoles] = mapped_column(
+        String(length=50),
+        default=TeamRoles.MEMBER,
+        nullable=False,
+        index=True,
+    )
+    is_owner: Mapped[bool] = mapped_column(default=False, nullable=False)
+
+    # -----------
+    # ORM Relationships
+    # ------------
+    user: Mapped[User] = relationship(
+        back_populates="teams",
+        foreign_keys="TeamMember.user_id",
+        innerjoin=True,
+        uselist=False,
+        lazy="joined",
+    )
+    name: AssociationProxy[str] = association_proxy("user", "name")
+    email: AssociationProxy[str] = association_proxy("user", "email")
+    team: Mapped[Team] = relationship(
+        back_populates="members",
+        foreign_keys="TeamMember.team_id",
+        innerjoin=True,
+        uselist=False,
+        lazy="joined",
+    )
+    team_name: AssociationProxy[str] = association_proxy("team", "name")
diff --git a/src/app/db/models/team_roles.py b/src/app/db/models/team_roles.py
new file mode 100644
index 00000000..628a4659
--- /dev/null
+++ b/src/app/db/models/team_roles.py
@@ -0,0 +1,10 @@
+from __future__ import annotations
+
+from enum import Enum
+
+
+class TeamRoles(str, Enum):
+    """Valid Values for Team Roles."""
+
+    ADMIN = "ADMIN"
+    MEMBER = "MEMBER"
diff --git a/src/app/db/models/team_tag.py b/src/app/db/models/team_tag.py
new file mode 100644
index 00000000..50f3a859
--- /dev/null
+++ b/src/app/db/models/team_tag.py
@@ -0,0 +1,13 @@
+from __future__ import annotations
+
+from typing import Final
+
+from advanced_alchemy.base import orm_registry
+from sqlalchemy import Column, ForeignKey, Table
+
+team_tag: Final[Table] = Table(
+    "team_tag",
+    orm_registry.metadata,
+    Column("team_id", ForeignKey("team.id", ondelete="CASCADE"), primary_key=True),
+    Column("tag_id", ForeignKey("tag.id", ondelete="CASCADE"), primary_key=True),
+)
diff --git a/src/app/db/models/tenant.py b/src/app/db/models/tenant.py
new file mode 100644
index 00000000..ae828abc
--- /dev/null
+++ b/src/app/db/models/tenant.py
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from advanced_alchemy.base import SlugKey, UUIDAuditBase
+from sqlalchemy import String
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+if TYPE_CHECKING:
+    from .user import User
+
+
+class Tenant(UUIDAuditBase, SlugKey):
+    """A group of tenants i.e. a company or an organization."""
+
+    __tablename__ = "tenant"
+    __pii_columns__ = {"name", "description", "url"}
+    name: Mapped[str] = mapped_column(nullable=False, index=True)
+    description: Mapped[str | None] = mapped_column(String(length=500), nullable=True, default=None)
+    url: Mapped[str | None] = mapped_column(nullable=True, default=None)
+    is_active: Mapped[bool] = mapped_column(default=True, nullable=False)
+    # -----------
+    # ORM Relationships
+    # ------------
+    users: Mapped[list[User]] = relationship(
+        back_populates="tenant",
+        innerjoin=True,
+        lazy="selectin",
+    )
diff --git a/src/app/db/models/user.py b/src/app/db/models/user.py
new file mode 100644
index 00000000..602e362e
--- /dev/null
+++ b/src/app/db/models/user.py
@@ -0,0 +1,62 @@
+from __future__ import annotations
+
+from datetime import date, datetime
+from typing import TYPE_CHECKING
+from uuid import UUID  # noqa: TCH003
+
+from advanced_alchemy.base import UUIDAuditBase
+from sqlalchemy import String, ForeignKey
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+if TYPE_CHECKING:
+    from .oauth_account import UserOauthAccount
+    from .team_member import TeamMember
+    from .user_role import UserRole
+    from .tenant import Tenant
+
+
+class User(UUIDAuditBase):
+    __tablename__ = "user_account"
+    __table_args__ = {"comment": "User accounts for application access"}
+    __pii_columns__ = {"name", "email", "avatar_url"}
+
+    email: Mapped[str] = mapped_column(unique=True, index=True, nullable=False)
+    name: Mapped[str | None] = mapped_column(nullable=True, default=None)
+    hashed_password: Mapped[str | None] = mapped_column(String(length=255), nullable=True, default=None)
+    avatar_url: Mapped[str | None] = mapped_column(String(length=500), nullable=True, default=None)
+    is_active: Mapped[bool] = mapped_column(default=True, nullable=False)
+    is_superuser: Mapped[bool] = mapped_column(default=False, nullable=False)
+    is_verified: Mapped[bool] = mapped_column(default=False, nullable=False)
+    verified_at: Mapped[date] = mapped_column(nullable=True, default=None)
+    joined_at: Mapped[date] = mapped_column(default=datetime.now)
+    login_count: Mapped[int] = mapped_column(default=0)
+    tenant_id: Mapped[UUID] = mapped_column(ForeignKey("tenant.id"), nullable=False)
+    # -----------
+    # ORM Relationships
+    # ------------
+    #
+    tenant: Mapped[Tenant] = relationship(
+        back_populates="users",
+        innerjoin=True,
+        uselist=False,
+        lazy="joined",
+    )
+    roles: Mapped[list[UserRole]] = relationship(
+        back_populates="user",
+        lazy="selectin",
+        uselist=True,
+        cascade="all, delete",
+    )
+    teams: Mapped[list[TeamMember]] = relationship(
+        back_populates="user",
+        lazy="selectin",
+        uselist=True,
+        cascade="all, delete",
+        viewonly=True,
+    )
+    oauth_accounts: Mapped[list[UserOauthAccount]] = relationship(
+        back_populates="user",
+        lazy="noload",
+        cascade="all, delete",
+        uselist=True,
+    )
diff --git a/src/app/db/models/user_role.py b/src/app/db/models/user_role.py
new file mode 100644
index 00000000..44855bae
--- /dev/null
+++ b/src/app/db/models/user_role.py
@@ -0,0 +1,34 @@
+from __future__ import annotations
+
+from datetime import UTC, datetime
+from typing import TYPE_CHECKING
+from uuid import UUID  # noqa: TCH003
+
+from advanced_alchemy.base import UUIDAuditBase
+from sqlalchemy import ForeignKey
+from sqlalchemy.ext.associationproxy import AssociationProxy, association_proxy
+from sqlalchemy.orm import Mapped, mapped_column, relationship
+
+if TYPE_CHECKING:
+    from .role import Role
+    from .user import User
+
+
+class UserRole(UUIDAuditBase):
+    """User Role."""
+
+    __tablename__ = "user_account_role"
+    __table_args__ = {"comment": "Links a user to a specific role."}
+    user_id: Mapped[UUID] = mapped_column(ForeignKey("user_account.id", ondelete="cascade"), nullable=False)
+    role_id: Mapped[UUID] = mapped_column(ForeignKey("role.id", ondelete="cascade"), nullable=False)
+    assigned_at: Mapped[datetime] = mapped_column(default=datetime.now(UTC))
+
+    # -----------
+    # ORM Relationships
+    # ------------
+    user: Mapped[User] = relationship(back_populates="roles", innerjoin=True, uselist=False, lazy="joined")
+    user_name: AssociationProxy[str] = association_proxy("user", "name")
+    user_email: AssociationProxy[str] = association_proxy("user", "email")
+    role: Mapped[Role] = relationship(back_populates="users", innerjoin=True, uselist=False, lazy="joined")
+    role_name: AssociationProxy[str] = association_proxy("role", "name")
+    role_slug: AssociationProxy[str] = association_proxy("role", "slug")
diff --git a/src/app/domain/__init__.py b/src/app/domain/__init__.py
new file mode 100644
index 00000000..812601e8
--- /dev/null
+++ b/src/app/domain/__init__.py
@@ -0,0 +1,2 @@
+"""Application Modules."""
+from __future__ import annotations
diff --git a/src/app/domain/accounts/__init__.py b/src/app/domain/accounts/__init__.py
new file mode 100644
index 00000000..e754be18
--- /dev/null
+++ b/src/app/domain/accounts/__init__.py
@@ -0,0 +1,4 @@
+"""User Account domain logic."""
+from app.domain.accounts import controllers, dependencies, guards, schemas, services, signals, urls
+
+__all__ = ["guards", "services", "controllers", "dependencies", "schemas", "signals", "urls"]
diff --git a/src/app/domain/accounts/controllers/__init__.py b/src/app/domain/accounts/controllers/__init__.py
new file mode 100644
index 00000000..41195f42
--- /dev/null
+++ b/src/app/domain/accounts/controllers/__init__.py
@@ -0,0 +1,7 @@
+from .access import AccessController
+from .roles import RoleController
+from .user_role import UserRoleController
+from .users import UserController
+from .tenant import TenantController
+
+__all__ = ["AccessController", "UserController", "UserRoleController", "RoleController", "TenantController"]
diff --git a/src/app/domain/accounts/controllers/access.py b/src/app/domain/accounts/controllers/access.py
new file mode 100644
index 00000000..1cbed0b3
--- /dev/null
+++ b/src/app/domain/accounts/controllers/access.py
@@ -0,0 +1,111 @@
+"""User Account Controllers."""
+
+from __future__ import annotations
+
+from typing import Annotated
+
+from advanced_alchemy.utils.text import slugify
+from litestar import Controller, Request, Response, get, post
+from litestar.di import Provide
+from litestar.enums import RequestEncodingType
+from litestar.params import Body
+from litestar.security.jwt import OAuth2Login
+
+from app.db.models import User as UserModel  # noqa: TCH001
+from app.domain.accounts import urls
+from app.domain.accounts.dependencies import provide_roles_service, provide_users_service
+from app.domain.accounts.guards import auth, requires_active_user
+from app.domain.accounts.schemas import AccountLogin, AccountRegister, User
+from app.domain.accounts.services import RoleService, UserService
+
+
+class AccessController(Controller):
+    """User login and registration."""
+
+    tags = ["Access"]
+    dependencies = {"users_service": Provide(provide_users_service), "roles_service": Provide(provide_roles_service)}
+    signature_namespace = {
+        "UserService": UserService,
+        "RoleService": RoleService,
+        "OAuth2Login": OAuth2Login,
+        "RequestEncodingType": RequestEncodingType,
+        "Body": Body,
+        "User": User,
+    }
+
+    @post(
+        operation_id="AccountLogin",
+        name="account:login",
+        path=urls.ACCOUNT_LOGIN,
+        cache=False,
+        summary="Login",
+        exclude_from_auth=True,
+    )
+    async def login(
+        self,
+        users_service: UserService,
+        data: Annotated[AccountLogin, Body(title="OAuth2 Login", media_type=RequestEncodingType.URL_ENCODED)],
+    ) -> Response[OAuth2Login]:
+        """Authenticate a user."""
+        user = await users_service.authenticate(data.username, data.password)
+        return auth.login(user.email)
+
+    @post(
+        operation_id="AccountLogout",
+        name="account:logout",
+        path=urls.ACCOUNT_LOGOUT,
+        cache=False,
+        summary="Logout",
+        exclude_from_auth=True,
+    )
+    async def logout(
+        self,
+        request: Request,
+    ) -> Response:
+        """Account Logout"""
+        request.cookies.pop(auth.key, None)
+        request.clear_session()
+
+        response = Response(
+            {"message": "OK"},
+            status_code=200,
+        )
+        response.delete_cookie(auth.key)
+
+        return response
+
+    @post(
+        operation_id="AccountRegister",
+        name="account:register",
+        path=urls.ACCOUNT_REGISTER,
+        cache=False,
+        summary="Create User",
+        description="Register a new account.",
+    )
+    async def signup(
+        self,
+        request: Request,
+        users_service: UserService,
+        roles_service: RoleService,
+        data: AccountRegister,
+    ) -> User:
+        """User Signup."""
+        user_data = data.to_dict()
+        role_obj = await roles_service.get_one_or_none(slug=slugify(users_service.default_role))
+        if role_obj is not None:
+            user_data.update({"role_id": role_obj.id})
+        user = await users_service.create(user_data)
+        request.app.emit(event_id="user_created", user_id=user.id)
+        return users_service.to_schema(user, schema_type=User)
+
+    @get(
+        operation_id="AccountProfile",
+        name="account:profile",
+        path=urls.ACCOUNT_PROFILE,
+        guards=[requires_active_user],
+        summary="User Profile",
+        description="User profile information.",
+    )
+    async def profile(self, request: Request, current_user: UserModel, users_service: UserService) -> User:
+        """User Profile."""
+        return users_service.to_schema(current_user, schema_type=User)
diff --git a/src/app/domain/accounts/controllers/roles.py b/src/app/domain/accounts/controllers/roles.py
new file mode 100644
index 00000000..d6e298a8
--- /dev/null
+++ b/src/app/domain/accounts/controllers/roles.py
@@ -0,0 +1,20 @@
+"""Role Routes."""
+from __future__ import annotations
+
+from litestar import Controller
+from litestar.di import Provide
+
+from app.domain.accounts.dependencies import provide_roles_service
+from app.domain.accounts.guards import requires_superuser
+from app.domain.accounts.services import RoleService
+
+
+class RoleController(Controller):
+    """Handles the adding and removing of new Roles."""
+
+    tags = ["Roles"]
+    guards = [requires_superuser]
+    dependencies = {
+        "roles_service": Provide(provide_roles_service),
+    }
+    signature_namespace = {"RoleService": RoleService}
diff --git a/src/app/domain/accounts/controllers/tenant.py b/src/app/domain/accounts/controllers/tenant.py
new file mode 100644
index 00000000..b7e72985
--- /dev/null
+++ b/src/app/domain/accounts/controllers/tenant.py
@@ -0,0 +1,124 @@
+"""Tenant Controllers."""
+
+from __future__ import annotations
+
+from typing import Annotated, TYPE_CHECKING
+
+from advanced_alchemy.utils.text import slugify
+from litestar import Controller, Request, Response, get, post, patch, delete
+from litestar.di import Provide
+from litestar.enums import RequestEncodingType
+from litestar.params import Body
+from litestar.security.jwt import OAuth2Login
+
+from app.db.models import User as UserModel  # noqa: TCH001
+from app.domain.accounts import urls
+from app.domain.accounts.dependencies import provide_tenants_service
+from app.domain.accounts.guards import auth, requires_active_user, requires_superuser
+from app.domain.accounts.schemas import Tenant, TenantCreate, TenantUpdate
+from app.domain.accounts.services import TenantService
+
+if TYPE_CHECKING:
+    from uuid import UUID
+
+    from advanced_alchemy.service.pagination import OffsetPagination
+    from litestar.params import Dependency, Parameter
+
+    from app.lib.dependencies import FilterTypes
+
+
+class TenantController(Controller):
+    """Tenant operations."""
+
+    tags = ["Tenant"]
+    dependencies = {"tenants_service": Provide(provide_tenants_service)}
+    guards = [requires_active_user]
+    signature_namespace = {
+        "TenantService": TenantService,
+        "RequestEncodingType": RequestEncodingType,
+        "Body": Body,
+    }
+
+    @post(
+        operation_id="CreateTenant",
+        name="tenant:create",
+        path=urls.ACCOUNT_TENANT_CREATE,
+        guards=[requires_superuser],
+        cache=False,
+        summary="Create Tenant",
+        description="Create a new tenant.",
+    )
+    async def create_tenant(
+        self,
+        tenants_service: TenantService,
+        data: TenantCreate,
+    ) -> Tenant:
+        """Create a new tenant."""
+        obj = data.to_dict()
+        db_obj = await tenants_service.create(obj)
+        return tenants_service.to_schema(schema_type=Tenant, data=db_obj)
+
+
+    @get(
+        operation_id="GetTenant",
+        name="tenant:get",
+        summary="Retrieve the details of a tenant.",
+        path=urls.ACCOUNT_TENANT_DETAIL,
+    )
+    async def get_tenant(
+        self,
+        tenants_service: TenantService,
+        tenant_id: Annotated[
+            UUID,
+            Parameter(
+                title="Tenant ID",
+                description="The tenant to retrieve.",
+            ),
+        ],
+    ) -> Tenant:
+        """Get details about a tenant."""
+        db_obj = await tenants_service.get(tenant_id)
+        return tenants_service.to_schema(schema_type=Tenant, data=db_obj)
+
+    @patch(
+        operation_id="UpdateTenant",
+        name="tenants:update",
+        path=urls.ACCOUNT_TENANT_UPDATE,
+        guards=[requires_superuser],
+    )
+    async def update_tenant(
+        self,
+        data: TenantUpdate,
+        tenants_service: TenantService,
+        tenant_id: Annotated[
+            UUID,
+            Parameter(
+                title="Tenant ID",
+                description="The tenant to update.",
+            ),
+        ],
+    ) -> Tenant:
+        """Update a tenant."""
+        db_obj = await tenants_service.update(
+            item_id=tenant_id,
+            data=data.to_dict(),
+        )
+        return tenants_service.to_schema(schema_type=Tenant, data=db_obj)
+
+    @delete(
+        operation_id="DeleteTenant",
+        name="tenants:delete",
+        summary="Remove Tenant",
+        path=urls.ACCOUNT_TENANT_DELETE,
+        guards=[requires_superuser],
+    )
+    async def delete_tenant(
+        self,
+        tenants_service: TenantService,
+        tenant_id: Annotated[
+            UUID,
+            Parameter(title="Tenant ID", description="The tenant to delete."),
+        ],
+    ) -> None:
+        """Delete a tenant."""
+        _ = await tenants_service.delete(tenant_id)
diff --git a/src/app/domain/accounts/controllers/user_role.py b/src/app/domain/accounts/controllers/user_role.py
new file mode 100644
index 00000000..fb794318
--- /dev/null
+++ b/src/app/domain/accounts/controllers/user_role.py
@@ -0,0 +1,80 @@
+"""User Routes."""
+from __future__ import annotations
+
+from litestar import Controller, post
+from litestar.di import Provide
+from litestar.params import Parameter
+from litestar.repository.exceptions import ConflictError
+
+from app.domain.accounts import schemas, urls
+from app.domain.accounts.dependencies import provide_roles_service, provide_user_roles_service, provide_users_service
+from app.domain.accounts.guards import requires_superuser
+from app.domain.accounts.services import RoleService, UserRoleService, UserService
+from app.lib.schema import Message
+
+
+class UserRoleController(Controller):
+    """Handles the adding and removing of User Role records."""
+
+    tags = ["User Account Roles"]
+    guards = [requires_superuser]
+    dependencies = {
+        "users_service": Provide(provide_users_service),
+        "roles_service": Provide(provide_roles_service),
+        "user_roles_service": Provide(provide_user_roles_service),
+    }
+    signature_namespace = {"UserService": UserService, "RoleService": RoleService, "UserRoleService": UserRoleService}
+
+    @post(
+        operation_id="AssignUserRole",
+        name="users:assign-role",
+        path=urls.ACCOUNT_ASSIGN_ROLE,
+    )
+    async def assign_role(
+        self,
+        roles_service: RoleService,
+        users_service: UserService,
+        user_roles_service: UserRoleService,
+        data: schemas.UserRoleAdd,
+        role_slug: str = Parameter(
+            title="Role Slug",
+            description="The role to grant.",
+        ),
+    ) -> Message:
+        """Create a new migration role."""
+        role_id = (await roles_service.get_one(slug=role_slug)).id
+        user_obj = await users_service.get_one(email=data.user_name)
+        if all(user_role.role_id != role_id for user_role in user_obj.roles):
+            obj, created = await user_roles_service.get_or_upsert(role_id=role_id, user_id=user_obj.id)
+        if created:
+            return Message(message=f"Successfully assigned the '{obj.role_slug}' role to {obj.user_email}.")
+        return Message(message=f"User {obj.user_email} already has the '{obj.role_slug}' role.")
+
+    @post(
+        operation_id="RevokeUserRole",
+        name="users:revoke-role",
+        summary="Remove Role",
+        description="Removes an assigned role from a user.",
+        path=urls.ACCOUNT_REVOKE_ROLE,
+    )
+    async def revoke_role(
+        self,
+        users_service: UserService,
+        user_roles_service: UserRoleService,
+        data: schemas.UserRoleRevoke,
+        role_slug: str = Parameter(
+            title="Role Slug",
+            description="The role to revoke.",
+        ),
+    ) -> Message:
+        """Delete a role from the system."""
+        user_obj = await users_service.get_one(email=data.user_name)
+        removed_role: bool = False
+        for user_role in user_obj.roles:
+            if user_role.role_slug == role_slug:
+                _ = await user_roles_service.delete(user_role.id)
+                removed_role = True
+        if not removed_role:
+            msg = "User did not have role assigned."
+            raise ConflictError(msg)
+        return Message(message=f"Removed the '{role_slug}' role from User {user_obj.email}.")
diff --git a/src/app/domain/accounts/controllers/users.py b/src/app/domain/accounts/controllers/users.py
new file mode 100644
index 00000000..55442657
--- /dev/null
+++ b/src/app/domain/accounts/controllers/users.py
@@ -0,0 +1,126 @@
+"""User Account Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Annotated
+
+from litestar import Controller, delete, get, patch, post
+from litestar.di import Provide
+from litestar.params import Dependency, Parameter
+
+from app.domain.accounts import urls
+from app.domain.accounts.dependencies import provide_users_service
+from app.domain.accounts.guards import requires_superuser
+from app.domain.accounts.schemas import User, UserCreate, UserUpdate
+from app.domain.accounts.services import UserService
+
+if TYPE_CHECKING:
+    from uuid import UUID
+
+    from advanced_alchemy.filters import FilterTypes
+    from advanced_alchemy.service import OffsetPagination
+
+
+class UserController(Controller):
+    """User Account Controller."""
+
+    tags = ["User Accounts"]
+    guards = [requires_superuser]
+    dependencies = {"users_service": Provide(provide_users_service)}
+    signature_namespace = {"UserService": UserService}
+    dto = None
+    return_dto = None
+
+    @get(
+        operation_id="ListUsers",
+        name="users:list",
+        summary="List Users",
+        description="Retrieve the users.",
+        path=urls.ACCOUNT_LIST,
+        cache=60,
+    )
+    async def list_users(
+        self,
+        users_service: UserService,
+        filters: Annotated[list[FilterTypes], Dependency(skip_validation=True)],
+    ) -> OffsetPagination[User]:
+        """List users."""
+        results, total = await users_service.list_and_count(*filters)
+        return users_service.to_schema(data=results, total=total, schema_type=User, filters=filters)
+
+    @get(
+        operation_id="GetUser",
+        name="users:get",
+        path=urls.ACCOUNT_DETAIL,
+        summary="Retrieve the details of a user.",
+    )
+    async def get_user(
+        self,
+        users_service: UserService,
+        user_id: Annotated[
+            UUID,
+            Parameter(
+                title="User ID",
+                description="The user to retrieve.",
+            ),
+        ],
+    ) -> User:
+        """Get a user."""
+        db_obj = await users_service.get(user_id)
+        return users_service.to_schema(db_obj, schema_type=User)
+
+    @post(
+        operation_id="CreateUser",
+        name="users:create",
+        summary="Create a new user.",
+        cache_control=None,
+        description="A user who can login and use the system.",
+        path=urls.ACCOUNT_CREATE,
+    )
+    async def create_user(
+        self,
+        users_service: UserService,
+        data: UserCreate,
+    ) -> User:
+        """Create a new user."""
+        db_obj = await users_service.create(data.to_dict())
+        return users_service.to_schema(db_obj, schema_type=User)
+
+    @patch(
+        operation_id="UpdateUser",
+        name="users:update",
+        path=urls.ACCOUNT_UPDATE,
+    )
+    async def update_user(
+        self,
+        data: UserUpdate,
+        users_service: UserService,
+        user_id: UUID = Parameter(
+            title="User ID",
+            description="The user to update.",
+        ),
+    ) -> User:
+        """Create a new user."""
+        db_obj = await users_service.update(item_id=user_id, data=data.to_dict())
+        return users_service.to_schema(db_obj, schema_type=User)
+
+    @delete(
+        operation_id="DeleteUser",
+        name="users:delete",
+        path=urls.ACCOUNT_DELETE,
+        summary="Remove User",
+        description="Removes a user and all associated data from the system.",
+    )
+    async def delete_user(
+        self,
+        users_service: UserService,
+        user_id: Annotated[
+            UUID,
+            Parameter(
+                title="User ID",
+                description="The user to delete.",
+            ),
+        ],
+    ) -> None:
+        """Delete a user from the system."""
+        _ = await users_service.delete(user_id)
diff --git a/src/app/domain/accounts/dependencies.py b/src/app/domain/accounts/dependencies.py
new file mode 100644
index 00000000..2c9dbffa
--- /dev/null
+++ b/src/app/domain/accounts/dependencies.py
@@ -0,0 +1,116 @@
+"""User Account Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from sqlalchemy.orm import joinedload, load_only, selectinload
+
+from app.db.models import Role, Team, TeamMember, UserRole
+from app.db.models import User as UserModel
+from app.domain.accounts.services import RoleService, UserRoleService, UserService, TenantService
+
+if TYPE_CHECKING:
+    from collections.abc import AsyncGenerator
+
+    from litestar.connection import Request
+    from litestar.security.jwt import Token
+    from sqlalchemy.ext.asyncio import AsyncSession
+
+
+async def provide_user(request: Request[UserModel, Token, Any]) -> UserModel:
+    """Get the user from the connection.
+
+    Args:
+        request: current connection.
+
+    Returns:
+        User
+    """
+    return request.user
+
+
+async def provide_users_service(db_session: AsyncSession) -> AsyncGenerator[UserService, None]:
+    """Construct repository and service objects for the request."""
+    async with UserService.new(
+        session=db_session,
+        load=[
+            selectinload(UserModel.roles).options(joinedload(UserRole.role, innerjoin=True)),
+            selectinload(UserModel.oauth_accounts),
+            selectinload(UserModel.teams).options(
+                joinedload(TeamMember.team, innerjoin=True).options(load_only(Team.name)),
+            ),
+        ],
+    ) as service:
+        yield service
+
+
+async def provide_roles_service(db_session: AsyncSession | None = None) -> AsyncGenerator[RoleService, None]:
+    """Provide roles service.
+
+    Args:
+        db_session (AsyncSession | None, optional): current database session. Defaults to None.
+
+    Returns:
+        RoleService: A role service object
+    """
+    async with RoleService.new(
+        session=db_session,
+        load=selectinload(Role.users).options(joinedload(UserRole.user, innerjoin=True)),
+    ) as service:
+        yield service
+
+
+async def provide_user_roles_service(db_session: AsyncSession | None = None) -> AsyncGenerator[UserRoleService, None]:
+    """Provide user roles service.
+
+    Args:
+        db_session (AsyncSession | None, optional): current database session. Defaults to None.
+
+    Returns:
+        UserRoleService: A user role service object
+    """
+    async with UserRoleService.new(
+        session=db_session,
+    ) as service:
+        yield service
+
+
+async def provide_roles_service(db_session: AsyncSession | None = None) -> AsyncGenerator[RoleService, None]:
+    """Provide roles service.
+
+    Args:
+        db_session (AsyncSession | None, optional): current database session. Defaults to None.
+
+    Returns:
+        RoleService: A role service object
+    """
+    async with RoleService.new(
+        session=db_session,
+        load=selectinload(Role.users).options(joinedload(UserRole.user, innerjoin=True)),
+    ) as service:
+        yield service
+
+
+async def provide_user_roles_service(db_session: AsyncSession | None = None) -> AsyncGenerator[UserRoleService, None]:
+    """Provide user roles service.
+
+    Args:
+        db_session (AsyncSession | None, optional): current database session. Defaults to None.
+
+    Returns:
+        UserRoleService: A user role service object
+    """
+    async with UserRoleService.new(
+        session=db_session,
+    ) as service:
+        yield service
+
+
+async def provide_tenants_service(db_session: AsyncSession) -> AsyncGenerator[TenantService, None]:
+    """Construct repository and service objects for the request."""
+    async with TenantService.new(
+        session=db_session,
+        load=[],
+    ) as service:
+        yield service
diff --git a/src/app/domain/accounts/guards.py b/src/app/domain/accounts/guards.py
new file mode 100644
index 00000000..56b78f24
--- /dev/null
+++ b/src/app/domain/accounts/guards.py
@@ -0,0 +1,110 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from litestar.exceptions import PermissionDeniedException
+from litestar.security.jwt import OAuth2PasswordBearerAuth
+
+from app.config import constants
+from app.config.app import alchemy
+from app.config.base import get_settings
+from app.db.models import User
+from app.domain.accounts import urls
+from app.domain.accounts.dependencies import provide_users_service
+
+if TYPE_CHECKING:
+    from litestar.connection import ASGIConnection
+    from litestar.handlers.base import BaseRouteHandler
+    from litestar.security.jwt import Token
+
+
+__all__ = ("requires_superuser", "requires_active_user", "requires_verified_user", "current_user_from_token", "auth")
+
+
+settings = get_settings()
+
+
+def requires_active_user(connection: ASGIConnection, _: BaseRouteHandler) -> None:
+    """Request requires active user.
+
+    Verifies the request user is active.
+
+    Args:
+        connection (ASGIConnection): HTTP Request
+        _ (BaseRouteHandler): Route handler
+
+    Raises:
+        PermissionDeniedException: Permission denied exception
+    """
+    if connection.user.is_active:
+        return
+    msg = "Inactive account"
+    raise PermissionDeniedException(msg)
+
+
+def requires_superuser(connection: ASGIConnection, _: BaseRouteHandler) -> None:
+    """Request requires active superuser.
+
+    Args:
+        connection (ASGIConnection): HTTP Request
+        _ (BaseRouteHandler): Route handler
+
+    Raises:
+        PermissionDeniedException: Permission denied exception
+
+    Returns:
+        None: Returns None when successful
+    """
+    if connection.user.is_superuser:
+        return
+    raise PermissionDeniedException(detail="Insufficient privileges")
+
+
+def requires_verified_user(connection: ASGIConnection, _: BaseRouteHandler) -> None:
+    """Verify the connection user is a superuser.
+
+    Args:
+        connection (ASGIConnection): Request/Connection object.
+        _ (BaseRouteHandler): Route handler.
+
+    Raises:
+        PermissionDeniedException: Not authorized
+
+    Returns:
+        None: Returns None when successful
+    """
+    if connection.user.is_verified:
+        return
+    raise PermissionDeniedException(detail="User account is not verified.")
+
+
+async def current_user_from_token(token: Token, connection: ASGIConnection[Any, Any, Any, Any]) -> User | None:
+    """Lookup current user from local JWT token.
+
+    Fetches the user information from the database
+
+
+    Args:
+        token (str): JWT Token Object
+        connection (ASGIConnection[Any, Any, Any, Any]): ASGI connection.
+
+
+    Returns:
+        User: User record mapped to the JWT identifier
+    """
+    service = await anext(provide_users_service(alchemy.provide_session(connection.app.state, connection.scope)))
+    user = await service.get_one_or_none(email=token.sub)
+    return user if user and user.is_active else None
+
+
+auth = OAuth2PasswordBearerAuth[User](
+    retrieve_user_handler=current_user_from_token,
+    token_secret=settings.app.SECRET_KEY,
+    token_url=urls.ACCOUNT_LOGIN,
+    exclude=[
+        constants.OPENAPI_SCHEMA,
+        constants.HEALTH_ENDPOINT,
+        urls.ACCOUNT_LOGIN,
+        urls.ACCOUNT_REGISTER,
+    ],
+)
diff --git a/src/app/domain/accounts/repositories.py b/src/app/domain/accounts/repositories.py
new file mode 100644
index 00000000..18e32778
--- /dev/null
+++ b/src/app/domain/accounts/repositories.py
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+from advanced_alchemy.repository import SQLAlchemyAsyncRepository, SQLAlchemyAsyncSlugRepository
+
+from app.db.models import Role, User, UserRole, Tenant
+
+
+class UserRepository(SQLAlchemyAsyncRepository[User]):
+    """User SQLAlchemy Repository."""
+
+    model_type = User
+
+
+class RoleRepository(SQLAlchemyAsyncSlugRepository[Role]):
+    """User SQLAlchemy Repository."""
+
+    model_type = Role
+
+
+class UserRoleRepository(SQLAlchemyAsyncRepository[UserRole]):
+    """User Role SQLAlchemy Repository."""
+
+    model_type = UserRole
+
+
+class TenantRepository(SQLAlchemyAsyncSlugRepository[Tenant]):
+    """Tenant SQLAlchemy Repository."""
+
+    model_type = Tenant
diff --git a/src/app/domain/accounts/schemas.py b/src/app/domain/accounts/schemas.py
new file mode 100644
index 00000000..07769ef7
--- /dev/null
+++ b/src/app/domain/accounts/schemas.py
@@ -0,0 +1,143 @@
+from __future__ import annotations
+
+from datetime import datetime  # noqa: TCH003
+from uuid import UUID  # noqa: TCH003
+
+import msgspec
+
+from app.db.models.team_roles import TeamRoles
+from app.lib.schema import CamelizedBaseStruct
+
+__all__ = (
+    "AccountLogin",
+    "AccountRegister",
+    "UserRoleAdd",
+    "UserRoleRevoke",
+    "UserCreate",
+    "User",
+    "UserRole",
+    "UserTeam",
+    "UserUpdate",
+)
+
+
+class UserTeam(CamelizedBaseStruct):
+    """Holds team details for a user.
+
+    This is nested in the User Model for 'team'
+    """
+
+    team_id: UUID
+    team_name: str
+    is_owner: bool = False
+    role: TeamRoles = TeamRoles.MEMBER
+
+
+class UserRole(CamelizedBaseStruct):
+    """Holds role details for a user.
+
+    This is nested in the User Model for 'roles'
+    """
+
+    role_id: UUID
+    role_slug: str
+    role_name: str
+    assigned_at: datetime
+
+
+class OauthAccount(CamelizedBaseStruct):
+    """Holds linked Oauth details for a user."""
+
+    id: UUID
+    oauth_name: str
+    access_token: str
+    account_id: str
+    account_email: str
+    expires_at: int | None = None
+    refresh_token: str | None = None
+
+
+class User(CamelizedBaseStruct):
+    """User properties to use for a response."""
+
+    id: UUID
+    email: str
+    tenant_id: UUID
+    name: str | None = None
+    is_superuser: bool = False
+    is_active: bool = False
+    is_verified: bool = False
+    teams: list[UserTeam] = []
+    roles: list[UserRole] = []
+    oauth_accounts: list[OauthAccount] = []
+
+
+class UserCreate(CamelizedBaseStruct):
+    email: str
+    password: str
+    name: str | None = None
+    tenant_id: UUID | None = None
+    is_superuser: bool = False
+    is_active: bool = True
+    is_verified: bool = False
+
+
+class UserUpdate(CamelizedBaseStruct, omit_defaults=True):
+    email: str | None | msgspec.UnsetType = msgspec.UNSET
+    password: str | None | msgspec.UnsetType = msgspec.UNSET
+    name: str | None | msgspec.UnsetType = msgspec.UNSET
+    is_superuser: bool | None | msgspec.UnsetType = msgspec.UNSET
+    is_active: bool | None | msgspec.UnsetType = msgspec.UNSET
+    is_verified: bool | None | msgspec.UnsetType = msgspec.UNSET
+
+
+class AccountLogin(CamelizedBaseStruct):
+    username: str
+    password: str
+
+
+class AccountRegister(CamelizedBaseStruct):
+    email: str
+    password: str
+    name: str | None = None
+
+
+class UserRoleAdd(CamelizedBaseStruct):
+    """User role add ."""
+
+    user_name: str
+
+
+class UserRoleRevoke(CamelizedBaseStruct):
+    """User role revoke ."""
+
+    user_name: str
+
+
+class Tenant(CamelizedBaseStruct):
+    """Tenant properties."""
+
+    id: UUID
+    name: str
+    description: str | None = None
+    url: str | None = None
+    is_active: bool = True
+    users: list[User] = []
+
+
+class TenantCreate(CamelizedBaseStruct):
+    """Tenant create properties."""
+
+    name: str
+    description: str | None = None
+    url: str | None = None
+    is_active: bool | None = True
+
+
+class TenantUpdate(CamelizedBaseStruct):
+    """Tenant update properties."""
+
+    name: str | None | msgspec.UnsetType = msgspec.UNSET
+    description: str | None | msgspec.UnsetType = msgspec.UNSET
+    url: str | None | msgspec.UnsetType = msgspec.UNSET
+    is_active: bool | None | msgspec.UnsetType = msgspec.UNSET
diff --git a/src/app/domain/accounts/services.py b/src/app/domain/accounts/services.py
new file mode 100644
index 00000000..92727e2b
--- /dev/null
+++ b/src/app/domain/accounts/services.py
@@ -0,0 +1,203 @@
+from __future__ import annotations
+
+from datetime import datetime, timezone
+from typing import TYPE_CHECKING, Any
+from uuid import UUID  # noqa: TCH003
+
+from advanced_alchemy.service import (
+    ModelDictT,
+    SQLAlchemyAsyncRepositoryService,
+    is_dict,
+    is_msgspec_model,
+    is_pydantic_model,
+)
+from litestar.exceptions import PermissionDeniedException
+
+from app.config import constants
+from app.db.models import Role, User, UserRole, Tenant
+from app.lib import crypt
+
+from .repositories import RoleRepository, UserRepository, UserRoleRepository, TenantRepository
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+
+    from advanced_alchemy.repository._util import LoadSpec
+    from sqlalchemy.orm import InstrumentedAttribute
+
+
+class UserService(SQLAlchemyAsyncRepositoryService[User]):
+    """Handles database operations for users."""
+
+    repository_type = UserRepository
+    default_role = constants.DEFAULT_USER_ROLE
+
+    def __init__(self, **repo_kwargs: Any) -> None:
+        self.repository: UserRepository = self.repository_type(**repo_kwargs)
+        self.model_type = self.repository.model_type
+
+    async def create(
+        self,
+        data: ModelDictT[User],
+        *,
+        load: LoadSpec | None = None,
+        execution_options: dict[str, Any] | None = None,
+        auto_commit: bool | None = None,
+        auto_expunge: bool | None = None,
+        auto_refresh: bool | None = None,
+    ) -> User:
+        """Create a new User and assign default Role."""
+        if isinstance(data, dict):
+            role_id: UUID | None = data.pop("role_id", None)
+            data = await self.to_model(data, "create")
+            if role_id:
+                data.roles.append(UserRole(role_id=role_id, assigned_at=datetime.now(timezone.utc)))  # noqa: UP017
+        return await super().create(
+            data=data,
+            load=load,
+            execution_options=execution_options,
+            auto_commit=auto_commit,
+            auto_expunge=auto_expunge,
+            auto_refresh=auto_refresh,
+        )
+
+    async def update(
+        self,
+        data: ModelDictT[User],
+        item_id: Any | None = None,
+        *,
+        id_attribute: str | InstrumentedAttribute[Any] | None = None,
+        load: LoadSpec | None = None,
+        execution_options: dict[str, Any] | None = None,
+        attribute_names: Iterable[str] | None = None,
+        with_for_update: bool | None = None,
+        auto_commit: bool | None = None,
+        auto_expunge: bool | None = None,
+        auto_refresh: bool | None = None,
+    ) -> User:
+        if isinstance(data, dict):
+            role_id: UUID | None = data.pop("role_id", None)
+            data = await self.to_model(data, "update")
+            if role_id:
+                data.roles.append(UserRole(role_id=role_id, assigned_at=datetime.now(timezone.utc)))  # noqa: UP017
+        return await super().update(
+            data=data,
+            item_id=item_id,
+            attribute_names=attribute_names,
+            with_for_update=with_for_update,
+            auto_commit=auto_commit,
+            auto_expunge=auto_expunge,
+            auto_refresh=auto_refresh,
+            id_attribute=id_attribute,
+            load=load,
+            execution_options=execution_options,
+        )
+
+    async def authenticate(self, username: str, password: bytes | str) -> User:
+        """Authenticate a user.
+
+        Args:
+            username (str): _description_
+            password (str | bytes): _description_
+
+        Raises:
+            NotAuthorizedException: Raised when the user doesn't exist, isn't verified, or is not active.
+
+        Returns:
+            User: The user object
+        """
+        db_obj = await self.get_one_or_none(email=username)
+        if db_obj is None:
+            msg = "User not found or password invalid"
+            raise PermissionDeniedException(msg)
+        if db_obj.hashed_password is None:
+            msg = "User not found or password invalid."
+            raise PermissionDeniedException(msg)
+        if not await crypt.verify_password(password, db_obj.hashed_password):
+            msg = "User not found or password invalid"
+            raise PermissionDeniedException(msg)
+        if not db_obj.is_active:
+            msg = "User account is inactive"
+            raise PermissionDeniedException(msg)
+        return db_obj
+
+    async def update_password(self, data: dict[str, Any], db_obj: User) -> None:
+        """Update stored user password.
+
+        This is only used when not used IAP authentication.
+
+        Args:
+            data (UserPasswordUpdate): _description_
+            db_obj (User): _description_
+
+        Raises:
+            PermissionDeniedException: _description_
+        """
+        if db_obj.hashed_password is None:
+            msg = "User not found or password invalid."
+            raise PermissionDeniedException(msg)
+        if not await crypt.verify_password(data["current_password"], db_obj.hashed_password):
+            msg = "User not found or password invalid."
+            raise PermissionDeniedException(msg)
+        if not db_obj.is_active:
+            msg = "User account is not active"
+            raise PermissionDeniedException(msg)
+        db_obj.hashed_password = await crypt.get_password_hash(data["new_password"])
+        await self.repository.update(db_obj)
+
+    async def to_model(self, data: ModelDictT[User], operation: str | None = None) -> User:
+        if isinstance(data, dict) and "password" in data:
+            password: bytes | str | None = data.pop("password", None)
+            if password is not None:
+                data.update({"hashed_password": await crypt.get_password_hash(password)})
+        return await super().to_model(data, operation)
+
+
+class RoleService(SQLAlchemyAsyncRepositoryService[Role]):
+    """Handles database operations for users."""
+
+    repository_type = RoleRepository
+    match_fields = ["name"]
+
+    def __init__(self, **repo_kwargs: Any) -> None:
+        self.repository: RoleRepository = self.repository_type(**repo_kwargs)
+        self.model_type = self.repository.model_type
+
+    async def to_model(self, data: ModelDictT[Role], operation: str | None = None) -> Role:
+        if (is_msgspec_model(data) or is_pydantic_model(data)) and operation == "create" and data.slug is None:  # type: ignore[union-attr]
+            data.slug = await self.repository.get_available_slug(data.name)  # type: ignore[union-attr]
+        if (is_msgspec_model(data) or is_pydantic_model(data)) and operation == "update" and data.slug is None:  # type: ignore[union-attr]
+            data.slug = await self.repository.get_available_slug(data.name)  # type: ignore[union-attr]
+        if is_dict(data) and "slug" not in data and operation == "create":
+            data["slug"] = await self.repository.get_available_slug(data["name"])
+        if is_dict(data) and "slug" not in data and "name" in data and operation == "update":
+            data["slug"] = await self.repository.get_available_slug(data["name"])
+        return await super().to_model(data, operation)
+
+
+class UserRoleService(SQLAlchemyAsyncRepositoryService[UserRole]):
+    """Handles database operations for user roles."""
+
+    repository_type = UserRoleRepository
+
+
+class TenantService(SQLAlchemyAsyncRepositoryService[Tenant]):
+    """Handles database operations for tenants."""
+
+    repository_type = TenantRepository
+    match_fields = ["name"]
+
+    def __init__(self, **repo_kwargs: Any) -> None:
+        self.repository: TenantRepository = self.repository_type(**repo_kwargs)
+        self.model_type = self.repository.model_type
+
+    async def to_model(self, data: ModelDictT[Role], operation: str | None = None) -> Tenant:
+        if (is_msgspec_model(data) or is_pydantic_model(data)) and operation == "create" and data.slug is None:  # type: ignore[union-attr]
+            data.slug = await self.repository.get_available_slug(data.name)  # type: ignore[union-attr]
+        if (is_msgspec_model(data) or is_pydantic_model(data)) and operation == "update" and data.slug is None:  # type: ignore[union-attr]
+            data.slug = await self.repository.get_available_slug(data.name)  # type: ignore[union-attr]
+        if is_dict(data) and "slug" not in data and operation == "create":
+            data["slug"] = await self.repository.get_available_slug(data["name"])
+        if is_dict(data) and "slug" not in data and "name" in data and operation == "update":
+            data["slug"] = await self.repository.get_available_slug(data["name"])
+        return await super().to_model(data, operation)
diff --git a/src/app/domain/accounts/signals.py b/src/app/domain/accounts/signals.py
new file mode 100644
index 00000000..141f65af
--- /dev/null
+++ b/src/app/domain/accounts/signals.py
@@ -0,0 +1,34 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import structlog
+from litestar.events import listener
+
+from app.config.app import alchemy
+
+from .dependencies import provide_users_service
+
+if TYPE_CHECKING:
+    from uuid import UUID
+
+logger = structlog.get_logger()
+
+
+@listener("user_created")
+async def user_created_event_handler(
+    user_id: UUID,
+) -> None:
+    """Executes when a new user is created.
+
+    Args:
+        user_id: The primary key of the user that was created.
+    """
+    await logger.ainfo("Running post signup flow.")
+    async with alchemy.get_session() as db_session:
+        service = await anext(provide_users_service(db_session))
+        obj = await service.get_one_or_none(id=user_id)
+        if obj is None:
+            await logger.aerror("Could not locate the specified user", id=user_id)
+        else:
+            await logger.ainfo("Found user", **obj.to_dict(exclude={"hashed_password"}))
diff --git a/src/app/domain/accounts/urls.py b/src/app/domain/accounts/urls.py
new file mode 100644
index 00000000..8fd881b1
--- /dev/null
+++ b/src/app/domain/accounts/urls.py
@@ -0,0 +1,15 @@
+ACCOUNT_LOGIN = "/api/access/login"
+ACCOUNT_LOGOUT = "/api/access/logout"
+ACCOUNT_REGISTER = "/api/access/signup"
+ACCOUNT_PROFILE = "/api/me"
+ACCOUNT_LIST = "/api/users"
+ACCOUNT_DELETE = "/api/users/{user_id:uuid}"
+ACCOUNT_DETAIL = "/api/users/{user_id:uuid}"
+ACCOUNT_UPDATE = "/api/users/{user_id:uuid}"
+ACCOUNT_CREATE = "/api/users"
+ACCOUNT_ASSIGN_ROLE = "/api/roles/{role_slug:str}/assign"
+ACCOUNT_REVOKE_ROLE = "/api/roles/{role_slug:str}/revoke"
+ACCOUNT_TENANT_DELETE = "/api/tenants"
+ACCOUNT_TENANT_DETAIL = "/api/tenants/{tenant_id:uuid}"
+ACCOUNT_TENANT_UPDATE = "/api/tenants/{tenant_id:uuid}"
+ACCOUNT_TENANT_CREATE = "/api/tenants"
diff --git a/src/app/domain/companies/__init__.py b/src/app/domain/companies/__init__.py
new file mode 100644
index 00000000..e6e7b072
--- /dev/null
+++ b/src/app/domain/companies/__init__.py
@@ -0,0 +1,4 @@
+"""Company Application Module."""
+from . import controllers, dependencies, schemas, services
+
+__all__ = ["controllers", "services", "schemas", "dependencies"]
diff --git a/src/app/domain/companies/controllers/__init__.py b/src/app/domain/companies/controllers/__init__.py
new file mode 100644
index 00000000..b4d8d507
--- /dev/null
+++ b/src/app/domain/companies/controllers/__init__.py
@@ -0,0 +1,3 @@
+from .companies import CompanyController
+
+__all__ = ["CompanyController"]
diff --git a/src/app/domain/companies/controllers/companies.py b/src/app/domain/companies/controllers/companies.py
new file mode 100644
index 00000000..edc2406f
--- /dev/null
+++ b/src/app/domain/companies/controllers/companies.py
@@ -0,0 +1,129 @@
+"""Company Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Annotated
+
+from litestar import Controller, delete, get, patch, post
+from litestar.di import Provide
+
+from app.config import constants
+from app.domain.accounts.guards import requires_active_user
+from app.domain.companies import urls
+from app.domain.companies.dependencies import provide_companies_service
+from app.domain.companies.schemas import Company, CompanyCreate, CompanyUpdate
+from app.domain.companies.services import CompanyService
+
+if TYPE_CHECKING:
+    from uuid import UUID
+
+    from advanced_alchemy.service.pagination import OffsetPagination
+    from litestar.params import Dependency, Parameter
+
+    from app.lib.dependencies import FilterTypes
+
+
+class CompanyController(Controller):
+    """Company operations."""
+
+    tags = ["Companies"]
+    dependencies = {"companies_service": Provide(provide_companies_service)}
+    guards = [requires_active_user]
+    signature_namespace = {
+        "CompanyService": CompanyService,
+    }
+    dto = None
+    return_dto = None
+
+    @get(
+        operation_id="ListCompanies",
+        name="companies:list",
+        summary="List Companies",
+        path=urls.COMPANY_LIST,
+    )
+    async def list_companies(
+        self,
+        companies_service: CompanyService,
+        filters: Annotated[list[FilterTypes], Dependency(skip_validation=True)],
+    ) -> OffsetPagination[Company]:
+        """List companies that your account can access.."""
+        results, total = await companies_service.list_and_count(*filters)
+        return companies_service.to_schema(data=results, total=total, schema_type=Company, filters=filters)
+
+    @post(
+        operation_id="CreateCompany",
+        name="companies:create",
+        summary="Create a new company.",
+        path=urls.COMPANY_CREATE,
+    )
+    async def create_company(
+        self,
+        companies_service: CompanyService,
+        data: CompanyCreate,
+    ) -> CompanyCreate:
+        """Create a new company."""
+        obj = data.to_dict()
+        db_obj = await companies_service.create(obj)
+        return companies_service.to_schema(schema_type=Company, data=db_obj)
+
+    @get(
+        operation_id="GetCompany",
+        name="companies:get",
+        summary="Retrieve the details of a company.",
+        path=urls.COMPANY_DETAIL,
+    )
+    async def get_company(
+        self,
+        companies_service: CompanyService,
+        company_id: Annotated[
+            UUID,
+            Parameter(
+                title="Company ID",
+                description="The company to retrieve.",
+            ),
+        ],
+    ) -> Company:
+        """Get details about a comapny."""
+        db_obj = await companies_service.get(company_id)
+        return companies_service.to_schema(schema_type=Company, data=db_obj)
+
+    @patch(
+        operation_id="UpdateCompany",
+        name="companies:update",
+        path=urls.COMPANY_UPDATE,
+    )
+    async def update_company(
+        self,
+        data: CompanyUpdate,
+        companies_service: CompanyService,
+        company_id: Annotated[
+            UUID,
+            Parameter(
+                title="Company ID",
+                description="The company to update.",
+            ),
+        ],
+    ) -> Company:
+        """Update a company."""
+        db_obj = await companies_service.update(
+            item_id=company_id,
+            data=data.to_dict(),
+        )
+        return companies_service.to_schema(schema_type=Company, data=db_obj)
+
+    @delete(
+        operation_id="DeleteCompany",
+        name="companies:delete",
+        summary="Remove Company",
+        path=urls.COMPANY_DELETE,
+    )
+    async def delete_company(
+        self,
+        companies_service: CompanyService,
+        company_id: Annotated[
+            UUID,
+            Parameter(title="Company ID", description="The company to delete."),
+        ],
+    ) -> None:
+        """Delete a company."""
+        _ = await companies_service.delete(company_id)
diff --git a/src/app/domain/companies/dependencies.py b/src/app/domain/companies/dependencies.py
new file mode 100644
index 00000000..612dbc27
--- /dev/null
+++ b/src/app/domain/companies/dependencies.py
@@ -0,0 +1,27 @@
+"""User Account Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from sqlalchemy.orm import joinedload, noload, selectinload
+
+from app.db.models import Company
+from app.domain.companies.services import CompanyService
+
+__all__ = ("provide_companies_service", )
+
+
+if TYPE_CHECKING:
+    from collections.abc import AsyncGenerator
+
+    from sqlalchemy.ext.asyncio import AsyncSession
+
+
+async def provide_companies_service(db_session: AsyncSession) -> AsyncGenerator[CompanyService, None]:
+    """Construct repository and service objects for the request."""
+    async with CompanyService.new(
+        session=db_session,
+        load=[],
+    ) as service:
+        yield service
diff --git a/src/app/domain/companies/repositories.py b/src/app/domain/companies/repositories.py
new file mode 100644
index 00000000..776f0653
--- /dev/null
+++ b/src/app/domain/companies/repositories.py
@@ -0,0 +1,23 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+from uuid import UUID  # noqa: TCH003
+
+from advanced_alchemy.repository import SQLAlchemyAsyncRepository, SQLAlchemyAsyncSlugRepository
+from sqlalchemy import ColumnElement, select
+from sqlalchemy.orm import joinedload, selectinload
+
+from app.db.models import Company
+
+if TYPE_CHECKING:
+    from advanced_alchemy.filters import FilterTypes
+
+__all__ = (
+    "CompanyRepository",
+)
+
+
+class CompanyRepository(SQLAlchemyAsyncSlugRepository[Company]):
+    """Company Repository."""
+
+    model_type = Company
diff --git a/src/app/domain/companies/schemas.py b/src/app/domain/companies/schemas.py
new file mode 100644
index 00000000..f4fffda1
--- /dev/null
+++ b/src/app/domain/companies/schemas.py
@@ -0,0 +1,56 @@
+from __future__ import annotations
+
+from uuid import UUID  # noqa: TCH003
+
+import msgspec
+
+from app.db.models.company import Company
+from app.lib.schema import CamelizedBaseStruct, Location, Funding
+
+
+class Company(CamelizedBaseStruct):
+    """A company."""
+    id: UUID
+    slug: str
+    name: str
+    description: str | None = None
+    type: str | None = None
+    industry: str | None = None
+    headcount: int | None = None
+    founded_year: int | None = None
+    url: str | None = None
+    profile_pic_url: str | None = None
+    linkedin_profile_url: str | None = None
+    hq_location: Location | None = None
+    last_funding: Funding | None = None
+
+
+class CompanyCreate(CamelizedBaseStruct):
+    """A company create schema."""
+    name: str
+    description: str | None = None
+    type: str | None = None
+    industry: str | None = None
+    headcount: int | None = None
+    founded_year: int | None = None
+    url: str | None = None
+    profile_pic_url: str | None = None
+    linkedin_profile_url: str | None = None
+    hq_location: Location | None = None
+    last_funding: Funding | None = None
+
+
+class CompanyUpdate(CamelizedBaseStruct, omit_defaults=True):
+    """A company update schema."""
+    id: UUID
+    name: str | None | msgspec.UnsetType = msgspec.UNSET
+    description: str | None | msgspec.UnsetType = msgspec.UNSET
+    type: str | None | msgspec.UnsetType = msgspec.UNSET
+    industry: str | None | msgspec.UnsetType = msgspec.UNSET
+    headcount: int | None | msgspec.UnsetType = msgspec.UNSET
+    founded_year: int | None | msgspec.UnsetType = msgspec.UNSET
+    url: str | None | msgspec.UnsetType = msgspec.UNSET
+    profile_pic_url: str | None | msgspec.UnsetType = msgspec.UNSET
+    linkedin_profile_url: str | None | msgspec.UnsetType = msgspec.UNSET
+    hq_location: Location | None | msgspec.UnsetType = msgspec.UNSET
+    last_funding: Funding | None | msgspec.UnsetType = msgspec.UNSET
diff --git a/src/app/domain/companies/services.py b/src/app/domain/companies/services.py
new file mode 100644
index 00000000..d5b9789d
--- /dev/null
+++ b/src/app/domain/companies/services.py
@@ -0,0 +1,48 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from advanced_alchemy.exceptions import RepositoryError
+from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService, is_dict, is_msgspec_model, is_pydantic_model
+from uuid_utils.compat import uuid4
+
+from app.lib.schema import CamelizedBaseStruct, Location, Funding
+from app.db.models import Company
+
+from .repositories import CompanyRepository
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+    from uuid import UUID
+
+    from advanced_alchemy.filters import FilterTypes
+    from advanced_alchemy.repository._util import LoadSpec
+    from advanced_alchemy.service import ModelDictT
+    from msgspec import Struct
+    from sqlalchemy.orm import InstrumentedAttribute
+
+__all__ = (
+    "CompanyService",
+)
+
+
+class CompanyService(SQLAlchemyAsyncRepositoryService[Company]):
+    """Company Service."""
+
+    repository_type = CompanyRepository
+    match_fields = ["name"]
+
+    def __init__(self, **repo_kwargs: Any) -> None:
+        self.repository: CompanyRepository = self.repository_type(**repo_kwargs)
+        self.model_type = self.repository.model_type
+
+    async def to_model(self, data: Company | dict[str, Any] | Struct, operation: str | None = None) -> Company:
+        if (is_msgspec_model(data) or is_pydantic_model(data)) and operation == "create" and data.slug is None:  # type: ignore[union-attr]
+            data.slug = await self.repository.get_available_slug(data.name)  # type: ignore[union-attr]
+        if (is_msgspec_model(data) or is_pydantic_model(data)) and operation == "update" and data.slug is None:  # type: ignore[union-attr]
+            data.slug = await self.repository.get_available_slug(data.name)  # type: ignore[union-attr]
+        if is_dict(data) and "slug" not in data and operation == "create":
+            data["slug"] = await self.repository.get_available_slug(data["name"])
+        if is_dict(data) and "slug" not in data and "name" in data and operation == "update":
+            data["slug"] = await self.repository.get_available_slug(data["name"])
+        return await super().to_model(data, operation)
diff --git a/src/app/domain/companies/urls.py b/src/app/domain/companies/urls.py
new file mode 100644
index 00000000..5d0c35ae
--- /dev/null
+++ b/src/app/domain/companies/urls.py
@@ -0,0 +1,6 @@
+COMPANY_LIST = "/api/companies"
+COMPANY_DELETE = "/api/companies/{company_id:uuid}"
+COMPANY_DETAIL = "/api/companies/{company_id:uuid}"
+COMPANY_UPDATE = "/api/companies/{company_id:uuid}"
+COMPANY_CREATE = "/api/companies"
+COMPANY_INDEX = "/api/companies/{company_id:uuid}"
diff --git a/src/app/domain/jobs/__init__.py b/src/app/domain/jobs/__init__.py
new file mode 100644
index 00000000..13bae5f7
--- /dev/null
+++ b/src/app/domain/jobs/__init__.py
@@ -0,0 +1,4 @@
+"""Job Application Module."""
+from . import controllers, dependencies, schemas, services
+
+__all__ = ["controllers", "services", "schemas", "dependencies"]
diff --git a/src/app/domain/jobs/controllers/__init__.py b/src/app/domain/jobs/controllers/__init__.py
new file mode 100644
index 00000000..e0c95a6e
--- /dev/null
+++ b/src/app/domain/jobs/controllers/__init__.py
@@ -0,0 +1,3 @@
+from .job_posts import JobPostController
+
+__all__ = ["JobPostController"]
diff --git a/src/app/domain/jobs/controllers/job_posts.py b/src/app/domain/jobs/controllers/job_posts.py
new file mode 100644
index 00000000..d594e880
--- /dev/null
+++ b/src/app/domain/jobs/controllers/job_posts.py
@@ -0,0 +1,130 @@
+"""Job Post Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Annotated
+
+from litestar import Controller, delete, get, patch, post
+from litestar.di import Provide
+
+from app.config import constants
+from app.db.models import User as UserModel
+from app.domain.accounts.guards import requires_active_user
+from app.domain.jobs import urls
+from app.domain.jobs.dependencies import provide_job_posts_service
+from app.domain.jobs.schemas import JobPost, JobPostCreate, JobPostUpdate
+from app.domain.jobs.services import JobPostService
+
+if TYPE_CHECKING:
+    from uuid import UUID
+
+    from advanced_alchemy.service.pagination import OffsetPagination
+    from litestar.params import Dependency, Parameter
+
+    from app.lib.dependencies import FilterTypes
+
+
+class JobPostController(Controller):
+    """JobPost operations."""
+
+    tags = ["Job Posts"]
+    dependencies = {"job_posts_service": Provide(provide_job_posts_service)}
+    guards = [requires_active_user]
+    signature_namespace = {
+        "JobPostService": JobPostService,
+    }
+    dto = None
+    return_dto = None
+
+    @get(
+        operation_id="ListJobPosts",
+        name="jobs:list-post",
+        summary="List Job Posts",
+        path=urls.JOBS_LIST,
+    )
+    async def list_job_posts(
+        self,
+        job_posts_service: JobPostService,
+        filters: Annotated[list[FilterTypes], Dependency(skip_validation=True)],
+    ) -> OffsetPagination[JobPost]:
+        """List job_posts that your account can access.."""
+        results, total = await job_posts_service.get_job_posts(*filters)
+        return job_posts_service.to_schema(data=results, total=total, schema_type=JobPost, filters=filters)
+
+    @post(
+        operation_id="CreateJobPost",
+        name="jobs:create-post",
+        summary="Create a new job post.",
+        path=urls.JOBS_CREATE,
+    )
+    async def create_job_post(
+        self,
+        job_posts_service: JobPostService,
+        data: JobPostCreate,
+    ) -> JobPostCreate:
+        """Create a new job post."""
+        obj = data.to_dict()
+        db_obj = await job_posts_service.create(obj)
+        return job_posts_service.to_schema(schema_type=JobPost, data=db_obj)
+
+    @get(
+        operation_id="GetJobPost",
+        name="jobs:get-post",
+        summary="Retrieve the details of a job post.",
+        path=urls.JOBS_DETAIL,
+    )
+    async def get_job_post(
+        self,
+        job_posts_service: JobPostService,
+        job_post_id: Annotated[
+            UUID,
+            Parameter(
+                title="JobPost ID",
+                description="The job_post to retrieve.",
+            ),
+        ],
+    ) -> JobPost:
+        """Get details about a job post."""
+        db_obj = await job_posts_service.get(job_post_id)
+        return job_posts_service.to_schema(schema_type=JobPost, data=db_obj)
+
+    @patch(
+        operation_id="UpdateJobPost",
+        name="jobs:update-post",
+        path=urls.JOBS_UPDATE,
+    )
+    async def update_job_post(
+        self,
+        data: JobPostUpdate,
+        job_posts_service: JobPostService,
+        job_post_id: Annotated[
+            UUID,
+            Parameter(
+                title="JobPost ID",
+                description="The job_post to update.",
+            ),
+        ],
+    ) -> JobPost:
+        """Update a job post."""
+        db_obj = await job_posts_service.update(
+            item_id=job_post_id,
+            data=data.to_dict(),
+        )
+        return job_posts_service.to_schema(schema_type=JobPost, data=db_obj)
+
+    @delete(
+        operation_id="DeleteJobPost",
+        name="jobs:delete-post",
+        summary="Remove JobPost",
+        path=urls.JOBS_DELETE,
+    )
+    async def delete_job_post(
+        self,
+        job_posts_service: JobPostService,
+        job_post_id: Annotated[
+            UUID,
+            Parameter(title="JobPost ID", description="The job_post to delete."),
+        ],
+    ) -> None:
+        """Delete a job_post."""
+        _ = await job_posts_service.delete(job_post_id)
diff --git a/src/app/domain/jobs/dependencies.py b/src/app/domain/jobs/dependencies.py
new file mode 100644
index 00000000..02682095
--- /dev/null
+++ b/src/app/domain/jobs/dependencies.py
@@ -0,0 +1,27 @@
+"""Job Post Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from sqlalchemy.orm import joinedload, noload, selectinload
+
+from app.db.models import JobPost
+from app.domain.jobs.services import JobPostService
+
+__all__ = ("provide_job_posts_service", )
+
+
+if TYPE_CHECKING:
+    from collections.abc import AsyncGenerator
+
+    from sqlalchemy.ext.asyncio import AsyncSession
+
+
+async def provide_job_posts_service(db_session: AsyncSession) -> AsyncGenerator[JobPostService, None]:
+    """Construct repository and service objects for the request."""
+    async with JobPostService.new(
+        session=db_session,
+        load=[],
+    ) as service:
+        yield service
diff --git a/src/app/domain/jobs/repositories.py b/src/app/domain/jobs/repositories.py
new file mode 100644
index 00000000..b13578d8
--- /dev/null
+++ b/src/app/domain/jobs/repositories.py
@@ -0,0 +1,44 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+from uuid import UUID  # noqa: TCH003
+
+from advanced_alchemy.repository import SQLAlchemyAsyncRepository, SQLAlchemyAsyncSlugRepository
+from sqlalchemy import ColumnElement, select
+from sqlalchemy.orm import joinedload, selectinload
+
+from app.db.models import JobPost
+
+if TYPE_CHECKING:
+    from advanced_alchemy.filters import FilterTypes
+
+__all__ = (
+    "JobPostRepository",
+)
+
+
+class JobPostRepository(SQLAlchemyAsyncRepository[JobPost]):
+    """JobPost Repository."""
+
+    model_type = JobPost
+
+    async def get_job_posts(
+        self,
+        *filters: FilterTypes | ColumnElement[bool],
+        auto_expunge: bool | None = None,
+        force_basic_query_mode: bool | None = None,
+        **kwargs: Any,
+    ) -> tuple[list[JobPost], int]:
+        """Get paginated list and total count of job posts."""
+
+        return await self.list_and_count(
+            *filters,
+            statement=select(JobPost)
+            .order_by(JobPost.created_at)
+            .options(
+                joinedload(JobPost.company, isouter=True),
+            ),
+            auto_expunge=auto_expunge,
+            force_basic_query_mode=force_basic_query_mode,
+            **kwargs,
+        )
diff --git a/src/app/domain/jobs/schemas.py b/src/app/domain/jobs/schemas.py
new file mode 100644
index 00000000..432781d4
--- /dev/null
+++ b/src/app/domain/jobs/schemas.py
@@ -0,0 +1,59 @@
+from __future__ import annotations
+
+from uuid import UUID  # noqa: TCH003
+
+import msgspec
+
+from app.db.models.job_post import JobPost
+from app.lib.schema import CamelizedBaseStruct, Location
+from app.domain.companies.schemas import Company
+
+
+class JobPost(CamelizedBaseStruct):
+    """A job post."""
+
+    id: UUID
+    title: str
+    body: str | None = None
+    location: Location | None = None
+    seniority_level: str | None = None
+    employment_type: str | None = None
+    job_functions: list[str] | None = None
+    url: str | None = None
+    apply_url: str | None = None
+    total_applicants: int | None = None
+    external_id: str | None = None
+    company: Company | None = None
+
+
+class JobPostCreate(CamelizedBaseStruct):
+    """A job post create schema."""
+
+    title: str
+    body: str | None = None
+    location: Location | None = None
+    seniority_level: str | None = None
+    employment_type: str | None = None
+    job_functions: list[str] | None = None
+    url: str | None = None
+    apply_url: str | None = None
+    total_applicants: int | None = None
+    external_id: str | None = None
+    company_id: str | None = None
+
+
+class JobPostUpdate(CamelizedBaseStruct, omit_defaults=True):
+    """A job post update schema."""
+
+    id: UUID
+    title: str | None | msgspec.UnsetType = msgspec.UNSET
+    body: str | None | msgspec.UnsetType = msgspec.UNSET
+    location: Location | None | msgspec.UnsetType = msgspec.UNSET
+    seniority_level: str | None | msgspec.UnsetType = msgspec.UNSET
+    employment_type: str | None | msgspec.UnsetType = msgspec.UNSET
+    job_functions: list[str] | None | msgspec.UnsetType = msgspec.UNSET
+    url: str | None | msgspec.UnsetType = msgspec.UNSET
+    apply_url: str | None | msgspec.UnsetType = msgspec.UNSET
+    total_applicants: int | None | msgspec.UnsetType = msgspec.UNSET
+    external_id: str | None | msgspec.UnsetType = msgspec.UNSET
+    company_id: str | None | msgspec.UnsetType = msgspec.UNSET
diff --git a/src/app/domain/jobs/services.py b/src/app/domain/jobs/services.py
new file mode 100644
index 00000000..7942cbce
--- /dev/null
+++ b/src/app/domain/jobs/services.py
@@ -0,0 +1,45 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from advanced_alchemy.exceptions import RepositoryError
+from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService, is_dict, is_msgspec_model, is_pydantic_model
+from uuid_utils.compat import uuid4
+
+from app.lib.schema import CamelizedBaseStruct, Location, Funding
+from app.db.models import JobPost
+
+from .repositories import JobPostRepository
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+    from uuid import UUID
+
+    from advanced_alchemy.filters import FilterTypes
+    from advanced_alchemy.repository._util import LoadSpec
+    from advanced_alchemy.service import ModelDictT
+    from msgspec import Struct
+    from sqlalchemy.orm import InstrumentedAttribute
+
+__all__ = (
+    "JobPostService",
+)
+
+
+class JobPostService(SQLAlchemyAsyncRepositoryService[JobPost]):
+    """JobPost Service."""
+
+    repository_type = JobPostRepository
+    match_fields = ["title"]
+
+    def __init__(self, **repo_kwargs: Any) -> None:
+        self.repository: JobPostRepository = self.repository_type(**repo_kwargs)
+        self.model_type = self.repository.model_type
+
+    async def get_job_posts(
+        self,
+        *filters: FilterTypes,
+        **kwargs: Any,
+    ) -> tuple[list[JobPost], int]:
+        """Get all job posts."""
+        return await self.repository.get_job_posts(*filters, **kwargs)
diff --git a/src/app/domain/jobs/urls.py b/src/app/domain/jobs/urls.py
new file mode 100644
index 00000000..b34c00b1
--- /dev/null
+++ b/src/app/domain/jobs/urls.py
@@ -0,0 +1,6 @@
+JOBS_LIST = "/api/jobs"
+JOBS_DELETE = "/api/jobs/{job_post_id:uuid}"
+JOBS_DETAIL = "/api/jobs/{job_post_id:uuid}"
+JOBS_UPDATE = "/api/jobs/{job_post_id:uuid}"
+JOBS_CREATE = "/api/jobs"
+JOBS_INDEX = "/api/jobs/{job_post_id:uuid}"
diff --git a/src/app/domain/opportunities/__init__.py b/src/app/domain/opportunities/__init__.py
new file mode 100644
index 00000000..33322e0f
--- /dev/null
+++ b/src/app/domain/opportunities/__init__.py
@@ -0,0 +1,4 @@
+"""Opportunity Application Module."""
+from . import controllers, dependencies, schemas, services
+
+__all__ = ["controllers", "services", "schemas", "dependencies"]
diff --git a/src/app/domain/opportunities/controllers/__init__.py b/src/app/domain/opportunities/controllers/__init__.py
new file mode 100644
index 00000000..2c1780e4
--- /dev/null
+++ b/src/app/domain/opportunities/controllers/__init__.py
@@ -0,0 +1,3 @@
+from .opportunities import OpportunityController
+
+__all__ = ["OpportunityController"]
diff --git a/src/app/domain/opportunities/controllers/opportunities.py b/src/app/domain/opportunities/controllers/opportunities.py
new file mode 100644
index 00000000..136701be
--- /dev/null
+++ b/src/app/domain/opportunities/controllers/opportunities.py
@@ -0,0 +1,192 @@
+"""Opportunity Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Annotated
+
+from litestar import Controller, delete, get, patch, post
+from litestar.di import Provide
+from litestar.exceptions import ValidationException
+
+from app.config import constants
+from app.db.models import User as UserModel
+from app.domain.accounts.guards import requires_active_user
+from app.domain.accounts.dependencies import provide_users_service
+from app.domain.accounts.services import UserService
+from app.domain.opportunities import urls
+from app.domain.opportunities.dependencies import provide_opportunities_service, provide_opportunities_audit_log_service
+from app.domain.opportunities.schemas import Opportunity, OpportunityCreate, OpportunityUpdate
+from app.domain.opportunities.services import OpportunityService, OpportunityAuditLogService
+
+if TYPE_CHECKING:
+    from uuid import UUID
+
+    from advanced_alchemy.service.pagination import OffsetPagination
+    from litestar.params import Dependency, Parameter
+
+    from app.lib.dependencies import FilterTypes
+
+
+class OpportunityController(Controller):
+    """Opportunity operations."""
+
+    tags = ["Opportunities"]
+    dependencies = {
+        "opportunities_service": Provide(provide_opportunities_service),
+        "opportunities_audit_log_service": Provide(provide_opportunities_audit_log_service),
+        "users_service": Provide(provide_users_service),
+    }
+    guards = [requires_active_user]
+    signature_namespace = {
+        "OpportunityService": OpportunityService,
+        "UserModel": UserModel,
+    }
+    dto = None
+    return_dto = None
+
+    @get(
+        operation_id="ListOpportunities",
+        name="opportunities:list",
+        summary="List Opportunities",
+        path=urls.OPPORTUNITY_LIST,
+    )
+    async def list_opportunities(
+        self,
+        opportunities_service: OpportunityService,
+        current_user: UserModel,
+        filters: Annotated[list[FilterTypes], Dependency(skip_validation=True)],
+    ) -> OffsetPagination[Opportunity]:
+        """List opportunities that your account can access.."""
+        results, total = await opportunities_service.get_opportunities(*filters, tenant_id=current_user.tenant_id)
+        return opportunities_service.to_schema(data=results, total=total, schema_type=Opportunity, filters=filters)
+
+    @post(
+        operation_id="CreateOpportunity",
+        name="opportunities:create",
+        summary="Create a new opportunity.",
+        path=urls.OPPORTUNITY_CREATE,
+    )
+    async def create_opportunity(
+        self,
+        opportunities_service: OpportunityService,
+        opportunities_audit_log_service: OpportunityAuditLogService,
+        users_service: UserService,
+        current_user: UserModel,
+        data: OpportunityCreate,
+    ) -> OpportunityCreate:
+        """Create a new opportunity."""
+        obj = data.to_dict()
+
+        # Verify is the owner exists in this tenant
+        owner_id = obj.get("owner_id")
+        if owner_id:
+            db_obj = await users_service.get_one((UserModel.tenant_id == current_user.tenant_id) & (UserModel.id == owner_id))
+            if not db_obj:
+                raise ValidationException("Owner does not exist")
+
+        obj["tenant_id"] = current_user.tenant_id
+        db_obj = await opportunities_service.create(obj)
+
+        await opportunities_audit_log_service.create({
+            "operation": "create",
+            "diff": {"new": obj},
+            "user_id": current_user.id,
+            "tenant_id": current_user.tenant_id,
+            "opportunity_id": db_obj.id
+        })
+
+        return opportunities_service.to_schema(schema_type=Opportunity, data=db_obj)
+
+    @get(
+        operation_id="GetOpportunity",
+        name="opportunities:get",
+        summary="Retrieve the details of a opportunity.",
+        path=urls.OPPORTUNITY_DETAIL,
+    )
+    async def get_opportunity(
+        self,
+        opportunities_service: OpportunityService,
+        current_user: UserModel,
+        opportunity_id: Annotated[
+            UUID,
+            Parameter(
+                title="Opportunity ID",
+                description="The opportunity to retrieve.",
+            ),
+        ],
+    ) -> Opportunity:
+        """Get details about a comapny."""
+        db_obj = await opportunities_service.get(opportunity_id)
+        return opportunities_service.to_schema(schema_type=Opportunity, data=db_obj)
+
+    @patch(
+        operation_id="UpdateOpportunity",
+        name="opportunities:update",
+        path=urls.OPPORTUNITY_UPDATE,
+    )
+    async def update_opportunity(
+        self,
+        data: OpportunityUpdate,
+        opportunities_service: OpportunityService,
+        opportunities_audit_log_service: OpportunityAuditLogService,
+        users_service: UserService,
+        current_user: UserModel,
+        opportunity_id: Annotated[
+            UUID,
+            Parameter(
+                title="Opportunity ID",
+                description="The opportunity to update.",
+            ),
+        ],
+    ) -> Opportunity:
+        """Update a opportunity."""
+        obj = data.to_dict()
+
+        # Verify is the owner exists for in tenant
+        owner_id = data.get("owner_id")
+        if owner_id:
+            db_obj = await users_service.get_one(owner_id, tenant_id=current_user.tenant_id)
+            if not db_obj:
+                raise ValidationException("Owner does not exist")
+
+        # Verify if the user is part of the same tenant as the opportunity
+        opportunity = OpportunityService.get_one(data.id)
+        if not opportunity:
+            raise ValidationException("Opportunity does not exist")
+
+        if opportunity.tenant_id != current_user.tenant_id:
+            raise ValidationException("Opportunity does not exist")
+
+        obj["tenant_id"] = current_user.tenant_id
+        db_obj = await opportunities_service.update(
+            item_id=opportunity_id,
+            data=obj,
+        )
+
+        await opportunities_audit_log_service.create({
+            "operation": "update",
+            "diff": {"new": obj},
+            "user_id": current_user.id,
+            "tenant_id": current_user.tenant_id,
+            "opportunity_id": db_obj.id
+        })
+
+        return opportunities_service.to_schema(schema_type=Opportunity, data=db_obj)
+
+    @delete(
+        operation_id="DeleteOpportunity",
+        name="opportunities:delete",
+        summary="Remove Opportunity",
+        path=urls.OPPORTUNITY_DELETE,
+    )
+    async def delete_opportunity(
+        self,
+        opportunities_service: OpportunityService,
+        current_user: UserModel,
+        opportunity_id: Annotated[
+            UUID,
+            Parameter(title="Opportunity ID", description="The opportunity to delete."),
+        ],
+    ) -> None:
+        """Delete a opportunity."""
+        _ = await opportunities_service.delete(opportunity_id)
diff --git a/src/app/domain/opportunities/dependencies.py b/src/app/domain/opportunities/dependencies.py
new file mode 100644
index 00000000..ee07d649
--- /dev/null
+++ b/src/app/domain/opportunities/dependencies.py
@@ -0,0 +1,36 @@
+"""Opportunity Account Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from sqlalchemy.orm import joinedload, noload, selectinload
+
+from app.db.models import Opportunity, OpportunityAuditLog
+from app.domain.opportunities.services import OpportunityService, OpportunityAuditLogService
+
+__all__ = ("provide_opportunities_service", "provide_opportunities_audit_log_service")
+
+
+if TYPE_CHECKING:
+    from collections.abc import AsyncGenerator
+
+    from sqlalchemy.ext.asyncio import AsyncSession
+
+
+async def provide_opportunities_service(db_session: AsyncSession) -> AsyncGenerator[OpportunityService, None]:
+    """Construct repository and service objects for the request."""
+    async with OpportunityService.new(
+        session=db_session,
+        load=[],
+    ) as service:
+        yield service
+
+
+async def provide_opportunities_audit_log_service(db_session: AsyncSession) -> AsyncGenerator[OpportunityAuditLogService, None]:
+    """Construct repository and service objects for the request."""
+    async with OpportunityAuditLogService.new(
+        session=db_session,
+        load=[],
+    ) as service:
+        yield service
diff --git a/src/app/domain/opportunities/repositories.py b/src/app/domain/opportunities/repositories.py
new file mode 100644
index 00000000..76f297e2
--- /dev/null
+++ b/src/app/domain/opportunities/repositories.py
@@ -0,0 +1,77 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+from uuid import UUID  # noqa: TCH003
+
+from advanced_alchemy.repository import SQLAlchemyAsyncSlugRepository
+from sqlalchemy import ColumnElement, select
+from sqlalchemy.orm import joinedload, InstrumentedAttribute
+
+from app.db.models import Opportunity, OpportunityAuditLog
+
+if TYPE_CHECKING:
+    from advanced_alchemy.filters import FilterTypes
+    from advanced_alchemy.repository._util import LoadSpec
+
+__all__ = (
+    "OpportunityRepository",
+    "OpportunityAuditLogRepository"
+)
+
+
+class OpportunityRepository(SQLAlchemyAsyncSlugRepository[Opportunity]):
+    """Opportunity Repository."""
+
+    model_type = Opportunity
+
+    async def get_opportunities(
+        self,
+        *filters: FilterTypes | ColumnElement[bool],
+        tenant_id: UUID,
+        auto_expunge: bool | None = None,
+        force_basic_query_mode: bool | None = None,
+        **kwargs: Any,
+    ) -> tuple[list[Opportunity], int]:
+        """Get paginated list and total count of opportunities that a tenant can access."""
+
+        return await self.list_and_count(
+            *filters,
+            statement=select(Opportunity)
+            .where(Opportunity.tenant_id == tenant_id),
+            #.order_by(Opportunity.score.desc(), Opportunity.created_at.desc())
+            auto_expunge=auto_expunge,
+            force_basic_query_mode=force_basic_query_mode,
+            **kwargs,
+        )
+
+    async def get_opportunity(
+        self,
+        opportunity_id: UUID,
+        tenant_id: UUID,
+        *,
+        id_attribute: str | InstrumentedAttribute[Any] | None = None,
+        load: LoadSpec | None = None,
+        execution_options: dict[str, Any] | None = None,
+        auto_expunge: bool | None = None,
+    ) -> Opportunity:
+        """Get an opportunity along with it's associated details."""
+        return await self.repository.get_one(
+            item_id=opportunity_id,
+            auto_expunge=auto_expunge,
+            statement=select(Opportunity)
+            .where((Opportunity.id == opportunity_id) & (Opportunity.tenant_id == tenant_id))
+            #.order_by(Opportunity.score.desc(), Opportunity.created_at.desc())
+            .options(
+                joinedload(Opportunity.contacts, innerjoin=False),
+                joinedload(Opportunity.job_posts, innerjoin=False),
+                joinedload(Opportunity.logs, innerjoin=False),
+            ),
+            id_attribute=id_attribute,
+            load=load,
+            execution_options=execution_options,
+        )
+
+class OpportunityAuditLogRepository(SQLAlchemyAsyncSlugRepository[OpportunityAuditLog]):
+    """OpportunityAuditLog Repository."""
+
+    model_type = OpportunityAuditLog
diff --git a/src/app/domain/opportunities/schemas.py b/src/app/domain/opportunities/schemas.py
new file mode 100644
index 00000000..bac502b0
--- /dev/null
+++ b/src/app/domain/opportunities/schemas.py
@@ -0,0 +1,55 @@
+from __future__ import annotations
+
+from uuid import UUID  # noqa: TCH003
+from typing import Any
+
+import msgspec
+
+from app.domain.accounts.schemas import User
+from app.domain.companies.schemas import Company
+from app.domain.people.schemas import Person
+from app.domain.jobs.schemas import JobPost
+from app.lib.schema import CamelizedBaseStruct, OpportunityStage
+
+
+class OpportunityAuditLog(CamelizedBaseStruct):
+    """An opportunity audit log."""
+    id: UUID
+    operation: str
+    user: User
+    diff: dict[str, Any] | None = None
+
+
+class Opportunity(CamelizedBaseStruct):
+    """An opportunity."""
+    id: UUID
+    slug: str
+    name: str
+    stage: OpportunityStage
+    notes: str | None = None
+    owner: User | None = None
+    company: Company | None = None
+    contacts: list[Person] | None = None
+    job_posts: list[JobPost] | None = None
+    logs: list[OpportunityAuditLog] | None = None
+
+
+class OpportunityCreate(CamelizedBaseStruct):
+    """An opportunity create schema."""
+    name: str
+    stage: OpportunityStage | None = None
+    notes: str | None = None
+    owner_id: UUID | None = None
+    company_id: UUID | None = None
+    contact_ids: list[UUID] | None = None
+    job_post_ids: list[UUID] | None = None
+
+
+class OpportunityUpdate(CamelizedBaseStruct):
+    """An opportunity update schema."""
+    id: UUID
+    name: str | None | msgspec.UnsetType = msgspec.UNSET
+    stage: OpportunityStage | None | msgspec.UnsetType = msgspec.UNSET
+    notes: str | None | msgspec.UnsetType = msgspec.UNSET
+    owner_id: UUID | None = None
+    company_id: UUID | None = None
diff --git a/src/app/domain/opportunities/services.py b/src/app/domain/opportunities/services.py
new file mode 100644
index 00000000..213427b1
--- /dev/null
+++ b/src/app/domain/opportunities/services.py
@@ -0,0 +1,156 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from sqlalchemy import ColumnElement, insert
+from advanced_alchemy.exceptions import RepositoryError
+from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService, is_dict, is_msgspec_model, is_pydantic_model
+from uuid_utils.compat import uuid4
+
+from app.lib.schema import CamelizedBaseStruct
+from app.db.models import Opportunity, OpportunityAuditLog
+from app.domain.accounts.services import UserService
+from .repositories import OpportunityRepository, OpportunityAuditLogRepository
+
+from app.db.models import Opportunity, OpportunityAuditLog, opportunity_person_relation, opportunity_job_post_relation
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+    from uuid import UUID
+
+    from advanced_alchemy.repository._util import LoadSpec
+    from advanced_alchemy.service import ModelDictT
+    from advanced_alchemy.filters import FilterTypes
+    from advanced_alchemy.repository._util import LoadSpec
+    from advanced_alchemy.service import ModelDictT
+    from msgspec import Struct
+    from sqlalchemy.orm import InstrumentedAttribute
+
+__all__ = (
+    "OpportunityService",
+    "OpportunityAuditLogService",
+)
+
+
+class OpportunityAuditLogService(SQLAlchemyAsyncRepositoryService[Opportunity]):
+    """OpportunityAuditLog Service."""
+
+    repository_type = OpportunityAuditLogRepository
+    match_fields = ["id"]
+
+    def __init__(self, **repo_kwargs: Any) -> None:
+        self.repository: OpportunityRepository = self.repository_type(**repo_kwargs)
+        self.model_type = self.repository.model_type
+
+
+class OpportunityService(SQLAlchemyAsyncRepositoryService[Opportunity]):
+    """Opportunity Service."""
+
+    repository_type = OpportunityRepository
+    match_fields = ["name"]
+
+    def __init__(self, **repo_kwargs: Any) -> None:
+        self.repository: OpportunityRepository = self.repository_type(**repo_kwargs)
+        self.model_type = self.repository.model_type
+
+    async def get_opportunities(
+        self,
+        *filters: FilterTypes,
+        tenant_id: UUID,
+        **kwargs: Any,
+    ) -> tuple[list[Opportunity], int]:
+        """Get all opportunities for a tenant."""
+        return await self.repository.get_opportunities(*filters, tenant_id=tenant_id, **kwargs)
+
+    async def get_opportunity(
+        self,
+        opportunity_id: UUID,
+        tenant_id: UUID,
+        **kwargs: Any,
+    ) -> tuple[list[Opportunity], int]:
+        """Get all opportunities for a tenant."""
+        return await self.repository.get_opportunity(
+            opportunity_id=opportunity_id, tenant_id=tenant_id, **kwargs
+        )
+
+    async def update(
+        self,
+        data: ModelDictT[Opportunity],
+        item_id: Any | None = None,
+        *,
+        id_attribute: str | InstrumentedAttribute[Any] | None = None,
+        load: LoadSpec | None = None,
+        execution_options: dict[str, Any] | None = None,
+        attribute_names: Iterable[str] | None = None,
+        with_for_update: bool | None = None,
+        auto_commit: bool | None = None,
+        auto_expunge: bool | None = None,
+        auto_refresh: bool | None = None,
+    ) -> Opportunity:
+        """Wrap repository update operation.
+
+        Returns:
+            Updated representation.
+        """
+        obj = await super().update(
+            data=data,
+            item_id=item_id,
+            attribute_names=attribute_names,
+            id_attribute=id_attribute,
+            load=load,
+            execution_options=execution_options,
+            with_for_update=with_for_update,
+            auto_commit=auto_commit,
+            auto_expunge=auto_expunge,
+            auto_refresh=auto_refresh,
+        )
+        return obj
+
+    async def create(
+        self,
+        data: ModelDictT[Opportunity],
+        *,
+        load: LoadSpec | None = None,
+        execution_options: dict[str, Any] | None = None,
+        auto_commit: bool | None = None,
+        auto_expunge: bool | None = None,
+        auto_refresh: bool | None = None,
+    ) -> Opportunity:
+        """Create a new team with an owner."""
+        contact_ids: list[UUID] = []
+        job_post_ids: list[UUID] = []
+        if isinstance(data, dict):
+            contact_ids = data.pop("contact_ids", [])
+            job_post_ids = data.pop("job_post_ids", [])
+        data = await self.to_model(data, "create")
+        obj = await super().create(
+            data=data,
+            load=load,
+            execution_options=execution_options,
+            auto_commit=auto_commit,
+            auto_expunge=True,
+            auto_refresh=False,
+        )
+
+        # Add associated contacts
+        for contact_id in contact_ids:
+            stmt = insert(opportunity_person_relation).values(opportunity_id=obj.id, person_id=contact_id)
+            await self.repository.session.execute(stmt)
+
+        # Add associated job posts
+        for job_post_id in job_post_ids:
+            stmt = insert(opportunity_job_post_relation).values(opportunity_id=obj.id, job_post_id=job_post_id)
+            await self.repository.session.execute(stmt)
+
+        return data
+
+    async def to_model(self, data: Opportunity | dict[str, Any] | Struct, operation: str | None = None) -> Opportunity:
+        if (is_msgspec_model(data) or is_pydantic_model(data)) and operation == "create" and data.slug is None:  # type: ignore[union-attr]
+            data.slug = await self.repository.get_available_slug(data.name)  # type: ignore[union-attr]
+        if (is_msgspec_model(data) or is_pydantic_model(data)) and operation == "update" and data.slug is None:  # type: ignore[union-attr]
+            data.slug = await self.repository.get_available_slug(data.name)  # type: ignore[union-attr]
+        if is_dict(data) and "slug" not in data and operation == "create":
+            data["slug"] = await self.repository.get_available_slug(data["name"])
+        if is_dict(data) and "slug" not in data and "name" in data and operation == "update":
+            data["slug"] = await self.repository.get_available_slug(data["name"])
+        return await super().to_model(data, operation)
diff --git a/src/app/domain/opportunities/urls.py b/src/app/domain/opportunities/urls.py
new file mode 100644
index 00000000..9d35a835
--- /dev/null
+++ b/src/app/domain/opportunities/urls.py
@@ -0,0 +1,6 @@
+OPPORTUNITY_LIST = "/api/opportunities"
+OPPORTUNITY_DELETE = "/api/opportunities/{opportunity_id:uuid}"
+OPPORTUNITY_DETAIL = "/api/opportunities/{opportunity_id:uuid}"
+OPPORTUNITY_UPDATE = "/api/opportunities/{opportunity_id:uuid}"
+OPPORTUNITY_CREATE = "/api/opportunities"
+OPPORTUNITY_INDEX = "/api/opportunities/{opportunity_id:uuid}"
diff --git a/src/app/domain/people/__init__.py b/src/app/domain/people/__init__.py
new file mode 100644
index 00000000..18c61ee6
--- /dev/null
+++ b/src/app/domain/people/__init__.py
@@ -0,0 +1,4 @@
+"""People Application Module."""
+from . import controllers, dependencies, schemas, services
+
+__all__ = ["controllers", "services", "schemas", "dependencies"]
diff --git a/src/app/domain/people/controllers/__init__.py b/src/app/domain/people/controllers/__init__.py
new file mode 100644
index 00000000..132a7186
--- /dev/null
+++ b/src/app/domain/people/controllers/__init__.py
@@ -0,0 +1,3 @@
+from .persons import PersonController
+
+__all__ = ["PersonController"]
diff --git a/src/app/domain/people/controllers/persons.py b/src/app/domain/people/controllers/persons.py
new file mode 100644
index 00000000..b8ba432c
--- /dev/null
+++ b/src/app/domain/people/controllers/persons.py
@@ -0,0 +1,129 @@
+"""Person Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Annotated
+
+from litestar import Controller, delete, get, patch, post
+from litestar.di import Provide
+
+from app.config import constants
+from app.domain.accounts.guards import requires_active_user
+from app.domain.people import urls
+from app.domain.people.dependencies import provide_persons_service
+from app.domain.people.schemas import Person, PersonCreate, PersonUpdate
+from app.domain.people.services import PersonService
+
+if TYPE_CHECKING:
+    from uuid import UUID
+
+    from advanced_alchemy.service.pagination import OffsetPagination
+    from litestar.params import Dependency, Parameter
+
+    from app.lib.dependencies import FilterTypes
+
+
+class PersonController(Controller):
+    """Person operations."""
+
+    tags = ["Persons"]
+    dependencies = {"persons_service": Provide(provide_persons_service)}
+    guards = [requires_active_user]
+    signature_namespace = {
+        "PersonService": PersonService,
+    }
+    dto = None
+    return_dto = None
+
+    @get(
+        operation_id="ListPersons",
+        name="persons:list",
+        summary="List Persons",
+        path=urls.PERSON_LIST,
+    )
+    async def list_persons(
+        self,
+        persons_service: PersonService,
+        filters: Annotated[list[FilterTypes], Dependency(skip_validation=True)],
+    ) -> OffsetPagination[Person]:
+        """List persons that your account can access.."""
+        results, total = await persons_service.list_and_count(*filters)
+        return persons_service.to_schema(data=results, total=total, schema_type=Person, filters=filters)
+
+    @post(
+        operation_id="CreatePerson",
+        name="persons:create",
+        summary="Create a new person.",
+        path=urls.PERSON_CREATE,
+    )
+    async def create_person(
+        self,
+        persons_service: PersonService,
+        data: PersonCreate,
+    ) -> PersonCreate:
+        """Create a new person."""
+        obj = data.to_dict()
+        db_obj = await persons_service.create(obj)
+        return persons_service.to_schema(schema_type=Person, data=db_obj)
+
+    @get(
+        operation_id="GetPerson",
+        name="persons:get",
+        summary="Retrieve the details of a person.",
+        path=urls.PERSON_DETAIL,
+    )
+    async def get_person(
+        self,
+        persons_service: PersonService,
+        person_id: Annotated[
+            UUID,
+            Parameter(
+                title="Person ID",
+                description="The person to retrieve.",
+            ),
+        ],
+    ) -> Person:
+        """Get details about a comapny."""
+        db_obj = await persons_service.get(person_id)
+        return persons_service.to_schema(schema_type=Person, data=db_obj)
+
+    @patch(
+        operation_id="UpdatePerson",
+        name="persons:update",
+        path=urls.PERSON_UPDATE,
+    )
+    async def update_person(
+        self,
+        data: PersonUpdate,
+        persons_service: PersonService,
+        person_id: Annotated[
+            UUID,
+            Parameter(
+                title="Person ID",
+                description="The person to update.",
+            ),
+        ],
+    ) -> Person:
+        """Update a person."""
+        db_obj = await persons_service.update(
+            item_id=person_id,
+            data=data.to_dict(),
+        )
+        return persons_service.to_schema(schema_type=Person, data=db_obj)
+
+    @delete(
+        operation_id="DeletePerson",
+        name="persons:delete",
+        summary="Remove Person",
+        path=urls.PERSON_DELETE,
+    )
+    async def delete_person(
+        self,
+        persons_service: PersonService,
+        person_id: Annotated[
+            UUID,
+            Parameter(title="Person ID", description="The person to delete."),
+        ],
+    ) -> None:
+        """Delete a person."""
+        _ = await persons_service.delete(person_id)
diff --git a/src/app/domain/people/dependencies.py b/src/app/domain/people/dependencies.py
new file mode 100644
index 00000000..453f572b
--- /dev/null
+++ b/src/app/domain/people/dependencies.py
@@ -0,0 +1,27 @@
+"""People Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from sqlalchemy.orm import joinedload, noload, selectinload
+
+from app.db.models import Person
+from app.domain.people.services import PersonService
+
+__all__ = ("provide_persons_service", )
+
+
+if TYPE_CHECKING:
+    from collections.abc import AsyncGenerator
+
+    from sqlalchemy.ext.asyncio import AsyncSession
+
+
+async def provide_persons_service(db_session: AsyncSession) -> AsyncGenerator[PersonService, None]:
+    """Construct repository and service objects for the request."""
+    async with PersonService.new(
+        session=db_session,
+        load=[],
+    ) as service:
+        yield service
diff --git a/src/app/domain/people/repositories.py b/src/app/domain/people/repositories.py
new file mode 100644
index 00000000..3dbb0719
--- /dev/null
+++ b/src/app/domain/people/repositories.py
@@ -0,0 +1,23 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+from uuid import UUID  # noqa: TCH003
+
+from advanced_alchemy.repository import SQLAlchemyAsyncRepository, SQLAlchemyAsyncSlugRepository
+from sqlalchemy import ColumnElement, select
+from sqlalchemy.orm import joinedload, selectinload
+
+from app.db.models import Person
+
+if TYPE_CHECKING:
+    from advanced_alchemy.filters import FilterTypes
+
+__all__ = (
+    "PersonRepository",
+)
+
+
+class PersonRepository(SQLAlchemyAsyncSlugRepository[Person]):
+    """Person Repository."""
+
+    model_type = Person
diff --git a/src/app/domain/people/schemas.py b/src/app/domain/people/schemas.py
new file mode 100644
index 00000000..379cccdf
--- /dev/null
+++ b/src/app/domain/people/schemas.py
@@ -0,0 +1,87 @@
+from __future__ import annotations
+
+from uuid import UUID  # noqa: TCH003
+from datetime import date
+
+import msgspec
+
+from app.db.models.person import Person
+from app.lib.schema import CamelizedBaseStruct, Location, WorkExperience, SocialActivity
+
+
+class Person(CamelizedBaseStruct):
+    """A person."""
+    id: UUID
+    slug: str
+    first_name: str | None = None
+    last_name: str | None = None
+    full_name: str | None = None
+    headline: str | None = None
+    summary: str | None = None
+    occupation: str | None = None
+    industry: str | None = None
+    profile_pic_url: str | None = None
+    url: str | None = None
+    linkedin_profile_url: str | None = None
+    twitter_profile_url: str | None = None
+    github_profile_url: str | None = None
+    location: Location | None = None
+    personal_emails: list[str] | None = None
+    work_emails: list[str] | None = None
+    personal_numbers: list[str] | None = None
+    birth_date: date | None = None
+    gender: str | None = None
+    languages: list[str] | None = None
+    work_experiences: list[WorkExperience] | None = None
+    social_activities: list[SocialActivity] | None = None
+
+
+class PersonCreate(CamelizedBaseStruct):
+    """A person create schema."""
+    first_name: str | None = None
+    last_name: str | None = None
+    full_name: str | None = None
+    headline: str | None = None
+    summary: str | None = None
+    occupation: str | None = None
+    industry: str | None = None
+    profile_pic_url: str | None = None
+    url: str | None = None
+    linkedin_profile_url: str | None = None
+    twitter_profile_url: str | None = None
+    github_profile_url: str | None = None
+    location: Location | None = None
+    personal_emails: list[str] | None = None
+    work_emails: list[str] | None = None
+    personal_numbers: list[str] | None = None
+    birth_date: date | None = None
+    gender: str | None = None
+    languages: list[str] | None = None
+    work_experiences: list[WorkExperience] | None = None
+    social_activities: list[SocialActivity] | None = None
+
+
+class PersonUpdate(CamelizedBaseStruct, omit_defaults=True):
+    """A person update schema."""
+    id: UUID
+    first_name: str | None | msgspec.UnsetType = msgspec.UNSET
+    last_name: str | None | msgspec.UnsetType = msgspec.UNSET
+    full_name: str | None | msgspec.UnsetType = msgspec.UNSET
+    headline: str | None | msgspec.UnsetType = msgspec.UNSET
+    summary: str | None | msgspec.UnsetType = msgspec.UNSET
+    occupation: str | None | msgspec.UnsetType = msgspec.UNSET
+    industry: str | None | msgspec.UnsetType = msgspec.UNSET
+    profile_pic_url: str | None | msgspec.UnsetType = msgspec.UNSET
+    url: str | None | msgspec.UnsetType = msgspec.UNSET
+    linkedin_profile_url: str | None | msgspec.UnsetType = msgspec.UNSET
+    twitter_profile_url: str | None | msgspec.UnsetType = msgspec.UNSET
+    github_profile_url: str | None | msgspec.UnsetType = msgspec.UNSET
+    location: Location | None | msgspec.UnsetType = msgspec.UNSET
+    personal_emails: list[str] | None | msgspec.UnsetType = msgspec.UNSET
+    work_emails: list[str] | None | msgspec.UnsetType = msgspec.UNSET
+    personal_numbers: list[str] | None | msgspec.UnsetType = msgspec.UNSET
+    birth_date: date | None | msgspec.UnsetType = msgspec.UNSET
+    gender: str | None | msgspec.UnsetType = msgspec.UNSET
+    languages: list[str] | None | msgspec.UnsetType = msgspec.UNSET
+    work_experiences: list[WorkExperience] | None | msgspec.UnsetType = msgspec.UNSET
+    social_activities: list[SocialActivity] | None | msgspec.UnsetType = msgspec.UNSET
diff --git a/src/app/domain/people/services.py b/src/app/domain/people/services.py
new file mode 100644
index 00000000..d6bd9cf3
--- /dev/null
+++ b/src/app/domain/people/services.py
@@ -0,0 +1,48 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from advanced_alchemy.exceptions import RepositoryError
+from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService, is_dict, is_msgspec_model, is_pydantic_model
+from uuid_utils.compat import uuid4
+
+from app.lib.schema import CamelizedBaseStruct, Location, WorkExperience, SocialActivity
+from app.db.models import Person
+
+from .repositories import PersonRepository
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+    from uuid import UUID
+
+    from advanced_alchemy.filters import FilterTypes
+    from advanced_alchemy.repository._util import LoadSpec
+    from advanced_alchemy.service import ModelDictT
+    from msgspec import Struct
+    from sqlalchemy.orm import InstrumentedAttribute
+
+__all__ = (
+    "PersonService",
+)
+
+
+class PersonService(SQLAlchemyAsyncRepositoryService[Person]):
+    """Person Service."""
+
+    repository_type = PersonRepository
+    match_fields = ["full_name"]
+
+    def __init__(self, **repo_kwargs: Any) -> None:
+        self.repository: PersonRepository = self.repository_type(**repo_kwargs)
+        self.model_type = self.repository.model_type
+
+    async def to_model(self, data: Person | dict[str, Any] | Struct, operation: str | None = None) -> Person:
+        if (is_msgspec_model(data) or is_pydantic_model(data)) and operation == "create" and data.slug is None:  # type: ignore[union-attr]
+            data.slug = await self.repository.get_available_slug(data.name)  # type: ignore[union-attr]
+        if (is_msgspec_model(data) or is_pydantic_model(data)) and operation == "update" and data.slug is None:  # type: ignore[union-attr]
+            data.slug = await self.repository.get_available_slug(data.name)  # type: ignore[union-attr]
+        if is_dict(data) and "slug" not in data and operation == "create":
+            data["slug"] = await self.repository.get_available_slug(data["full_name"])
+        if is_dict(data) and "slug" not in data and "name" in data and operation == "update":
+            data["slug"] = await self.repository.get_available_slug(data["full_name"])
+        return await super().to_model(data, operation)
diff --git a/src/app/domain/people/urls.py b/src/app/domain/people/urls.py
new file mode 100644
index 00000000..2f2ac8a5
--- /dev/null
+++ b/src/app/domain/people/urls.py
@@ -0,0 +1,6 @@
+PERSON_LIST = "/api/persons"
+PERSON_DELETE = "/api/persons/{company_id:uuid}"
+PERSON_DETAIL = "/api/persons/{company_id:uuid}"
+PERSON_UPDATE = "/api/persons/{company_id:uuid}"
+PERSON_CREATE = "/api/persons"
+PERSON_INDEX = "/api/persons/{company_id:uuid}"
diff --git a/src/app/domain/system/__init__.py b/src/app/domain/system/__init__.py
new file mode 100644
index 00000000..fb0420fb
--- /dev/null
+++ b/src/app/domain/system/__init__.py
@@ -0,0 +1,3 @@
+from . import controllers, schemas, tasks
+
+__all__ = ("controllers", "schemas", "tasks")
diff --git a/src/app/domain/system/controllers.py b/src/app/domain/system/controllers.py
new file mode 100644
index 00000000..9b974c68
--- /dev/null
+++ b/src/app/domain/system/controllers.py
@@ -0,0 +1,79 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Literal, TypeVar
+
+import structlog
+from litestar import Controller, MediaType, Request, get
+from litestar.response import Response
+from redis import RedisError
+from sqlalchemy import text
+
+from app.config.base import get_settings
+
+from .schemas import SystemHealth
+from .urls import SYSTEM_HEALTH
+
+if TYPE_CHECKING:
+    from litestar_saq import TaskQueues
+    from sqlalchemy.ext.asyncio import AsyncSession
+
+logger = structlog.get_logger()
+OnlineOffline = TypeVar("OnlineOffline", bound=Literal["online", "offline"])
+
+
+class SystemController(Controller):
+    tags = ["System"]
+
+    @get(
+        operation_id="SystemHealth",
+        name="system:health",
+        path=SYSTEM_HEALTH,
+        media_type=MediaType.JSON,
+        cache=False,
+        tags=["System"],
+        summary="Health Check",
+        description="Execute a health check against backend components.  Returns system information including database and cache status.",
+    )
+    async def check_system_health(
+        self,
+        request: Request,
+        db_session: AsyncSession,
+        task_queues: TaskQueues,
+    ) -> Response[SystemHealth]:
+        """Check database available and returns app config info."""
+        settings = get_settings()
+        try:
+            await db_session.execute(text("select 1"))
+            db_ping = True
+        except ConnectionRefusedError:
+            db_ping = False
+
+        db_status = "online" if db_ping else "offline"
+        try:
+            cache_ping = await settings.redis.get_client().ping()
+        except RedisError:
+            cache_ping = False
+        cache_status = "online" if cache_ping else "offline"
+        worker_ping = bool([await queue.info() for queue in task_queues.queues.values()])
+        worker_status = "online" if worker_ping else "offline"
+        healthy = worker_ping and cache_ping and db_ping
+        if healthy:
+            await logger.adebug(
+                "System Health",
+                database_status=db_status,
+                cache_status=cache_status,
+                worker_status=worker_status,
+            )
+        else:
+            await logger.awarn(
+                "System Health Check",
+                database_status=db_status,
+                cache_status=cache_status,
+                worker_status=worker_status,
+            )
+
+        return Response(
+            content=SystemHealth(database_status=db_status, cache_status=cache_status, worker_status=worker_status),  # type: ignore
+            status_code=200 if db_ping and cache_ping and worker_ping else 500,
+            media_type=MediaType.JSON,
+        )
diff --git a/src/app/domain/system/schemas.py b/src/app/domain/system/schemas.py
new file mode 100644
index 00000000..a3524e05
--- /dev/null
+++ b/src/app/domain/system/schemas.py
@@ -0,0 +1,18 @@
+from dataclasses import dataclass
+from typing import Literal
+
+from app.__about__ import __version__ as current_version
+from app.config.base import get_settings
+
+__all__ = ("SystemHealth",)
+
+settings = get_settings()
+
+
+@dataclass
+class SystemHealth:
+    database_status: Literal["online", "offline"]
+    cache_status: Literal["online", "offline"]
+    worker_status: Literal["online", "offline"]
+    app: str = settings.app.NAME
+    version: str = current_version
diff --git a/src/app/domain/system/tasks.py b/src/app/domain/system/tasks.py
new file mode 100644
index 00000000..81fcbad1
--- /dev/null
+++ b/src/app/domain/system/tasks.py
@@ -0,0 +1,31 @@
+import asyncio
+
+from saq.types import Context
+from structlog import get_logger
+
+__all__ = ["background_worker_task", "system_task", "system_upkeep"]
+
+
+logger = get_logger()
+
+
+async def system_upkeep(_: Context) -> None:
+    await logger.ainfo("Performing system upkeep operations.")
+    await logger.ainfo("Simulating a long running operation.  Sleeping for 60 seconds.")
+    await asyncio.sleep(60)
+    await logger.ainfo("Simulating an even long running operation.  Sleeping for 120 seconds.")
+    await asyncio.sleep(120)
+    await logger.ainfo("Long running process complete.")
+    await logger.ainfo("Performing system upkeep operations.")
+
+
+async def background_worker_task(_: Context) -> None:
+    await logger.ainfo("Performing background worker task.")
+    await asyncio.sleep(20)
+    await logger.ainfo("Performing system upkeep operations.")
+
+
+async def system_task(_: Context) -> None:
+    await logger.ainfo("Performing simple system task")
+    await asyncio.sleep(2)
+    await logger.ainfo("System task complete.")
diff --git a/src/app/domain/system/urls.py b/src/app/domain/system/urls.py
new file mode 100644
index 00000000..2de48ef3
--- /dev/null
+++ b/src/app/domain/system/urls.py
@@ -0,0 +1,2 @@
+SYSTEM_HEALTH: str = "/health"
+"""Default path for the service health check endpoint."""
diff --git a/src/app/domain/tags/__init__.py b/src/app/domain/tags/__init__.py
new file mode 100644
index 00000000..1e035ea0
--- /dev/null
+++ b/src/app/domain/tags/__init__.py
@@ -0,0 +1,3 @@
+from . import controllers, dependencies, dtos, services
+
+__all__ = ["controllers", "services", "dtos", "dependencies"]
diff --git a/src/app/domain/tags/controllers.py b/src/app/domain/tags/controllers.py
new file mode 100644
index 00000000..92ed60b6
--- /dev/null
+++ b/src/app/domain/tags/controllers.py
@@ -0,0 +1,133 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Annotated
+
+from litestar import Controller, delete, get, patch, post
+from litestar.di import Provide
+
+from app.db.models import Tag
+from app.domain.accounts.guards import requires_active_user, requires_superuser
+from app.domain.tags import urls
+from app.domain.tags.dependencies import provide_tags_service
+from app.domain.tags.dtos import TagCreateDTO, TagDTO, TagUpdateDTO
+from app.domain.tags.services import TagService
+
+if TYPE_CHECKING:
+    from uuid import UUID
+
+    from advanced_alchemy.filters import FilterTypes
+    from advanced_alchemy.service import OffsetPagination
+    from litestar.dto import DTOData
+    from litestar.params import Dependency, Parameter
+
+
+class TagController(Controller):
+    """Handles the interactions within the Tag objects."""
+
+    guards = [requires_active_user]
+    dependencies = {"tags_service": Provide(provide_tags_service)}
+    signature_namespace = {"TagService": TagService, "Tag": Tag}
+    tags = ["Tags"]
+    return_dto = TagDTO
+
+    @get(
+        operation_id="ListTags",
+        name="tags:list",
+        summary="List Tags",
+        description="Retrieve the tags.",
+        path=urls.TAG_LIST,
+    )
+    async def list_tags(
+        self,
+        tags_service: TagService,
+        filters: Annotated[list[FilterTypes], Dependency(skip_validation=True)],
+    ) -> OffsetPagination[Tag]:
+        """List tags."""
+        results, total = await tags_service.list_and_count(*filters)
+        return tags_service.to_schema(data=results, total=total, filters=filters)
+
+    @get(
+        operation_id="GetTag",
+        name="tags:get",
+        path=urls.TAG_DETAILS,
+        summary="Retrieve the details of a tag.",
+    )
+    async def get_tag(
+        self,
+        tags_service: TagService,
+        tag_id: Annotated[
+            UUID,
+            Parameter(
+                title="Tag ID",
+                description="The tag to retrieve.",
+            ),
+        ],
+    ) -> Tag:
+        """Get a tag."""
+        db_obj = await tags_service.get(tag_id)
+        return tags_service.to_schema(db_obj)
+
+    @post(
+        operation_id="CreateTag",
+        name="tags:create",
+        summary="Create a new tag.",
+        cache_control=None,
+        description="A tag is a place where you can upload and group collections of databases.",
+        guards=[requires_superuser],
+        path=urls.TAG_CREATE,
+        dto=TagCreateDTO,
+    )
+    async def create_tag(
+        self,
+        tags_service: TagService,
+        data: DTOData[Tag],
+    ) -> Tag:
+        """Create a new tag."""
+        db_obj = await tags_service.create(data.create_instance())
+        return tags_service.to_schema(db_obj)
+
+    @patch(
+        operation_id="UpdateTag",
+        name="tags:update",
+        path=urls.TAG_UPDATE,
+        guards=[requires_superuser],
+        dto=TagUpdateDTO,
+    )
+    async def update_tag(
+        self,
+        tags_service: TagService,
+        data: DTOData[Tag],
+        tag_id: Annotated[
+            UUID,
+            Parameter(
+                title="Tag ID",
+                description="The tag to update.",
+            ),
+        ],
+    ) -> Tag:
+        """Update a tag."""
+        db_obj = await tags_service.update(item_id=tag_id, data=data.create_instance())
+        return tags_service.to_schema(db_obj)
+
+    @delete(
+        operation_id="DeleteTag",
+        name="tags:delete",
+        path=urls.TAG_DELETE,
+        summary="Remove Tag",
+        description="Removes a tag and its associations",
+        guards=[requires_superuser],
+        return_dto=None,
+    )
+    async def delete_tag(
+        self,
+        tags_service: TagService,
+        tag_id: Annotated[
+            UUID,
+            Parameter(
+                title="Tag ID",
+                description="The tag to delete.",
+            ),
+        ],
+    ) -> None:
+        """Delete a tag."""
+        _ = await tags_service.delete(tag_id)
diff --git a/src/app/domain/tags/dependencies.py b/src/app/domain/tags/dependencies.py
new file mode 100644
index 00000000..ae6810b5
--- /dev/null
+++ b/src/app/domain/tags/dependencies.py
@@ -0,0 +1,33 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from sqlalchemy.orm import selectinload
+
+from app.db.models import Tag
+from app.domain.tags.services import TagService
+
+if TYPE_CHECKING:
+    from collections.abc import AsyncGenerator
+
+    from sqlalchemy.ext.asyncio import AsyncSession
+
+__all__ = ["provide_tags_service"]
+
+
+async def provide_tags_service(
+    db_session: AsyncSession | None = None,
+) -> AsyncGenerator[TagService, None]:
+    """Provide Tags service.
+
+    Args:
+        db_session (AsyncSession | None, optional): current database session. Defaults to None.
+
+    Returns:
+        TagService: An Tags service object
+    """
+    async with TagService.new(
+        session=db_session,
+        load=selectinload(Tag.teams, recursion_depth=2),
+    ) as service:
+        yield service
diff --git a/src/app/domain/tags/dtos.py b/src/app/domain/tags/dtos.py
new file mode 100644
index 00000000..4b65d4d3
--- /dev/null
+++ b/src/app/domain/tags/dtos.py
@@ -0,0 +1,21 @@
+from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO
+
+from app.db.models import Tag
+from app.lib import dto
+
+__all__ = ["TagCreateDTO", "TagDTO", "TagUpdateDTO"]
+
+
+# database model
+
+
+class TagDTO(SQLAlchemyDTO[Tag]):
+    config = dto.config(max_nested_depth=0, exclude={"created_at", "updated_at", "teams"})
+
+
+class TagCreateDTO(SQLAlchemyDTO[Tag]):
+    config = dto.config(max_nested_depth=0, exclude={"id", "created_at", "updated_at", "teams"})
+
+
+class TagUpdateDTO(SQLAlchemyDTO[Tag]):
+    config = dto.config(max_nested_depth=0, exclude={"id", "created_at", "updated_at", "teams"}, partial=True)
diff --git a/src/app/domain/tags/repositories.py b/src/app/domain/tags/repositories.py
new file mode 100644
index 00000000..2c8aad8e
--- /dev/null
+++ b/src/app/domain/tags/repositories.py
@@ -0,0 +1,13 @@
+from __future__ import annotations
+
+from advanced_alchemy.repository import SQLAlchemyAsyncRepository
+
+from app.db.models import Tag
+
+__all__ = ("TagRepository",)
+
+
+class TagRepository(SQLAlchemyAsyncRepository[Tag]):
+    """Tag Repository."""
+
+    model_type = Tag
diff --git a/src/app/domain/tags/services.py b/src/app/domain/tags/services.py
new file mode 100644
index 00000000..4c2025c0
--- /dev/null
+++ b/src/app/domain/tags/services.py
@@ -0,0 +1,16 @@
+from __future__ import annotations
+
+from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService
+
+from app.db.models import Tag
+
+from .repositories import TagRepository
+
+__all__ = ("TagService",)
+
+
+class TagService(SQLAlchemyAsyncRepositoryService[Tag]):
+    """Handles basic lookup operations for an Tag."""
+
+    repository_type = TagRepository
+    match_fields = ["name"]
diff --git a/src/app/domain/tags/urls.py b/src/app/domain/tags/urls.py
new file mode 100644
index 00000000..42e36b2c
--- /dev/null
+++ b/src/app/domain/tags/urls.py
@@ -0,0 +1,5 @@
+TAG_LIST = "/api/tags"
+TAG_CREATE = "/api/tags"
+TAG_UPDATE = "/api/tags/{tag_id:uuid}"
+TAG_DELETE = "/api/tags/{tag_id:uuid}"
+TAG_DETAILS = "/api/tags/{tag_id:uuid}"
diff --git a/src/app/domain/teams/__init__.py b/src/app/domain/teams/__init__.py
new file mode 100644
index 00000000..27604162
--- /dev/null
+++ b/src/app/domain/teams/__init__.py
@@ -0,0 +1,4 @@
+"""Team Application Module."""
+from . import controllers, dependencies, guards, schemas, services, signals
+
+__all__ = ["controllers", "guards", "services", "schemas", "dependencies", "signals"]
diff --git a/src/app/domain/teams/controllers/__init__.py b/src/app/domain/teams/controllers/__init__.py
new file mode 100644
index 00000000..d2b08497
--- /dev/null
+++ b/src/app/domain/teams/controllers/__init__.py
@@ -0,0 +1,5 @@
+from .team_invitation import TeamInvitationController
+from .team_member import TeamMemberController
+from .teams import TeamController
+
+__all__ = ["TeamInvitationController", "TeamMemberController", "TeamController"]
diff --git a/src/app/domain/teams/controllers/team_invitation.py b/src/app/domain/teams/controllers/team_invitation.py
new file mode 100644
index 00000000..a6c46e32
--- /dev/null
+++ b/src/app/domain/teams/controllers/team_invitation.py
@@ -0,0 +1,14 @@
+"""User Account Controllers."""
+from __future__ import annotations
+
+from litestar import Controller
+from litestar.di import Provide
+
+from app.domain.teams.dependencies import provide_team_invitations_service
+
+
+class TeamInvitationController(Controller):
+    """Team Invitations."""
+
+    tags = ["Teams"]
+    dependencies = {"team_invitations_service": Provide(provide_team_invitations_service)}
diff --git a/src/app/domain/teams/controllers/team_member.py b/src/app/domain/teams/controllers/team_member.py
new file mode 100644
index 00000000..f9888dbf
--- /dev/null
+++ b/src/app/domain/teams/controllers/team_member.py
@@ -0,0 +1,94 @@
+"""User Account Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from advanced_alchemy.exceptions import IntegrityError
+from litestar import Controller, post
+from litestar.di import Provide
+from litestar.params import Parameter
+
+from app.db.models import TeamMember, TeamRoles
+from app.domain.accounts.dependencies import provide_users_service
+from app.domain.accounts.services import UserService
+from app.domain.teams import urls
+from app.domain.teams.dependencies import provide_team_members_service, provide_teams_service
+from app.domain.teams.schemas import Team, TeamMemberModify
+from app.domain.teams.services import TeamMemberService, TeamService
+
+if TYPE_CHECKING:
+    from uuid import UUID
+
+
+class TeamMemberController(Controller):
+    """Team Members."""
+
+    tags = ["Team Members"]
+    dependencies = {
+        "teams_service": Provide(provide_teams_service),
+        "team_members_service": Provide(provide_team_members_service),
+        "users_service": Provide(provide_users_service),
+    }
+    signature_namespace = {
+        "TeamService": TeamService,
+        "UserService": UserService,
+        "TeamMemberService": TeamMemberService,
+    }
+
+    @post(
+        operation_id="AddMemberToTeam",
+        name="teams:add-member",
+        path=urls.TEAM_ADD_MEMBER,
+    )
+    async def add_member_to_team(
+        self,
+        teams_service: TeamService,
+        users_service: UserService,
+        data: TeamMemberModify,
+        team_id: UUID = Parameter(
+            title="Team ID",
+            description="The team to update.",
+        ),
+    ) -> Team:
+        """Add a member to a team."""
+        team_obj = await teams_service.get(team_id)
+        user_obj = await users_service.get_one(email=data.user_name)
+        is_member = any(membership.team.id == team_id for membership in user_obj.teams)
+        if is_member:
+            msg = "User is already a member of the team."
+            raise IntegrityError(msg)
+        team_obj.members.append(TeamMember(user_id=user_obj.id, role=TeamRoles.MEMBER))
+        team_obj = await teams_service.update(item_id=team_id, data=team_obj)
+        return teams_service.to_schema(schema_type=Team, data=team_obj)
+
+    @post(
+        operation_id="RemoveMemberFromTeam",
+        name="teams:remove-member",
+        summary="Remove Team Member",
+        description="Removes a member from a team",
+        path=urls.TEAM_REMOVE_MEMBER,
+    )
+    async def remove_member_from_team(
+        self,
+        teams_service: TeamService,
+        team_members_service: TeamMemberService,
+        users_service: UserService,
+        data: TeamMemberModify,
+        team_id: UUID = Parameter(
+            title="Team ID",
+            description="The team to delete.",
+        ),
+    ) -> Team:
+        """Delete a new migration team."""
+        user_obj = await users_service.get_one(email=data.user_name)
+        removed_member = False
+        for membership in user_obj.teams:
+            if membership.user_id == user_obj.id:
+                removed_member = True
+                _ = await team_members_service.delete(membership.id)
+        if not removed_member:
+            msg = "User is not a member of this team."
+            raise IntegrityError(msg)
+        team_obj = await teams_service.get(team_id)
+        return teams_service.to_schema(schema_type=Team, data=team_obj)
diff --git a/src/app/domain/teams/controllers/teams.py b/src/app/domain/teams/controllers/teams.py
new file mode 100644
index 00000000..f8c491b8
--- /dev/null
+++ b/src/app/domain/teams/controllers/teams.py
@@ -0,0 +1,151 @@
+"""User Account Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Annotated
+
+from litestar import Controller, delete, get, patch, post
+from litestar.di import Provide
+
+from app.config import constants
+from app.db.models import User as UserModel
+from app.domain.accounts.guards import requires_active_user
+from app.domain.teams import urls
+from app.domain.teams.dependencies import provide_teams_service
+from app.domain.teams.guards import requires_team_admin, requires_team_membership
+from app.domain.teams.schemas import Team, TeamCreate, TeamUpdate
+from app.domain.teams.services import TeamService
+
+if TYPE_CHECKING:
+    from uuid import UUID
+
+    from advanced_alchemy.service.pagination import OffsetPagination
+    from litestar.params import Dependency, Parameter
+
+    from app.lib.dependencies import FilterTypes
+
+
+class TeamController(Controller):
+    """Teams."""
+
+    tags = ["Teams"]
+    dependencies = {"teams_service": Provide(provide_teams_service)}
+    guards = [requires_active_user]
+    signature_namespace = {
+        "TeamService": TeamService,
+        "TeamUpdate": TeamUpdate,
+        "TeamCreate": TeamCreate,
+        "UserModel": UserModel,
+    }
+    dto = None
+    return_dto = None
+
+    @get(
+        operation_id="ListTeams",
+        name="teams:list",
+        summary="List Teams",
+        path=urls.TEAM_LIST,
+    )
+    async def list_teams(
+        self,
+        teams_service: TeamService,
+        current_user: UserModel,
+        filters: Annotated[list[FilterTypes], Dependency(skip_validation=True)],
+    ) -> OffsetPagination[Team]:
+        """List teams that your account can access.."""
+        show_all = bool(
+            current_user.is_superuser
+            or any(
+                assigned_role.role.name
+                for assigned_role in current_user.roles
+                if assigned_role.role.name in {constants.SUPERUSER_ACCESS_ROLE}
+            ),
+        )
+        if show_all:
+            results, total = await teams_service.list_and_count(*filters)
+        else:
+            results, total = await teams_service.get_user_teams(*filters, user_id=current_user.id)
+        return teams_service.to_schema(data=results, total=total, schema_type=Team, filters=filters)
+
+    @post(
+        operation_id="CreateTeam",
+        name="teams:create",
+        summary="Create a new team.",
+        path=urls.TEAM_CREATE,
+    )
+    async def create_team(
+        self,
+        teams_service: TeamService,
+        current_user: UserModel,
+        data: TeamCreate,
+    ) -> Team:
+        """Create a new team."""
+        obj = data.to_dict()
+        obj.update({"owner_id": current_user.id, "owner": current_user})
+        db_obj = await teams_service.create(obj)
+        return teams_service.to_schema(schema_type=Team, data=db_obj)
+
+    @get(
+        operation_id="GetTeam",
+        name="teams:get",
+        guards=[requires_team_membership],
+        summary="Retrieve the details of a team.",
+        path=urls.TEAM_DETAIL,
+    )
+    async def get_team(
+        self,
+        teams_service: TeamService,
+        team_id: Annotated[
+            UUID,
+            Parameter(
+                title="Team ID",
+                description="The team to retrieve.",
+            ),
+        ],
+    ) -> Team:
+        """Get details about a team."""
+        db_obj = await teams_service.get(team_id)
+        return teams_service.to_schema(schema_type=Team, data=db_obj)
+
+    @patch(
+        operation_id="UpdateTeam",
+        name="teams:update",
+        guards=[requires_team_admin],
+        path=urls.TEAM_UPDATE,
+    )
+    async def update_team(
+        self,
+        data: TeamUpdate,
+        teams_service: TeamService,
+        team_id: Annotated[
+            UUID,
+            Parameter(
+                title="Team ID",
+                description="The team to update.",
+            ),
+        ],
+    ) -> Team:
+        """Update a migration team."""
+        db_obj = await teams_service.update(
+            item_id=team_id,
+            data=data.to_dict(),
+        )
+        return teams_service.to_schema(schema_type=Team, data=db_obj)
+
+    @delete(
+        operation_id="DeleteTeam",
+        name="teams:delete",
+        guards=[requires_team_admin],
+        summary="Remove Team",
+        path=urls.TEAM_DELETE,
+    )
+    async def delete_team(
+        self,
+        teams_service: TeamService,
+        team_id: Annotated[
+            UUID,
+            Parameter(title="Team ID", description="The team to delete."),
+        ],
+    ) -> None:
+        """Delete a team."""
+        _ = await teams_service.delete(team_id)
diff --git a/src/app/domain/teams/dependencies.py b/src/app/domain/teams/dependencies.py
new file mode 100644
index 00000000..1450d19b
--- /dev/null
+++ b/src/app/domain/teams/dependencies.py
@@ -0,0 +1,58 @@
+"""User Account Controllers."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from sqlalchemy.orm import joinedload, noload, selectinload
+
+from app.db.models import Team, TeamInvitation, TeamMember
+from app.domain.teams.services import TeamInvitationService, TeamMemberService, TeamService
+
+__all__ = ("provide_team_members_service", "provide_teams_service", "provide_team_invitations_service")
+
+
+if TYPE_CHECKING:
+    from collections.abc import AsyncGenerator
+
+    from sqlalchemy.ext.asyncio import AsyncSession
+
+
+async def provide_teams_service(db_session: AsyncSession) -> AsyncGenerator[TeamService, None]:
+    """Construct repository and service objects for the request."""
+    async with TeamService.new(
+        session=db_session,
+        load=[
+            selectinload(Team.tags),
+            selectinload(Team.members).options(
+                joinedload(TeamMember.user, innerjoin=True),
+            ),
+        ],
+    ) as service:
+        yield service
+
+
+async def provide_team_members_service(db_session: AsyncSession) -> AsyncGenerator[TeamMemberService, None]:
+    """Construct repository and service objects for the request."""
+    async with TeamMemberService.new(
+        session=db_session,
+        load=[
+            noload("*"),
+            joinedload(TeamMember.team, innerjoin=True).options(noload("*")),
+            joinedload(TeamMember.user, innerjoin=True).options(noload("*")),
+        ],
+    ) as service:
+        yield service
+
+
+async def provide_team_invitations_service(db_session: AsyncSession) -> AsyncGenerator[TeamInvitationService, None]:
+    """Construct repository and service objects for the request."""
+    async with TeamInvitationService.new(
+        session=db_session,
+        load=[
+            noload("*"),
+            joinedload(TeamInvitation.team, innerjoin=True).options(noload("*")),
+            joinedload(TeamInvitation.invited_by, innerjoin=True).options(noload("*")),
+        ],
+    ) as service:
+        yield service
diff --git a/src/app/domain/teams/guards.py b/src/app/domain/teams/guards.py
new file mode 100644
index 00000000..8af6f6eb
--- /dev/null
+++ b/src/app/domain/teams/guards.py
@@ -0,0 +1,80 @@
+from uuid import UUID
+
+from litestar.connection import ASGIConnection
+from litestar.exceptions import PermissionDeniedException
+from litestar.handlers.base import BaseRouteHandler
+
+from app.config import constants
+from app.db.models import TeamRoles
+
+__all__ = ["requires_team_admin", "requires_team_membership", "requires_team_ownership"]
+
+
+def requires_team_membership(connection: ASGIConnection, _: BaseRouteHandler) -> None:
+    """Verify the connection user is a member of the team.
+
+    Args:
+        connection (ASGIConnection): _description_
+        _ (BaseRouteHandler): _description_
+
+    Raises:
+        PermissionDeniedException: _description_
+    """
+    team_id = connection.path_params["team_id"]
+    has_system_role = any(
+        assigned_role.role_name
+        for assigned_role in connection.user.roles
+        if assigned_role.role.name in {constants.SUPERUSER_ACCESS_ROLE}
+    )
+    has_team_role = any(membership.team.id == team_id for membership in connection.user.teams)
+    if connection.user.is_superuser or has_system_role or has_team_role:
+        return
+    raise PermissionDeniedException(detail="Insufficient permissions to access team.")
+
+
+def requires_team_admin(connection: ASGIConnection, _: BaseRouteHandler) -> None:
+    """Verify the connection user is a team admin.
+
+    Args:
+        connection (ASGIConnection): _description_
+        _ (BaseRouteHandler): _description_
+
+    Raises:
+        PermissionDeniedException: _description_
+    """
+    team_id = connection.path_params["team_id"]
+    has_system_role = any(
+        assigned_role.role_name
+        for assigned_role in connection.user.roles
+        if assigned_role.role.name in {constants.SUPERUSER_ACCESS_ROLE}
+    )
+    has_team_role = any(
+        membership.team.id == team_id and membership.role == TeamRoles.ADMIN for membership in connection.user.teams
+    )
+    if connection.user.is_superuser or has_system_role or has_team_role:
+        return
+    raise PermissionDeniedException(detail="Insufficient permissions to access team.")
+
+
+def requires_team_ownership(connection: ASGIConnection, _: BaseRouteHandler) -> None:
+    """Verify that the connection user is the team owner.
+
+    Args:
+        connection (ASGIConnection): _description_
+        _ (BaseRouteHandler): _description_
+
+    Raises:
+        PermissionDeniedException: _description_
+    """
+    team_id = UUID(connection.path_params["team_id"])
+    has_system_role = any(
+        assigned_role.role.name
+        for assigned_role in connection.user.roles
+        if assigned_role.role.name in {constants.SUPERUSER_ACCESS_ROLE}
+    )
+    has_team_role = any(membership.team.id == team_id and membership.is_owner for membership in connection.user.teams)
+    if connection.user.is_superuser or has_system_role or has_team_role:
+        return
+
+    msg = "Insufficient permissions to access team."
+    raise PermissionDeniedException(detail=msg)
diff --git a/src/app/domain/teams/repositories.py b/src/app/domain/teams/repositories.py
new file mode 100644
index 00000000..359cb6b6
--- /dev/null
+++ b/src/app/domain/teams/repositories.py
@@ -0,0 +1,64 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+from uuid import UUID  # noqa: TCH003
+
+from advanced_alchemy.repository import SQLAlchemyAsyncRepository, SQLAlchemyAsyncSlugRepository
+from sqlalchemy import ColumnElement, select
+from sqlalchemy.orm import joinedload, selectinload
+
+from app.db.models import Team, TeamInvitation, TeamMember
+
+if TYPE_CHECKING:
+    from advanced_alchemy.filters import FilterTypes
+
+__all__ = (
+    "TeamInvitationRepository",
+    "TeamMemberRepository",
+    "TeamRepository",
+)
+
+
+class TeamRepository(SQLAlchemyAsyncSlugRepository[Team]):
+    """Team Repository."""
+
+    model_type = Team
+
+    async def get_user_teams(
+        self,
+        *filters: FilterTypes | ColumnElement[bool],
+        user_id: UUID,
+        auto_expunge: bool | None = None,
+        force_basic_query_mode: bool | None = None,
+        **kwargs: Any,
+    ) -> tuple[list[Team], int]:
+        """Get paginated list and total count of teams that a user can access."""
+
+        return await self.list_and_count(
+            *filters,
+            statement=select(Team)
+            .join(TeamMember, onclause=Team.id == TeamMember.team_id, isouter=False)
+            .where(TeamMember.user_id == user_id)
+            .order_by(Team.name)
+            .options(
+                selectinload(Team.tags),
+                selectinload(Team.members).options(
+                    joinedload(TeamMember.user, innerjoin=True),
+                ),
+            ),
+            auto_expunge=auto_expunge,
+            force_basic_query_mode=force_basic_query_mode,
+            **kwargs,
+        )
+
+
+class TeamMemberRepository(SQLAlchemyAsyncRepository[TeamMember]):
+    """Team Member Repository."""
+
+    model_type = TeamMember
+
+
+class TeamInvitationRepository(SQLAlchemyAsyncRepository[TeamInvitation]):
+    """Team Invitation Repository."""
+
+    model_type = TeamInvitation
diff --git a/src/app/domain/teams/schemas.py b/src/app/domain/teams/schemas.py
new file mode 100644
index 00000000..a9162fd9
--- /dev/null
+++ b/src/app/domain/teams/schemas.py
@@ -0,0 +1,49 @@
+from __future__ import annotations
+
+from uuid import UUID  # noqa: TCH003
+
+import msgspec
+
+from app.db.models.team_roles import TeamRoles
+from app.lib.schema import CamelizedBaseStruct
+
+
+class TeamTag(CamelizedBaseStruct):
+    id: UUID
+    slug: str
+    name: str
+
+
+class TeamMember(CamelizedBaseStruct):
+    id: UUID
+    user_id: UUID
+    email: str
+    name: str | None = None
+    role: TeamRoles | None = TeamRoles.MEMBER
+    is_owner: bool | None = False
+
+
+class Team(CamelizedBaseStruct):
+    id: UUID
+    name: str
+    description: str | None = None
+    members: list[TeamMember] = []
+    tags: list[TeamTag] = []
+
+
+class TeamCreate(CamelizedBaseStruct):
+    name: str
+    description: str | None = None
+    tags: list[str] = []
+
+
+class TeamUpdate(CamelizedBaseStruct, omit_defaults=True):
+    name: str | None | msgspec.UnsetType = msgspec.UNSET
+    description: str | None | msgspec.UnsetType = msgspec.UNSET
+    tags: list[str] | None | msgspec.UnsetType = msgspec.UNSET
+
+
+class TeamMemberModify(CamelizedBaseStruct):
+    """Team Member Modify."""
+
+    user_name: str
diff --git a/src/app/domain/teams/services.py b/src/app/domain/teams/services.py
new file mode 100644
index 00000000..4f18d255
--- /dev/null
+++ b/src/app/domain/teams/services.py
@@ -0,0 +1,165 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from advanced_alchemy.exceptions import RepositoryError
+from advanced_alchemy.service import SQLAlchemyAsyncRepositoryService, is_dict, is_msgspec_model, is_pydantic_model
+from advanced_alchemy.utils.text import slugify
+from uuid_utils.compat import uuid4
+
+from app.db.models import Team, TeamInvitation, TeamMember, TeamRoles
+from app.db.models.tag import Tag
+from app.db.models.user import User  # noqa: TCH001
+
+from .repositories import TeamInvitationRepository, TeamMemberRepository, TeamRepository
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+    from uuid import UUID
+
+    from advanced_alchemy.filters import FilterTypes
+    from advanced_alchemy.repository._util import LoadSpec
+    from advanced_alchemy.service import ModelDictT
+    from msgspec import Struct
+    from sqlalchemy.orm import InstrumentedAttribute
+
+__all__ = (
+    "TeamInvitationService",
+    "TeamMemberService",
+    "TeamService",
+)
+
+
+class TeamService(SQLAlchemyAsyncRepositoryService[Team]):
+    """Team Service."""
+
+    repository_type = TeamRepository
+    match_fields = ["name"]
+
+    def __init__(self, **repo_kwargs: Any) -> None:
+        self.repository: TeamRepository = self.repository_type(**repo_kwargs)
+        self.model_type = self.repository.model_type
+
+    async def get_user_teams(
+        self,
+        *filters: FilterTypes,
+        user_id: UUID,
+        **kwargs: Any,
+    ) -> tuple[list[Team], int]:
+        """Get all teams for a user."""
+        return await self.repository.get_user_teams(*filters, user_id=user_id, **kwargs)
+
+    async def create(
+        self,
+        data: ModelDictT[Team],
+        *,
+        load: LoadSpec | None = None,
+        execution_options: dict[str, Any] | None = None,
+        auto_commit: bool | None = None,
+        auto_expunge: bool | None = None,
+        auto_refresh: bool | None = None,
+    ) -> Team:
+        """Create a new team with an owner."""
+        owner_id: UUID | None = None
+        owner: User | None = None
+        tags_added: list[str] = []
+        if isinstance(data, dict):
+            data["id"] = data.get("id", uuid4())
+            owner = data.pop("owner", None)
+            owner_id = data.pop("owner_id", None)
+            if owner_id is None:
+                msg = "'owner_id' is required to create a team."
+                raise RepositoryError(msg)
+            tags_added = data.pop("tags", [])
+        data = await self.to_model(data, "create")
+        if owner:
+            data.members.append(TeamMember(user=owner, role=TeamRoles.ADMIN, is_owner=True))
+        elif owner_id:
+            data.members.append(TeamMember(user_id=owner_id, role=TeamRoles.ADMIN, is_owner=True))
+        if tags_added:
+            data.tags.extend(
+                [
+                    await Tag.as_unique_async(self.repository.session, name=tag_text, slug=slugify(tag_text))
+                    for tag_text in tags_added
+                ],
+            )
+        await super().create(
+            data=data,
+            load=load,
+            execution_options=execution_options,
+            auto_commit=auto_commit,
+            auto_expunge=True,
+            auto_refresh=False,
+        )
+        return data
+
+    async def update(
+        self,
+        data: ModelDictT[Team],
+        item_id: Any | None = None,
+        *,
+        id_attribute: str | InstrumentedAttribute[Any] | None = None,
+        load: LoadSpec | None = None,
+        execution_options: dict[str, Any] | None = None,
+        attribute_names: Iterable[str] | None = None,
+        with_for_update: bool | None = None,
+        auto_commit: bool | None = None,
+        auto_expunge: bool | None = None,
+        auto_refresh: bool | None = None,
+    ) -> Team:
+        """Wrap repository update operation.
+
+        Returns:
+            Updated representation.
+        """
+        tags_updated: list[str] = []
+        if isinstance(data, dict):
+            tags_updated.extend(data.pop("tags", None) or [])
+            data["id"] = item_id
+            data = await self.to_model(data, "update")
+            existing_tags = [tag.name for tag in data.tags]
+            tags_to_remove = [tag for tag in data.tags if tag.name not in tags_updated]
+            tags_to_add = [tag for tag in tags_updated if tag not in existing_tags]
+            for tag_rm in tags_to_remove:
+                data.tags.remove(tag_rm)
+            data.tags.extend(
+                [
+                    await Tag.as_unique_async(self.repository.session, name=tag_text, slug=slugify(tag_text))
+                    for tag_text in tags_to_add
+                ],
+            )
+        return await super().update(
+            data=data,
+            item_id=item_id,
+            attribute_names=attribute_names,
+            id_attribute=id_attribute,
+            load=load,
+            execution_options=execution_options,
+            with_for_update=with_for_update,
+            auto_commit=auto_commit,
+            auto_expunge=auto_expunge,
+            auto_refresh=auto_refresh,
+        )
+
+    async def to_model(self, data: Team | dict[str, Any] | Struct, operation: str | None = None) -> Team:
+        if (is_msgspec_model(data) or is_pydantic_model(data)) and operation == "create" and data.slug is None:  # type: ignore[union-attr]
+            data.slug = await self.repository.get_available_slug(data.name)  # type: ignore[union-attr]
+        if (is_msgspec_model(data) or is_pydantic_model(data)) and operation == "update" and data.slug is None:  # type: ignore[union-attr]
+            data.slug = await self.repository.get_available_slug(data.name)  # type: ignore[union-attr]
+        if is_dict(data) and "slug" not in data and operation == "create":
+            data["slug"] = await self.repository.get_available_slug(data["name"])
+        if is_dict(data) and "slug" not in data and "name" in data and operation == "update":
+            data["slug"] = await self.repository.get_available_slug(data["name"])
+        return await super().to_model(data, operation)
+
+
+class TeamMemberService(SQLAlchemyAsyncRepositoryService[TeamMember]):
+    """Team Member Service."""
+
+    repository_type = TeamMemberRepository
+
+
+class TeamInvitationService(SQLAlchemyAsyncRepositoryService[TeamInvitation]):
+    """Team Invitation Service."""
+
+    repository_type = TeamInvitationRepository
diff --git a/src/app/domain/teams/signals.py b/src/app/domain/teams/signals.py
new file mode 100644
index 00000000..26f219b0
--- /dev/null
+++ b/src/app/domain/teams/signals.py
@@ -0,0 +1,34 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import structlog
+from litestar.events import listener
+
+from app.config.app import alchemy
+
+from .dependencies import provide_teams_service
+
+if TYPE_CHECKING:
+    from uuid import UUID
+
+logger = structlog.get_logger()
+
+
+@listener("team_created")
+async def team_created_event_handler(
+    team_id: UUID,
+) -> None:
+    """Executes when a new user is created.
+
+    Args:
+        team_id: The primary key of the team that was created.
+    """
+    await logger.ainfo("Running post signup flow.")
+    async with alchemy.get_session() as db_session:
+        service = await anext(provide_teams_service(db_session))
+        obj = await service.get_one_or_none(id=team_id)
+        if obj is None:
+            await logger.aerror("Could not locate the specified team", id=team_id)
+        else:
+            await logger.ainfo("Found team", **obj.to_dict())
diff --git a/src/app/domain/teams/urls.py b/src/app/domain/teams/urls.py
new file mode 100644
index 00000000..7354cc9d
--- /dev/null
+++ b/src/app/domain/teams/urls.py
@@ -0,0 +1,9 @@
+TEAM_LIST = "/api/teams"
+TEAM_DELETE = "/api/teams/{team_id:uuid}"
+TEAM_DETAIL = "/api/teams/{team_id:uuid}"
+TEAM_UPDATE = "/api/teams/{team_id:uuid}"
+TEAM_CREATE = "/api/teams"
+TEAM_INDEX = "/api/teams/{team_id:uuid}"
+TEAM_INVITATION_LIST = "/api/teams/{team_id:uuid}/invitations"
+TEAM_ADD_MEMBER = "/api/teams/{team_id:uuid}/members/add"
+TEAM_REMOVE_MEMBER = "/api/teams/{team_id:uuid}/members/remove"
diff --git a/src/pyspa/py.typed b/src/app/lib/__init__.py
similarity index 100%
rename from src/pyspa/py.typed
rename to src/app/lib/__init__.py
diff --git a/src/app/lib/crypt.py b/src/app/lib/crypt.py
new file mode 100644
index 00000000..dd86e64a
--- /dev/null
+++ b/src/app/lib/crypt.py
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+import asyncio
+import base64
+
+from passlib.context import CryptContext
+
+password_crypt_context = CryptContext(schemes=["argon2"], deprecated="auto")
+
+
+def get_encryption_key(secret: str) -> bytes:
+    """Get Encryption Key.
+
+    Args:
+        secret (str): Secret key used for encryption
+
+    Returns:
+        bytes: a URL safe encoded version of secret
+    """
+    if len(secret) <= 32:
+        secret = f"{secret:<32}"[:32]
+    return base64.urlsafe_b64encode(secret.encode())
+
+
+async def get_password_hash(password: str | bytes) -> str:
+    """Get password hash.
+
+    Args:
+        password: Plain password
+    Returns:
+        str: Hashed password
+    """
+    return await asyncio.get_running_loop().run_in_executor(None, password_crypt_context.hash, password)
+
+
+async def verify_password(plain_password: str | bytes, hashed_password: str) -> bool:
+    """Verify Password.
+
+    Args:
+        plain_password (str | bytes): The string or byte password
+        hashed_password (str): the hash of the password
+
+    Returns:
+        bool: True if password matches hash.
+    """
+    valid, _ = await asyncio.get_running_loop().run_in_executor(
+        None,
+        password_crypt_context.verify_and_update,
+        plain_password,
+        hashed_password,
+    )
+    return bool(valid)
diff --git a/src/app/lib/dependencies.py b/src/app/lib/dependencies.py
new file mode 100644
index 00000000..5b1990ba
--- /dev/null
+++ b/src/app/lib/dependencies.py
@@ -0,0 +1,228 @@
+"""Application dependency providers."""
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import Literal
+from uuid import UUID
+
+from advanced_alchemy.filters import (
+    BeforeAfter,
+    CollectionFilter,
+    FilterTypes,
+    LimitOffset,
+    OrderBy,
+    SearchFilter,
+)
+from litestar.di import Provide
+from litestar.params import Dependency, Parameter
+
+from app.config import constants
+
+__all__ = [
+    "create_collection_dependencies",
+    "provide_created_filter",
+    "provide_filter_dependencies",
+    "provide_id_filter",
+    "provide_limit_offset_pagination",
+    "provide_updated_filter",
+    "provide_search_filter",
+    "provide_order_by",
+    "BeforeAfter",
+    "CollectionFilter",
+    "LimitOffset",
+    "OrderBy",
+    "SearchFilter",
+    "FilterTypes",
+]
+
+DTorNone = datetime | None
+StringOrNone = str | None
+UuidOrNone = UUID | None
+BooleanOrNone = bool | None
+SortOrderOrNone = Literal["asc", "desc"] | None
+"""Aggregate type alias of the types supported for collection filtering."""
+FILTERS_DEPENDENCY_KEY = "filters"
+CREATED_FILTER_DEPENDENCY_KEY = "created_filter"
+ID_FILTER_DEPENDENCY_KEY = "id_filter"
+LIMIT_OFFSET_DEPENDENCY_KEY = "limit_offset"
+UPDATED_FILTER_DEPENDENCY_KEY = "updated_filter"
+ORDER_BY_DEPENDENCY_KEY = "order_by"
+SEARCH_FILTER_DEPENDENCY_KEY = "search_filter"
+
+
+def provide_id_filter(
+    ids: list[UUID] | None = Parameter(query="ids", default=None, required=False),
+) -> CollectionFilter[UUID]:
+    """Return type consumed by ``Repository.filter_in_collection()``.
+
+    Args:
+        ids (list[UUID] | None): Parsed out of a comma-separated list of values in query params.
+
+    Returns:
+        CollectionFilter[UUID]: Filter for a scoping query to a limited set of identities.
+    """
+    return CollectionFilter(field_name="id", values=ids or [])
+
+
+def provide_created_filter(
+    before: DTorNone = Parameter(query="createdBefore", default=None, required=False),
+    after: DTorNone = Parameter(query="createdAfter", default=None, required=False),
+) -> BeforeAfter:
+    """Return type consumed by `Repository.filter_on_datetime_field()`.
+
+    Args:
+        before (DTorNone): Filter for records created before this date/time.
+        after (DTorNone): Filter for records created after this date/time.
+
+    Returns:
+        BeforeAfter: Filter for scoping query to instance creation date/time.
+    """
+    return BeforeAfter("created_at", before, after)
+
+
+def provide_search_filter(
+    field: StringOrNone = Parameter(title="Field to search", query="searchField", default=None, required=False),
+    search: StringOrNone = Parameter(title="Field to search", query="searchString", default=None, required=False),
+    ignore_case: BooleanOrNone = Parameter(
+        title="Search should be case sensitive",
+        query="searchIgnoreCase",
+        default=None,
+        required=False,
+    ),
+) -> SearchFilter:
+    """Add offset/limit pagination.
+
+    Return type consumed by `Repository.apply_search_filter()`.
+
+    Args:
+        field (StringOrNone): Field name to search.
+        search (StringOrNone): Value to search for.
+        ignore_case (BooleanOrNone): Whether to ignore case when searching.
+
+    Returns:
+        SearchFilter: Filter for searching fields.
+    """
+    return SearchFilter(field_name=field, value=search, ignore_case=ignore_case or False)  # type: ignore[arg-type]
+
+
+def provide_order_by(
+    field_name: StringOrNone = Parameter(title="Order by field", query="orderBy", default=None, required=False),
+    sort_order: SortOrderOrNone = Parameter(title="Field to search", query="sortOrder", default="desc", required=False),
+) -> OrderBy:
+    """Add offset/limit pagination.
+
+    Return type consumed by ``Repository.apply_order_by()``.
+
+    Args:
+        field_name (StringOrNone): Field name to order by.
+        sort_order (SortOrderOrNone): Order field ascending ('asc') or descending ('desc)
+
+    Returns:
+        OrderBy: Order by for query.
+    """
+    return OrderBy(field_name=field_name, sort_order=sort_order)  # type: ignore[arg-type]
+
+
+def provide_updated_filter(
+    before: DTorNone = Parameter(query="updatedBefore", default=None, required=False),
+    after: DTorNone = Parameter(query="updatedAfter", default=None, required=False),
+) -> BeforeAfter:
+    """Add updated filter.
+
+    Return type consumed by ``Repository.filter_on_datetime_field()``.
+
+    Args:
+        before (DTorNone): Filter for records updated before this date/time.
+        after (DTorNone): Filter for records updated after this date/time.
+
+    Returns:
+        BeforeAfter: Filter for scoping query to instance update date/time.
+    """
+    return BeforeAfter("updated_at", before, after)
+
+
+def provide_limit_offset_pagination(
+    current_page: int = Parameter(ge=1, query="currentPage", default=1, required=False),
+    page_size: int = Parameter(
+        query="pageSize",
+        ge=1,
+        default=constants.DEFAULT_PAGINATION_SIZE,
+        required=False,
+    ),
+) -> LimitOffset:
+    """Add offset/limit pagination.
+
+    Return type consumed by ``Repository.apply_limit_offset_pagination()``.
+
+    Args:
+        current_page (int): Page number to return.
+        page_size (int): Number of records per page.
+
+    Returns:
+        LimitOffset: Filter for query pagination.
+    """
+    return LimitOffset(page_size, page_size * (current_page - 1))
+
+
+def provide_filter_dependencies(
+    created_filter: BeforeAfter = Dependency(skip_validation=True),
+    updated_filter: BeforeAfter = Dependency(skip_validation=True),
+    id_filter: CollectionFilter = Dependency(skip_validation=True),
+    limit_offset: LimitOffset = Dependency(skip_validation=True),
+    search_filter: SearchFilter = Dependency(skip_validation=True),
+    order_by: OrderBy = Dependency(skip_validation=True),
+) -> list[FilterTypes]:
+    """Provide common collection route filtering dependencies.
+
+    Add all filters to any route by including this function as a dependency, e.g.:
+
+    .. code-block:: python
+
+        @get
+        def get_collection_handler(filters: Filters) -> ...:
+            ...
+
+    The dependency is provided in the application layer, so only need to inject the dependency where
+    necessary.
+
+    Args:
+        created_filter (BeforeAfter): Filter for a scoping query to instance creation date/time.
+        updated_filter (BeforeAfter): Filter for a scoping query to instance update date/time.
+        id_filter (CollectionFilter): Filter for a scoping query to a limited set of identities.
+        limit_offset (LimitOffset): Filter for query pagination.
+        search_filter (SearchFilter): Filter for searching fields.
+        order_by (OrderBy): Order by for query.
+
+    Returns:
+        list[FilterTypes]: List of filters parsed from connection.
+    """
+    filters: list[FilterTypes] = []
+    if id_filter.values:  # noqa: PD011
+        filters.append(id_filter)
+    filters.extend([created_filter, limit_offset, updated_filter])
+
+    if search_filter.field_name is not None and search_filter.value is not None:
+        filters.append(search_filter)
+    if order_by.field_name is not None:
+        filters.append(order_by)
+    return filters
+
+
+def create_collection_dependencies() -> dict[str, Provide]:
+    """Create ORM dependencies.
+
+    Creates a dictionary of provides for pagination endpoints.
+
+    Returns:
+        dict[str, Provide]: Dictionary of provides for pagination endpoints.
+    """
+    return {
+        LIMIT_OFFSET_DEPENDENCY_KEY: Provide(provide_limit_offset_pagination, sync_to_thread=False),
+        UPDATED_FILTER_DEPENDENCY_KEY: Provide(provide_updated_filter, sync_to_thread=False),
+        CREATED_FILTER_DEPENDENCY_KEY: Provide(provide_created_filter, sync_to_thread=False),
+        ID_FILTER_DEPENDENCY_KEY: Provide(provide_id_filter, sync_to_thread=False),
+        SEARCH_FILTER_DEPENDENCY_KEY: Provide(provide_search_filter, sync_to_thread=False),
+        ORDER_BY_DEPENDENCY_KEY: Provide(provide_order_by, sync_to_thread=False),
+        FILTERS_DEPENDENCY_KEY: Provide(provide_filter_dependencies, sync_to_thread=False),
+    }
diff --git a/src/app/lib/dto.py b/src/app/lib/dto.py
new file mode 100644
index 00000000..d39d07f0
--- /dev/null
+++ b/src/app/lib/dto.py
@@ -0,0 +1,73 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Literal, TypeVar, overload
+
+from advanced_alchemy.extensions.litestar.dto import SQLAlchemyDTO, SQLAlchemyDTOConfig
+from litestar.dto import DataclassDTO, dto_field
+from litestar.dto.config import DTOConfig
+from litestar.types.protocols import DataclassProtocol
+from sqlalchemy.orm import DeclarativeBase
+
+if TYPE_CHECKING:
+    from collections.abc import Set as AbstractSet
+
+    from litestar.dto import RenameStrategy
+
+__all__ = ("config", "dto_field", "DTOConfig", "SQLAlchemyDTO", "DataclassDTO")
+
+DTOT = TypeVar("DTOT", bound=DataclassProtocol | DeclarativeBase)
+DTOFactoryT = TypeVar("DTOFactoryT", bound=DataclassDTO | SQLAlchemyDTO)
+SQLAlchemyModelT = TypeVar("SQLAlchemyModelT", bound=DeclarativeBase)
+DataclassModelT = TypeVar("DataclassModelT", bound=DataclassProtocol)
+ModelT = SQLAlchemyModelT | DataclassModelT
+
+
+@overload
+def config(
+    backend: Literal["sqlalchemy"] = "sqlalchemy",
+    exclude: AbstractSet[str] | None = None,
+    rename_fields: dict[str, str] | None = None,
+    rename_strategy: RenameStrategy | None = None,
+    max_nested_depth: int | None = None,
+    partial: bool | None = None,
+) -> SQLAlchemyDTOConfig:
+    ...
+
+
+@overload
+def config(
+    backend: Literal["dataclass"] = "dataclass",
+    exclude: AbstractSet[str] | None = None,
+    rename_fields: dict[str, str] | None = None,
+    rename_strategy: RenameStrategy | None = None,
+    max_nested_depth: int | None = None,
+    partial: bool | None = None,
+) -> DTOConfig:
+    ...
+
+
+def config(
+    backend: Literal["dataclass", "sqlalchemy"] = "dataclass",
+    exclude: AbstractSet[str] | None = None,
+    rename_fields: dict[str, str] | None = None,
+    rename_strategy: RenameStrategy | None = None,
+    max_nested_depth: int | None = None,
+    partial: bool | None = None,
+) -> DTOConfig | SQLAlchemyDTOConfig:
+    """_summary_
+
+    Returns:
+        DTOConfig: Configured DTO class
+    """
+    default_kwargs = {"rename_strategy": "camel", "max_nested_depth": 2}
+    if exclude:
+        default_kwargs["exclude"] = exclude
+    if rename_fields:
+        default_kwargs["rename_fields"] = rename_fields
+    if rename_strategy:
+        default_kwargs["rename_strategy"] = rename_strategy
+    if max_nested_depth:
+        default_kwargs["max_nested_depth"] = max_nested_depth
+    if partial:
+        default_kwargs["partial"] = partial
+    return DTOConfig(**default_kwargs)
diff --git a/src/app/lib/exceptions.py b/src/app/lib/exceptions.py
new file mode 100644
index 00000000..0ca303cc
--- /dev/null
+++ b/src/app/lib/exceptions.py
@@ -0,0 +1,136 @@
+"""Litestar-saqlalchemy exception types.
+
+Also, defines functions that translate service and repository exceptions
+into HTTP exceptions.
+"""
+
+from __future__ import annotations
+
+import sys
+from typing import TYPE_CHECKING
+
+from advanced_alchemy.exceptions import IntegrityError
+from litestar.exceptions import (
+    HTTPException,
+    InternalServerException,
+    NotFoundException,
+    PermissionDeniedException,
+)
+from litestar.exceptions.responses import create_debug_response, create_exception_response
+from litestar.repository.exceptions import ConflictError, NotFoundError, RepositoryError
+from litestar.status_codes import HTTP_409_CONFLICT, HTTP_500_INTERNAL_SERVER_ERROR
+from structlog.contextvars import bind_contextvars
+
+if TYPE_CHECKING:
+    from typing import Any
+
+    from litestar.connection import Request
+    from litestar.middleware.exceptions.middleware import ExceptionResponseContent
+    from litestar.response import Response
+    from litestar.types import Scope
+
+__all__ = (
+    "AuthorizationError",
+    "HealthCheckConfigurationError",
+    "ApplicationError",
+    "after_exception_hook_handler",
+)
+
+
+class ApplicationError(Exception):
+    """Base exception type for the lib's custom exception types."""
+
+    detail: str
+
+    def __init__(self, *args: Any, detail: str = "") -> None:
+        """Initialize ``AdvancedAlchemyException``.
+
+        Args:
+            *args: args are converted to :class:`str` before passing to :class:`Exception`
+            detail: detail of the exception.
+        """
+        str_args = [str(arg) for arg in args if arg]
+        if not detail:
+            if str_args:
+                detail, *str_args = str_args
+            elif hasattr(self, "detail"):
+                detail = self.detail
+        self.detail = detail
+        super().__init__(*str_args)
+
+    def __repr__(self) -> str:
+        if self.detail:
+            return f"{self.__class__.__name__} - {self.detail}"
+        return self.__class__.__name__
+
+    def __str__(self) -> str:
+        return " ".join((*self.args, self.detail)).strip()
+
+
+class MissingDependencyError(ApplicationError, ImportError):
+    """Missing optional dependency.
+
+    This exception is raised only when a module depends on a dependency that has not been installed.
+    """
+
+
+class ApplicationClientError(ApplicationError):
+    """Base exception type for client errors."""
+
+
+class AuthorizationError(ApplicationClientError):
+    """A user tried to do something they shouldn't have."""
+
+
+class HealthCheckConfigurationError(ApplicationError):
+    """An error occurred while registering an health check."""
+
+
+class _HTTPConflictException(HTTPException):
+    """Request conflict with the current state of the target resource."""
+
+    status_code = HTTP_409_CONFLICT
+
+
+async def after_exception_hook_handler(exc: Exception, _scope: Scope) -> None:
+    """Binds `exc_info` key with exception instance as value to structlog
+    context vars.
+
+    This must be a coroutine so that it is not wrapped in a thread where we'll lose context.
+
+    Args:
+        exc: the exception that was raised.
+        _scope: scope of the request
+    """
+    if isinstance(exc, ApplicationError):
+        return
+    if isinstance(exc, HTTPException) and exc.status_code < HTTP_500_INTERNAL_SERVER_ERROR:
+        return
+    bind_contextvars(exc_info=sys.exc_info())
+
+
+def exception_to_http_response(
+    request: Request[Any, Any, Any],
+    exc: ApplicationError | RepositoryError,
+) -> Response[ExceptionResponseContent]:
+    """Transform repository exceptions to HTTP exceptions.
+
+    Args:
+        request: The request that experienced the exception.
+        exc: Exception raised during handling of the request.
+
+    Returns:
+        Exception response appropriate to the type of original exception.
+    """
+    http_exc: type[HTTPException]
+    if isinstance(exc, NotFoundError):
+        http_exc = NotFoundException
+    elif isinstance(exc, ConflictError | RepositoryError | IntegrityError):
+        http_exc = _HTTPConflictException
+    elif isinstance(exc, AuthorizationError):
+        http_exc = PermissionDeniedException
+    else:
+        http_exc = InternalServerException
+    if request.app.debug and http_exc not in (PermissionDeniedException, NotFoundError, AuthorizationError):
+        return create_debug_response(request, exc)
+    return create_exception_response(request, http_exc(detail=str(exc.__cause__)))
diff --git a/src/app/lib/schema.py b/src/app/lib/schema.py
new file mode 100644
index 00000000..5172d7c8
--- /dev/null
+++ b/src/app/lib/schema.py
@@ -0,0 +1,79 @@
+from typing import Any
+from datetime import date
+import enum
+
+import msgspec
+
+
+class BaseStruct(msgspec.Struct):
+    def to_dict(self) -> dict[str, Any]:
+        """Convert object to dict."""
+        return {f: getattr(self, f) for f in self.__struct_fields__ if getattr(self, f, None) != msgspec.UNSET}
+
+    @classmethod
+    def from_dict(cls, data):
+        """Create an instance from a dictionary."""
+        return cls(**data)
+
+
+class CamelizedBaseStruct(BaseStruct, rename="camel"):
+    """Camelized Base Struct"""
+
+
+class Message(CamelizedBaseStruct):
+    message: str
+
+
+class Location(CamelizedBaseStruct):
+    """A Location."""
+    city: str | None = None
+    region: str | None = None
+    country: str | None = None
+
+
+class Investor(CamelizedBaseStruct):
+    """An investor."""
+    name: str
+    type: str | None = None
+    url: str | None = None
+    linkedin_profile_url: str | None = None
+
+
+class Funding(CamelizedBaseStruct):
+    """Funding data."""
+    round_name: str = "Series Unknown"
+    money_raised: int | None = None
+    announced_date: date | None = None
+    investors: list[Investor] = []
+
+
+class WorkExperience(CamelizedBaseStruct):
+    """Work experience data."""
+    starts_at: date
+    title: str
+    company_name: str
+    ends_at: date | None = None
+    linkedin_profile_url: str | None = None
+    description: str | None = None
+    location: Location | None = None
+    logo_url: str | None = None
+
+
+class SocialActivity(CamelizedBaseStruct):
+    """Social activity data."""
+    title: str
+    link: str | None = None
+    status: str | None = None
+
+
+class OpportunityStage(enum.Enum):
+    """Opportunity stages."""
+    IDENTIFIED = "identified"
+    QUALIFIED = "qualified"
+    CONTACTED = "contacted"
+    ENGAGED = "engaged"
+    PROPOSED = "proposed"
+    NEGOTIATED = "negotiated"
+    DEFERRED = "deferred"
+    SUSPENDED = "suspended"
+    CUSTOMER = "customer"
diff --git a/vite.config.ts b/src/app/py.typed
similarity index 100%
rename from vite.config.ts
rename to src/app/py.typed
diff --git a/src/app/server/__init__.py b/src/app/server/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/src/app/server/builder.py b/src/app/server/builder.py
new file mode 100644
index 00000000..23b18055
--- /dev/null
+++ b/src/app/server/builder.py
@@ -0,0 +1,94 @@
+# pylint: disable=[invalid-name,import-outside-toplevel]
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, TypeVar
+
+from litestar.config.response_cache import ResponseCacheConfig, default_cache_key_builder
+from litestar.plugins import CLIPluginProtocol, InitPluginProtocol
+from litestar.security.jwt import OAuth2Login
+from litestar.stores.redis import RedisStore
+from litestar.stores.registry import StoreRegistry
+
+if TYPE_CHECKING:
+    from click import Group
+    from litestar import Request
+    from litestar.config.app import AppConfig
+    from redis.asyncio import Redis
+
+
+T = TypeVar("T")
+
+
+class ApplicationConfigurator(InitPluginProtocol, CLIPluginProtocol):
+    """Application configuration plugin."""
+
+    __slots__ = ("redis", "app_slug")
+    redis: Redis
+    app_slug: str
+
+    def __init__(self) -> None:
+        """Initialize ``ApplicationConfigurator``.
+
+        Args:
+            config: configure and start SAQ.
+        """
+
+    def on_cli_init(self, cli: Group) -> None:
+        from app.cli.commands import user_management_app
+        from app.config import get_settings
+
+        settings = get_settings()
+        self.redis = settings.redis.get_client()
+        self.app_slug = settings.app.slug
+        cli.add_command(user_management_app)
+
+    def on_app_init(self, app_config: AppConfig) -> AppConfig:
+        """Configure application for use with SQLAlchemy.
+
+        Args:
+            app_config: The :class:`AppConfig <.config.app.AppConfig>` instance.
+        """
+
+        from advanced_alchemy.exceptions import RepositoryError
+        from litestar.security.jwt import Token
+
+        from app.config import constants, get_settings
+        from app.db.models import User as UserModel
+        from app.lib.exceptions import ApplicationError, exception_to_http_response
+
+        settings = get_settings()
+        self.redis = settings.redis.get_client()
+        self.app_slug = settings.app.slug
+        app_config.response_cache_config = ResponseCacheConfig(
+            default_expiration=constants.CACHE_EXPIRATION,
+            key_builder=self._cache_key_builder,
+        )
+        app_config.stores = StoreRegistry(default_factory=self.redis_store_factory)
+        app_config.on_shutdown.append(self.redis.aclose)  # type: ignore[attr-defined]
+        app_config.signature_namespace.update(
+            {
+                "Token": Token,
+                "OAuth2Login": OAuth2Login,
+                "UserModel": UserModel,
+            },
+        )
+        app_config.exception_handlers = {
+            ApplicationError: exception_to_http_response,
+            RepositoryError: exception_to_http_response,
+        }
+        return app_config
+
+    def redis_store_factory(self, name: str) -> RedisStore:
+        return RedisStore(self.redis, namespace=f"{self.app_slug}:{name}")
+
+    def _cache_key_builder(self, request: Request) -> str:
+        """App name prefixed cache key builder.
+
+        Args:
+            request (Request): Current request instance.
+
+        Returns:
+            str: App slug prefixed cache key.
+        """
+
+        return f"{self.app_slug}:{default_cache_key_builder(request)}"
diff --git a/src/app/server/openapi.py b/src/app/server/openapi.py
new file mode 100644
index 00000000..1a1b77b1
--- /dev/null
+++ b/src/app/server/openapi.py
@@ -0,0 +1,17 @@
+from litestar.openapi.config import OpenAPIConfig
+from litestar.openapi.plugins import ScalarRenderPlugin
+
+from app.__about__ import __version__ as current_version
+from app.config import get_settings
+from app.domain.accounts.guards import auth
+
+settings = get_settings()
+config = OpenAPIConfig(
+    title=settings.app.NAME,
+    version=current_version,
+    components=[auth.openapi_components],
+    security=[auth.security_requirement],
+    use_handler_docstrings=True,
+    render_plugins=[ScalarRenderPlugin()],
+)
+"""OpenAPI config for app.  See OpenAPISettings for configuration."""
diff --git a/src/app/server/plugins.py b/src/app/server/plugins.py
new file mode 100644
index 00000000..2828e910
--- /dev/null
+++ b/src/app/server/plugins.py
@@ -0,0 +1,15 @@
+from advanced_alchemy.extensions.litestar import SQLAlchemyPlugin
+from litestar.plugins.structlog import StructlogPlugin
+from litestar_granian import GranianPlugin
+from litestar_saq import SAQPlugin
+from litestar_vite import VitePlugin
+
+from app.config import app as config
+from app.server.builder import ApplicationConfigurator
+
+structlog = StructlogPlugin(config=config.log)
+vite = VitePlugin(config=config.vite)
+saq = SAQPlugin(config=config.saq)
+alchemy = SQLAlchemyPlugin(config=config.alchemy)
+granian = GranianPlugin()
+app_config = ApplicationConfigurator()
diff --git a/src/app/server/routers.py b/src/app/server/routers.py
new file mode 100644
index 00000000..6786a54c
--- /dev/null
+++ b/src/app/server/routers.py
@@ -0,0 +1,33 @@
+"""Application Modules."""
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from app.domain.accounts.controllers import AccessController, UserController, UserRoleController, TenantController
+from app.domain.system.controllers import SystemController
+from app.domain.tags.controllers import TagController
+from app.domain.teams.controllers import TeamController, TeamMemberController
+from app.domain.companies.controllers import CompanyController
+from app.domain.jobs.controllers import JobPostController
+from app.domain.people.controllers import PersonController
+from app.domain.opportunities.controllers import OpportunityController
+
+if TYPE_CHECKING:
+    from litestar.types import ControllerRouterHandler
+
+
+route_handlers: list[ControllerRouterHandler] = [
+    AccessController,
+    UserController,
+    TeamController,
+    UserRoleController,
+    #  TeamInvitationController,
+    TeamMemberController,
+    TagController,
+    TenantController,
+    CompanyController,
+    JobPostController,
+    PersonController,
+    OpportunityController,
+    SystemController,
+]
diff --git a/src/pyspa/__init__.py b/src/pyspa/__init__.py
deleted file mode 100644
index bd4de6dd..00000000
--- a/src/pyspa/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import logging
-
-from pyspa import cli, config
-from pyspa.__main__ import main
-from pyspa.__version__ import __version__
-
-logger = logging.getLogger(__name__)
-logger.addHandler(logging.NullHandler())
-
-__all__ = ["__version__", "cli", "config", "main"]
diff --git a/src/pyspa/__main__.py b/src/pyspa/__main__.py
deleted file mode 100644
index 8a50862f..00000000
--- a/src/pyspa/__main__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import sys
-
-
-def main() -> None:
-    try:
-        from pyspa.cli import cli
-    except ImportError:
-        print(  # noqa: T201
-            "💣 [bold red] Could not load required libraries.  ",
-            "Please check your installation",
-        )
-        sys.exit(1)
-    cli()
-
-
-if __name__ == "__main__":
-    main()
diff --git a/src/pyspa/__version__.py b/src/pyspa/__version__.py
deleted file mode 100644
index 485f44ac..00000000
--- a/src/pyspa/__version__.py
+++ /dev/null
@@ -1 +0,0 @@
-__version__ = "0.1.1"
diff --git a/src/pyspa/cli/__init__.py b/src/pyspa/cli/__init__.py
deleted file mode 100644
index fa7249be..00000000
--- a/src/pyspa/cli/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from pyspa.cli.console import console
-from pyspa.cli.main import cli
-
-__all__ = ["cli", "console"]
diff --git a/src/pyspa/cli/commands/__init__.py b/src/pyspa/cli/commands/__init__.py
deleted file mode 100644
index 2112e616..00000000
--- a/src/pyspa/cli/commands/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from .manage import cli as manage_cli
-from .run import cli as run_cli
-
-__all__ = ["manage_cli", "run_cli"]
diff --git a/src/pyspa/cli/commands/manage.py b/src/pyspa/cli/commands/manage.py
deleted file mode 100644
index 8f6b3b08..00000000
--- a/src/pyspa/cli/commands/manage.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import typer
-
-from pyspa.cli.console import console
-from pyspa.config.logging import get_logger
-
-logger = get_logger("pyspa")
-
-cli = typer.Typer(
-    no_args_is_help=True,
-    rich_markup_mode="markdown",
-    pretty_exceptions_enable=True,
-    pretty_exceptions_show_locals=False,
-    pretty_exceptions_short=True,
-    add_completion=False,
-)
-
-
-@cli.command()
-def pull_secret(secret_name: str) -> None:
-    """Pull Secrets from Secrets Provider"""
-    console.print("[bold green]...Gathering data")
-    logger.info("[bold red]...Gathering data")
-
-
-@cli.command()
-def push_secret(secret_name: str) -> None:
-    """Pull Secrets from Secrets Provider"""
-    console.print("[bold green]...Gathering data")
-
-
-@cli.command()
-def bundle_scripts() -> None:
-    """Push secrets to Secrets Provider"""
-    console.print("[bold blue]...exporting shell scripts")
diff --git a/src/pyspa/cli/commands/run.py b/src/pyspa/cli/commands/run.py
deleted file mode 100644
index 74a1dcb8..00000000
--- a/src/pyspa/cli/commands/run.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import typer
-
-from pyspa.cli.console import console
-from pyspa.config import settings
-from pyspa.config.logging import get_logger
-from pyspa.core.wsgi import run_wsgi
-
-cli = typer.Typer(
-    no_args_is_help=True,
-    rich_markup_mode="markdown",
-    pretty_exceptions_enable=True,
-    pretty_exceptions_show_locals=False,
-    pretty_exceptions_short=True,
-    add_completion=False,
-)
-
-logger = get_logger("root")
-
-
-@cli.command(name="server")
-def run_server(
-    host: str = typer.Option(
-        settings.gunicorn.HOST,
-        help="Host interface to listen on.  Use 0.0.0.0 for all available interfaces.",
-    ),
-    port: int = typer.Option(settings.gunicorn.PORT, help="Port to listen on."),
-    workers: int = typer.Option(
-        settings.gunicorn.WORKERS,
-        help="Number of HTTP workers to run.  This should equal the number of CPUs available.",
-    ),
-) -> None:
-    """Run the server"""
-    settings.gunicorn.HOST = host
-    settings.gunicorn.PORT = port
-    settings.gunicorn.WORKERS = workers
-    console.print("[bold green]...Gathering data")
-    run_wsgi(host, port, workers, reload=settings.gunicorn.RELOAD)
-
-
-@cli.command(name="worker")
-def run_worker() -> None:
-    """Run the worker"""
-    console.print("[bold green]...Gathering data")
-    logger.info("Running worker")
diff --git a/src/pyspa/cli/console.py b/src/pyspa/cli/console.py
deleted file mode 100644
index a9463afd..00000000
--- a/src/pyspa/cli/console.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from rich.console import Console
-
-console = Console()
diff --git a/src/pyspa/cli/main.py b/src/pyspa/cli/main.py
deleted file mode 100644
index 8eea5f03..00000000
--- a/src/pyspa/cli/main.py
+++ /dev/null
@@ -1,16 +0,0 @@
-import typer
-
-from pyspa.cli.commands import manage_cli, run_cli
-
-cli = typer.Typer(
-    name="Simple Single Page Application",
-    no_args_is_help=True,
-    rich_markup_mode="markdown",
-    pretty_exceptions_enable=True,
-    pretty_exceptions_show_locals=False,
-    pretty_exceptions_short=True,
-    add_completion=False,
-)
-
-cli.add_typer(run_cli, name="run")
-cli.add_typer(manage_cli, name="manage")
diff --git a/src/pyspa/config/__init__.py b/src/pyspa/config/__init__.py
deleted file mode 100644
index eeddae8c..00000000
--- a/src/pyspa/config/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from pyspa.config import logging
-from pyspa.config.application import settings
-
-__all__ = ["settings", "logging"]
diff --git a/src/pyspa/config/application.py b/src/pyspa/config/application.py
deleted file mode 100644
index e273d0d3..00000000
--- a/src/pyspa/config/application.py
+++ /dev/null
@@ -1,116 +0,0 @@
-import datetime
-from enum import Enum, EnumMeta
-from functools import lru_cache
-
-from pydantic import AnyUrl
-from pydantic import BaseSettings as _BaseSettings
-from pydantic import PostgresDsn, SecretBytes, SecretStr
-
-from pyspa.__version__ import __version__
-
-
-def encode_datetime_object(dt: datetime.datetime) -> str:
-    """Handles datetime serialization for nested timestamps in models/dataclasses"""
-    return dt.replace(tzinfo=datetime.timezone.utc).isoformat().replace("+00:00", "Z")
-
-
-class BaseSettings(_BaseSettings):
-    class Config:
-        case_sensitive = True
-        json_encoders = {
-            datetime.datetime: encode_datetime_object,
-            SecretStr: lambda secret: secret.get_secret_value() if secret else None,
-            SecretBytes: lambda secret: secret.get_secret_value() if secret else None,
-            Enum: lambda enum: enum.value if enum else None,
-            EnumMeta: None,
-        }
-        validate_assignment = True
-        case_sensitive = False
-        orm_mode = True
-        use_enum_values = True
-        env_file = ".env"
-        env_file_encoding = "utf-8"
-
-
-class ApplicationSettings(BaseSettings):
-    class Config:
-        env_prefix = "PYSPA_"
-
-    BUILD_NUMBER: str = str(__version__)
-    DEBUG: bool = False
-    DEFAULT_PAGINATION_LIMIT: int = 10
-    ENVIRONMENT: str = "production"
-    LOG_LEVEL: str = "INFO"
-    DEV_MODE: bool = False
-    NAME: str = "pyspa"
-
-
-class CacheSettings(BaseSettings):
-    class Config:
-        env_prefix = "PYSPA_REDIS_"
-
-    EXPIRATION: int = 60  # 60 seconds
-    URL: AnyUrl
-
-
-class DatabaseSettings(BaseSettings):
-    """Database Configuration"""
-
-    class Config:
-        env_prefix = "PYSPA_POSTGRES_"
-
-    ECHO: bool = False
-    URL: PostgresDsn
-
-
-class GunicornSettings(BaseSettings):
-    """Gunicorn settings"""
-
-    class Config:
-        env_prefix = "PYSPA_GUNICORN_"
-
-    ACCESS_LOG: str
-    ERROR_LOG: str
-    HOST: str = "0.0.0.0"
-    KEEPALIVE: int = 120
-    LOG_LEVEL: str = "INFO"
-    PORT: int = 8080
-    RELOAD: bool = False
-    THREADS: int
-    TIMEOUT: int = 120
-    WORKERS: int
-    WORKER_CLASS: str
-    PRELOAD: bool = True
-
-
-# Constants
-class ApiPaths:
-    HEALTH = "/health"
-
-
-class Settings(BaseSettings):
-    """Main Setting Class"""
-
-    app: ApplicationSettings = ApplicationSettings()
-    db: DatabaseSettings = DatabaseSettings()
-    cache: CacheSettings = CacheSettings()
-    gunicorn: GunicornSettings = GunicornSettings()
-    api_paths: ApiPaths = ApiPaths()
-
-
-@lru_cache(maxsize=1)
-def get_app_settings() -> Settings:
-    """
-    Cache app settings
-
-    This function returns a configured instance of settings.
-
-    LRU Cache decorator has been used to limit the number of instances to 1.
-    This effectively turns this into a singleton class.
-
-    Maybe there are better approaches for this?
-    """
-    return Settings()
-
-
-settings = get_app_settings()
diff --git a/src/pyspa/config/gunicorn.py b/src/pyspa/config/gunicorn.py
deleted file mode 100644
index 053235eb..00000000
--- a/src/pyspa/config/gunicorn.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from pyspa.config.application import settings
-from pyspa.config.logging import log_config
-
-# Gunicorn config variables
-accesslog = settings.gunicorn.ACCESS_LOG
-bind = f"{settings.gunicorn.HOST}:{settings.gunicorn.PORT}"
-errorlog = settings.gunicorn.ERROR_LOG
-keepalive = settings.gunicorn.KEEPALIVE
-logconfig_dict = log_config.dict(exclude_none=True)
-loglevel = settings.gunicorn.LOG_LEVEL
-reload = settings.gunicorn.RELOAD
-threads = settings.gunicorn.THREADS
-timeout = settings.gunicorn.TIMEOUT
-worker_class = settings.gunicorn.WORKER_CLASS
-workers = settings.gunicorn.WORKERS
diff --git a/src/pyspa/config/logging.py b/src/pyspa/config/logging.py
deleted file mode 100644
index 69ef381b..00000000
--- a/src/pyspa/config/logging.py
+++ /dev/null
@@ -1,165 +0,0 @@
-# Standard Library
-
-import logging.config
-import re
-from functools import lru_cache
-from logging import Filter as LoggingFilter
-from queue import Queue
-from typing import TYPE_CHECKING, Any, Final, Generic, List, TypeVar
-
-import picologging
-from gunicorn.glogging import Logger as GunicornLogger
-from picologging import LogRecord
-from picologging.handlers import QueueHandler, QueueListener
-from rich.console import Console
-from rich.logging import RichHandler as _RichHandler
-from starlette.status import HTTP_200_OK
-from starlite import LoggingConfig
-
-from pyspa.config.application import ApiPaths, settings
-
-DEFAULT_LOG_NAME: Final = "pyspa"
-
-
-class RichHandler(_RichHandler):
-    """Rich Handler Config"""
-
-    def __init__(self, *args, **kwargs) -> None:  # type: ignore
-        super().__init__(
-            rich_tracebacks=settings.app.LOG_LEVEL.lower() == "debug",
-            console=Console(markup=True),
-            tracebacks_suppress=[
-                "click",
-                "typer",
-                "uvloop",
-                "uvicorn",
-                "gunicorn",
-                "starlette",
-                "starlite",
-                "sqlalchemy",
-                "anyio",
-                "asyncio",
-            ],
-            markup=True,
-            show_path=False,
-            omit_repeated_times=False,
-        )
-
-
-class AccessLogFilter(LoggingFilter):
-    """
-    For filtering log events based on request path.
-
-    Parameters
-    ----------
-    path_re : str Regex string,
-        drops log event if the path of the request matches the regex.
-    args : Any
-    kwargs : Any
-        Args and kwargs passed through to `logging.Filter`.
-    """
-
-    def __init__(self, *args: Any, path_re: str, **kwargs: Any) -> None:
-        super().__init__(*args, **kwargs)
-        self.path_filter = re.compile(path_re)
-
-    def filter(self, record: logging.LogRecord) -> bool:
-        *_, req_path, _, status_code = record.args  # type: ignore
-        if self.path_filter.match(req_path) and status_code == HTTP_200_OK:  # type: ignore
-            return False
-        return True
-
-
-class QueueListenerHandler(QueueHandler):  # type: ignore
-    """
-    Configures queue listener and handler to support non-blocking logging configuration.
-    """
-
-    def __init__(
-        self,
-        handlers: List[Any],
-        respect_handler_level: bool = False,
-        queue: Queue[LogRecord] = Queue(-1),
-    ):
-        super().__init__(queue)
-        self.handlers = _resolve_handlers(handlers)
-        self._listener: QueueListener = QueueListener(
-            self.queue, *self.handlers, respect_handler_level=respect_handler_level
-        )
-        self._listener.start()
-
-
-class StubbedGunicornLogger(GunicornLogger):  # type: ignore
-    """Customized Gunicorn Logger"""
-
-    def setup(self, cfg: Any) -> None:
-        """Configures logger"""
-        self.handler = RichHandler()
-        self.error_logger = picologging.getLogger("gunicorn.error")
-        self.error_logger.addHandler(self.handler)
-        self.access_logger = picologging.getLogger("gunicorn.access")
-        self.access_logger.addHandler(self.handler)
-
-
-log_config = LoggingConfig(
-    root={"level": settings.app.LOG_LEVEL, "handlers": ["queue_listener"]},
-    filters={
-        "health_filter": {
-            "()": AccessLogFilter,
-            "path_re": f"^{ApiPaths.HEALTH}$",
-        }
-    },
-    handlers={
-        "console": {
-            "class": "pyspa.config.logging.RichHandler",
-            "level": "DEBUG",
-            "formatter": "standard",
-        },
-        "queue_listener": {
-            "class": "starlite.logging.picologging.QueueListenerHandler",
-            "handlers": ["cfg://handlers.console"],
-        },
-    },
-    formatters={
-        "standard": {"format": "%(levelname)s - %(asctime)s - %(name)s - %(message)s"}
-    },
-    loggers={
-        "pyspa": {
-            "propagate": True,
-        },
-        "gunicorn.error": {
-            "propagate": True,
-        },
-        "uvicorn.access": {
-            "propagate": True,
-            "filters": ["health_filter"],
-        },
-        "uvicorn.error": {
-            "propagate": True,
-        },
-        "sqlalchemy.engine": {
-            "propagate": True,
-        },
-        "starlite": {
-            "level": "WARNING",
-            "propagate": True,
-        },
-    },
-)
-
-
-@lru_cache(maxsize=1)
-def get_logger(name: str = DEFAULT_LOG_NAME) -> picologging.Logger:
-    """
-    Returns a Configured Logger
-    """
-    log_config.configure()
-    return picologging.getLogger(name)
-
-
-def _resolve_handlers(handlers: List[Any]) -> List[Any]:
-    """
-    Converts list of string of handlers to the object of respective handler.
-    Indexing the list performs the evaluation of the object.
-    """
-    return [handlers[i] for i in range(len(handlers))]
diff --git a/src/pyspa/core/asgi.py b/src/pyspa/core/asgi.py
deleted file mode 100644
index 4b2e65dc..00000000
--- a/src/pyspa/core/asgi.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR
-from starlite import CompressionConfig, Starlite
-
-from pyspa import routes
-from pyspa.config import settings
-from pyspa.config.logging import log_config
-from pyspa.core import exceptions, openapi, response
-
-app = Starlite(
-    debug=settings.app.DEBUG,
-    exception_handlers={
-        HTTP_500_INTERNAL_SERVER_ERROR: exceptions.logging_exception_handler
-    },
-    compression_config=CompressionConfig(backend="brotli"),
-    middleware=[],
-    on_shutdown=[],
-    on_startup=[log_config.configure],
-    openapi_config=openapi.config,
-    response_class=response.Response,
-    route_handlers=[routes.health_check],
-)
diff --git a/src/pyspa/core/exceptions.py b/src/pyspa/core/exceptions.py
deleted file mode 100644
index a0ded6dc..00000000
--- a/src/pyspa/core/exceptions.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import logging
-from typing import TYPE_CHECKING
-
-from starlite.middleware import ExceptionHandlerMiddleware
-
-logger = logging.getLogger(__name__)
-
-if TYPE_CHECKING:
-    from starlette.responses import Response
-    from starlite.types import Request
-
-
-def logging_exception_handler(request: "Request", exc: Exception) -> "Response":
-    """
-    Logs exception and returns appropriate response.
-
-    Parameters
-    ----------
-    request : Request
-        The request that caused the exception.
-    exc :
-        The exception caught by the Starlite exception handling middleware and passed to the
-        callback.
-
-    Returns
-    -------
-    Response
-    """
-    logger.error("Application Exception", exc_info=exc)
-    return ExceptionHandlerMiddleware(
-        app=request.app, debug=request.app.debug, exception_handlers={}
-    ).default_http_exception_handler(request, exc)
diff --git a/src/pyspa/core/openapi.py b/src/pyspa/core/openapi.py
deleted file mode 100644
index 7daea842..00000000
--- a/src/pyspa/core/openapi.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from openapi_schema_pydantic import Contact  # type: ignore
-from starlite import OpenAPIConfig
-
-from pyspa import __version__
-
-config = OpenAPIConfig(
-    title="pyspa",
-    version=str(__version__),
-    contact=Contact(name="Cody Fincher", email="cody@fincher.cloud"),
-    description="Simple Single Page Application",
-)
diff --git a/src/pyspa/core/response.py b/src/pyspa/core/response.py
deleted file mode 100644
index b85b1fbd..00000000
--- a/src/pyspa/core/response.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from typing import Any
-
-from asyncpg.pgproto import pgproto
-from starlite import Response as _Response
-
-
-class Response(_Response):
-    @staticmethod
-    def serializer(value: Any) -> Any:
-        """
-        Custom serializer method that handles the `asyncpg.pgproto.UUID` implementation.
-
-        Parameters
-        ----------
-        value : Any
-
-        Returns
-        -------
-        Any
-        """
-        if isinstance(value, pgproto.UUID):
-            return str(value)
-        return _Response.serializer(value)
diff --git a/src/pyspa/core/wsgi.py b/src/pyspa/core/wsgi.py
deleted file mode 100644
index 7afe4641..00000000
--- a/src/pyspa/core/wsgi.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Application Web Server Gateway Interface - gunicorn."""
-import asyncio
-import os
-import signal
-import sys
-import threading
-import time
-from typing import TYPE_CHECKING, Any, NoReturn, Union
-
-from gunicorn.app.base import Application
-from gunicorn.arbiter import Arbiter
-from uvicorn.main import Server
-from uvicorn.workers import UvicornWorker as _UvicornWorker
-
-from pyspa.config import settings
-from pyspa.core.asgi import app
-
-if TYPE_CHECKING:
-    from starlite import Starlite
-
-
-class ReloaderThread(threading.Thread):
-    def __init__(self, worker: "UvicornWorker", sleep_interval: float = 1.0):
-        super().__init__()
-        self.daemon = True
-        self._worker = worker
-        self._interval = sleep_interval
-
-    def run(self) -> None:
-        """
-        Sends a KILL signal to the current process if the worker's active flag is set to
-        False.
-        """
-        while True:
-            if not self._worker.alive:
-                os.kill(os.getpid(), signal.SIGINT)
-            time.sleep(self._interval)
-
-
-class UvicornWorker(_UvicornWorker):  # type: ignore
-    CONFIG_KWARGS = {"loop": "uvloop", "http": "httptools", "lifespan": "auto"}
-
-    def __init__(self, *args: list[Any], **kwargs: dict[str, Any]):
-        super().__init__(*args, **kwargs)
-        self._reloader_thread = ReloaderThread(self)
-
-    def run(self) -> None:
-        if self.cfg.reload:
-            self._reloader_thread.start()
-        super().run()
-
-    def _install_sigquit_handler(self, server: Server) -> None:
-        """Workaround to install a SIGQUIT handler on workers.
-        Ref.:
-        - https://github.com/encode/uvicorn/issues/1116
-        - https://github.com/benoitc/gunicorn/issues/2604
-        """
-        if threading.current_thread() is not threading.main_thread():
-            # Signals can only be listened to from the main thread.
-            return
-
-        loop = asyncio.get_running_loop()
-        loop.add_signal_handler(signal.SIGQUIT, self.handle_exit, signal.SIGQUIT, None)
-
-    async def _serve(self) -> None:
-        self.config.app = self.wsgi
-        server = Server(config=self.config)
-        self._install_sigquit_handler(server)
-        await server.serve(sockets=self.sockets)
-        if not server.started:
-            sys.exit(Arbiter.WORKER_BOOT_ERROR)
-
-
-class ApplicationLoader(Application):  # type: ignore
-    """Bootstraps the WSGI app"""
-
-    def __init__(self, options: dict[str, str | bool | int] | None = None):
-        self.options = options or {}
-        self.config_path = self.options.pop("config", None)
-        super().__init__()
-
-    def init(self, parser, options, args):  # type: ignore
-        """Class ApplicationLoader object constructor."""
-        self.options = options
-        self.cfg.set("default_proc_name", args[0])
-
-    def load_config(self) -> None:
-        """Load config from passed options"""
-        if self.config_path:
-            self.load_config_from_file(self.config_path)
-        config = {
-            key: value
-            for key, value in self.options.items()
-            if key in self.cfg.settings and value is not None
-        }
-        for key, value in config.items():
-            self.cfg.set(key.lower(), value)
-
-    def load(self) -> "Starlite":
-        """Load application."""
-        return app
-
-
-def run_wsgi(
-    host: str,
-    port: int,
-    http_workers: int,
-    reload: bool,
-) -> None:
-    """Run gunicorn WSGI with ASGI workers."""
-    sys.argv = [
-        "--gunicorn",
-    ]
-    if reload:
-        sys.argv.append("-r")
-    sys.argv.append("pyspa.core.asgi:app")
-    ApplicationLoader(
-        options={
-            "host": host,
-            "workers": str(http_workers),
-            "port": str(port),
-            "reload": reload,
-            "loglevel": settings.app.LOG_LEVEL,
-            "config": "pyspa/config/gunicorn.py",
-        },
-    ).run()
diff --git a/src/pyspa/routes/__init__.py b/src/pyspa/routes/__init__.py
deleted file mode 100644
index 576a4615..00000000
--- a/src/pyspa/routes/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from .health_check import health_check
-
-__all__ = ["health_check"]
diff --git a/src/pyspa/routes/health_check.py b/src/pyspa/routes/health_check.py
deleted file mode 100644
index a2087b3a..00000000
--- a/src/pyspa/routes/health_check.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from starlite import MediaType, get
-
-from pyspa.config import settings
-
-
-@get(path="/health", media_type=MediaType.JSON, cache=False, tags=["Misc"])
-async def health_check() -> dict[str, str]:
-    """Health check handler"""
-    return {"app": settings.app.NAME, "build": settings.app.BUILD_NUMBER}
diff --git a/src/pyspa/utils/postgres_ready.py b/src/pyspa/utils/postgres_ready.py
deleted file mode 100644
index f701ca6e..00000000
--- a/src/pyspa/utils/postgres_ready.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import asyncio
-import sys
-
-from sqlalchemy import text
-
-from pyspa.core.db import engine
-
-
-async def c() -> None:
-    """
-    Checks for database connectivity.
-    """
-    try:
-        async with engine.begin() as conn:
-            await conn.execute(text("SELECT 1"))
-    except Exception as e:  # pylint: disable=broad-except
-        print(f"Waiting for Postgres: {e}")  # noqa: T201
-        sys.exit(-1)
-    else:
-        print("Postgres OK!")  # noqa: T201
-
-
-def main() -> None:
-    """Entrypoint"""
-    asyncio.run(c())
diff --git a/src/pyspa/utils/redis_ready.py b/src/pyspa/utils/redis_ready.py
deleted file mode 100644
index 442df426..00000000
--- a/src/pyspa/utils/redis_ready.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import asyncio
-import sys
-
-from redis.asyncio import Redis
-
-from app.config import cache_settings
-
-
-async def c() -> None:
-    """
-    Checks for cache connectivity.
-    """
-    redis = Redis.from_url(cache_settings.URL)
-    try:
-        await redis.ping()
-    except Exception as e:  # pylint: disable=broad-except
-        print(f"Waiting  for Redis: {e}")  # noqa: T201
-        sys.exit(-1)
-    else:
-        print("Redis OK!")  # noqa: T201
-    finally:
-        await redis.close()
-
-
-def main() -> None:
-    """Entrypoint"""
-    asyncio.run(c())
diff --git a/src/pyspa/utils/restartable_worker.py b/src/pyspa/utils/restartable_worker.py
deleted file mode 100644
index e5786824..00000000
--- a/src/pyspa/utils/restartable_worker.py
+++ /dev/null
@@ -1,101 +0,0 @@
-import asyncio
-import os
-import signal
-import sys
-import threading
-import time
-from typing import Any
-
-from gunicorn.app.base import Application
-from gunicorn.arbiter import Arbiter
-from uvicorn.main import Server
-from uvicorn.workers import UvicornWorker
-
-
-class ReloaderThread(threading.Thread):
-    def __init__(self, worker: UvicornWorker, sleep_interval: float = 1.0):
-        super().__init__()
-        self.daemon = True
-        self._worker = worker
-        self._interval = sleep_interval
-
-    def run(self) -> None:
-        """
-        Sends a KILL signal to the current process if the worker's active flag is set to
-        False.
-        """
-        while True:
-            if not self._worker.alive:
-                os.kill(os.getpid(), signal.SIGINT)
-            time.sleep(self._interval)
-
-
-class RestartableUvicornWorker(UvicornWorker):  # type: ignore
-    """
-    UvicornWorker with additional thread that sends a KILL signal to the current process
-    if the worker's active flag is set to False.
-
-    attribution: https://github.com/benoitc/gunicorn/issues/2339#issuecomment-867481389
-    """
-
-    CONFIG_KWARGS = {"loop": "uvloop", "http": "httptools", "lifespan": "auto"}
-
-    def __init__(self, *args: list[Any], **kwargs: dict[str, Any]):
-        super().__init__(*args, **kwargs)
-        self._reloader_thread = ReloaderThread(self)
-
-    def _install_sigquit_handler(self, server: Server) -> None:
-        """Workaround to install a SIGQUIT handler on workers.
-        Ref.:
-        - https://github.com/encode/uvicorn/issues/1116
-        - https://github.com/benoitc/gunicorn/issues/2604
-        """
-        if threading.current_thread() is not threading.main_thread():
-            # Signals can only be listened to from the main thread.
-            return
-
-        loop = asyncio.get_running_loop()
-        loop.add_signal_handler(signal.SIGQUIT, self.handle_exit, signal.SIGQUIT, None)
-
-    async def _serve(self) -> None:
-        self.config.app = self.wsgi
-        server = Server(config=self.config)
-        self._install_sigquit_handler(server)
-        await server.serve(sockets=self.sockets)
-        if not server.started:
-            sys.exit(Arbiter.WORKER_BOOT_ERROR)
-
-    def run(self) -> None:
-        if self.cfg.reload:
-            self._reloader_thread.start()
-        super().run()
-
-
-class ApplicationLoader(Application):  # type: ignore
-    """Bootstraps the WSGI app"""
-
-    def __init__(self, options: dict[str, str] | None = None):
-        self.options = options or {}
-        self.config_path = self.options.pop("config", None)
-        super().__init__()
-
-    def init(self, parser, options, args) -> None:  # type: ignore
-        """Class ApplicationLoader object constructor."""
-        self.options = options
-        self.cfg.set("default_proc_name", args[0])
-
-    def load_config(self) -> None:
-        """Load config from passed options"""
-        if self.config_path:
-            self.load_config_from_file(self.config_path)
-        config = {
-            key: value
-            for key, value in self.options.items()
-            if key in self.cfg.settings and value is not None
-        }
-        for key, value in config.items():
-            self.cfg.set(key.lower(), value)
-
-    def load(self):
-        """Load application."""
-        return get_app()
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 00000000..bc61e4ad
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,49 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+from redis.asyncio import Redis
+
+from app.config import base
+
+if TYPE_CHECKING:
+    from collections.abc import AsyncGenerator
+
+    from pytest import MonkeyPatch
+
+
+pytestmark = pytest.mark.anyio
+pytest_plugins = [
+    "tests.data_fixtures",
+    "pytest_databases.docker",
+    "pytest_databases.docker.postgres",
+    "pytest_databases.docker.redis",
+]
+
+
+@pytest.fixture(scope="session")
+def anyio_backend() -> str:
+    return "asyncio"
+
+
+@pytest.fixture(autouse=True)
+def _patch_settings(monkeypatch: MonkeyPatch) -> None:
+    """Path the settings."""
+
+    settings = base.Settings.from_env(".env.testing")
+
+    def get_settings(dotenv_filename: str = ".env.testing") -> base.Settings:
+        return settings
+
+    monkeypatch.setattr(base, "get_settings", get_settings)
+
+
+@pytest.fixture(name="redis", autouse=True)
+async def fx_redis(redis_docker_ip: str, redis_service: None, redis_port: int) -> AsyncGenerator[Redis, None]:
+    """Redis instance for testing.
+
+    Returns:
+        Redis client instance, function scoped.
+    """
+    yield Redis(host=redis_docker_ip, port=redis_port)
diff --git a/tests/data_fixtures.py b/tests/data_fixtures.py
new file mode 100644
index 00000000..0a73598f
--- /dev/null
+++ b/tests/data_fixtures.py
@@ -0,0 +1,104 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+import pytest
+
+if TYPE_CHECKING:
+    from litestar import Litestar
+    from pytest import MonkeyPatch
+
+    from app.db.models import Team, User
+
+pytestmark = pytest.mark.anyio
+
+
+@pytest.fixture(name="app")
+def fx_app(pytestconfig: pytest.Config, monkeypatch: MonkeyPatch) -> Litestar:
+    """App fixture.
+
+    Returns:
+        An application instance, configured via plugin.
+    """
+    from app.asgi import create_app
+
+    return create_app()
+
+
+@pytest.fixture(name="raw_users")
+def fx_raw_users() -> list[User | dict[str, Any]]:
+    """Unstructured user representations."""
+
+    return [
+        {
+            "id": "97108ac1-ffcb-411d-8b1e-d9183399f63b",
+            "email": "superuser@example.com",
+            "name": "Super User",
+            "password": "Test_Password1!",
+            "is_superuser": True,
+            "is_active": True,
+        },
+        {
+            "id": "5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2",
+            "email": "user@example.com",
+            "name": "Example User",
+            "password": "Test_Password2!",
+            "is_superuser": False,
+            "is_active": True,
+        },
+        {
+            "id": "5ef29f3c-3560-4d15-ba6b-a2e5c721e999",
+            "email": "test@test.com",
+            "name": "Test User",
+            "password": "Test_Password3!",
+            "is_superuser": False,
+            "is_active": True,
+        },
+        {
+            "id": "6ef29f3c-3560-4d15-ba6b-a2e5c721e4d3",
+            "email": "another@example.com",
+            "name": "The User",
+            "password": "Test_Password3!",
+            "is_superuser": False,
+            "is_active": True,
+        },
+        {
+            "id": "7ef29f3c-3560-4d15-ba6b-a2e5c721e4e1",
+            "email": "inactive@example.com",
+            "name": "Inactive User",
+            "password": "Old_Password2!",
+            "is_superuser": False,
+            "is_active": False,
+        },
+    ]
+
+
+@pytest.fixture(name="raw_teams")
+def fx_raw_teams() -> list[Team | dict[str, Any]]:
+    """Unstructured team representations."""
+
+    return [
+        {
+            "id": "97108ac1-ffcb-411d-8b1e-d9183399f63b",
+            "slug": "test-team",
+            "name": "Test Team",
+            "description": "This is a description for a  team.",
+            "owner_id": "5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2",
+        },
+        {
+            "id": "81108ac1-ffcb-411d-8b1e-d91833999999",
+            "slug": "simple-team",
+            "name": "Simple Team",
+            "description": "This is a description",
+            "owner_id": "5ef29f3c-3560-4d15-ba6b-a2e5c721e999",
+            "tags": ["new", "another", "extra"],
+        },
+        {
+            "id": "81108ac1-ffcb-411d-8b1e-d91833999998",
+            "slug": "extra-team",
+            "name": "Extra Team",
+            "description": "This is a description",
+            "owner_id": "5ef29f3c-3560-4d15-ba6b-a2e5c721e999",
+            "tags": ["extra"],
+        },
+    ]
diff --git a/tests/helpers.py b/tests/helpers.py
new file mode 100644
index 00000000..e11fe0ab
--- /dev/null
+++ b/tests/helpers.py
@@ -0,0 +1,60 @@
+from __future__ import annotations
+
+import inspect
+from contextlib import AbstractAsyncContextManager, AbstractContextManager
+from functools import partial
+from typing import TYPE_CHECKING, TypeVar, cast, overload
+
+import anyio
+from typing_extensions import ParamSpec
+
+if TYPE_CHECKING:
+    from collections.abc import Awaitable, Callable
+    from types import TracebackType
+
+T = TypeVar("T")
+P = ParamSpec("P")
+
+
+class _ContextManagerWrapper:
+    def __init__(self, cm: AbstractContextManager[T]) -> None:
+        self._cm = cm
+
+    async def __aenter__(self) -> T:
+        return self._cm.__enter__()
+
+    async def __aexit__(
+        self,
+        exc_type: type[BaseException] | None,
+        exc_val: BaseException | None,
+        exc_tb: TracebackType | None,
+    ) -> bool | None:
+        return self._cm.__exit__(exc_type, exc_val, exc_tb)
+
+
+@overload
+async def maybe_async(obj: Awaitable[T]) -> T: ...
+
+
+@overload
+async def maybe_async(obj: T) -> T: ...
+
+
+async def maybe_async(obj: Awaitable[T] | T) -> T:
+    return cast(T, await obj) if inspect.isawaitable(obj) else cast(T, obj)
+
+
+def maybe_async_cm(obj: AbstractContextManager[T] | AbstractAsyncContextManager[T]) -> AbstractAsyncContextManager[T]:
+    if isinstance(obj, AbstractContextManager):
+        return cast(AbstractAsyncContextManager[T], _ContextManagerWrapper(obj))
+    return obj
+
+
+def wrap_sync(fn: Callable[P, T]) -> Callable[P, Awaitable[T]]:
+    if inspect.iscoroutinefunction(fn):
+        return fn
+
+    async def wrapped(*args: P.args, **kwargs: P.kwargs) -> T:
+        return await anyio.to_thread.run_sync(partial(fn, *args, **kwargs))
+
+    return wrapped
diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
new file mode 100644
index 00000000..26024637
--- /dev/null
+++ b/tests/integration/conftest.py
@@ -0,0 +1,175 @@
+from collections.abc import AsyncGenerator, AsyncIterator
+from pathlib import Path
+from typing import Any
+
+import pytest
+from advanced_alchemy.base import UUIDAuditBase
+from advanced_alchemy.utils.fixtures import open_fixture_async
+from httpx import AsyncClient
+from litestar import Litestar
+from litestar_saq.cli import get_saq_plugin
+from redis.asyncio import Redis
+from sqlalchemy.engine import URL
+from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
+from sqlalchemy.pool import NullPool
+
+from app.config import get_settings
+from app.db.models import Team, User
+from app.domain.accounts.guards import auth
+from app.domain.accounts.services import RoleService, UserService
+from app.domain.teams.services import TeamService
+from app.server.builder import ApplicationConfigurator
+from app.server.plugins import alchemy
+
+here = Path(__file__).parent
+pytestmark = pytest.mark.anyio
+
+
+@pytest.fixture(name="engine", autouse=True)
+async def fx_engine(
+    postgres_docker_ip: str,
+    postgres_service: None,
+    redis_service: None,
+    postgres_port: int,
+    postgres_user: str,
+    postgres_password: str,
+    postgres_database: str,
+) -> AsyncEngine:
+    """Postgresql instance for end-to-end testing.
+
+    Returns:
+        Async SQLAlchemy engine instance.
+    """
+    return create_async_engine(
+        URL(
+            drivername="postgresql+asyncpg",
+            username=postgres_user,
+            password=postgres_password,
+            host=postgres_docker_ip,
+            port=postgres_port,
+            database=postgres_database,
+            query={},  # type:ignore[arg-type]
+        ),
+        echo=False,
+        poolclass=NullPool,
+    )
+
+
+@pytest.fixture(name="sessionmaker")
+def fx_session_maker_factory(engine: AsyncEngine) -> async_sessionmaker[AsyncSession]:
+    return async_sessionmaker(bind=engine, expire_on_commit=False)
+
+
+@pytest.fixture(name="session")
+async def fx_session(sessionmaker: async_sessionmaker[AsyncSession]) -> AsyncGenerator[AsyncSession, None]:
+    async with sessionmaker() as session:
+        yield session
+
+
+@pytest.fixture(autouse=True)
+async def _seed_db(
+    engine: AsyncEngine,
+    sessionmaker: async_sessionmaker[AsyncSession],
+    raw_users: list[User | dict[str, Any]],
+    raw_teams: list[Team | dict[str, Any]],
+) -> AsyncIterator[None]:
+    """Populate test database with.
+
+    Args:
+        engine: The SQLAlchemy engine instance.
+        sessionmaker: The SQLAlchemy sessionmaker factory.
+        raw_users: Test users to add to the database
+        raw_teams: Test teams to add to the database
+
+    """
+
+    settings = get_settings()
+    fixtures_path = Path(settings.db.FIXTURE_PATH)
+    metadata = UUIDAuditBase.registry.metadata
+    async with engine.begin() as conn:
+        await conn.run_sync(metadata.drop_all)
+        await conn.run_sync(metadata.create_all)
+    async with RoleService.new(sessionmaker()) as service:
+        fixture = await open_fixture_async(fixtures_path, "role")
+        for obj in fixture:
+            _ = await service.repository.get_or_upsert(match_fields="name", upsert=True, **obj)
+        await service.repository.session.commit()
+    async with UserService.new(sessionmaker()) as users_service:
+        await users_service.create_many(raw_users, auto_commit=True)
+    async with TeamService.new(sessionmaker()) as teams_services:
+        for obj in raw_teams:
+            await teams_services.create(obj)
+        await teams_services.repository.session.commit()
+
+    yield
+
+
+@pytest.fixture(autouse=True)
+def _patch_db(
+    app: "Litestar",
+    engine: AsyncEngine,
+    sessionmaker: async_sessionmaker[AsyncSession],
+    monkeypatch: pytest.MonkeyPatch,
+) -> None:
+    monkeypatch.setattr(alchemy._config, "session_maker", sessionmaker)
+    if isinstance(alchemy._config, list):
+        monkeypatch.setitem(app.state, alchemy._config[0].engine_app_state_key, engine)
+        monkeypatch.setitem(
+            app.state,
+            alchemy._config[0].session_maker_app_state_key,
+            async_sessionmaker(bind=engine, expire_on_commit=False),
+        )
+    else:
+        monkeypatch.setitem(app.state, alchemy._config.engine_app_state_key, engine)
+        monkeypatch.setitem(
+            app.state,
+            alchemy._config.session_maker_app_state_key,
+            async_sessionmaker(bind=engine, expire_on_commit=False),
+        )
+
+
+@pytest.fixture(autouse=True)
+def _patch_redis(app: "Litestar", redis: Redis, monkeypatch: pytest.MonkeyPatch) -> None:
+    cache_config = app.response_cache_config
+    assert cache_config is not None
+    saq_plugin = get_saq_plugin(app)
+    app_plugin = app.plugins.get(ApplicationConfigurator)
+    monkeypatch.setattr(app_plugin, "redis", redis)
+    monkeypatch.setattr(app.stores.get(cache_config.store), "_redis", redis)
+    if saq_plugin._config.queue_instances is not None:
+        for queue in saq_plugin._config.queue_instances.values():
+            monkeypatch.setattr(queue, "redis", redis)
+
+
+@pytest.fixture(name="client")
+async def fx_client(app: Litestar) -> AsyncIterator[AsyncClient]:
+    """Async client that calls requests on the app.
+
+    ```text
+    ValueError: The future belongs to a different loop than the one specified as the loop argument
+    ```
+    """
+    async with AsyncClient(app=app, base_url="http://testserver") as client:
+        yield client
+
+
+@pytest.fixture(name="superuser_token_headers")
+def fx_superuser_token_headers() -> dict[str, str]:
+    """Valid superuser token.
+
+    ```text
+    ValueError: The future belongs to a different loop than the one specified as the loop argument
+    ```
+    """
+    return {"Authorization": f"Bearer {auth.create_token(identifier='superuser@example.com')}"}
+
+
+@pytest.fixture(name="user_token_headers")
+def fx_user_token_headers() -> dict[str, str]:
+    """Valid user token.
+
+    ```text
+    ValueError: The future belongs to a different loop than the one specified as the loop argument
+    ```
+    """
+    return {"Authorization": f"Bearer {auth.create_token(identifier='user@example.com')}"}
diff --git a/tests/integration/test_access.py b/tests/integration/test_access.py
new file mode 100644
index 00000000..60de3c74
--- /dev/null
+++ b/tests/integration/test_access.py
@@ -0,0 +1,45 @@
+import pytest
+from httpx import AsyncClient
+
+pytestmark = pytest.mark.anyio
+
+
+@pytest.mark.parametrize(
+    ("username", "password", "expected_status_code"),
+    (
+        ("superuser@example1.com", "Test_Password1!", 403),
+        ("superuser@example.com", "Test_Password1!", 201),
+        ("user@example.com", "Test_Password1!", 403),
+        ("user@example.com", "Test_Password2!", 201),
+        ("inactive@example.com", "Old_Password2!", 403),
+        ("inactive@example.com", "Old_Password3!", 403),
+    ),
+)
+async def test_user_login(client: AsyncClient, username: str, password: str, expected_status_code: int) -> None:
+    response = await client.post("/api/access/login", data={"username": username, "password": password})
+    assert response.status_code == expected_status_code
+
+
+
+@pytest.mark.parametrize(
+    ("username", "password"),
+    (
+        ("superuser@example.com", "Test_Password1!" ),
+    ),
+)
+async def test_user_logout(client: AsyncClient, username: str, password: str) -> None:
+    response = await client.post("/api/access/login", data={"username": username, "password": password})
+    assert response.status_code == 201
+    cookies = dict(response.cookies)
+
+    assert cookies.get("token") is not None
+
+    me_response = await client.get("/api/me")
+    assert me_response.status_code == 200
+
+    response = await client.post("/api/access/logout")
+    assert response.status_code == 200
+
+    # the user can no longer access the /me route.
+    me_response = await client.get("/api/me")
+    assert me_response.status_code == 401
diff --git a/tests/integration/test_account_role.py b/tests/integration/test_account_role.py
new file mode 100644
index 00000000..1b1fcf54
--- /dev/null
+++ b/tests/integration/test_account_role.py
@@ -0,0 +1,68 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+if TYPE_CHECKING:
+    from httpx import AsyncClient
+
+
+pytestmark = pytest.mark.anyio
+
+
+async def test_superuser_role_access(
+    client: "AsyncClient",
+    user_token_headers: dict[str, str],
+    superuser_token_headers: dict[str, str],
+) -> None:
+    # user should not see all teams to start
+    response = await client.get("/api/teams", headers=user_token_headers)
+    assert response.status_code == 200
+    assert int(response.json()["total"]) == 1
+
+    # assign the role
+    response = await client.post(
+        "/api/roles/superuser/assign",
+        json={"userName": "user@example.com"},
+        headers=superuser_token_headers,
+    )
+    assert response.status_code == 201
+    assert response.json()["message"] == "Successfully assigned the 'superuser' role to user@example.com."
+    response = await client.patch(
+        "/api/teams/81108ac1-ffcb-411d-8b1e-d91833999999",
+        json={"name": "TEST UPDATE"},
+        headers=user_token_headers,
+    )
+    assert response.status_code == 200
+    # retrieve
+    response = await client.get("/api/teams/81108ac1-ffcb-411d-8b1e-d91833999999", headers=user_token_headers)
+    assert response.status_code == 200
+    response = await client.get("/api/teams", headers=user_token_headers)
+    assert response.status_code == 200
+    assert int(response.json()["total"]) == 3
+
+    # superuser should see all
+    response = await client.get("/api/teams", headers=superuser_token_headers)
+    assert response.status_code == 200
+    assert int(response.json()["total"]) == 3
+    # delete
+    # revoke role now
+    response = await client.post(
+        "/api/roles/superuser/revoke",
+        json={"userName": "user@example.com"},
+        headers=superuser_token_headers,
+    )
+    assert response.status_code == 201
+    response = await client.delete("/api/teams/81108ac1-ffcb-411d-8b1e-d91833999999", headers=user_token_headers)
+    assert response.status_code == 403
+    response = await client.delete("/api/teams/97108ac1-ffcb-411d-8b1e-d9183399f63b", headers=user_token_headers)
+    assert response.status_code == 204
+
+    # retrieve should now fail
+    response = await client.get("/api/teams/81108ac1-ffcb-411d-8b1e-d91833999999", headers=user_token_headers)
+    assert response.status_code == 403
+    # user should only see 1 now.
+    response = await client.get("/api/teams", headers=user_token_headers)
+    assert response.status_code == 200
+    assert int(response.json()["total"]) == 0
diff --git a/tests/integration/test_accounts.py b/tests/integration/test_accounts.py
new file mode 100644
index 00000000..6ee329cb
--- /dev/null
+++ b/tests/integration/test_accounts.py
@@ -0,0 +1,92 @@
+from typing import TYPE_CHECKING
+
+import pytest
+
+if TYPE_CHECKING:
+    from httpx import AsyncClient
+
+pytestmark = pytest.mark.anyio
+
+
+async def test_update_user_no_auth(client: "AsyncClient") -> None:
+    response = await client.patch("/api/users/97108ac1-ffcb-411d-8b1e-d9183399f63b", json={"name": "TEST UPDATE"})
+    assert response.status_code == 401
+    response = await client.post(
+        "/api/users/",
+        json={"name": "A User", "email": "new-user@example.com", "password": "S3cret!"},
+    )
+    assert response.status_code == 401
+    response = await client.get("/api/users/97108ac1-ffcb-411d-8b1e-d9183399f63b")
+    assert response.status_code == 401
+    response = await client.get("/api/users")
+    assert response.status_code == 401
+    response = await client.delete("/api/users/97108ac1-ffcb-411d-8b1e-d9183399f63b")
+    assert response.status_code == 401
+
+
+async def test_accounts_list(client: "AsyncClient", superuser_token_headers: dict[str, str]) -> None:
+    response = await client.get("/api/users", headers=superuser_token_headers)
+    assert response.status_code == 200
+    assert int(response.json()["total"]) > 0
+
+
+async def test_accounts_get(client: "AsyncClient", superuser_token_headers: dict[str, str]) -> None:
+    response = await client.get("/api/users/97108ac1-ffcb-411d-8b1e-d9183399f63b", headers=superuser_token_headers)
+    assert response.status_code == 200
+    assert response.json()["email"] == "superuser@example.com"
+
+
+async def test_accounts_create(client: "AsyncClient", superuser_token_headers: dict[str, str]) -> None:
+    response = await client.post(
+        "/api/users",
+        json={"name": "A User", "email": "new-user@example.com", "password": "S3cret!"},
+        headers=superuser_token_headers,
+    )
+    assert response.status_code == 201
+
+
+async def test_accounts_update(client: "AsyncClient", superuser_token_headers: dict[str, str]) -> None:
+    response = await client.patch(
+        "/api/users/5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2",
+        json={
+            "name": "Name Changed",
+        },
+        headers=superuser_token_headers,
+    )
+    assert response.status_code == 200
+    assert response.json()["name"] == "Name Changed"
+
+
+async def test_accounts_delete(client: "AsyncClient", superuser_token_headers: dict[str, str]) -> None:
+    response = await client.delete(
+        "/api/users/5ef29f3c-3560-4d15-ba6b-a2e5c721e4d2",
+        headers=superuser_token_headers,
+    )
+    assert response.status_code == 204
+    # ensure we didn't cascade delete the teams the user owned
+    response = await client.get(
+        "/api/teams/97108ac1-ffcb-411d-8b1e-d9183399f63b",
+        headers=superuser_token_headers,
+    )
+    assert response.status_code == 200
+
+
+async def test_accounts_with_incorrect_role(client: "AsyncClient", user_token_headers: dict[str, str]) -> None:
+    response = await client.patch(
+        "/api/users/97108ac1-ffcb-411d-8b1e-d9183399f63b",
+        json={"name": "TEST UPDATE"},
+        headers=user_token_headers,
+    )
+    assert response.status_code == 403
+    response = await client.post(
+        "/api/users/",
+        json={"name": "A User", "email": "new-user@example.com", "password": "S3cret!"},
+        headers=user_token_headers,
+    )
+    assert response.status_code == 403
+    response = await client.get("/api/users/97108ac1-ffcb-411d-8b1e-d9183399f63b", headers=user_token_headers)
+    assert response.status_code == 403
+    response = await client.get("/api/users", headers=user_token_headers)
+    assert response.status_code == 403
+    response = await client.delete("/api/users/97108ac1-ffcb-411d-8b1e-d9183399f63b", headers=user_token_headers)
+    assert response.status_code == 403
diff --git a/tests/integration/test_health.py b/tests/integration/test_health.py
new file mode 100644
index 00000000..86ed528a
--- /dev/null
+++ b/tests/integration/test_health.py
@@ -0,0 +1,21 @@
+import pytest
+from httpx import AsyncClient
+
+from app.__about__ import __version__
+
+pytestmark = pytest.mark.anyio
+
+
+async def test_health(client: AsyncClient) -> None:
+    response = await client.get("/health")
+    assert response.status_code == 500
+
+    expected = {
+        "database_status": "online",
+        "cache_status": "offline",
+        "worker_status": "offline",
+        "app": "app",
+        "version": __version__,
+    }
+
+    assert response.json() == expected
diff --git a/tests/integration/test_teams.py b/tests/integration/test_teams.py
new file mode 100644
index 00000000..666c5a04
--- /dev/null
+++ b/tests/integration/test_teams.py
@@ -0,0 +1,89 @@
+from typing import TYPE_CHECKING
+
+import pytest
+
+if TYPE_CHECKING:
+    from httpx import AsyncClient
+
+pytestmark = pytest.mark.anyio
+
+
+async def test_teams_with_no_auth(client: "AsyncClient") -> None:
+    response = await client.patch("/api/teams/97108ac1-ffcb-411d-8b1e-d9183399f63b", json={"name": "TEST UPDATE"})
+    assert response.status_code == 401
+    response = await client.post(
+        "/api/teams/",
+        json={"name": "A User", "email": "new-user@example.com", "password": "S3cret!"},
+    )
+    assert response.status_code == 401
+    response = await client.get("/api/teams/97108ac1-ffcb-411d-8b1e-d9183399f63b")
+    assert response.status_code == 401
+    response = await client.get("/api/teams")
+    assert response.status_code == 401
+    response = await client.delete("/api/teams/97108ac1-ffcb-411d-8b1e-d9183399f63b")
+    assert response.status_code == 401
+
+
+async def test_teams_with_incorrect_role(client: "AsyncClient", user_token_headers: dict[str, str]) -> None:
+    response = await client.patch(
+        "/api/teams/81108ac1-ffcb-411d-8b1e-d91833999999",
+        json={"name": "TEST UPDATE"},
+        headers=user_token_headers,
+    )
+    assert response.status_code == 403
+    response = await client.post(
+        "/api/teams/",
+        json={"name": "A new team."},
+        headers=user_token_headers,
+    )
+    assert response.status_code == 201
+    response = await client.get("/api/teams/81108ac1-ffcb-411d-8b1e-d91833999999", headers=user_token_headers)
+    assert response.status_code == 403
+    response = await client.get("/api/teams", headers=user_token_headers)
+    assert response.status_code == 200
+    response = await client.delete("/api/teams/81108ac1-ffcb-411d-8b1e-d91833999999", headers=user_token_headers)
+    assert response.status_code == 403
+
+
+async def test_teams_list(client: "AsyncClient", superuser_token_headers: dict[str, str]) -> None:
+    response = await client.get("/api/teams", headers=superuser_token_headers)
+    assert response.status_code == 200
+    assert int(response.json()["total"]) > 0
+
+
+async def test_teams_get(client: "AsyncClient", superuser_token_headers: dict[str, str]) -> None:
+    response = await client.get("/api/teams/97108ac1-ffcb-411d-8b1e-d9183399f63b", headers=superuser_token_headers)
+    assert response.status_code == 200
+    assert response.json()["name"] == "Test Team"
+
+
+async def test_teams_create(client: "AsyncClient", superuser_token_headers: dict[str, str]) -> None:
+    response = await client.post(
+        "/api/teams/",
+        json={"name": "My First Team", "tags": ["cool tag"]},
+        headers=superuser_token_headers,
+    )
+    assert response.status_code == 201
+
+
+async def test_teams_update(client: "AsyncClient", superuser_token_headers: dict[str, str]) -> None:
+    response = await client.patch(
+        "/api/teams/97108ac1-ffcb-411d-8b1e-d9183399f63b",
+        json={"name": "Name Changed"},
+        headers=superuser_token_headers,
+    )
+    assert response.status_code == 200
+
+
+async def test_teams_delete(client: "AsyncClient", superuser_token_headers: dict[str, str]) -> None:
+    response = await client.delete(
+        "/api/teams/81108ac1-ffcb-411d-8b1e-d91833999999",
+        headers=superuser_token_headers,
+    )
+    assert response.status_code == 204
+    # ensure we didn't cascade delete the users that were members of the team
+    response = await client.get(
+        "/api/users/5ef29f3c-3560-4d15-ba6b-a2e5c721e999",
+        headers=superuser_token_headers,
+    )
+    assert response.status_code == 200
diff --git a/tests/integration/test_tests.py b/tests/integration/test_tests.py
new file mode 100644
index 00000000..f3f2a9fa
--- /dev/null
+++ b/tests/integration/test_tests.py
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, cast
+
+import pytest
+from httpx import AsyncClient
+from litestar import get
+
+from app.config import app as config
+
+if TYPE_CHECKING:
+    from litestar import Litestar
+    from litestar.stores.redis import RedisStore
+    from redis.asyncio import Redis as AsyncRedis
+    from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession
+
+pytestmark = pytest.mark.anyio
+
+
+def test_cache_on_app(app: "Litestar", redis: "AsyncRedis") -> None:
+    """Test that the app's cache is patched.
+
+    Args:
+        app: The test Litestar instance
+        redis: The test Redis client instance.
+    """
+    assert cast("RedisStore", app.stores.get("response_cache"))._redis is redis
+
+
+def test_engine_on_app(app: "Litestar", engine: "AsyncEngine") -> None:
+    """Test that the app's engine is patched.
+
+    Args:
+        app: The test Litestar instance
+        engine: The test SQLAlchemy engine instance.
+    """
+    assert app.state[config.alchemy.engine_app_state_key] is engine
+
+
+@pytest.mark.anyio
+async def test_db_session_dependency(app: "Litestar", engine: "AsyncEngine") -> None:
+    """Test that handlers receive session attached to patched engine.
+
+    Args:
+        app: The test Litestar instance
+        engine: The patched SQLAlchemy engine instance.
+    """
+
+    @get("/db-session-test", opt={"exclude_from_auth": True})
+    async def db_session_dependency_patched(db_session: AsyncSession) -> dict[str, str]:
+        return {"result": f"{db_session.bind is engine = }"}
+
+    app.register(db_session_dependency_patched)
+    # can't use test client as it always starts its own event loop
+    async with AsyncClient(app=app, base_url="http://testserver") as client:
+        response = await client.get("/db-session-test")
+        assert response.json()["result"] == "db_session.bind is engine = True"
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py
new file mode 100644
index 00000000..2a1ea880
--- /dev/null
+++ b/tests/unit/conftest.py
@@ -0,0 +1,80 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+from litestar import Litestar, get
+from litestar.datastructures import State
+from litestar.enums import ScopeType
+from litestar.testing import AsyncTestClient
+
+if TYPE_CHECKING:
+    from collections.abc import AsyncGenerator
+
+    from litestar.types import HTTPResponseBodyEvent, HTTPResponseStartEvent, HTTPScope
+
+pytestmark = pytest.mark.anyio
+
+
+@pytest.fixture(name="client")
+async def fx_client(app: Litestar) -> AsyncGenerator[AsyncTestClient, None]:
+    """Test client fixture for making calls on the global app instance."""
+    try:
+        async with AsyncTestClient(app=app) as client:
+            yield client
+    except Exception:  # noqa: BLE001
+        ...
+
+
+@pytest.fixture()
+def http_response_start() -> HTTPResponseStartEvent:
+    """ASGI message for start of response."""
+    return {"type": "http.response.start", "status": 200, "headers": []}
+
+
+@pytest.fixture()
+def http_response_body() -> HTTPResponseBodyEvent:
+    """ASGI message for interim, and final response body messages.
+
+    Note:
+        `more_body` is `True` for interim body messages.
+    """
+    return {"type": "http.response.body", "body": b"body", "more_body": False}
+
+
+@pytest.fixture()
+def state() -> State:
+    """Litestar application state data structure."""
+    return State()
+
+
+@pytest.fixture()
+def http_scope(app: Litestar) -> HTTPScope:
+    """Minimal ASGI HTTP connection scope."""
+
+    @get()
+    async def handler() -> None:
+        ...
+
+    return {
+        "headers": [],
+        "app": app,
+        "asgi": {"spec_version": "whatever", "version": "3.0"},
+        "auth": None,
+        "client": None,
+        "extensions": None,
+        "http_version": "3",
+        "path": "/wherever",
+        "path_params": {},
+        "query_string": b"",
+        "raw_path": b"/wherever",
+        "root_path": "/",
+        "route_handler": handler,
+        "scheme": "http",
+        "server": None,
+        "session": {},
+        "state": {},
+        "user": None,
+        "method": "GET",
+        "type": ScopeType.HTTP,
+    }
diff --git a/tests/unit/lib/__init__.py b/tests/unit/lib/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/lib/test_cache.py b/tests/unit/lib/test_cache.py
new file mode 100644
index 00000000..3bc352a9
--- /dev/null
+++ b/tests/unit/lib/test_cache.py
@@ -0,0 +1,14 @@
+import pytest
+from litestar.config.response_cache import default_cache_key_builder
+from litestar.testing import RequestFactory
+
+from app.server.builder import ApplicationConfigurator
+
+pytestmark = pytest.mark.anyio
+
+
+def test_cache_key_builder(monkeypatch: "pytest.MonkeyPatch") -> None:
+    monkeypatch.setattr(ApplicationConfigurator, "app_slug", "the-slug")
+    request = RequestFactory().get("/test")
+    default_cache_key = default_cache_key_builder(request)
+    assert ApplicationConfigurator()._cache_key_builder(request) == f"the-slug:{default_cache_key}"
diff --git a/tests/unit/lib/test_crypt.py b/tests/unit/lib/test_crypt.py
new file mode 100644
index 00000000..6b68ad52
--- /dev/null
+++ b/tests/unit/lib/test_crypt.py
@@ -0,0 +1,54 @@
+# pylint: disable=protected-access
+from __future__ import annotations
+
+import base64
+
+import pytest
+
+from app.lib import crypt
+
+pytestmark = pytest.mark.anyio
+
+
+@pytest.mark.parametrize(
+    ("secret_key", "expected_value"),
+    (
+        ("test", "test                            "),
+        ("test---------------------------", "test--------------------------- "),
+        ("test----------------------------", "test----------------------------"),
+        ("test-----------------------------", "test-----------------------------"),
+        (
+            "this is a really long string that exceeds the 32 character padding added.",
+            "this is a really long string that exceeds the 32 character padding added.",
+        ),
+    ),
+)
+async def test_get_encryption_key(secret_key: str, expected_value: str) -> None:
+    """Test that the encryption key is formatted correctly."""
+    secret = crypt.get_encryption_key(secret_key)
+    decoded = base64.urlsafe_b64decode(secret)
+    assert expected_value == decoded.decode()
+
+
+async def test_get_password_hash() -> None:
+    """Test that the encryption key is formatted correctly."""
+    secret_str = "This is a password!"  # noqa: S105
+    secret_bytes = b"This is a password too!"
+    secret_str_hash = await crypt.get_password_hash(secret_str)
+    secret_bytes_hash = await crypt.get_password_hash(secret_bytes)
+
+    assert secret_str_hash.startswith("$argon2")
+    assert secret_bytes_hash.startswith("$argon2")
+
+
+@pytest.mark.parametrize(
+    ("valid_password", "tested_password", "expected_result"),
+    (("SuperS3cret123456789!!", "SuperS3cret123456789!!", True), ("SuperS3cret123456789!!", "Invalid!!", False)),
+)
+async def test_verify_password(valid_password: str, tested_password: str, expected_result: bool) -> None:
+    """Test that the encryption key is formatted correctly."""
+
+    secret_str_hash = await crypt.get_password_hash(valid_password)
+    is_valid = await crypt.verify_password(tested_password, secret_str_hash)
+
+    assert is_valid == expected_result
diff --git a/tests/unit/lib/test_dependencies.py b/tests/unit/lib/test_dependencies.py
new file mode 100644
index 00000000..07606f1e
--- /dev/null
+++ b/tests/unit/lib/test_dependencies.py
@@ -0,0 +1,214 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+from datetime import datetime
+from typing import TYPE_CHECKING, Annotated, Literal
+from uuid import uuid4
+
+import pytest
+from advanced_alchemy.filters import (
+    BeforeAfter,
+    CollectionFilter,
+    FilterTypes,
+    LimitOffset,
+    OrderBy,
+    SearchFilter,
+)
+from litestar import Litestar, get
+from litestar.params import Dependency
+from litestar.testing import AsyncTestClient, RequestFactory
+
+from app.db.models import User
+from app.domain.accounts.dependencies import provide_user
+from app.lib import dependencies
+
+if TYPE_CHECKING:
+    from collections import abc
+
+pytestmark = pytest.mark.anyio
+
+
+@dataclass
+class MessageTest:
+    test_attr: str
+
+
+async def test_provide_user_dependency() -> None:
+    user = User()
+    request = RequestFactory(app=Litestar(route_handlers=[])).get("/", user=user)
+    assert await provide_user(request) is user
+
+
+def test_id_filter() -> None:
+    ids = [uuid4() for _ in range(3)]
+    assert dependencies.provide_id_filter(ids) == CollectionFilter(field_name="id", values=ids)
+
+
+@pytest.mark.parametrize(
+    ("filter_", "field_name"),
+    [(dependencies.provide_created_filter, "created_at"), (dependencies.provide_updated_filter, "updated_at")],
+)
+def test_before_after_filters(filter_: "abc.Callable[[datetime, datetime], BeforeAfter]", field_name: str) -> None:
+    assert filter_(datetime.max, datetime.min) == BeforeAfter(
+        field_name=field_name,
+        before=datetime.max,
+        after=datetime.min,
+    )
+
+
+@pytest.mark.parametrize(
+    ("filter_", "field_name", "search_string", "ignore_case"),
+    [
+        (dependencies.provide_search_filter, "first_name", "co", True),
+        (dependencies.provide_search_filter, "last_name", "Fin", False),
+    ],
+)
+def test_search_filters(
+    filter_: "abc.Callable[[str, str,bool], SearchFilter]",
+    field_name: str,
+    search_string: str,
+    ignore_case: bool,
+) -> None:
+    assert filter_(field_name, search_string, ignore_case) == SearchFilter(
+        field_name=field_name,
+        value=search_string,
+        ignore_case=ignore_case,
+    )
+
+
+@pytest.mark.parametrize(
+    ("filter_", "field_name", "sort_order"),
+    [
+        (dependencies.provide_order_by, "first_name", "asc"),
+        (dependencies.provide_order_by, "last_name", "desc"),
+    ],
+)
+def test_order_by(
+    filter_: "abc.Callable[[str, Literal['asc','desc']], OrderBy]",
+    field_name: str,
+    sort_order: Literal["asc", "desc"],
+) -> None:
+    assert filter_(field_name, sort_order) == OrderBy(field_name=field_name, sort_order=sort_order)
+
+
+def test_limit_offset_pagination() -> None:
+    assert dependencies.provide_limit_offset_pagination(10, 100) == LimitOffset(100, 900)
+
+
+async def test_provided_filters(app: Litestar, client: AsyncTestClient) -> None:
+    called = False
+    path = f"/{uuid4()}"
+    ids = [uuid4() for _ in range(2)]
+
+    @get(
+        path,
+        opt={"exclude_from_auth": True},
+    )
+    async def filtered_collection_route(
+        created_filter: BeforeAfter,
+        updated_filter: BeforeAfter,
+        limit_offset: LimitOffset,
+        id_filter: CollectionFilter,
+    ) -> MessageTest:
+        nonlocal called
+        assert created_filter == BeforeAfter("created_at", datetime.max, datetime.min)
+        assert updated_filter == BeforeAfter("updated_at", datetime.max, datetime.min)
+        assert limit_offset == LimitOffset(2, 18)
+        assert id_filter == CollectionFilter("id", ids)
+        called = True
+        return MessageTest(test_attr="yay")
+
+    app.register(filtered_collection_route)
+    _response = await client.get(
+        path,
+        params={
+            "createdBefore": datetime.max.isoformat(),
+            "createdAfter": datetime.min.isoformat(),
+            "updatedBefore": datetime.max.isoformat(),
+            "updatedAfter": datetime.min.isoformat(),
+            "currentPage": 10,
+            "pageSize": 2,
+            "ids": [str(id_) for id_ in ids],
+        },
+    )
+    assert called
+
+
+async def test_filters_dependency(app: "Litestar", client: "AsyncTestClient") -> None:
+    called = False
+    path = f"/{uuid4()}"
+    ids = [uuid4() for _ in range(2)]
+
+    @get(path=path, opt={"exclude_from_auth": True}, signature_namespace={"Dependency": Dependency})
+    async def filtered_collection_route(
+        filters: Annotated[list[FilterTypes], Dependency(skip_validation=True)],
+    ) -> MessageTest:
+        nonlocal called
+        assert filters == [
+            CollectionFilter(field_name="id", values=ids),
+            BeforeAfter(field_name="created_at", before=datetime.max, after=datetime.min),
+            LimitOffset(limit=2, offset=18),
+            BeforeAfter(field_name="updated_at", before=datetime.max, after=datetime.min),
+            SearchFilter(field_name="my_field", value="SearchString"),
+            OrderBy(field_name="my_col", sort_order="desc"),
+        ]
+        called = True
+        return MessageTest(test_attr="yay")
+
+    app.debug = True
+    app.register(filtered_collection_route)
+    _response = await client.get(
+        path,
+        params={
+            "createdBefore": datetime.max.isoformat(),
+            "createdAfter": datetime.min.isoformat(),
+            "updatedBefore": datetime.max.isoformat(),
+            "updatedAfter": datetime.min.isoformat(),
+            "currentPage": 10,
+            "pageSize": 2,
+            "ids": [str(id_) for id_ in ids],
+            "orderBy": "my_col",
+            "searchField": "my_field",
+            "searchString": "SearchString",
+        },
+    )
+    assert called
+
+
+async def test_filters_dependency_no_ids(app: "Litestar", client: "AsyncTestClient") -> None:
+    called = False
+    path = f"/{uuid4()}"
+    [uuid4() for _ in range(2)]
+
+    @get(path=path, opt={"exclude_from_auth": True})
+    async def filtered_collection_route(
+        filters: Annotated[list[FilterTypes], Dependency(skip_validation=True)],
+    ) -> MessageTest:
+        nonlocal called
+        assert filters == [
+            BeforeAfter(field_name="created_at", before=datetime.max, after=datetime.min),
+            LimitOffset(limit=2, offset=18),
+            BeforeAfter(field_name="updated_at", before=datetime.max, after=datetime.min),
+            SearchFilter(field_name="my_field", value="SearchString"),
+            OrderBy(field_name="my_col", sort_order="desc"),
+        ]
+        called = True
+        return MessageTest(test_attr="yay")
+
+    app.debug = True
+    app.register(filtered_collection_route)
+    _response = await client.get(
+        path,
+        params={
+            "createdBefore": datetime.max.isoformat(),
+            "createdAfter": datetime.min.isoformat(),
+            "updatedBefore": datetime.max.isoformat(),
+            "updatedAfter": datetime.min.isoformat(),
+            "currentPage": 10,
+            "pageSize": 2,
+            "orderBy": "my_col",
+            "searchField": "my_field",
+            "searchString": "SearchString",
+        },
+    )
+    assert called
diff --git a/tests/unit/lib/test_exceptions.py b/tests/unit/lib/test_exceptions.py
new file mode 100644
index 00000000..39a08c6d
--- /dev/null
+++ b/tests/unit/lib/test_exceptions.py
@@ -0,0 +1,93 @@
+from typing import TYPE_CHECKING
+from unittest.mock import ANY, MagicMock
+
+import pytest
+from litestar import Litestar, get
+from litestar.repository.exceptions import ConflictError, NotFoundError
+from litestar.status_codes import (
+    HTTP_403_FORBIDDEN,
+    HTTP_404_NOT_FOUND,
+    HTTP_409_CONFLICT,
+    HTTP_500_INTERNAL_SERVER_ERROR,
+)
+from litestar.testing import RequestFactory, create_test_client
+
+from app.lib import exceptions
+from app.lib.exceptions import ApplicationError
+
+if TYPE_CHECKING:
+    from collections import abc
+
+
+pytestmark = pytest.mark.anyio
+
+
+def test_after_exception_hook_handler_called(monkeypatch: pytest.MonkeyPatch) -> None:
+    """Tests that the handler gets added to the app and called."""
+    logger_mock = MagicMock()
+    monkeypatch.setattr(exceptions, "bind_contextvars", logger_mock)
+    exc = RuntimeError()
+
+    @get("/error")
+    async def raises() -> None:
+        raise exc
+
+    with create_test_client(
+        route_handlers=[raises],
+        after_exception=[exceptions.after_exception_hook_handler],
+    ) as client:
+        resp = client.get("/error")
+        assert resp.status_code == HTTP_500_INTERNAL_SERVER_ERROR
+
+    logger_mock.assert_called_once_with(exc_info=(RuntimeError, exc, ANY))
+
+
+@pytest.mark.parametrize(
+    ("exc", "status"),
+    [
+        (ConflictError, HTTP_409_CONFLICT),
+        (NotFoundError, HTTP_404_NOT_FOUND),
+        (ApplicationError, HTTP_500_INTERNAL_SERVER_ERROR),
+    ],
+)
+def test_repository_exception_to_http_response(exc: type[ApplicationError], status: int) -> None:
+    app = Litestar(route_handlers=[])
+    request = RequestFactory(app=app, server="testserver").get("/wherever")
+    response = exceptions.exception_to_http_response(request, exc())
+    assert response.status_code == status
+
+
+@pytest.mark.parametrize(
+    ("exc", "status", "debug"),
+    [
+        (exceptions.AuthorizationError, HTTP_403_FORBIDDEN, True),
+        (exceptions.AuthorizationError, HTTP_403_FORBIDDEN, False),
+        (exceptions.ApplicationError, HTTP_500_INTERNAL_SERVER_ERROR, False),
+    ],
+)
+def test_exception_to_http_response(exc: type[exceptions.ApplicationError], status: int, debug: bool) -> None:
+    app = Litestar(route_handlers=[], debug=debug)
+    request = RequestFactory(app=app, server="testserver").get("/wherever")
+    response = exceptions.exception_to_http_response(request, exc())
+    assert response.status_code == status
+
+
+@pytest.mark.parametrize(
+    ("exc", "fn", "expected_message"),
+    [
+        (
+            exceptions.ApplicationError("message"),
+            exceptions.exception_to_http_response,
+            b"app.lib.exceptions.ApplicationError: message\n",
+        ),
+    ],
+)
+def test_exception_serves_debug_middleware_response(
+    exc: Exception,
+    fn: "abc.Callable",
+    expected_message: bytes,
+) -> None:
+    app = Litestar(route_handlers=[], debug=True)
+    request = RequestFactory(app=app, server="testserver").get("/wherever")
+    response = fn(request, exc)
+    assert response.content == expected_message.decode()
diff --git a/tests/unit/lib/test_schema.py b/tests/unit/lib/test_schema.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/lib/test_settings.py b/tests/unit/lib/test_settings.py
new file mode 100644
index 00000000..17dac298
--- /dev/null
+++ b/tests/unit/lib/test_settings.py
@@ -0,0 +1,12 @@
+import pytest
+
+from app.config import get_settings
+
+pytestmark = pytest.mark.anyio
+
+
+def test_app_slug() -> None:
+    """Test app name conversion to slug."""
+    settings = get_settings()
+    settings.app.NAME = "My Application!"
+    assert settings.app.slug == "my-application"
diff --git a/tests/unit/test_cli.py b/tests/unit/test_cli.py
new file mode 100644
index 00000000..f6bc81a5
--- /dev/null
+++ b/tests/unit/test_cli.py
@@ -0,0 +1,7 @@
+import pytest
+from click.testing import CliRunner
+
+
+@pytest.fixture()
+def cli_runner() -> CliRunner:
+    return CliRunner()