mirror of
https://github.com/dat515-2025/Group-8.git
synced 2026-03-22 23:20:56 +01:00
Compare commits
1 Commits
merge/prom
...
d908a0843a
| Author | SHA1 | Date | |
|---|---|---|---|
| d908a0843a |
105
.github/workflows/build-image.yaml
vendored
105
.github/workflows/build-image.yaml
vendored
@@ -1,105 +0,0 @@
|
|||||||
name: Build and Push Image
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
mode:
|
|
||||||
description: "Build mode: 'prod' or 'pr'"
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
image_repo:
|
|
||||||
description: "Docker image repository (e.g., user/app)"
|
|
||||||
required: false
|
|
||||||
default: "lukastrkan/cc-app-demo"
|
|
||||||
type: string
|
|
||||||
context:
|
|
||||||
description: "Docker build context path"
|
|
||||||
required: false
|
|
||||||
default: "7project/backend"
|
|
||||||
type: string
|
|
||||||
pr_number:
|
|
||||||
description: "PR number (required when mode=pr)"
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
secrets:
|
|
||||||
DOCKER_USER:
|
|
||||||
required: true
|
|
||||||
DOCKER_PASSWORD:
|
|
||||||
required: true
|
|
||||||
outputs:
|
|
||||||
digest:
|
|
||||||
description: "Built image digest"
|
|
||||||
value: ${{ jobs.build.outputs.digest }}
|
|
||||||
image_repo:
|
|
||||||
description: "Image repository used"
|
|
||||||
value: ${{ jobs.build.outputs.image_repo }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
digest: ${{ steps.set.outputs.digest }}
|
|
||||||
image_repo: ${{ steps.set.outputs.image_repo }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Log in to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USER }}
|
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Compute image repo and tags
|
|
||||||
id: meta
|
|
||||||
env:
|
|
||||||
MODE: ${{ inputs.mode }}
|
|
||||||
IMAGE_REPO: ${{ inputs.image_repo }}
|
|
||||||
PR: ${{ inputs.pr_number }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
if [ -z "${IMAGE_REPO:-}" ]; then IMAGE_REPO="lukastrkan/cc-app-demo"; fi
|
|
||||||
echo "IMAGE_REPO=$IMAGE_REPO" >> $GITHUB_ENV
|
|
||||||
SHA_SHORT="${GITHUB_SHA::12}"
|
|
||||||
case "$MODE" in
|
|
||||||
prod)
|
|
||||||
TAG1="prod-$SHA_SHORT"
|
|
||||||
TAG2="latest"
|
|
||||||
;;
|
|
||||||
pr)
|
|
||||||
if [ -z "${PR:-}" ]; then echo "pr_number input is required for mode=pr"; exit 1; fi
|
|
||||||
TAG1="pr-$PR"
|
|
||||||
TAG2="pr-$PR-$SHA_SHORT"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "Unknown mode '$MODE' (expected 'prod' or 'pr')"; exit 1;
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
echo "TAG1=$TAG1" >> $GITHUB_ENV
|
|
||||||
echo "TAG2=$TAG2" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Build and push image
|
|
||||||
id: build
|
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
|
||||||
context: ${{ inputs.context }}
|
|
||||||
push: true
|
|
||||||
tags: |
|
|
||||||
${{ env.IMAGE_REPO }}:${{ env.TAG1 }}
|
|
||||||
${{ env.IMAGE_REPO }}:${{ env.TAG2 }}
|
|
||||||
platforms: linux/amd64
|
|
||||||
|
|
||||||
- name: Set outputs
|
|
||||||
id: set
|
|
||||||
env:
|
|
||||||
IMAGE_REPO: ${{ env.IMAGE_REPO }}
|
|
||||||
run: |
|
|
||||||
echo "digest=${{ steps.build.outputs.digest }}" >> $GITHUB_OUTPUT
|
|
||||||
echo "image_repo=$IMAGE_REPO" >> $GITHUB_OUTPUT
|
|
||||||
158
.github/workflows/deploy-pr.yaml
vendored
158
.github/workflows/deploy-pr.yaml
vendored
@@ -1,158 +0,0 @@
|
|||||||
name: Deploy Preview (PR)
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [opened, reopened, synchronize, closed]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
name: Run Python Tests
|
|
||||||
if: github.event.action != 'closed'
|
|
||||||
uses: ./.github/workflows/run-tests.yml
|
|
||||||
|
|
||||||
build:
|
|
||||||
if: github.event.action != 'closed'
|
|
||||||
name: Build and push image (reusable)
|
|
||||||
uses: ./.github/workflows/build-image.yaml
|
|
||||||
with:
|
|
||||||
mode: pr
|
|
||||||
image_repo: lukastrkan/cc-app-demo
|
|
||||||
context: 7project/backend
|
|
||||||
pr_number: ${{ github.event.pull_request.number }}
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
get_urls:
|
|
||||||
if: github.event.action != 'closed'
|
|
||||||
name: Generate Preview URLs
|
|
||||||
uses: ./.github/workflows/url_generator.yml
|
|
||||||
with:
|
|
||||||
runner: vhs
|
|
||||||
mode: pr
|
|
||||||
pr_number: ${{ github.event.pull_request.number }}
|
|
||||||
base_domain: ${{ vars.DEV_BASE_DOMAIN }}
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
frontend:
|
|
||||||
if: github.event.action != 'closed'
|
|
||||||
name: Frontend - Build and Deploy to Cloudflare Pages (PR)
|
|
||||||
needs: [get_urls]
|
|
||||||
uses: ./.github/workflows/frontend-pages.yml
|
|
||||||
with:
|
|
||||||
mode: pr
|
|
||||||
pr_number: ${{ github.event.pull_request.number }}
|
|
||||||
backend_url_scheme: ${{ needs.get_urls.outputs.backend_url_scheme }}
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
deploy:
|
|
||||||
if: github.event.action != 'closed'
|
|
||||||
name: Helm upgrade/install (PR preview)
|
|
||||||
runs-on: vhs
|
|
||||||
concurrency:
|
|
||||||
group: pr-${{ github.event.pull_request.number }}
|
|
||||||
cancel-in-progress: false
|
|
||||||
needs: [build, frontend, get_urls]
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Helm
|
|
||||||
uses: azure/setup-helm@v4
|
|
||||||
|
|
||||||
- name: Setup kubectl
|
|
||||||
uses: azure/setup-kubectl@v4
|
|
||||||
|
|
||||||
- name: Configure kubeconfig
|
|
||||||
env:
|
|
||||||
KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
|
|
||||||
run: |
|
|
||||||
mkdir -p ~/.kube
|
|
||||||
if [ -z "$KUBE_CONFIG" ]; then
|
|
||||||
echo "Secret KUBE_CONFIG is required (kubeconfig content)"; exit 1; fi
|
|
||||||
echo "$KUBE_CONFIG" > ~/.kube/config
|
|
||||||
chmod 600 ~/.kube/config
|
|
||||||
|
|
||||||
- name: Helm upgrade/install PR preview
|
|
||||||
env:
|
|
||||||
DEV_BASE_DOMAIN: ${{ secrets.BASE_DOMAIN }}
|
|
||||||
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
|
||||||
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
|
||||||
DIGEST: ${{ needs.build.outputs.digest }}
|
|
||||||
DOMAIN: "${{ needs.get_urls.outputs.backend_url }}"
|
|
||||||
DOMAIN_SCHEME: "${{ needs.get_urls.outputs.backend_url_scheme }}"
|
|
||||||
FRONTEND_DOMAIN: "${{ needs.get_urls.outputs.frontend_url }}"
|
|
||||||
FRONTEND_DOMAIN_SCHEME: "${{ needs.get_urls.outputs.frontend_url_scheme }}"
|
|
||||||
run: |
|
|
||||||
PR=${{ github.event.pull_request.number }}
|
|
||||||
RELEASE=myapp-pr-$PR
|
|
||||||
NAMESPACE=pr-$PR
|
|
||||||
helm upgrade --install "$RELEASE" ./7project/charts/myapp-chart \
|
|
||||||
-n "$NAMESPACE" --create-namespace \
|
|
||||||
-f 7project/charts/myapp-chart/values-dev.yaml \
|
|
||||||
--set prNumber="$PR" \
|
|
||||||
--set deployment="pr-$PR" \
|
|
||||||
--set domain="$DOMAIN" \
|
|
||||||
--set domain_scheme="$DOMAIN_SCHEME" \
|
|
||||||
--set frontend_domain="$FRONTEND_DOMAIN" \
|
|
||||||
--set frontend_domain_scheme="$FRONTEND_DOMAIN_SCHEME" \
|
|
||||||
--set image.digest="$DIGEST" \
|
|
||||||
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
|
||||||
--set-string database.password="$DB_PASSWORD" \
|
|
||||||
--set-string database.encryptionSecret="$PR" \
|
|
||||||
--set-string app.name="finance-tracker-pr-$PR"
|
|
||||||
|
|
||||||
- name: Post preview URLs as PR comment
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
env:
|
|
||||||
BACKEND_URL: ${{ needs.get_urls.outputs.backend_url_scheme }}
|
|
||||||
FRONTEND_URL: ${{ needs.get_urls.outputs.frontend_url_scheme }}
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const pr = context.payload.pull_request;
|
|
||||||
if (!pr) { core.setFailed('No pull_request context'); return; }
|
|
||||||
const prNumber = pr.number;
|
|
||||||
const backendUrl = process.env.BACKEND_URL || '(not available)';
|
|
||||||
const frontendUrl = process.env.FRONTEND_URL || '(not available)';
|
|
||||||
const marker = '<!-- preview-comment-marker -->';
|
|
||||||
const body = `${marker}\nPreview environment is running\n- Frontend: ${frontendUrl}\n- Backend: ${backendUrl}\n`;
|
|
||||||
const { owner, repo } = context.repo;
|
|
||||||
const { data: comments } = await github.rest.issues.listComments({ owner, repo, issue_number: prNumber, per_page: 100 });
|
|
||||||
const existing = comments.find(c => c.body && c.body.includes(marker));
|
|
||||||
if (existing) {
|
|
||||||
await github.rest.issues.updateComment({ owner, repo, comment_id: existing.id, body });
|
|
||||||
} else {
|
|
||||||
await github.rest.issues.createComment({ owner, repo, issue_number: prNumber, body });
|
|
||||||
}
|
|
||||||
|
|
||||||
uninstall:
|
|
||||||
if: github.event.action == 'closed'
|
|
||||||
name: Helm uninstall (PR preview)
|
|
||||||
runs-on: vhs
|
|
||||||
steps:
|
|
||||||
- name: Setup Helm
|
|
||||||
uses: azure/setup-helm@v4
|
|
||||||
|
|
||||||
- name: Setup kubectl
|
|
||||||
uses: azure/setup-kubectl@v4
|
|
||||||
|
|
||||||
- name: Configure kubeconfig
|
|
||||||
env:
|
|
||||||
KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
|
|
||||||
run: |
|
|
||||||
mkdir -p ~/.kube
|
|
||||||
if [ -z "$KUBE_CONFIG" ]; then
|
|
||||||
echo "Secret KUBE_CONFIG is required (kubeconfig content)"; exit 1; fi
|
|
||||||
echo "$KUBE_CONFIG" > ~/.kube/config
|
|
||||||
chmod 600 ~/.kube/config
|
|
||||||
|
|
||||||
- name: Helm uninstall release and cleanup namespace
|
|
||||||
run: |
|
|
||||||
PR=${{ github.event.pull_request.number }}
|
|
||||||
RELEASE=myapp-pr-$PR
|
|
||||||
NAMESPACE=pr-$PR
|
|
||||||
helm uninstall "$RELEASE" -n "$NAMESPACE" || true
|
|
||||||
# Optionally delete the namespace if empty
|
|
||||||
kubectl delete namespace "$NAMESPACE" --ignore-not-found=true || true
|
|
||||||
114
.github/workflows/deploy-prod.yaml
vendored
114
.github/workflows/deploy-prod.yaml
vendored
@@ -1,114 +0,0 @@
|
|||||||
name: Deploy Prod
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ "main" ]
|
|
||||||
paths:
|
|
||||||
- 7project/backend/**
|
|
||||||
- 7project/frontend/**
|
|
||||||
- 7project/charts/myapp-chart/**
|
|
||||||
- .github/workflows/deploy-prod.yaml
|
|
||||||
- .github/workflows/build-image.yaml
|
|
||||||
- .github/workflows/frontend-pages.yml
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: deploy-prod
|
|
||||||
cancel-in-progress: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
name: Run Python Tests
|
|
||||||
uses: ./.github/workflows/run-tests.yml
|
|
||||||
|
|
||||||
build:
|
|
||||||
name: Build and push image (reusable)
|
|
||||||
uses: ./.github/workflows/build-image.yaml
|
|
||||||
with:
|
|
||||||
mode: prod
|
|
||||||
image_repo: lukastrkan/cc-app-demo
|
|
||||||
context: 7project/backend
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
get_urls:
|
|
||||||
name: Generate Production URLs
|
|
||||||
uses: ./.github/workflows/url_generator.yml
|
|
||||||
with:
|
|
||||||
mode: prod
|
|
||||||
runner: vhs
|
|
||||||
base_domain: ${{ vars.PROD_DOMAIN }}
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
frontend:
|
|
||||||
name: Frontend - Build and Deploy to Cloudflare Pages (prod)
|
|
||||||
needs: [get_urls]
|
|
||||||
uses: ./.github/workflows/frontend-pages.yml
|
|
||||||
with:
|
|
||||||
mode: prod
|
|
||||||
backend_url_scheme: ${{ needs.get_urls.outputs.backend_url_scheme }}
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
deploy:
|
|
||||||
name: Helm upgrade/install (prod)
|
|
||||||
runs-on: vhs
|
|
||||||
needs: [build, frontend, get_urls]
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Helm
|
|
||||||
uses: azure/setup-helm@v4
|
|
||||||
|
|
||||||
- name: Setup kubectl
|
|
||||||
uses: azure/setup-kubectl@v4
|
|
||||||
|
|
||||||
- name: Configure kubeconfig
|
|
||||||
env:
|
|
||||||
KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
|
|
||||||
run: |
|
|
||||||
mkdir -p ~/.kube
|
|
||||||
if [ -z "$KUBE_CONFIG" ]; then
|
|
||||||
echo "Secret KUBE_CONFIG is required (kubeconfig content)"; exit 1; fi
|
|
||||||
echo "$KUBE_CONFIG" > ~/.kube/config
|
|
||||||
chmod 600 ~/.kube/config
|
|
||||||
|
|
||||||
- name: Helm upgrade/install prod
|
|
||||||
env:
|
|
||||||
DOMAIN: ${{ needs.get_urls.outputs.backend_url }}
|
|
||||||
DOMAIN_SCHEME: ${{ needs.get_urls.outputs.backend_url_scheme }}
|
|
||||||
FRONTEND_DOMAIN: ${{ needs.get_urls.outputs.frontend_url }}
|
|
||||||
FRONTEND_DOMAIN_SCHEME: ${{ needs.get_urls.outputs.frontend_url_scheme }}
|
|
||||||
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
|
||||||
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
|
||||||
DIGEST: ${{ needs.build.outputs.digest }}
|
|
||||||
BANKID_CLIENT_ID: ${{ secrets.BANKID_CLIENT_ID }}
|
|
||||||
BANKID_CLIENT_SECRET: ${{ secrets.BANKID_CLIENT_SECRET }}
|
|
||||||
MOJEID_CLIENT_ID: ${{ secrets.MOJEID_CLIENT_ID }}
|
|
||||||
MOJEID_CLIENT_SECRET: ${{ secrets.MOJEID_CLIENT_SECRET }}
|
|
||||||
CSAS_CLIENT_ID: ${{ secrets.CSAS_CLIENT_ID }}
|
|
||||||
CSAS_CLIENT_SECRET: ${{ secrets.CSAS_CLIENT_SECRET }}
|
|
||||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
|
||||||
run: |
|
|
||||||
helm upgrade --install myapp ./7project/charts/myapp-chart \
|
|
||||||
-n prod --create-namespace \
|
|
||||||
-f 7project/charts/myapp-chart/values-prod.yaml \
|
|
||||||
--set deployment="prod" \
|
|
||||||
--set domain="$DOMAIN" \
|
|
||||||
--set domain_scheme="$DOMAIN_SCHEME" \
|
|
||||||
--set frontend_domain="$FRONTEND_DOMAIN" \
|
|
||||||
--set frontend_domain_scheme="$FRONTEND_DOMAIN_SCHEME" \
|
|
||||||
--set image.digest="$DIGEST" \
|
|
||||||
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
|
||||||
--set-string database.password="$DB_PASSWORD" \
|
|
||||||
--set-string oauth.bankid.clientId="$BANKID_CLIENT_ID" \
|
|
||||||
--set-string oauth.bankid.clientSecret="$BANKID_CLIENT_SECRET" \
|
|
||||||
--set-string oauth.mojeid.clientId="$MOJEID_CLIENT_ID" \
|
|
||||||
--set-string oauth.mojeid.clientSecret="$MOJEID_CLIENT_SECRET" \
|
|
||||||
--set-string oauth.csas.clientId="$CSAS_CLIENT_ID" \
|
|
||||||
--set-string oauth.csas.clientSecret="$CSAS_CLIENT_SECRET" \
|
|
||||||
--set-string sentry_dsn="$SENTRY_DSN" \
|
|
||||||
--set-string database.encryptionSecret="${{ secrets.PROD_DB_ENCRYPTION_KEY }}"
|
|
||||||
135
.github/workflows/frontend-pages.yml
vendored
135
.github/workflows/frontend-pages.yml
vendored
@@ -1,135 +0,0 @@
|
|||||||
name: Frontend - Build and Deploy to Cloudflare Pages
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
mode:
|
|
||||||
description: "Build mode: 'prod' or 'pr'"
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
pr_number:
|
|
||||||
description: 'PR number (required when mode=pr)'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
project_name:
|
|
||||||
description: 'Cloudflare Pages project name (overrides default)'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
backend_url_scheme:
|
|
||||||
description: 'The full scheme URL for the backend (e.g., https://api.example.com)'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
secrets:
|
|
||||||
CLOUDFLARE_API_TOKEN:
|
|
||||||
required: true
|
|
||||||
CLOUDFLARE_ACCOUNT_ID:
|
|
||||||
required: true
|
|
||||||
outputs:
|
|
||||||
deployed_url:
|
|
||||||
description: 'URL of deployed frontend'
|
|
||||||
value: ${{ jobs.deploy.outputs.deployed_url }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: Build frontend
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
working-directory: 7project/frontend
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Use Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '20'
|
|
||||||
cache: 'npm'
|
|
||||||
cache-dependency-path: 7project/frontend/package-lock.json
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm ci
|
|
||||||
|
|
||||||
- name: Set backend URL from workflow input
|
|
||||||
run: |
|
|
||||||
echo "VITE_BACKEND_URL=${{ inputs.backend_url_scheme }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
run: npm run build
|
|
||||||
|
|
||||||
- name: Upload build artifact
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: frontend-dist
|
|
||||||
path: 7project/frontend/dist
|
|
||||||
|
|
||||||
deploy:
|
|
||||||
name: Deploy to Cloudflare Pages
|
|
||||||
needs: build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
deployed_url: ${{ steps.out.outputs.deployed_url }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Download build artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: frontend-dist
|
|
||||||
path: dist
|
|
||||||
|
|
||||||
- name: Determine project name and branch
|
|
||||||
id: pname
|
|
||||||
env:
|
|
||||||
INPUT_MODE: ${{ inputs.mode }}
|
|
||||||
INPUT_PR: ${{ inputs.pr_number }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
# Prefer manual input, then repo variable, fallback to repo-name
|
|
||||||
INPUT_NAME='${{ inputs.project_name }}'
|
|
||||||
VAR_NAME='${{ vars.CF_PAGES_PROJECT_NAME }}'
|
|
||||||
if [ -n "$INPUT_NAME" ]; then PNAME_RAW="$INPUT_NAME";
|
|
||||||
elif [ -n "$VAR_NAME" ]; then PNAME_RAW="$VAR_NAME";
|
|
||||||
else PNAME_RAW="${GITHUB_REPOSITORY##*/}-frontend"; fi
|
|
||||||
# Normalize project name to lowercase to satisfy Cloudflare Pages naming
|
|
||||||
PNAME="${PNAME_RAW,,}"
|
|
||||||
# Determine branch for Pages
|
|
||||||
if [ "${INPUT_MODE}" = "pr" ]; then
|
|
||||||
if [ -z "${INPUT_PR}" ]; then echo "pr_number is required when mode=pr"; exit 1; fi
|
|
||||||
PBRANCH="pr-${INPUT_PR}"
|
|
||||||
else
|
|
||||||
PBRANCH="main"
|
|
||||||
fi
|
|
||||||
echo "project_name=$PNAME" >> $GITHUB_OUTPUT
|
|
||||||
echo "branch=$PBRANCH" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Ensure Cloudflare Pages project exists
|
|
||||||
env:
|
|
||||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
|
||||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
|
||||||
PNAME: ${{ steps.pname.outputs.project_name }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
npx wrangler pages project create "$PNAME" --production-branch=main || true
|
|
||||||
|
|
||||||
- name: Deploy using Cloudflare Wrangler
|
|
||||||
uses: cloudflare/wrangler-action@v3
|
|
||||||
with:
|
|
||||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
|
||||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
|
||||||
command: pages deploy dist --project-name=${{ steps.pname.outputs.project_name }} --branch=${{ steps.pname.outputs.branch }}
|
|
||||||
|
|
||||||
- name: Compute deployed URL
|
|
||||||
id: out
|
|
||||||
env:
|
|
||||||
PNAME: ${{ steps.pname.outputs.project_name }}
|
|
||||||
PBRANCH: ${{ steps.pname.outputs.branch }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
if [ "$PBRANCH" = "main" ]; then
|
|
||||||
URL="https://${PNAME}.pages.dev"
|
|
||||||
else
|
|
||||||
URL="https://${PBRANCH}.${PNAME}.pages.dev"
|
|
||||||
fi
|
|
||||||
echo "deployed_url=$URL" >> $GITHUB_OUTPUT
|
|
||||||
61
.github/workflows/run-tests.yml
vendored
61
.github/workflows/run-tests.yml
vendored
@@ -1,61 +0,0 @@
|
|||||||
name: Run Python Tests
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
services:
|
|
||||||
mariadb:
|
|
||||||
image: mariadb:11.4
|
|
||||||
env:
|
|
||||||
MARIADB_ROOT_PASSWORD: rootpw
|
|
||||||
MARIADB_DATABASE: group_project
|
|
||||||
MARIADB_USER: appuser
|
|
||||||
MARIADB_PASSWORD: apppass
|
|
||||||
ports:
|
|
||||||
- 3306:3306
|
|
||||||
options: >-
|
|
||||||
--health-cmd="mariadb-admin ping -h 127.0.0.1 -u root -prootpw --silent"
|
|
||||||
--health-interval=5s
|
|
||||||
--health-timeout=2s
|
|
||||||
--health-retries=20
|
|
||||||
|
|
||||||
env:
|
|
||||||
MARIADB_HOST: 127.0.0.1
|
|
||||||
MARIADB_PORT: "3306"
|
|
||||||
MARIADB_DB: group_project
|
|
||||||
MARIADB_USER: appuser
|
|
||||||
MARIADB_PASSWORD: apppass
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up Python 3.11
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.11'
|
|
||||||
|
|
||||||
- name: Add test dependencies to requirements
|
|
||||||
run: |
|
|
||||||
echo "pytest==8.4.2" >> ./7project/backend/requirements.txt
|
|
||||||
echo "pytest-asyncio==1.2.0" >> ./7project/backend/requirements.txt
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install -r ./7project/backend/requirements.txt
|
|
||||||
|
|
||||||
- name: Run Alembic migrations
|
|
||||||
run: |
|
|
||||||
alembic upgrade head
|
|
||||||
working-directory: ./7project/backend
|
|
||||||
|
|
||||||
- name: Run tests with pytest
|
|
||||||
run: pytest
|
|
||||||
working-directory: ./7project/backend
|
|
||||||
74
.github/workflows/url_generator.yml
vendored
74
.github/workflows/url_generator.yml
vendored
@@ -1,74 +0,0 @@
|
|||||||
name: Generate Preview or Production URLs
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
mode:
|
|
||||||
description: "Build mode: 'prod' or 'pr'"
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
pr_number:
|
|
||||||
description: 'PR number (required when mode=pr)'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
runner:
|
|
||||||
description: 'The runner to use for this job'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: 'ubuntu-latest'
|
|
||||||
base_domain:
|
|
||||||
description: 'The base domain for production URLs (e.g., example.com)'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
backend_url:
|
|
||||||
description: "The backend URL without scheme (e.g., api.example.com)"
|
|
||||||
value: ${{ jobs.generate-urls.outputs.backend_url }}
|
|
||||||
frontend_url:
|
|
||||||
description: "The frontend URL without scheme (e.g., app.example.com)"
|
|
||||||
value: ${{ jobs.generate-urls.outputs.frontend_url }}
|
|
||||||
backend_url_scheme:
|
|
||||||
description: "The backend URL with scheme (e.g., https://api.example.com)"
|
|
||||||
value: ${{ jobs.generate-urls.outputs.backend_url_scheme }}
|
|
||||||
frontend_url_scheme:
|
|
||||||
description: "The frontend URL with scheme (e.g., https://app.example.com)"
|
|
||||||
value: ${{ jobs.generate-urls.outputs.frontend_url_scheme }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
generate-urls:
|
|
||||||
permissions:
|
|
||||||
contents: none
|
|
||||||
runs-on: ${{ inputs.runner }}
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
backend_url: ${{ steps.set_urls.outputs.backend_url }}
|
|
||||||
frontend_url: ${{ steps.set_urls.outputs.frontend_url }}
|
|
||||||
backend_url_scheme: ${{ steps.set_urls.outputs.backend_url_scheme }}
|
|
||||||
frontend_url_scheme: ${{ steps.set_urls.outputs.frontend_url_scheme }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Generate URLs
|
|
||||||
id: set_urls
|
|
||||||
env:
|
|
||||||
BASE_DOMAIN: ${{ inputs.base_domain }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
if [ "${{ inputs.mode }}" = "prod" ]; then
|
|
||||||
BACKEND_URL="api.${BASE_DOMAIN}"
|
|
||||||
FRONTEND_URL="finance.${BASE_DOMAIN}"
|
|
||||||
else
|
|
||||||
# This is your current logic
|
|
||||||
FRONTEND_URL="pr-${{ inputs.pr_number }}.group-8-frontend.pages.dev"
|
|
||||||
BACKEND_URL="api-pr-${{ inputs.pr_number }}.${BASE_DOMAIN}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
FRONTEND_URL_SCHEME="https://$FRONTEND_URL"
|
|
||||||
BACKEND_URL_SCHEME="https://$BACKEND_URL"
|
|
||||||
|
|
||||||
# This part correctly writes to GITHUB_OUTPUT for the step
|
|
||||||
echo "backend_url_scheme=$BACKEND_URL_SCHEME" >> $GITHUB_OUTPUT
|
|
||||||
echo "frontend_url_scheme=$FRONTEND_URL_SCHEME" >> $GITHUB_OUTPUT
|
|
||||||
echo "backend_url=$BACKEND_URL" >> $GITHUB_OUTPUT
|
|
||||||
echo "frontend_url=$FRONTEND_URL" >> $GITHUB_OUTPUT
|
|
||||||
54
.github/workflows/workflow.yml
vendored
Normal file
54
.github/workflows/workflow.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
name: Build, Push and Update Image in Manifest
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "main" ]
|
||||||
|
paths:
|
||||||
|
- 'backend/**'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-update:
|
||||||
|
runs-on: kbctl
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
packages: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USER }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
id: build
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: ./backend
|
||||||
|
push: true
|
||||||
|
tags: ${{ secrets.DOCKER_USER }}/cc-app-demo:latest
|
||||||
|
|
||||||
|
- name: Get image digest
|
||||||
|
run: echo "IMAGE_DIGEST=${{ steps.build.outputs.digest }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Update manifests with new image digest
|
||||||
|
uses: OpsVerseIO/image-updater-action@0.1.0
|
||||||
|
with:
|
||||||
|
branch: main
|
||||||
|
targetBranch: main
|
||||||
|
createPR: 'false'
|
||||||
|
message: "${{ github.event.head_commit.message }}"
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
changes: |
|
||||||
|
{
|
||||||
|
"deployment/app-demo-deployment.yaml": {
|
||||||
|
"spec.template.spec.containers[0].image": "${{ secrets.DOCKER_USER }}/cc-app-demo@${{ env.IMAGE_DIGEST }}"
|
||||||
|
},
|
||||||
|
"deployment/app-demo-worker-deployment.yaml": {
|
||||||
|
"spec.template.spec.containers[0].image": "${{ secrets.DOCKER_USER }}/cc-app-demo@${{ env.IMAGE_DIGEST }}"
|
||||||
|
}
|
||||||
|
}
|
||||||
0
7project/.gitignore → .gitignore
vendored
0
7project/.gitignore → .gitignore
vendored
@@ -45,11 +45,11 @@ flowchart LR
|
|||||||
proc_cron[Task planner] --> proc_queue
|
proc_cron[Task planner] --> proc_queue
|
||||||
proc_queue_worker --> ext_bank[(Bank API)]
|
proc_queue_worker --> ext_bank[(Bank API)]
|
||||||
proc_queue_worker --> db
|
proc_queue_worker --> db
|
||||||
client[Client/UI] <--> api[API Gateway / Web Server]
|
client[Client/UI] --> api[API Gateway / Web Server]
|
||||||
api <--> svc[Web API]
|
api --> svc[Web API]
|
||||||
svc --> proc_queue
|
svc --> proc_queue
|
||||||
svc <--> db[(Database)]
|
svc --> db[(Database)]
|
||||||
svc <--> cache[(Cache)]
|
svc --> cache[(Cache)]
|
||||||
```
|
```
|
||||||
|
|
||||||
- Components and responsibilities: What does each box do?
|
- Components and responsibilities: What does each box do?
|
||||||
|
|||||||
@@ -1,34 +0,0 @@
|
|||||||
"""update categories unique
|
|
||||||
|
|
||||||
Revision ID: 390041bd839e
|
|
||||||
Revises: 63e072f09836
|
|
||||||
Create Date: 2025-10-09 15:14:31.557686
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '390041bd839e'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = '63e072f09836'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_index(op.f('name'), table_name='categories')
|
|
||||||
op.create_unique_constraint('uix_name_user_id', 'categories', ['name', 'user_id'])
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_constraint('uix_name_user_id', 'categories', type_='unique')
|
|
||||||
op.create_index(op.f('name'), 'categories', ['name'], unique=True)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
"""add user oauth
|
|
||||||
|
|
||||||
Revision ID: 7af8f296d089
|
|
||||||
Revises: 390041bd839e
|
|
||||||
Create Date: 2025-10-10 14:05:00.153376
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
import fastapi_users_db_sqlalchemy
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '7af8f296d089'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = '390041bd839e'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table('oauth_account',
|
|
||||||
sa.Column('id', fastapi_users_db_sqlalchemy.generics.GUID(), nullable=False),
|
|
||||||
sa.Column('user_id', fastapi_users_db_sqlalchemy.generics.GUID(), nullable=False),
|
|
||||||
sa.Column('oauth_name', sa.String(length=100), nullable=False),
|
|
||||||
sa.Column('access_token', sa.String(length=1024), nullable=False),
|
|
||||||
sa.Column('expires_at', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('refresh_token', sa.String(length=1024), nullable=True),
|
|
||||||
sa.Column('account_id', sa.String(length=320), nullable=False),
|
|
||||||
sa.Column('account_email', sa.String(length=320), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='cascade'),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
op.create_index(op.f('ix_oauth_account_account_id'), 'oauth_account', ['account_id'], unique=False)
|
|
||||||
op.create_index(op.f('ix_oauth_account_oauth_name'), 'oauth_account', ['oauth_name'], unique=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_index(op.f('ix_oauth_account_oauth_name'), table_name='oauth_account')
|
|
||||||
op.drop_index(op.f('ix_oauth_account_account_id'), table_name='oauth_account')
|
|
||||||
op.drop_table('oauth_account')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
"""change token length
|
|
||||||
|
|
||||||
Revision ID: 5ab2e654c96e
|
|
||||||
Revises: 7af8f296d089
|
|
||||||
Create Date: 2025-10-11 21:07:41.930470
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import mysql
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '5ab2e654c96e'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = '7af8f296d089'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.alter_column('oauth_account', 'access_token',
|
|
||||||
existing_type=mysql.VARCHAR(length=1024),
|
|
||||||
type_=sa.String(length=4096),
|
|
||||||
existing_nullable=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.alter_column('oauth_account', 'access_token',
|
|
||||||
existing_type=sa.String(length=4096),
|
|
||||||
type_=mysql.VARCHAR(length=1024),
|
|
||||||
existing_nullable=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
"""add config to user
|
|
||||||
|
|
||||||
Revision ID: eabec90a94fe
|
|
||||||
Revises: 5ab2e654c96e
|
|
||||||
Create Date: 2025-10-21 18:56:42.085973
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = 'eabec90a94fe'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = '5ab2e654c96e'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('user', sa.Column('config', sa.JSON(), nullable=True))
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_column('user', 'config')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
"""add date to transaction
|
|
||||||
|
|
||||||
Revision ID: 1f2a3c4d5e6f
|
|
||||||
Revises: eabec90a94fe
|
|
||||||
Create Date: 2025-10-22 16:18:00
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.sql import func
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '1f2a3c4d5e6f'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = 'eabec90a94fe'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema by adding date column with server default current_date."""
|
|
||||||
op.add_column(
|
|
||||||
'transaction',
|
|
||||||
sa.Column('date', sa.Date(), nullable=False, server_default=sa.text('CURRENT_DATE'))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema by removing date column."""
|
|
||||||
op.drop_column('transaction', 'date')
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
"""Add encrypted type
|
|
||||||
|
|
||||||
Revision ID: 46b9e702e83f
|
|
||||||
Revises: 1f2a3c4d5e6f
|
|
||||||
Create Date: 2025-10-29 13:26:24.568523
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import mysql
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '46b9e702e83f'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = '1f2a3c4d5e6f'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.alter_column('transaction', 'amount',
|
|
||||||
existing_type=mysql.FLOAT(),
|
|
||||||
type_=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
|
||||||
existing_nullable=False)
|
|
||||||
op.alter_column('transaction', 'description',
|
|
||||||
existing_type=mysql.VARCHAR(length=255),
|
|
||||||
type_=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
|
||||||
existing_nullable=True)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.alter_column('transaction', 'description',
|
|
||||||
existing_type=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
|
||||||
type_=mysql.VARCHAR(length=255),
|
|
||||||
existing_nullable=True)
|
|
||||||
op.alter_column('transaction', 'amount',
|
|
||||||
existing_type=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
|
||||||
type_=mysql.FLOAT(),
|
|
||||||
existing_nullable=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
"""Cascade categories
|
|
||||||
|
|
||||||
Revision ID: 59cebf320c4a
|
|
||||||
Revises: 46b9e702e83f
|
|
||||||
Create Date: 2025-10-30 13:42:44.555284
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import mysql
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '59cebf320c4a'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = '46b9e702e83f'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('category_transaction', sa.Column('category_id', sa.Integer(), nullable=False))
|
|
||||||
op.add_column('category_transaction', sa.Column('transaction_id', sa.Integer(), nullable=False))
|
|
||||||
op.drop_constraint(op.f('category_transaction_ibfk_2'), 'category_transaction', type_='foreignkey')
|
|
||||||
op.drop_constraint(op.f('category_transaction_ibfk_1'), 'category_transaction', type_='foreignkey')
|
|
||||||
op.create_foreign_key(None, 'category_transaction', 'transaction', ['transaction_id'], ['id'], ondelete='CASCADE')
|
|
||||||
op.create_foreign_key(None, 'category_transaction', 'categories', ['category_id'], ['id'], ondelete='CASCADE')
|
|
||||||
op.drop_column('category_transaction', 'id_category')
|
|
||||||
op.drop_column('category_transaction', 'id_transaction')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('category_transaction', sa.Column('id_transaction', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
|
|
||||||
op.add_column('category_transaction', sa.Column('id_category', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
|
|
||||||
op.drop_constraint(None, 'category_transaction', type_='foreignkey')
|
|
||||||
op.drop_constraint(None, 'category_transaction', type_='foreignkey')
|
|
||||||
op.create_foreign_key(op.f('category_transaction_ibfk_1'), 'category_transaction', 'categories', ['id_category'], ['id'])
|
|
||||||
op.create_foreign_key(op.f('category_transaction_ibfk_2'), 'category_transaction', 'transaction', ['id_transaction'], ['id'])
|
|
||||||
op.drop_column('category_transaction', 'transaction_id')
|
|
||||||
op.drop_column('category_transaction', 'category_id')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
from fastapi import APIRouter, Depends, status
|
|
||||||
from fastapi_users import models
|
|
||||||
from fastapi_users.manager import BaseUserManager
|
|
||||||
|
|
||||||
from app.schemas.user import UserCreate, UserRead, UserUpdate
|
|
||||||
from app.services.user_service import auth_backend, fastapi_users
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
@router.delete(
|
|
||||||
"/users/me",
|
|
||||||
status_code=status.HTTP_204_NO_CONTENT,
|
|
||||||
tags=["users"],
|
|
||||||
summary="Delete current user",
|
|
||||||
response_description="The user has been successfully deleted.",
|
|
||||||
)
|
|
||||||
async def delete_me(
|
|
||||||
user: models.UserProtocol = Depends(fastapi_users.current_user(active=True)),
|
|
||||||
user_manager: BaseUserManager = Depends(fastapi_users.get_user_manager),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Delete the currently authenticated user.
|
|
||||||
"""
|
|
||||||
await user_manager.delete(user)
|
|
||||||
|
|
||||||
# Keep existing paths as-is under /auth/* and /users/*
|
|
||||||
from fastapi import Request, Response
|
|
||||||
from app.core.security import revoke_token, extract_bearer_token
|
|
||||||
|
|
||||||
|
|
||||||
@router.post(
|
|
||||||
"/auth/jwt/logout",
|
|
||||||
status_code=status.HTTP_204_NO_CONTENT,
|
|
||||||
tags=["auth"],
|
|
||||||
summary="Log out and revoke current token",
|
|
||||||
)
|
|
||||||
async def custom_logout(request: Request) -> Response:
|
|
||||||
"""Revoke the current bearer token so it cannot be used anymore."""
|
|
||||||
token = extract_bearer_token(request)
|
|
||||||
if token:
|
|
||||||
revoke_token(token)
|
|
||||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
router.include_router(
|
|
||||||
fastapi_users.get_auth_router(auth_backend), prefix="/auth/jwt", tags=["auth"]
|
|
||||||
)
|
|
||||||
router.include_router(
|
|
||||||
fastapi_users.get_register_router(UserRead, UserCreate),
|
|
||||||
prefix="/auth",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
router.include_router(
|
|
||||||
fastapi_users.get_reset_password_router(),
|
|
||||||
prefix="/auth",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
router.include_router(
|
|
||||||
fastapi_users.get_verify_router(UserRead),
|
|
||||||
prefix="/auth",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
router.include_router(
|
|
||||||
fastapi_users.get_users_router(UserRead, UserUpdate),
|
|
||||||
prefix="/users",
|
|
||||||
tags=["users"],
|
|
||||||
)
|
|
||||||
@@ -1,108 +0,0 @@
|
|||||||
from typing import List
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
|
||||||
from sqlalchemy import select, delete
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.models.categories import Category
|
|
||||||
from app.schemas.category import CategoryCreate, CategoryRead, CategoryUpdate
|
|
||||||
from app.services.db import get_async_session
|
|
||||||
from app.services.user_service import current_active_user
|
|
||||||
from app.models.user import User
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/categories", tags=["categories"])
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/create", response_model=CategoryRead, status_code=status.HTTP_201_CREATED)
|
|
||||||
async def create_category(
|
|
||||||
payload: CategoryCreate,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
# Enforce per-user unique name via query to provide 409 feedback
|
|
||||||
res = await session.execute(
|
|
||||||
select(Category).where(Category.user_id == user.id, Category.name == payload.name)
|
|
||||||
)
|
|
||||||
existing = res.scalar_one_or_none()
|
|
||||||
if existing:
|
|
||||||
raise HTTPException(status_code=409, detail="Category with this name already exists")
|
|
||||||
|
|
||||||
category = Category(name=payload.name, description=payload.description, user_id=user.id)
|
|
||||||
session.add(category)
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(category)
|
|
||||||
return category
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[CategoryRead])
|
|
||||||
async def list_categories(
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(select(Category).where(Category.user_id == user.id))
|
|
||||||
return list(res.scalars())
|
|
||||||
|
|
||||||
|
|
||||||
@router.patch("/{category_id}", response_model=CategoryRead)
|
|
||||||
async def update_category(
|
|
||||||
category_id: int,
|
|
||||||
payload: CategoryUpdate,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(
|
|
||||||
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
|
||||||
)
|
|
||||||
category = res.scalar_one_or_none()
|
|
||||||
if not category:
|
|
||||||
raise HTTPException(status_code=404, detail="Category not found")
|
|
||||||
|
|
||||||
# If name changed, check uniqueness per user
|
|
||||||
if payload.name is not None and payload.name != category.name:
|
|
||||||
dup = await session.execute(
|
|
||||||
select(Category.id).where(Category.user_id == user.id, Category.name == payload.name)
|
|
||||||
)
|
|
||||||
if dup.scalar_one_or_none() is not None:
|
|
||||||
raise HTTPException(status_code=409, detail="Category with this name already exists")
|
|
||||||
category.name = payload.name
|
|
||||||
|
|
||||||
if payload.description is not None:
|
|
||||||
category.description = payload.description
|
|
||||||
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(category)
|
|
||||||
return category
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{category_id}", response_model=CategoryRead)
|
|
||||||
async def get_category(
|
|
||||||
category_id: int,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(
|
|
||||||
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
|
||||||
)
|
|
||||||
category = res.scalar_one_or_none()
|
|
||||||
if not category:
|
|
||||||
raise HTTPException(status_code=404, detail="Category not found")
|
|
||||||
return category
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{category_id}", status_code=status.HTTP_204_NO_CONTENT)
|
|
||||||
async def delete_category(
|
|
||||||
category_id: int,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(
|
|
||||||
select(Category.id).where(Category.id == category_id, Category.user_id == user.id)
|
|
||||||
)
|
|
||||||
if res.scalar_one_or_none() is None:
|
|
||||||
raise HTTPException(status_code=404, detail="Category not found")
|
|
||||||
|
|
||||||
await session.execute(
|
|
||||||
delete(Category).where(Category.id == category_id, Category.user_id == user.id)
|
|
||||||
)
|
|
||||||
await session.commit()
|
|
||||||
return None
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
from fastapi import APIRouter
|
|
||||||
from fastapi.params import Depends
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.models.user import User
|
|
||||||
from app.oauth.csas import CSASOAuth
|
|
||||||
from app.services.db import get_async_session
|
|
||||||
from app.services.user_service import current_active_user
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/auth/csas", tags=["csas"])
|
|
||||||
|
|
||||||
CLIENT_ID = os.getenv("CSAS_CLIENT_ID")
|
|
||||||
CLIENT_SECRET = os.getenv("CSAS_CLIENT_SECRET")
|
|
||||||
CSAS_OAUTH = CSASOAuth(CLIENT_ID, CLIENT_SECRET)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/authorize")
|
|
||||||
async def csas_authorize():
|
|
||||||
return {"authorization_url":
|
|
||||||
await CSAS_OAUTH.get_authorization_url(os.getenv("FRONTEND_DOMAIN_SCHEME") + "/auth/csas/callback")}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/callback")
|
|
||||||
async def csas_callback(code: str, session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user)):
|
|
||||||
response = await CSAS_OAUTH.get_access_token(code, os.getenv("FRONTEND_DOMAIN_SCHEME") + "/auth/csas/callback")
|
|
||||||
|
|
||||||
if not user.config:
|
|
||||||
user.config = {}
|
|
||||||
|
|
||||||
new_dict = user.config.copy()
|
|
||||||
new_dict["csas"] = json.dumps(response)
|
|
||||||
|
|
||||||
user.config = new_dict
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
return "OK"
|
|
||||||
@@ -1,280 +0,0 @@
|
|||||||
from typing import List, Optional
|
|
||||||
from datetime import date
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
|
||||||
from sqlalchemy import select, and_, func
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.models.transaction import Transaction
|
|
||||||
from app.models.categories import Category
|
|
||||||
from app.schemas.transaction import (
|
|
||||||
TransactionCreate,
|
|
||||||
TransactionRead,
|
|
||||||
TransactionUpdate,
|
|
||||||
)
|
|
||||||
from app.services.db import get_async_session
|
|
||||||
from app.services.user_service import current_active_user
|
|
||||||
from app.models.user import User
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/transactions", tags=["transactions"])
|
|
||||||
|
|
||||||
|
|
||||||
def _to_read_model(tx: Transaction) -> TransactionRead:
|
|
||||||
return TransactionRead(
|
|
||||||
id=tx.id,
|
|
||||||
amount=tx.amount,
|
|
||||||
description=tx.description,
|
|
||||||
date=tx.date,
|
|
||||||
category_ids=[c.id for c in (tx.categories or [])],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/create", response_model=TransactionRead, status_code=status.HTTP_201_CREATED)
|
|
||||||
async def create_transaction(
|
|
||||||
payload: TransactionCreate,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
# Build transaction; set `date` only if provided to let DB default apply otherwise
|
|
||||||
tx_kwargs = dict(
|
|
||||||
amount=payload.amount,
|
|
||||||
description=payload.description,
|
|
||||||
user_id=user.id,
|
|
||||||
)
|
|
||||||
if payload.date is not None:
|
|
||||||
parsed_date = payload.date
|
|
||||||
if isinstance(parsed_date, str):
|
|
||||||
try:
|
|
||||||
parsed_date = date.fromisoformat(parsed_date)
|
|
||||||
except ValueError:
|
|
||||||
raise HTTPException(status_code=400, detail="Invalid date format, expected YYYY-MM-DD")
|
|
||||||
tx_kwargs["date"] = parsed_date
|
|
||||||
tx = Transaction(**tx_kwargs)
|
|
||||||
|
|
||||||
# Attach categories if provided (and owned by user)
|
|
||||||
if payload.category_ids:
|
|
||||||
res = await session.execute(
|
|
||||||
select(Category).where(
|
|
||||||
Category.user_id == user.id, Category.id.in_(payload.category_ids)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
categories = list(res.scalars())
|
|
||||||
if len(categories) != len(set(payload.category_ids)):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail="Duplicate category IDs provided or one or more categories not found"
|
|
||||||
)
|
|
||||||
tx.categories = categories
|
|
||||||
|
|
||||||
session.add(tx)
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(tx)
|
|
||||||
# Ensure categories are loaded
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
return _to_read_model(tx)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[TransactionRead])
|
|
||||||
async def list_transactions(
|
|
||||||
start_date: Optional[date] = None,
|
|
||||||
end_date: Optional[date] = None,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
cond = [Transaction.user_id == user.id]
|
|
||||||
if start_date is not None:
|
|
||||||
cond.append(Transaction.date >= start_date)
|
|
||||||
if end_date is not None:
|
|
||||||
cond.append(Transaction.date <= end_date)
|
|
||||||
res = await session.execute(
|
|
||||||
select(Transaction).where(and_(*cond)).order_by(Transaction.date, Transaction.id)
|
|
||||||
)
|
|
||||||
txs = list(res.scalars())
|
|
||||||
# Eagerly load categories for each transaction
|
|
||||||
for tx in txs:
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
return [_to_read_model(tx) for tx in txs]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/balance_series")
|
|
||||||
async def get_balance_series(
|
|
||||||
start_date: Optional[date] = None,
|
|
||||||
end_date: Optional[date] = None,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
cond = [Transaction.user_id == user.id]
|
|
||||||
if start_date is not None:
|
|
||||||
cond.append(Transaction.date >= start_date)
|
|
||||||
if end_date is not None:
|
|
||||||
cond.append(Transaction.date <= end_date)
|
|
||||||
res = await session.execute(
|
|
||||||
select(Transaction).where(and_(*cond)).order_by(Transaction.date, Transaction.id)
|
|
||||||
)
|
|
||||||
txs = list(res.scalars())
|
|
||||||
# Group by date and accumulate
|
|
||||||
daily = {}
|
|
||||||
for tx in txs:
|
|
||||||
key = tx.date.isoformat() if hasattr(tx.date, 'isoformat') else str(tx.date)
|
|
||||||
daily[key] = daily.get(key, 0.0) + float(tx.amount)
|
|
||||||
# Build cumulative series sorted by date
|
|
||||||
series = []
|
|
||||||
running = 0.0
|
|
||||||
for d in sorted(daily.keys()):
|
|
||||||
running += daily[d]
|
|
||||||
series.append({"date": d, "balance": running})
|
|
||||||
return series
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{transaction_id}", response_model=TransactionRead)
|
|
||||||
async def get_transaction(
|
|
||||||
transaction_id: int,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(
|
|
||||||
select(Transaction).where(
|
|
||||||
Transaction.id == transaction_id, Transaction.user_id == user.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
tx: Optional[Transaction] = res.scalar_one_or_none()
|
|
||||||
if not tx:
|
|
||||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
return _to_read_model(tx)
|
|
||||||
|
|
||||||
|
|
||||||
@router.patch("/{transaction_id}/edit", response_model=TransactionRead)
|
|
||||||
async def update_transaction(
|
|
||||||
transaction_id: int,
|
|
||||||
payload: TransactionUpdate,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(
|
|
||||||
select(Transaction).where(
|
|
||||||
Transaction.id == transaction_id, Transaction.user_id == user.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
tx: Optional[Transaction] = res.scalar_one_or_none()
|
|
||||||
if not tx:
|
|
||||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
|
||||||
|
|
||||||
if payload.amount is not None:
|
|
||||||
tx.amount = payload.amount
|
|
||||||
if payload.description is not None:
|
|
||||||
tx.description = payload.description
|
|
||||||
if payload.date is not None:
|
|
||||||
new_date = payload.date
|
|
||||||
if isinstance(new_date, str):
|
|
||||||
try:
|
|
||||||
new_date = date.fromisoformat(new_date)
|
|
||||||
except ValueError:
|
|
||||||
raise HTTPException(status_code=400, detail="Invalid date format, expected YYYY-MM-DD")
|
|
||||||
tx.date = new_date
|
|
||||||
|
|
||||||
if payload.category_ids is not None:
|
|
||||||
# Preload categories to avoid async lazy-load during assignment
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
if payload.category_ids:
|
|
||||||
# Check for duplicate category IDs in the payload
|
|
||||||
if len(payload.category_ids) != len(set(payload.category_ids)):
|
|
||||||
raise HTTPException(status_code=400, detail="Duplicate category IDs in payload")
|
|
||||||
res = await session.execute(
|
|
||||||
select(Category).where(
|
|
||||||
Category.user_id == user.id, Category.id.in_(payload.category_ids)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
categories = list(res.scalars())
|
|
||||||
if len(categories) != len(payload.category_ids):
|
|
||||||
raise HTTPException(status_code=400, detail="One or more categories not found")
|
|
||||||
tx.categories = categories
|
|
||||||
else:
|
|
||||||
tx.categories = []
|
|
||||||
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
return _to_read_model(tx)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{transaction_id}/delete", status_code=status.HTTP_204_NO_CONTENT)
|
|
||||||
async def delete_transaction(
|
|
||||||
transaction_id: int,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(
|
|
||||||
select(Transaction).where(
|
|
||||||
Transaction.id == transaction_id, Transaction.user_id == user.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
tx = res.scalar_one_or_none()
|
|
||||||
if not tx:
|
|
||||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
|
||||||
|
|
||||||
await session.delete(tx)
|
|
||||||
await session.commit()
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{transaction_id}/categories/{category_id}", response_model=TransactionRead)
|
|
||||||
async def assign_category(
|
|
||||||
transaction_id: int,
|
|
||||||
category_id: int,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
# Load transaction and category ensuring ownership
|
|
||||||
res_tx = await session.execute(
|
|
||||||
select(Transaction).where(
|
|
||||||
Transaction.id == transaction_id, Transaction.user_id == user.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
tx: Optional[Transaction] = res_tx.scalar_one_or_none()
|
|
||||||
if not tx:
|
|
||||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
|
||||||
|
|
||||||
res_cat = await session.execute(
|
|
||||||
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
|
||||||
)
|
|
||||||
cat: Optional[Category] = res_cat.scalar_one_or_none()
|
|
||||||
if not cat:
|
|
||||||
raise HTTPException(status_code=404, detail="Category not found")
|
|
||||||
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
if cat not in tx.categories:
|
|
||||||
tx.categories.append(cat)
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
return _to_read_model(tx)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{transaction_id}/categories/{category_id}", response_model=TransactionRead)
|
|
||||||
async def unassign_category(
|
|
||||||
transaction_id: int,
|
|
||||||
category_id: int,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res_tx = await session.execute(
|
|
||||||
select(Transaction).where(
|
|
||||||
Transaction.id == transaction_id, Transaction.user_id == user.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
tx: Optional[Transaction] = res_tx.scalar_one_or_none()
|
|
||||||
if not tx:
|
|
||||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
|
||||||
|
|
||||||
res_cat = await session.execute(
|
|
||||||
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
|
||||||
)
|
|
||||||
cat: Optional[Category] = res_cat.scalar_one_or_none()
|
|
||||||
if not cat:
|
|
||||||
raise HTTPException(status_code=404, detail="Category not found")
|
|
||||||
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
if cat in tx.categories:
|
|
||||||
tx.categories.remove(cat)
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
return _to_read_model(tx)
|
|
||||||
@@ -1,152 +0,0 @@
|
|||||||
import logging
|
|
||||||
import os
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from fastapi import Depends, FastAPI
|
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
|
||||||
from prometheus_fastapi_instrumentator import Instrumentator, metrics
|
|
||||||
from starlette.requests import Request
|
|
||||||
|
|
||||||
from app.services import bank_scraper
|
|
||||||
from app.workers.celery_tasks import load_transactions, load_all_transactions
|
|
||||||
from app.models.user import User, OAuthAccount
|
|
||||||
|
|
||||||
from app.services.user_service import current_active_verified_user
|
|
||||||
from app.api.auth import router as auth_router
|
|
||||||
from app.api.csas import router as csas_router
|
|
||||||
from app.api.categories import router as categories_router
|
|
||||||
from app.api.transactions import router as transactions_router
|
|
||||||
from app.services.user_service import auth_backend, current_active_verified_user, fastapi_users, get_oauth_provider, \
|
|
||||||
UserManager, get_jwt_strategy
|
|
||||||
from app.core.security import extract_bearer_token, is_token_revoked, decode_and_verify_jwt
|
|
||||||
from app.services.user_service import SECRET
|
|
||||||
|
|
||||||
from fastapi import FastAPI
|
|
||||||
import sentry_sdk
|
|
||||||
from fastapi_users.db import SQLAlchemyUserDatabase
|
|
||||||
from app.core.db import async_session_maker
|
|
||||||
|
|
||||||
sentry_sdk.init(
|
|
||||||
dsn=os.getenv("SENTRY_DSN"),
|
|
||||||
send_default_pii=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
fastApi = FastAPI()
|
|
||||||
|
|
||||||
# CORS for frontend dev server
|
|
||||||
fastApi.add_middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=[
|
|
||||||
"http://localhost:5173",
|
|
||||||
"http://127.0.0.1:5173",
|
|
||||||
os.getenv("FRONTEND_DOMAIN_SCHEME", "")
|
|
||||||
],
|
|
||||||
allow_credentials=True,
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
)
|
|
||||||
|
|
||||||
prometheus = Instrumentator().instrument(fastApi)
|
|
||||||
|
|
||||||
prometheus.expose(
|
|
||||||
fastApi,
|
|
||||||
endpoint="/metrics",
|
|
||||||
include_in_schema=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
fastApi.include_router(auth_router)
|
|
||||||
fastApi.include_router(categories_router)
|
|
||||||
fastApi.include_router(transactions_router)
|
|
||||||
|
|
||||||
logging.basicConfig(filename='app.log', level=logging.INFO, format='%(asctime)s %(message)s')
|
|
||||||
|
|
||||||
|
|
||||||
@fastApi.middleware("http")
|
|
||||||
async def auth_guard(request: Request, call_next):
|
|
||||||
# Enforce revoked/expired JWTs are rejected globally
|
|
||||||
token = extract_bearer_token(request)
|
|
||||||
if token:
|
|
||||||
from fastapi import Response, status as _status
|
|
||||||
# Deny if token is revoked
|
|
||||||
if is_token_revoked(token):
|
|
||||||
return Response(status_code=_status.HTTP_401_UNAUTHORIZED)
|
|
||||||
# Deny if token is expired or invalid
|
|
||||||
try:
|
|
||||||
decode_and_verify_jwt(token, SECRET)
|
|
||||||
except Exception:
|
|
||||||
return Response(status_code=_status.HTTP_401_UNAUTHORIZED)
|
|
||||||
return await call_next(request)
|
|
||||||
|
|
||||||
|
|
||||||
@fastApi.middleware("http")
|
|
||||||
async def log_traffic(request: Request, call_next):
|
|
||||||
start_time = datetime.now()
|
|
||||||
response = await call_next(request)
|
|
||||||
process_time = (datetime.now() - start_time).total_seconds()
|
|
||||||
client_host = request.client.host
|
|
||||||
log_params = {
|
|
||||||
"request_method": request.method,
|
|
||||||
"request_url": str(request.url),
|
|
||||||
"request_size": request.headers.get("content-length"),
|
|
||||||
"request_headers": dict(request.headers),
|
|
||||||
"response_status": response.status_code,
|
|
||||||
"response_size": response.headers.get("content-length"),
|
|
||||||
"response_headers": dict(response.headers),
|
|
||||||
"process_time": process_time,
|
|
||||||
"client_host": client_host
|
|
||||||
}
|
|
||||||
logging.info(str(log_params))
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
fastApi.include_router(
|
|
||||||
fastapi_users.get_oauth_router(
|
|
||||||
get_oauth_provider("MojeID"),
|
|
||||||
auth_backend,
|
|
||||||
"SECRET",
|
|
||||||
associate_by_email=True,
|
|
||||||
redirect_url=os.getenv("FRONTEND_DOMAIN_SCHEME", "http://localhost:3000") + "/auth/mojeid/callback",
|
|
||||||
),
|
|
||||||
prefix="/auth/mojeid",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
|
|
||||||
fastApi.include_router(
|
|
||||||
fastapi_users.get_oauth_router(
|
|
||||||
get_oauth_provider("BankID"),
|
|
||||||
auth_backend,
|
|
||||||
"SECRET",
|
|
||||||
associate_by_email=True,
|
|
||||||
redirect_url=os.getenv("FRONTEND_DOMAIN_SCHEME", "http://localhost:3000") + "/auth/bankid/callback",
|
|
||||||
),
|
|
||||||
prefix="/auth/bankid",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
|
|
||||||
fastApi.include_router(csas_router)
|
|
||||||
|
|
||||||
|
|
||||||
# Liveness/root endpoint
|
|
||||||
@fastApi.get("/", include_in_schema=False)
|
|
||||||
async def root():
|
|
||||||
return {"status": "ok"}
|
|
||||||
|
|
||||||
|
|
||||||
@fastApi.get("/authenticated-route")
|
|
||||||
async def authenticated_route(user: User = Depends(current_active_verified_user)):
|
|
||||||
return {"message": f"Hello {user.email}!"}
|
|
||||||
|
|
||||||
|
|
||||||
@fastApi.get("/debug/scrape/csas/all", tags=["debug"])
|
|
||||||
async def debug_scrape_csas_all():
|
|
||||||
logging.info("[Debug] Queueing CSAS scrape for all users via HTTP endpoint (Celery)")
|
|
||||||
task = load_all_transactions.delay()
|
|
||||||
return {"status": "queued", "action": "csas_scrape_all", "task_id": getattr(task, 'id', None)}
|
|
||||||
|
|
||||||
|
|
||||||
@fastApi.post("/debug/scrape/csas/{user_id}", tags=["debug"])
|
|
||||||
async def debug_scrape_csas_user(user_id: str, user: User = Depends(current_active_verified_user)):
|
|
||||||
logging.info("[Debug] Queueing CSAS scrape for single user via HTTP endpoint (Celery) | user_id=%s", user_id)
|
|
||||||
task = load_transactions.delay(user_id)
|
|
||||||
return {"status": "queued", "action": "csas_scrape_single", "user_id": user_id,
|
|
||||||
"task_id": getattr(task, 'id', None)}
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
import re
|
|
||||||
import jwt
|
|
||||||
from fastapi import Request
|
|
||||||
|
|
||||||
# Simple in-memory revocation store for revoked JWT tokens.
|
|
||||||
#
|
|
||||||
# Limitations:
|
|
||||||
# - All revoked tokens will be lost if the process restarts (data loss on restart).
|
|
||||||
# - Not suitable for multi-instance deployments: the revocation list is not shared between instances.
|
|
||||||
# A token revoked in one instance will not be recognized as revoked in others.
|
|
||||||
#
|
|
||||||
# For production, use a persistent and shared store (e.g., Redis or a database).
|
|
||||||
_REVOKED_TOKENS: set[str] = set()
|
|
||||||
|
|
||||||
# Bearer token regex
|
|
||||||
_BEARER_RE = re.compile(r"^[Bb]earer\s+(.+)$")
|
|
||||||
|
|
||||||
|
|
||||||
def extract_bearer_token(request: Request) -> Optional[str]:
|
|
||||||
auth = request.headers.get("authorization")
|
|
||||||
if not auth:
|
|
||||||
return None
|
|
||||||
m = _BEARER_RE.match(auth)
|
|
||||||
if not m:
|
|
||||||
return None
|
|
||||||
return m.group(1).strip()
|
|
||||||
|
|
||||||
|
|
||||||
def revoke_token(token: str) -> None:
|
|
||||||
if token:
|
|
||||||
_REVOKED_TOKENS.add(token)
|
|
||||||
|
|
||||||
|
|
||||||
def is_token_revoked(token: str) -> bool:
|
|
||||||
return token in _REVOKED_TOKENS
|
|
||||||
|
|
||||||
|
|
||||||
def decode_and_verify_jwt(token: str, secret: str) -> dict:
|
|
||||||
"""
|
|
||||||
Decode the JWT using the shared secret, verifying expiration and signature.
|
|
||||||
Audience is not verified here to be compatible with fastapi-users default tokens.
|
|
||||||
Raises jwt.ExpiredSignatureError if expired.
|
|
||||||
Raises jwt.InvalidTokenError for other issues.
|
|
||||||
Returns the decoded payload dict on success.
|
|
||||||
"""
|
|
||||||
return jwt.decode(
|
|
||||||
token,
|
|
||||||
secret,
|
|
||||||
algorithms=["HS256"],
|
|
||||||
options={"verify_aud": False},
|
|
||||||
) # verify_exp is True by default
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
from fastapi_users_db_sqlalchemy import GUID
|
|
||||||
from sqlalchemy import Column, Integer, String, ForeignKey, Table, UniqueConstraint
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
|
|
||||||
from app.core.base import Base
|
|
||||||
|
|
||||||
association_table = Table(
|
|
||||||
"category_transaction",
|
|
||||||
Base.metadata,
|
|
||||||
Column("category_id", Integer, ForeignKey("categories.id", ondelete="CASCADE"), primary_key=True),
|
|
||||||
Column("transaction_id", Integer, ForeignKey("transaction.id", ondelete="CASCADE"), primary_key=True)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Category(Base):
|
|
||||||
__tablename__ = "categories"
|
|
||||||
__table_args__ = (
|
|
||||||
UniqueConstraint("name", "user_id", name="uix_name_user_id"),
|
|
||||||
)
|
|
||||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
|
||||||
name = Column(String(length=100), nullable=False)
|
|
||||||
description = Column(String(length=255), nullable=True)
|
|
||||||
user_id = Column(GUID, ForeignKey("user.id"), nullable=False)
|
|
||||||
user = relationship("User", back_populates="categories")
|
|
||||||
transactions = relationship("Transaction", secondary=association_table, back_populates="categories")
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
import os
|
|
||||||
from fastapi_users_db_sqlalchemy import GUID
|
|
||||||
from sqlalchemy import Column, Integer, String, Float, ForeignKey, Date, func
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from sqlalchemy_utils import EncryptedType
|
|
||||||
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
|
||||||
|
|
||||||
from app.core.base import Base
|
|
||||||
from app.models.categories import association_table
|
|
||||||
|
|
||||||
SECRET_KEY = os.environ.get("DB_ENCRYPTION_KEY", "localdev")
|
|
||||||
|
|
||||||
|
|
||||||
class Transaction(Base):
|
|
||||||
__tablename__ = "transaction"
|
|
||||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
|
||||||
amount = Column(EncryptedType(Float, SECRET_KEY, engine=FernetEngine), nullable=False)
|
|
||||||
description = Column(EncryptedType(String(length=255), SECRET_KEY, engine=FernetEngine), nullable=True)
|
|
||||||
date = Column(Date, nullable=False, server_default=func.current_date())
|
|
||||||
user_id = Column(GUID, ForeignKey("user.id"), nullable=False)
|
|
||||||
|
|
||||||
# Relationship
|
|
||||||
user = relationship("User", back_populates="transactions")
|
|
||||||
categories = relationship("Category", secondary=association_table, back_populates="transactions", passive_deletes=True)
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
from sqlalchemy import Column, String
|
|
||||||
from sqlalchemy.orm import relationship, mapped_column, Mapped
|
|
||||||
from fastapi_users.db import SQLAlchemyBaseUserTableUUID, SQLAlchemyBaseOAuthAccountTableUUID
|
|
||||||
from sqlalchemy.sql.sqltypes import JSON
|
|
||||||
|
|
||||||
from app.core.base import Base
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthAccount(SQLAlchemyBaseOAuthAccountTableUUID, Base):
|
|
||||||
# BankID token is longer than default
|
|
||||||
access_token: Mapped[str] = mapped_column(String(length=4096), nullable=False)
|
|
||||||
|
|
||||||
|
|
||||||
class User(SQLAlchemyBaseUserTableUUID, Base):
|
|
||||||
first_name = Column(String(length=100), nullable=True)
|
|
||||||
last_name = Column(String(length=100), nullable=True)
|
|
||||||
oauth_accounts = relationship("OAuthAccount", lazy="joined")
|
|
||||||
config = Column(JSON, default={})
|
|
||||||
|
|
||||||
# Relationship
|
|
||||||
transactions = relationship("Transaction", back_populates="user")
|
|
||||||
categories = relationship("Category", back_populates="user")
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
import secrets
|
|
||||||
from typing import Optional, Literal
|
|
||||||
|
|
||||||
from httpx_oauth.oauth2 import T
|
|
||||||
|
|
||||||
from app.oauth.custom_openid import CustomOpenID
|
|
||||||
|
|
||||||
|
|
||||||
class BankID(CustomOpenID):
|
|
||||||
def __init__(self, client_id: str, client_secret: str):
|
|
||||||
super().__init__(
|
|
||||||
client_id,
|
|
||||||
client_secret,
|
|
||||||
"https://oidc.sandbox.bankid.cz/.well-known/openid-configuration",
|
|
||||||
"BankID",
|
|
||||||
base_scopes=["openid", "profile.email", "profile.name"],
|
|
||||||
)
|
|
||||||
|
|
||||||
async def get_user_info(self, token: str) -> dict:
|
|
||||||
info = await self.get_profile(token)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"first_name": info.get("given_name"),
|
|
||||||
"last_name": info.get("family_name"),
|
|
||||||
}
|
|
||||||
|
|
||||||
async def get_authorization_url(
|
|
||||||
self,
|
|
||||||
redirect_uri: str,
|
|
||||||
state: Optional[str] = None,
|
|
||||||
scope: Optional[list[str]] = None,
|
|
||||||
code_challenge: Optional[str] = None,
|
|
||||||
code_challenge_method: Optional[Literal["plain", "S256"]] = None,
|
|
||||||
extras_params: Optional[T] = None,
|
|
||||||
) -> str:
|
|
||||||
if extras_params is None:
|
|
||||||
extras_params = {}
|
|
||||||
|
|
||||||
# BankID requires random nonce parameter for security
|
|
||||||
# https://developer.bankid.cz/docs/security_sep
|
|
||||||
extras_params["nonce"] = secrets.token_urlsafe()
|
|
||||||
|
|
||||||
return await super().get_authorization_url(
|
|
||||||
redirect_uri,
|
|
||||||
state,
|
|
||||||
scope,
|
|
||||||
code_challenge,
|
|
||||||
code_challenge_method,
|
|
||||||
extras_params,
|
|
||||||
)
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
import os
|
|
||||||
from os.path import dirname, join
|
|
||||||
from typing import Optional, Any
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
from httpx_oauth.exceptions import GetProfileError
|
|
||||||
from httpx_oauth.oauth2 import BaseOAuth2
|
|
||||||
|
|
||||||
import app.services.db
|
|
||||||
|
|
||||||
BASE_DIR = dirname(__file__)
|
|
||||||
certs = (
|
|
||||||
join(BASE_DIR, "public_key.pem"),
|
|
||||||
join(BASE_DIR, "private_key.key")
|
|
||||||
)
|
|
||||||
|
|
||||||
class CSASOAuth(BaseOAuth2):
|
|
||||||
|
|
||||||
def __init__(self, client_id: str, client_secret: str):
|
|
||||||
super().__init__(
|
|
||||||
client_id,
|
|
||||||
client_secret,
|
|
||||||
base_scopes=["aisp"],
|
|
||||||
authorize_endpoint="https://webapi.developers.erstegroup.com/api/csas/sandbox/v1/sandbox-idp/auth",
|
|
||||||
access_token_endpoint="https://webapi.developers.erstegroup.com/api/csas/sandbox/v1/sandbox-idp/token",
|
|
||||||
refresh_token_endpoint="https://webapi.developers.erstegroup.com/api/csas/sandbox/v1/sandbox-idp/token"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
from httpx_oauth.clients.openid import OpenID
|
|
||||||
|
|
||||||
|
|
||||||
class CustomOpenID(OpenID):
|
|
||||||
async def get_user_info(self, token: str) -> dict:
|
|
||||||
raise NotImplementedError()
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
import json
|
|
||||||
from typing import Optional, Literal, Any
|
|
||||||
|
|
||||||
from httpx_oauth.oauth2 import T
|
|
||||||
|
|
||||||
from app.oauth.custom_openid import CustomOpenID
|
|
||||||
|
|
||||||
|
|
||||||
class MojeIDOAuth(CustomOpenID):
|
|
||||||
def __init__(self, client_id: str, client_secret: str):
|
|
||||||
super().__init__(
|
|
||||||
client_id,
|
|
||||||
client_secret,
|
|
||||||
"https://mojeid.cz/.well-known/openid-configuration/",
|
|
||||||
"MojeID",
|
|
||||||
base_scopes=["openid", "email", "profile"],
|
|
||||||
)
|
|
||||||
|
|
||||||
async def get_user_info(self, token: str) -> Optional[Any]:
|
|
||||||
info = await self.get_profile(token)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"first_name": info.get("given_name"),
|
|
||||||
"last_name": info.get("family_name"),
|
|
||||||
}
|
|
||||||
|
|
||||||
async def get_authorization_url(
|
|
||||||
self,
|
|
||||||
redirect_uri: str,
|
|
||||||
state: Optional[str] = None,
|
|
||||||
scope: Optional[list[str]] = None,
|
|
||||||
code_challenge: Optional[str] = None,
|
|
||||||
code_challenge_method: Optional[Literal["plain", "S256"]] = None,
|
|
||||||
extras_params: Optional[T] = None,
|
|
||||||
) -> str:
|
|
||||||
required_fields = {
|
|
||||||
'id_token': {
|
|
||||||
'name': {'essential': True},
|
|
||||||
'given_name': {'essential': True},
|
|
||||||
'family_name': {'essential': True},
|
|
||||||
'email': {'essential': True},
|
|
||||||
'mojeid_valid': {'essential': True},
|
|
||||||
}}
|
|
||||||
|
|
||||||
if extras_params is None:
|
|
||||||
extras_params = {}
|
|
||||||
extras_params["claims"] = json.dumps(required_fields)
|
|
||||||
|
|
||||||
return await super().get_authorization_url(
|
|
||||||
redirect_uri,
|
|
||||||
state,
|
|
||||||
scope,
|
|
||||||
code_challenge,
|
|
||||||
code_challenge_method,
|
|
||||||
extras_params,
|
|
||||||
)
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
-----BEGIN RSA PRIVATE KEY-----
|
|
||||||
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDcr/oxgV074ETd
|
|
||||||
DkP/0l8LFnRofru+m2wNNG/ttVCioTqwnvR4oYxwq3U9qIBsT0D+Rx/Ef7qcpzqf
|
|
||||||
/w9xt6Hosdv6I5jMHGaVQqLiPuV26/a7WvcmU+PpYuEBmbBHjGVJRBwgPtlUW1VL
|
|
||||||
M8Pht9YiaagEKvFa6SUidZLfPv+ECohqgH4mgMrEcG/BTnry0/5xQdadRC9o25cl
|
|
||||||
NtZIesS5GPeelhggFTkbh/FaxvMXhIAaRXT61cnxgxtfM71h5ObX5Lwle9z5a+Tw
|
|
||||||
xgQhSQq1jbHALYvTwsc4Q/NQGXpGNWy599sb7dg5AkPFSSF4ceXBo/2jOaZCqWrt
|
|
||||||
FVONZ+blAgMBAAECggEBAJwQbrRXsaFIRiq1jez5znC+3m+PQCHZM55a+NR3pqB7
|
|
||||||
uE9y+ZvdUr3S4sRJxxfRLDsl/Rcu5L8nm9PNwhQ/MmamcNQCHGoro3fmed3ZcNia
|
|
||||||
og94ktMt/DztygUhtIHEjVQ0sFc1WufG9xiJcPrM0MfhRAo+fBQ4UCSAVO8/U98B
|
|
||||||
a4yukrPNeEA03hyjLB9W41pNQfyOtAHqzwDg9Q5XVaGMCLZT1bjCIquUcht5iMva
|
|
||||||
tiw3cwdiYIklLTzTCsPPK9A/AlWZyUXL8KxtN0mU0kkwlXqASoXZ2nqdkhjRye/V
|
|
||||||
3JXOmlDtDaJCqWDpH2gHLxMCl7OjfPvuD66bAT3H63kCgYEA5zxW/l6oI3gwYW7+
|
|
||||||
j6rEjA2n8LikVnyW2e/PZ7pxBH3iBFe2DHx/imeqd/0IzixcM1zZT/V+PTFPQizG
|
|
||||||
lOU7stN6Zg/LuRdxneHPyLWCimJP7BBJCWyJkuxKy9psokyBhGSLR/phL3fP7UkB
|
|
||||||
o2I3vGmTFu5A0FzXcNH/cXPMdy8CgYEA9FJw3kyzXlInhJ6Cd63mckLPLYDArUsm
|
|
||||||
THBoeH2CVTBS5g0bCbl7N1ZxUoYwZPD4lg5V0nWhZALGf+85ULSjX03PMf1cc6WW
|
|
||||||
EIbZIo9hX+mGRa/FudDd+TlbtBnn0jucwABuLQi9mIepE55Hu9tw5/FT3cHeZVQc
|
|
||||||
cC0T6ulVvisCgYBCzFeFG+sOdAXl356B+h7VJozBKVWv9kXNp00O9fj4BzVnc78P
|
|
||||||
VFezr8a66snEZWQtIkFUq+JP4xK2VyD2mlHoktbk7OM5EOCtbzILFQQk3cmgtAOl
|
|
||||||
SUlkvAXPZcXEDL3NdQ4XOOkiQUY7kb97Z0AamZT4JtNqXaeO29si9wS12QKBgHYg
|
|
||||||
Hd3864Qg6GZgVOgUNiTsVErFw2KFwQCYIIqQ9CDH+myrzXTILuC0dJnXszI6p5W1
|
|
||||||
XJ0irmMyTFKykN2KWKrNbe3Xd4mad5GKARWKiSPcPkUXFNwgNhI3PzU2iTTGCaVz
|
|
||||||
D9HKNhC3FnIbxsb29AHQViITh7kqD43U3ZpoMkJ9AoGAZ+sg+CPfuo3ZMpbcdb3B
|
|
||||||
ZX2UhAvNKxgHvNnHOjO+pvaM7HiH+BT0650brfBWQ0nTG1dt18mCevVk1UM/5hO9
|
|
||||||
AtZw06vCLOJ3p3qpgkSlRZ1H7VokG9M8Od0zXqtJrmeLeBq7dfuDisYOuA+NUEbJ
|
|
||||||
UM/UHByieS6ywetruz0LpM0=
|
|
||||||
-----END RSA PRIVATE KEY-----
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
-----BEGIN CERTIFICATE-----
|
|
||||||
MIIFSTCCAzGgAwIBAgIEAQIDBDANBgkqhkiG9w0BAQsFADCBgDELMAkGA1UEBhMC
|
|
||||||
Q1oxDjAMBgNVBAcTBUN6ZWNoMRMwEQYDVQQKEwpFcnN0ZUdyb3VwMRUwEwYDVQQL
|
|
||||||
EwxFcnN0ZUh1YlRlYW0xETAPBgNVBAMTCEVyc3RlSHViMSIwIAYJKoZIhvcNAQkB
|
|
||||||
FhNpbmZvQGVyc3RlZ3JvdXAuY29tMB4XDTIyMTIxNDA4MDc1N1oXDTI2MDMxNDA4
|
|
||||||
MDc1N1owUjEaMBgGA1UEYRMRUFNEQ1otQ05CLTEyMzQ1NjcxCzAJBgNVBAYTAkNa
|
|
||||||
MRYwFAYDVQQDEw1UUFAgVGVzdCBRV0FDMQ8wDQYDVQQKEwZNeSBUUFAwggEiMA0G
|
|
||||||
CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDcr/oxgV074ETdDkP/0l8LFnRofru+
|
|
||||||
m2wNNG/ttVCioTqwnvR4oYxwq3U9qIBsT0D+Rx/Ef7qcpzqf/w9xt6Hosdv6I5jM
|
|
||||||
HGaVQqLiPuV26/a7WvcmU+PpYuEBmbBHjGVJRBwgPtlUW1VLM8Pht9YiaagEKvFa
|
|
||||||
6SUidZLfPv+ECohqgH4mgMrEcG/BTnry0/5xQdadRC9o25clNtZIesS5GPeelhgg
|
|
||||||
FTkbh/FaxvMXhIAaRXT61cnxgxtfM71h5ObX5Lwle9z5a+TwxgQhSQq1jbHALYvT
|
|
||||||
wsc4Q/NQGXpGNWy599sb7dg5AkPFSSF4ceXBo/2jOaZCqWrtFVONZ+blAgMBAAGj
|
|
||||||
gfcwgfQwCwYDVR0PBAQDAgHGMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD
|
|
||||||
AjCBrwYIKwYBBQUHAQMEgaIwgZ8wCAYGBACORgEBMAsGBgQAjkYBAwIBFDAIBgYE
|
|
||||||
AI5GAQQwEwYGBACORgEGMAkGBwQAjkYBBgMwZwYGBACBmCcCMF0wTDARBgcEAIGY
|
|
||||||
JwEBDAZQU1BfQVMwEQYHBACBmCcBAgwGUFNQX1BJMBEGBwQAgZgnAQMMBlBTUF9B
|
|
||||||
STARBgcEAIGYJwEEDAZQU1BfSUMMBUVyc3RlDAZBVC1FUlMwFAYDVR0RBA0wC4IJ
|
|
||||||
bXl0cHAuY29tMA0GCSqGSIb3DQEBCwUAA4ICAQBlTMPSwz46GMRBEPcy+25gV7xE
|
|
||||||
5aFS5N6sf3YQyFelRJgPxxPxTHo55WelcK4XmXRQKeQ4VoKf4FgP0Cj74+p0N0gw
|
|
||||||
wFJDWPGXH3SdjAXPRtG+FOiHwUSoyrmvbL4kk6Vbrd4cF+qe0BlzHzJ2Q6vFLwsk
|
|
||||||
NYvWzkY9YjoItB38nAnQhyYgl1yHUK/uDWyrwHVfZn1AeTws/hr/KufORuiQfaTU
|
|
||||||
kvAH1nzi7WSJ6AIQCd2exUEPx/O14Y+oCoJhTVd+RpA/9lkcqebceBijj47b2bvv
|
|
||||||
QbjymvyTXqHd3L224Y7zVmh95g+CaJ8PRpApdrImfjfDDRy8PaFWx2pd/v0UQgrQ
|
|
||||||
lgbO6jE7ah/tS0T5q5JtwnLAiOOqHPaKRvo5WB65jcZ2fvOH/0/oZ89noxp1Ihus
|
|
||||||
vvsjqc9k2h9Rvt2pEjVU40HtQZ6XCmWqgFwK3n9CHrKNV/GqgANIZRNcvXKMCUoB
|
|
||||||
VoJORVwi2DF4caKSFmyEWuK+5FyCEILtQ60SY/NHVGsUeOuN7OTjZjECARO6p4hz
|
|
||||||
Uw+GCIXrzmIjS6ydh/LRef+NK28+xTbjmLHu/wnHg9rrHEnTPd39is+byfS7eeLV
|
|
||||||
Dld/0Xrv88C0wxz63dcwAceiahjyz2mbQm765tOf9rK7EqsvT5M8EXFJ3dP4zwqS
|
|
||||||
6mNFoIa0XGbAUT3E1w==
|
|
||||||
-----END CERTIFICATE-----
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
from pydantic import BaseModel, ConfigDict
|
|
||||||
|
|
||||||
|
|
||||||
class CategoryBase(BaseModel):
|
|
||||||
name: str
|
|
||||||
description: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class CategoryCreate(CategoryBase):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class CategoryUpdate(BaseModel):
|
|
||||||
name: Optional[str] = None
|
|
||||||
description: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class CategoryRead(CategoryBase):
|
|
||||||
id: int
|
|
||||||
model_config = ConfigDict(from_attributes=True)
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
from typing import List, Optional, Union
|
|
||||||
from datetime import date
|
|
||||||
from pydantic import BaseModel, Field, ConfigDict
|
|
||||||
|
|
||||||
|
|
||||||
class TransactionBase(BaseModel):
|
|
||||||
amount: float = Field(..., gt=-1e18, lt=1e18)
|
|
||||||
description: Optional[str] = None
|
|
||||||
# accept either ISO date string or date object
|
|
||||||
date: Optional[Union[date, str]] = None
|
|
||||||
|
|
||||||
class TransactionCreate(TransactionBase):
|
|
||||||
category_ids: Optional[List[int]] = None
|
|
||||||
|
|
||||||
class TransactionUpdate(BaseModel):
|
|
||||||
amount: Optional[float] = Field(None, gt=-1e18, lt=1e18)
|
|
||||||
description: Optional[str] = None
|
|
||||||
# accept either ISO date string or date object
|
|
||||||
date: Optional[Union[date, str]] = None
|
|
||||||
category_ids: Optional[List[int]] = None
|
|
||||||
|
|
||||||
class TransactionRead(TransactionBase):
|
|
||||||
id: int
|
|
||||||
category_ids: List[int] = []
|
|
||||||
date: Optional[Union[date, str]]
|
|
||||||
model_config = ConfigDict(from_attributes=True)
|
|
||||||
@@ -1,124 +0,0 @@
|
|||||||
import json
|
|
||||||
import logging
|
|
||||||
from os.path import dirname, join
|
|
||||||
from time import strptime
|
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
from app.core.db import async_session_maker
|
|
||||||
from app.models.transaction import Transaction
|
|
||||||
from app.models.user import User
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
OAUTH_DIR = join(dirname(__file__), "..", "oauth")
|
|
||||||
CERTS = (
|
|
||||||
join(OAUTH_DIR, "public_key.pem"),
|
|
||||||
join(OAUTH_DIR, "private_key.key"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def aload_ceska_sporitelna_transactions(user_id: str) -> None:
|
|
||||||
try:
|
|
||||||
uid = UUID(str(user_id))
|
|
||||||
except Exception:
|
|
||||||
logger.error("Invalid user_id provided to bank_scraper (async): %r", user_id)
|
|
||||||
return
|
|
||||||
|
|
||||||
await _aload_ceska_sporitelna_transactions(uid)
|
|
||||||
|
|
||||||
|
|
||||||
async def aload_all_ceska_sporitelna_transactions() -> None:
|
|
||||||
async with async_session_maker() as session:
|
|
||||||
result = await session.execute(select(User))
|
|
||||||
users = result.unique().scalars().all()
|
|
||||||
logger.info("[BankScraper] Starting CSAS scrape for all users | count=%d", len(users))
|
|
||||||
|
|
||||||
processed = 0
|
|
||||||
for user in users:
|
|
||||||
try:
|
|
||||||
await _aload_ceska_sporitelna_transactions(user.id)
|
|
||||||
processed += 1
|
|
||||||
except Exception:
|
|
||||||
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
|
||||||
getattr(user, 'email', None))
|
|
||||||
logger.info("[BankScraper] Finished CSAS scrape for all users | processed=%d", processed)
|
|
||||||
|
|
||||||
|
|
||||||
async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|
||||||
async with (async_session_maker() as session):
|
|
||||||
result = await session.execute(select(User).where(User.id == user_id))
|
|
||||||
user: User = result.unique().scalar_one_or_none()
|
|
||||||
if user is None:
|
|
||||||
logger.warning("User not found for id=%s", user_id)
|
|
||||||
return
|
|
||||||
|
|
||||||
cfg = user.config or {}
|
|
||||||
if "csas" not in cfg:
|
|
||||||
return
|
|
||||||
|
|
||||||
cfg = json.loads(cfg["csas"])
|
|
||||||
if "access_token" not in cfg:
|
|
||||||
return
|
|
||||||
|
|
||||||
accounts = []
|
|
||||||
try:
|
|
||||||
async with httpx.AsyncClient(cert=CERTS, timeout=httpx.Timeout(20.0)) as client:
|
|
||||||
response = await client.get(
|
|
||||||
"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts?size=10&page=0&sort=iban&order=desc",
|
|
||||||
headers={
|
|
||||||
"Authorization": f"Bearer {cfg['access_token']}",
|
|
||||||
"WEB-API-key": "09fdc637-3c57-4242-95f2-c2205a2438f3",
|
|
||||||
"user-involved": "false",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if response.status_code != httpx.codes.OK:
|
|
||||||
return
|
|
||||||
|
|
||||||
for account in response.json()["accounts"]:
|
|
||||||
accounts.append(account)
|
|
||||||
|
|
||||||
except (httpx.HTTPError,) as e:
|
|
||||||
logger.exception("[BankScraper] HTTP error during CSAS request | user_id=%s", user_id)
|
|
||||||
return
|
|
||||||
|
|
||||||
for account in accounts:
|
|
||||||
id = account["id"]
|
|
||||||
|
|
||||||
url = f"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts/{id}/transactions?size=100&page=0&sort=bookingdate&order=desc"
|
|
||||||
async with httpx.AsyncClient(cert=CERTS) as client:
|
|
||||||
response = await client.get(
|
|
||||||
url,
|
|
||||||
headers={
|
|
||||||
"Authorization": f"Bearer {cfg['access_token']}",
|
|
||||||
"WEB-API-key": "09fdc637-3c57-4242-95f2-c2205a2438f3",
|
|
||||||
"user-involved": "false",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if response.status_code != httpx.codes.OK:
|
|
||||||
continue
|
|
||||||
|
|
||||||
transactions = response.json()["transactions"]
|
|
||||||
|
|
||||||
for transaction in transactions:
|
|
||||||
description = transaction.get("entryDetails", {}).get("transactionDetails", {}).get(
|
|
||||||
"additionalRemittanceInformation")
|
|
||||||
date_str = transaction.get("bookingDate", {}).get("date")
|
|
||||||
date = strptime(date_str, "%Y-%m-%d") if date_str else None
|
|
||||||
amount = transaction.get("amount", {}).get("value")
|
|
||||||
if transaction.get("creditDebitIndicator") == "DBIT":
|
|
||||||
amount = -abs(amount)
|
|
||||||
|
|
||||||
obj = Transaction(
|
|
||||||
amount=amount,
|
|
||||||
description=description,
|
|
||||||
date=date,
|
|
||||||
user_id=user_id,
|
|
||||||
)
|
|
||||||
session.add(obj)
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
pass
|
|
||||||
pass
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
import logging
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from celery import shared_task
|
|
||||||
|
|
||||||
import app.services.bank_scraper
|
|
||||||
|
|
||||||
logger = logging.getLogger("celery_tasks")
|
|
||||||
if not logger.handlers:
|
|
||||||
_h = logging.StreamHandler()
|
|
||||||
logger.addHandler(_h)
|
|
||||||
logger.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def run_coro(coro) -> None:
|
|
||||||
"""Run an async coroutine in a fresh event loop without using run_until_complete.
|
|
||||||
Primary strategy runs in a new loop in the current thread. If that fails due to
|
|
||||||
debugger patches (e.g., Bad file descriptor from pydevd_nest_asyncio), fall back
|
|
||||||
to running in a dedicated thread with its own event loop.
|
|
||||||
"""
|
|
||||||
import threading
|
|
||||||
|
|
||||||
def _cleanup_loop(loop):
|
|
||||||
try:
|
|
||||||
pending = [t for t in asyncio.all_tasks(loop) if not t.done()]
|
|
||||||
for t in pending:
|
|
||||||
t.cancel()
|
|
||||||
if pending:
|
|
||||||
loop.run_until_complete(asyncio.gather(*pending, return_exceptions=True))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
loop.close()
|
|
||||||
finally:
|
|
||||||
asyncio.set_event_loop(None)
|
|
||||||
|
|
||||||
# First attempt: Run in current thread with a fresh event loop
|
|
||||||
try:
|
|
||||||
loop = asyncio.get_event_loop_policy().new_event_loop()
|
|
||||||
try:
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
task = loop.create_task(coro)
|
|
||||||
task.add_done_callback(lambda _t: loop.stop())
|
|
||||||
loop.run_forever()
|
|
||||||
exc = task.exception()
|
|
||||||
if exc:
|
|
||||||
raise exc
|
|
||||||
return
|
|
||||||
finally:
|
|
||||||
_cleanup_loop(loop)
|
|
||||||
except OSError as e:
|
|
||||||
logger.warning("run_coro primary strategy failed (%s). Falling back to thread runner.", e)
|
|
||||||
except Exception:
|
|
||||||
# For any other unexpected errors, try thread fallback as well
|
|
||||||
logger.exception("run_coro primary strategy raised; attempting thread fallback")
|
|
||||||
|
|
||||||
# Fallback: Run in a dedicated thread with its own event loop
|
|
||||||
error = {"exc": None}
|
|
||||||
|
|
||||||
def _thread_target():
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
try:
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
task = loop.create_task(coro)
|
|
||||||
task.add_done_callback(lambda _t: loop.stop())
|
|
||||||
loop.run_forever()
|
|
||||||
exc = task.exception()
|
|
||||||
if exc:
|
|
||||||
error["exc"] = exc
|
|
||||||
finally:
|
|
||||||
_cleanup_loop(loop)
|
|
||||||
|
|
||||||
th = threading.Thread(target=_thread_target, name="celery-async-runner", daemon=True)
|
|
||||||
th.start()
|
|
||||||
th.join()
|
|
||||||
if error["exc"] is not None:
|
|
||||||
raise error["exc"]
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.send_email")
|
|
||||||
def send_email(to: str, subject: str, body: str) -> None:
|
|
||||||
if not (to and subject and body):
|
|
||||||
logger.error("Email task missing fields. to=%r subject=%r body_len=%r", to, subject, len(body) if body else 0)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Placeholder for real email sending logic
|
|
||||||
logger.info("[Celery] Email sent | to=%s | subject=%s | body_len=%d", to, subject, len(body))
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.load_transactions")
|
|
||||||
def load_transactions(user_id: str) -> None:
|
|
||||||
if not user_id:
|
|
||||||
logger.error("Load transactions task missing user_id.")
|
|
||||||
return
|
|
||||||
|
|
||||||
run_coro(app.services.bank_scraper.aload_ceska_sporitelna_transactions(user_id))
|
|
||||||
|
|
||||||
# Placeholder for real transaction loading logic
|
|
||||||
logger.info("[Celery] Transactions loaded for user_id=%s", user_id)
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.load_all_transactions")
|
|
||||||
def load_all_transactions() -> None:
|
|
||||||
logger.info("[Celery] Starting load_all_transactions")
|
|
||||||
run_coro(app.services.bank_scraper.aload_all_ceska_sporitelna_transactions())
|
|
||||||
logger.info("[Celery] Finished load_all_transactions")
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
[tool.pytest.ini_options]
|
|
||||||
pythonpath = "."
|
|
||||||
asyncio_mode = "auto"
|
|
||||||
asyncio_default_fixture_loop_scope = "session"
|
|
||||||
asyncio_default_test_loop_scope = "session"
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
import sys
|
|
||||||
import uuid
|
|
||||||
import types
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
from httpx import AsyncClient, ASGITransport
|
|
||||||
|
|
||||||
# Stub sentry_sdk to avoid optional dependency issues during import of app
|
|
||||||
stub = types.ModuleType("sentry_sdk")
|
|
||||||
stub.init = lambda *args, **kwargs: None
|
|
||||||
sys.modules.setdefault("sentry_sdk", stub)
|
|
||||||
|
|
||||||
# Import the FastAPI application
|
|
||||||
from app.app import fastApi as app # noqa: E402
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
|
||||||
def fastapi_app():
|
|
||||||
return app
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
|
||||||
def client(fastapi_app):
|
|
||||||
return TestClient(fastapi_app, raise_server_exceptions=True)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
|
||||||
async def test_user(fastapi_app):
|
|
||||||
"""
|
|
||||||
Creates a new user asynchronously and returns their credentials.
|
|
||||||
Does NOT log them in.
|
|
||||||
Using AsyncClient with ASGITransport avoids event loop conflicts with DB connections.
|
|
||||||
"""
|
|
||||||
unique_email = f"testuser_{uuid.uuid4()}@example.com"
|
|
||||||
password = "a_strong_password"
|
|
||||||
user_payload = {"email": unique_email, "password": password}
|
|
||||||
|
|
||||||
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
|
||||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
|
||||||
response = await ac.post("/auth/register", json=user_payload)
|
|
||||||
assert response.status_code == 201
|
|
||||||
|
|
||||||
return {"username": unique_email, "password": password}
|
|
||||||
|
|
||||||
@@ -1,98 +0,0 @@
|
|||||||
import pytest
|
|
||||||
import uuid
|
|
||||||
from httpx import AsyncClient, ASGITransport
|
|
||||||
from fastapi import status
|
|
||||||
|
|
||||||
|
|
||||||
def test_e2e_minimal_auth_flow(client):
|
|
||||||
# 1) Service is alive
|
|
||||||
alive = client.get("/")
|
|
||||||
assert alive.status_code == status.HTTP_200_OK
|
|
||||||
|
|
||||||
# 2) Attempt to login without payload should fail fast (validation error)
|
|
||||||
login = client.post("/auth/jwt/login")
|
|
||||||
assert login.status_code in (status.HTTP_400_BAD_REQUEST, status.HTTP_422_UNPROCESSABLE_CONTENT)
|
|
||||||
|
|
||||||
# 3) Protected endpoint should not be accessible without token
|
|
||||||
me = client.get("/users/me")
|
|
||||||
assert me.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_e2e_full_user_lifecycle(fastapi_app, test_user):
|
|
||||||
# Use an AsyncClient with ASGITransport for async tests
|
|
||||||
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
|
||||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
|
||||||
login_payload = test_user
|
|
||||||
|
|
||||||
# 1. Log in with the new credentials
|
|
||||||
login_resp = await ac.post("/auth/jwt/login", data=login_payload)
|
|
||||||
assert login_resp.status_code == status.HTTP_200_OK
|
|
||||||
token = login_resp.json()["access_token"]
|
|
||||||
headers = {"Authorization": f"Bearer {token}"}
|
|
||||||
|
|
||||||
# 2. Access a protected endpoint
|
|
||||||
me_resp = await ac.get("/users/me", headers=headers)
|
|
||||||
assert me_resp.status_code == status.HTTP_200_OK
|
|
||||||
assert me_resp.json()["email"] == test_user["username"]
|
|
||||||
|
|
||||||
# 3. Update the user's profile
|
|
||||||
update_payload = {"first_name": "Test"}
|
|
||||||
patch_resp = await ac.patch("/users/me", json=update_payload, headers=headers)
|
|
||||||
assert patch_resp.status_code == status.HTTP_200_OK
|
|
||||||
assert patch_resp.json()["first_name"] == "Test"
|
|
||||||
|
|
||||||
# 4. Log out
|
|
||||||
logout_resp = await ac.post("/auth/jwt/logout", headers=headers)
|
|
||||||
assert logout_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
# 5. Verify token is invalid
|
|
||||||
me_again_resp = await ac.get("/users/me", headers=headers)
|
|
||||||
assert me_again_resp.status_code == status.HTTP_401_UNAUTHORIZED
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_e2e_transaction_workflow(fastapi_app, test_user):
|
|
||||||
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
|
||||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
|
||||||
# 1. Log in to get the token
|
|
||||||
login_resp = await ac.post("/auth/jwt/login", data=test_user)
|
|
||||||
token = login_resp.json()["access_token"]
|
|
||||||
headers = {"Authorization": f"Bearer {token}"}
|
|
||||||
|
|
||||||
# NEW STEP: Create a category first to get a valid ID
|
|
||||||
category_payload = {"name": "Test Category for E2E"}
|
|
||||||
create_category_resp = await ac.post("/categories/create", json=category_payload, headers=headers)
|
|
||||||
assert create_category_resp.status_code == status.HTTP_201_CREATED
|
|
||||||
category_id = create_category_resp.json()["id"]
|
|
||||||
|
|
||||||
# 2. Create a new transaction
|
|
||||||
tx_payload = {"amount": -55.40, "description": "Milk and eggs"}
|
|
||||||
tx_resp = await ac.post("/transactions/create", json=tx_payload, headers=headers)
|
|
||||||
assert tx_resp.status_code == status.HTTP_201_CREATED
|
|
||||||
tx_id = tx_resp.json()["id"]
|
|
||||||
|
|
||||||
# 3. Assign the category
|
|
||||||
assign_resp = await ac.post(f"/transactions/{tx_id}/categories/{category_id}", headers=headers)
|
|
||||||
assert assign_resp.status_code == status.HTTP_200_OK
|
|
||||||
|
|
||||||
# 4. Verify assignment
|
|
||||||
get_tx_resp = await ac.get(f"/transactions/{tx_id}", headers=headers)
|
|
||||||
assert category_id in get_tx_resp.json()["category_ids"]
|
|
||||||
|
|
||||||
# 5. Unassign the category
|
|
||||||
unassign_resp = await ac.delete(f"/transactions/{tx_id}/categories/{category_id}", headers=headers)
|
|
||||||
assert unassign_resp.status_code == status.HTTP_200_OK
|
|
||||||
|
|
||||||
# 6. Get the transaction again and verify the category is gone
|
|
||||||
get_tx_again_resp = await ac.get(f"/transactions/{tx_id}", headers=headers)
|
|
||||||
final_tx_data = get_tx_again_resp.json()
|
|
||||||
assert category_id not in final_tx_data["category_ids"]
|
|
||||||
|
|
||||||
# 7. Delete the transaction for cleanup
|
|
||||||
delete_resp = await ac.delete(f"/transactions/{tx_id}/delete", headers=headers)
|
|
||||||
assert delete_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
# NEW STEP: Clean up the created category
|
|
||||||
delete_category_resp = await ac.delete(f"/categories/{category_id}", headers=headers)
|
|
||||||
assert delete_category_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
from fastapi import status
|
|
||||||
import pytest
|
|
||||||
from httpx import AsyncClient, ASGITransport
|
|
||||||
|
|
||||||
|
|
||||||
def test_root_ok(client):
|
|
||||||
resp = client.get("/")
|
|
||||||
assert resp.status_code == status.HTTP_200_OK
|
|
||||||
assert resp.json() == {"status": "ok"}
|
|
||||||
|
|
||||||
|
|
||||||
def test_authenticated_route_requires_auth(client):
|
|
||||||
resp = client.get("/authenticated-route")
|
|
||||||
assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_create_and_get_category(fastapi_app, test_user):
|
|
||||||
# Use AsyncClient for async tests
|
|
||||||
transport = ASGITransport(app=fastapi_app)
|
|
||||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
|
||||||
# 1. Log in to get an auth token
|
|
||||||
login_resp = await ac.post("/auth/jwt/login", data=test_user)
|
|
||||||
token = login_resp.json()["access_token"]
|
|
||||||
headers = {"Authorization": f"Bearer {token}"}
|
|
||||||
|
|
||||||
# 2. Define and create the new category
|
|
||||||
category_name = "Async Integration Test"
|
|
||||||
category_payload = {"name": category_name}
|
|
||||||
create_resp = await ac.post("/categories/create", json=category_payload, headers=headers)
|
|
||||||
|
|
||||||
# 3. Assert creation was successful
|
|
||||||
assert create_resp.status_code == status.HTTP_201_CREATED
|
|
||||||
created_data = create_resp.json()
|
|
||||||
category_id = created_data["id"]
|
|
||||||
assert created_data["name"] == category_name
|
|
||||||
|
|
||||||
# 4. GET the list of categories to verify
|
|
||||||
list_resp = await ac.get("/categories/", headers=headers)
|
|
||||||
assert list_resp.status_code == status.HTTP_200_OK
|
|
||||||
|
|
||||||
# 5. Check that our new category is in the list
|
|
||||||
categories_list = list_resp.json()
|
|
||||||
assert any(cat["name"] == category_name for cat in categories_list)
|
|
||||||
|
|
||||||
delete_resp = await ac.delete(f"/categories/{category_id}", headers=headers)
|
|
||||||
assert delete_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_create_transaction_missing_amount_fails(fastapi_app, test_user):
|
|
||||||
transport = ASGITransport(app=fastapi_app)
|
|
||||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
|
||||||
# 1. Log in to get an auth token
|
|
||||||
login_resp = await ac.post("/auth/jwt/login", data=test_user)
|
|
||||||
token = login_resp.json()["access_token"]
|
|
||||||
headers = {"Authorization": f"Bearer {token}"}
|
|
||||||
|
|
||||||
# 2. Define an invalid payload
|
|
||||||
invalid_payload = {"description": "This should fail"}
|
|
||||||
|
|
||||||
# 3. Attempt to create the transaction
|
|
||||||
resp = await ac.post("/transactions/create", json=invalid_payload, headers=headers)
|
|
||||||
|
|
||||||
# 4. Assert the expected validation error
|
|
||||||
assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
import types
|
|
||||||
import asyncio
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from app.services import user_service
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_oauth_provider_known_unknown():
|
|
||||||
# Known providers should return a provider instance
|
|
||||||
bankid = user_service.get_oauth_provider("BankID")
|
|
||||||
mojeid = user_service.get_oauth_provider("MojeID")
|
|
||||||
assert bankid is not None
|
|
||||||
assert mojeid is not None
|
|
||||||
|
|
||||||
# Unknown should return None
|
|
||||||
assert user_service.get_oauth_provider("DoesNotExist") is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_jwt_strategy_lifetime():
|
|
||||||
strategy = user_service.get_jwt_strategy()
|
|
||||||
assert strategy is not None
|
|
||||||
# Basic smoke check: strategy has a lifetime set to 604800
|
|
||||||
assert getattr(strategy, "lifetime_seconds", None) in (604800,)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_on_after_request_verify_enqueues_email(monkeypatch):
|
|
||||||
calls = {}
|
|
||||||
|
|
||||||
def fake_enqueue_email(to: str, subject: str, body: str):
|
|
||||||
calls.setdefault("emails", []).append({
|
|
||||||
"to": to,
|
|
||||||
"subject": subject,
|
|
||||||
"body": body,
|
|
||||||
})
|
|
||||||
|
|
||||||
# Patch the enqueue_email used inside user_service
|
|
||||||
monkeypatch.setattr(user_service, "enqueue_email", fake_enqueue_email)
|
|
||||||
|
|
||||||
class DummyUser:
|
|
||||||
def __init__(self, email):
|
|
||||||
self.email = email
|
|
||||||
|
|
||||||
mgr = user_service.UserManager(user_db=None) # user_db not needed for this method
|
|
||||||
user = DummyUser("test@example.com")
|
|
||||||
|
|
||||||
# Call the hook
|
|
||||||
await mgr.on_after_request_verify(user, token="abc123", request=None)
|
|
||||||
|
|
||||||
# Verify one email has been enqueued with expected content
|
|
||||||
assert len(calls.get("emails", [])) == 1
|
|
||||||
email = calls["emails"][0]
|
|
||||||
assert email["to"] == "test@example.com"
|
|
||||||
assert "ověření účtu" in email["subject"].lower()
|
|
||||||
assert "abc123" in email["body"]
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
apiVersion: v2
|
|
||||||
name: myapp-chart
|
|
||||||
version: 0.1.0
|
|
||||||
description: Helm chart for my app with MariaDB Database CR
|
|
||||||
appVersion: "1.0.0"
|
|
||||||
type: application
|
|
||||||
@@ -1,124 +0,0 @@
|
|||||||
apiVersion: apps/v1
|
|
||||||
kind: Deployment
|
|
||||||
metadata:
|
|
||||||
name: {{ .Values.app.name }}
|
|
||||||
spec:
|
|
||||||
replicas: {{ .Values.app.replicas }}
|
|
||||||
revisionHistoryLimit: 3
|
|
||||||
selector:
|
|
||||||
matchLabels:
|
|
||||||
app: {{ .Values.app.name }}
|
|
||||||
endpoint: metrics
|
|
||||||
template:
|
|
||||||
metadata:
|
|
||||||
labels:
|
|
||||||
app: {{ .Values.app.name }}
|
|
||||||
endpoint: metrics
|
|
||||||
spec:
|
|
||||||
containers:
|
|
||||||
- name: {{ .Values.app.name }}
|
|
||||||
image: "{{- if .Values.image.digest -}}{{ .Values.image.repository }}@{{ .Values.image.digest }}{{- else -}}{{ .Values.image.repository }}:{{ default "latest" .Values.image.tag }}{{- end -}}"
|
|
||||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
|
||||||
securityContext:
|
|
||||||
allowPrivilegeEscalation: false
|
|
||||||
capabilities:
|
|
||||||
drop: [ "ALL" ]
|
|
||||||
ports:
|
|
||||||
- containerPort: {{ .Values.app.port }}
|
|
||||||
env:
|
|
||||||
- name: MARIADB_HOST
|
|
||||||
value: "mariadb-repl-maxscale-internal.mariadb-operator.svc.cluster.local"
|
|
||||||
- name: MARIADB_PORT
|
|
||||||
value: '3306'
|
|
||||||
- name: MARIADB_DB
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: MARIADB_DB
|
|
||||||
- name: MARIADB_USER
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: MARIADB_USER
|
|
||||||
- name: MARIADB_PASSWORD
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: MARIADB_PASSWORD
|
|
||||||
- name: RABBITMQ_USERNAME
|
|
||||||
value: {{ .Values.rabbitmq.username | quote }}
|
|
||||||
- name: RABBITMQ_PASSWORD
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: RABBITMQ_PASSWORD
|
|
||||||
- name: RABBITMQ_HOST
|
|
||||||
value: {{ printf "%s.%s.svc.cluster.local" "rabbitmq-cluster" .Release.Namespace | quote }}
|
|
||||||
- name: RABBITMQ_PORT
|
|
||||||
value: {{ .Values.rabbitmq.port | quote }}
|
|
||||||
- name: RABBITMQ_VHOST
|
|
||||||
value: {{ .Values.rabbitmq.vhost | default "/" | quote }}
|
|
||||||
- name: MAIL_QUEUE
|
|
||||||
value: {{ .Values.worker.mailQueueName | default "mail_queue" | quote }}
|
|
||||||
- name: MOJEID_CLIENT_ID
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: MOJEID_CLIENT_ID
|
|
||||||
- name: MOJEID_CLIENT_SECRET
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: MOJEID_CLIENT_SECRET
|
|
||||||
- name: BANKID_CLIENT_ID
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: BANKID_CLIENT_ID
|
|
||||||
- name: BANKID_CLIENT_SECRET
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: BANKID_CLIENT_SECRET
|
|
||||||
- name: CSAS_CLIENT_ID
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: CSAS_CLIENT_ID
|
|
||||||
- name: CSAS_CLIENT_SECRET
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: CSAS_CLIENT_SECRET
|
|
||||||
- name: DOMAIN
|
|
||||||
value: {{ required "Set .Values.domain" .Values.domain | quote }}
|
|
||||||
- name: DOMAIN_SCHEME
|
|
||||||
value: {{ required "Set .Values.domain_scheme" .Values.domain_scheme | quote }}
|
|
||||||
- name: FRONTEND_DOMAIN
|
|
||||||
value: {{ required "Set .Values.frontend_domain" .Values.frontend_domain | quote }}
|
|
||||||
- name: FRONTEND_DOMAIN_SCHEME
|
|
||||||
value: {{ required "Set .Values.frontend_domain_scheme" .Values.frontend_domain_scheme | quote }}
|
|
||||||
- name: SENTRY_DSN
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: SENTRY_DSN
|
|
||||||
- name: DB_ENCRYPTION_KEY
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: DB_ENCRYPTION_KEY
|
|
||||||
livenessProbe:
|
|
||||||
httpGet:
|
|
||||||
path: /
|
|
||||||
port: {{ .Values.app.port }}
|
|
||||||
initialDelaySeconds: 10
|
|
||||||
periodSeconds: 10
|
|
||||||
failureThreshold: 3
|
|
||||||
readinessProbe:
|
|
||||||
httpGet:
|
|
||||||
path: /
|
|
||||||
port: {{ .Values.app.port }}
|
|
||||||
initialDelaySeconds: 10
|
|
||||||
periodSeconds: 10
|
|
||||||
failureThreshold: 3
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
apiVersion: k8s.mariadb.com/v1alpha1
|
|
||||||
kind: Grant
|
|
||||||
metadata:
|
|
||||||
name: grant
|
|
||||||
spec:
|
|
||||||
mariaDbRef:
|
|
||||||
name: {{ .Values.mariadb.mariaDbRef.name }}
|
|
||||||
namespace: {{ .Values.mariadb.mariaDbRef.namespace }}
|
|
||||||
privileges:
|
|
||||||
- "ALL PRIVILEGES"
|
|
||||||
database: {{ required "Set .Values.deployment" .Values.deployment | quote }}
|
|
||||||
table: "*"
|
|
||||||
username: {{ required "Set .Values.deployment" .Values.deployment | quote }}
|
|
||||||
grantOption: true
|
|
||||||
host: "%"
|
|
||||||
cleanupPolicy: {{ .Values.mariadb.cleanupPolicy }}
|
|
||||||
requeueInterval: {{ .Values.mariadb.requeueInterval | quote }}
|
|
||||||
retryInterval: {{ .Values.mariadb.retryInterval | quote }}
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
apiVersion: v1
|
|
||||||
kind: Secret
|
|
||||||
metadata:
|
|
||||||
name: {{ required "Set .Values.database.secretName" .Values.database.secretName }}
|
|
||||||
type: kubernetes.io/basic-auth
|
|
||||||
stringData:
|
|
||||||
password: {{ required "Set .Values.database.password" .Values.database.password | quote }}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
apiVersion: k8s.mariadb.com/v1alpha1
|
|
||||||
kind: User
|
|
||||||
metadata:
|
|
||||||
name: {{ required "Set .Values.deployment" .Values.deployment }}
|
|
||||||
spec:
|
|
||||||
mariaDbRef:
|
|
||||||
name: {{ .Values.mariadb.mariaDbRef.name }}
|
|
||||||
namespace: {{ .Values.mariadb.mariaDbRef.namespace }}
|
|
||||||
passwordSecretKeyRef:
|
|
||||||
name: {{ required "Set .Values.database.secretName" .Values.database.secretName }}
|
|
||||||
key: password
|
|
||||||
maxUserConnections: 20
|
|
||||||
host: "%"
|
|
||||||
cleanupPolicy: {{ .Values.mariadb.cleanupPolicy }}
|
|
||||||
requeueInterval: {{ .Values.mariadb.requeueInterval | quote }}
|
|
||||||
retryInterval: {{ .Values.mariadb.retryInterval | quote }}
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
apiVersion: k8s.mariadb.com/v1alpha1
|
|
||||||
kind: Database
|
|
||||||
metadata:
|
|
||||||
name: {{ required "Set .Values.deployment" .Values.deployment }}
|
|
||||||
spec:
|
|
||||||
mariaDbRef:
|
|
||||||
name: {{ .Values.mariadb.mariaDbRef.name | required "Values mariadb.mariaDbRef.name is required" }}
|
|
||||||
namespace: {{ .Values.mariadb.mariaDbRef.namespace | default .Release.Namespace }}
|
|
||||||
characterSet: utf8
|
|
||||||
collate: utf8_general_ci
|
|
||||||
cleanupPolicy: {{ .Values.mariadb.cleanupPolicy }}
|
|
||||||
requeueInterval: {{ .Values.mariadb.requeueInterval | quote }}
|
|
||||||
retryInterval: {{ .Values.mariadb.retryInterval | quote }}
|
|
||||||
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
apiVersion: monitoring.coreos.com/v1
|
|
||||||
kind: ServiceMonitor
|
|
||||||
metadata:
|
|
||||||
name: fastapi-servicemonitor
|
|
||||||
labels:
|
|
||||||
release: kube-prometheus-stack
|
|
||||||
spec:
|
|
||||||
selector:
|
|
||||||
matchLabels:
|
|
||||||
app: {{ .Values.app.name }}
|
|
||||||
endpoints:
|
|
||||||
- port: http
|
|
||||||
path: /metrics
|
|
||||||
interval: 15s
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
apiVersion: v1
|
|
||||||
kind: Secret
|
|
||||||
metadata:
|
|
||||||
name: prod
|
|
||||||
type: Opaque
|
|
||||||
stringData:
|
|
||||||
MOJEID_CLIENT_ID: {{ .Values.oauth.mojeid.clientId | quote }}
|
|
||||||
MOJEID_CLIENT_SECRET: {{ .Values.oauth.mojeid.clientSecret | quote }}
|
|
||||||
BANKID_CLIENT_ID: {{ .Values.oauth.bankid.clientId | quote }}
|
|
||||||
BANKID_CLIENT_SECRET: {{ .Values.oauth.bankid.clientSecret | quote }}
|
|
||||||
CSAS_CLIENT_ID: {{ .Values.oauth.csas.clientId | quote }}
|
|
||||||
CSAS_CLIENT_SECRET: {{ .Values.oauth.csas.clientSecret | quote }}
|
|
||||||
# Database credentials
|
|
||||||
MARIADB_DB: {{ required "Set .Values.deployment" .Values.deployment | quote }}
|
|
||||||
MARIADB_USER: {{ required "Set .Values.deployment" .Values.deployment | quote }}
|
|
||||||
MARIADB_PASSWORD: {{ .Values.database.password | default "" | quote }}
|
|
||||||
# RabbitMQ credentials
|
|
||||||
RABBITMQ_PASSWORD: {{ .Values.rabbitmq.password | default "" | quote }}
|
|
||||||
RABBITMQ_USERNAME: {{ .Values.rabbitmq.username | quote }}
|
|
||||||
SENTRY_DSN: {{ .Values.sentry_dsn | quote }}
|
|
||||||
DB_ENCRYPTION_KEY: {{ required "Set .Values.database.encryptionSecret" .Values.database.encryptionSecret | quote }}
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
apiVersion: rabbitmq.com/v1beta1
|
|
||||||
kind: RabbitmqCluster
|
|
||||||
metadata:
|
|
||||||
name: "rabbitmq-cluster"
|
|
||||||
namespace: {{ .Release.Namespace }}
|
|
||||||
spec:
|
|
||||||
replicas: {{ .Values.rabbitmq.replicas | default 1 }}
|
|
||||||
persistence:
|
|
||||||
storage: {{ .Values.rabbitmq.storage | default "1Gi" }}
|
|
||||||
resources: {}
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
apiVersion: rabbitmq.com/v1beta1
|
|
||||||
kind: Permission
|
|
||||||
metadata:
|
|
||||||
name: {{ printf "%s-permission" (.Values.rabbitmq.username | default "demo-app") }}
|
|
||||||
namespace: {{ .Release.Namespace }}
|
|
||||||
spec:
|
|
||||||
rabbitmqClusterReference:
|
|
||||||
name: rabbitmq-cluster
|
|
||||||
namespace: {{ .Release.Namespace }}
|
|
||||||
vhost: {{ .Values.rabbitmq.vhost | default "/" | quote }}
|
|
||||||
user: {{ .Values.rabbitmq.username | default "demo-app" }}
|
|
||||||
permissions:
|
|
||||||
configure: ".*"
|
|
||||||
read: ".*"
|
|
||||||
write: ".*"
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
apiVersion: rabbitmq.com/v1beta1
|
|
||||||
kind: Queue
|
|
||||||
metadata:
|
|
||||||
name: {{ .Values.worker.mailQueueName | replace "_" "-" | lower }}
|
|
||||||
namespace: {{ .Release.Namespace }}
|
|
||||||
spec:
|
|
||||||
rabbitmqClusterReference:
|
|
||||||
name: rabbitmq-cluster
|
|
||||||
namespace: {{ .Release.Namespace }}
|
|
||||||
name: {{ .Values.worker.mailQueueName }}
|
|
||||||
vhost: {{ .Values.rabbitmq.vhost | default "/" | quote }}
|
|
||||||
durable: true
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
{{- if .Values.rabbitmq.password }}
|
|
||||||
apiVersion: v1
|
|
||||||
kind: Secret
|
|
||||||
metadata:
|
|
||||||
name: {{ printf "%s-user-credentials" (.Values.rabbitmq.username | default "app-user") }}
|
|
||||||
namespace: {{ .Release.Namespace }}
|
|
||||||
stringData:
|
|
||||||
password: {{ .Values.rabbitmq.password | quote }}
|
|
||||||
username: {{ .Values.rabbitmq.username | quote }}
|
|
||||||
{{- end }}
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
apiVersion: rabbitmq.com/v1beta1
|
|
||||||
kind: User
|
|
||||||
metadata:
|
|
||||||
name: {{ .Values.rabbitmq.username | default "demo-app" }}
|
|
||||||
namespace: {{ .Release.Namespace }}
|
|
||||||
spec:
|
|
||||||
rabbitmqClusterReference:
|
|
||||||
name: rabbitmq-cluster
|
|
||||||
namespace: {{ .Release.Namespace }}
|
|
||||||
tags:
|
|
||||||
- management
|
|
||||||
importCredentialsSecret:
|
|
||||||
name: {{ printf "%s-user-credentials" (.Values.rabbitmq.username | default "app-user") }}
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
apiVersion: v1
|
|
||||||
kind: Service
|
|
||||||
metadata:
|
|
||||||
name: {{ .Values.app.name }}
|
|
||||||
labels:
|
|
||||||
app: {{ .Values.app.name }}
|
|
||||||
spec:
|
|
||||||
ports:
|
|
||||||
- name: http
|
|
||||||
port: {{ .Values.service.port }}
|
|
||||||
targetPort: {{ .Values.app.port }}
|
|
||||||
selector:
|
|
||||||
app: {{ .Values.app.name }}
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
apiVersion: networking.cfargotunnel.com/v1alpha1
|
|
||||||
kind: TunnelBinding
|
|
||||||
metadata:
|
|
||||||
name: guestbook-tunnel-binding
|
|
||||||
namespace: {{ .Release.Namespace }}
|
|
||||||
subjects:
|
|
||||||
- name: app-server
|
|
||||||
spec:
|
|
||||||
target: {{ printf "http://%s.%s.svc.cluster.local" .Values.app.name .Release.Namespace | quote }}
|
|
||||||
fqdn: {{ required "Set .Values.domain via --set domain=example.com" .Values.domain | quote }}
|
|
||||||
noTlsVerify: true
|
|
||||||
tunnelRef:
|
|
||||||
kind: ClusterTunnel
|
|
||||||
name: cluster-tunnel
|
|
||||||
@@ -1,87 +0,0 @@
|
|||||||
apiVersion: apps/v1
|
|
||||||
kind: Deployment
|
|
||||||
metadata:
|
|
||||||
name: {{ printf "%s-worker" .Values.app.name }}
|
|
||||||
spec:
|
|
||||||
replicas: {{ .Values.worker.replicas }}
|
|
||||||
revisionHistoryLimit: 3
|
|
||||||
selector:
|
|
||||||
matchLabels:
|
|
||||||
app: {{ printf "%s-worker" .Values.app.name }}
|
|
||||||
template:
|
|
||||||
metadata:
|
|
||||||
labels:
|
|
||||||
app: {{ printf "%s-worker" .Values.app.name }}
|
|
||||||
spec:
|
|
||||||
containers:
|
|
||||||
- name: {{ printf "%s-worker" .Values.app.name }}
|
|
||||||
image: "{{- if .Values.image.digest -}}{{ .Values.image.repository }}@{{ .Values.image.digest }}{{- else -}}{{ .Values.image.repository }}:{{ default "latest" .Values.image.tag }}{{- end -}}"
|
|
||||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
|
||||||
securityContext:
|
|
||||||
allowPrivilegeEscalation: false
|
|
||||||
capabilities:
|
|
||||||
drop: [ "ALL" ]
|
|
||||||
command:
|
|
||||||
- celery
|
|
||||||
- -A
|
|
||||||
- app.celery_app
|
|
||||||
- worker
|
|
||||||
- -Q
|
|
||||||
- $(MAIL_QUEUE)
|
|
||||||
- --loglevel
|
|
||||||
- INFO
|
|
||||||
env:
|
|
||||||
- name: MARIADB_HOST
|
|
||||||
value: "mariadb-repl-maxscale-internal.mariadb-operator.svc.cluster.local"
|
|
||||||
- name: MARIADB_PORT
|
|
||||||
value: '3306'
|
|
||||||
- name: MARIADB_DB
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: MARIADB_DB
|
|
||||||
- name: MARIADB_USER
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: MARIADB_USER
|
|
||||||
- name: MARIADB_PASSWORD
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: MARIADB_PASSWORD
|
|
||||||
- name: RABBITMQ_USERNAME
|
|
||||||
value: {{ .Values.rabbitmq.username | quote }}
|
|
||||||
- name: RABBITMQ_PASSWORD
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: RABBITMQ_PASSWORD
|
|
||||||
- name: RABBITMQ_HOST
|
|
||||||
value: {{ printf "%s.%s.svc.cluster.local" "rabbitmq-cluster" .Release.Namespace | quote }}
|
|
||||||
- name: RABBITMQ_PORT
|
|
||||||
value: {{ .Values.rabbitmq.port | quote }}
|
|
||||||
- name: RABBITMQ_VHOST
|
|
||||||
value: {{ .Values.rabbitmq.vhost | default "/" | quote }}
|
|
||||||
- name: MAIL_QUEUE
|
|
||||||
value: {{ .Values.worker.mailQueueName | default "mail_queue" | quote }}
|
|
||||||
- name: SENTRY_DSN
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: SENTRY_DSN
|
|
||||||
- name: CSAS_CLIENT_ID
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: CSAS_CLIENT_ID
|
|
||||||
- name: CSAS_CLIENT_SECRET
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: CSAS_CLIENT_SECRET
|
|
||||||
- name: DB_ENCRYPTION_KEY
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: prod
|
|
||||||
key: DB_ENCRYPTION_KEY
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
env: dev
|
|
||||||
|
|
||||||
mariadb:
|
|
||||||
cleanupPolicy: Delete
|
|
||||||
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
env: prod
|
|
||||||
|
|
||||||
app:
|
|
||||||
replicas: 3
|
|
||||||
|
|
||||||
worker:
|
|
||||||
replicas: 3
|
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
# Base values shared across environments
|
|
||||||
env: dev
|
|
||||||
|
|
||||||
# Optional PR number used to suffix DB name, set via --set prNumber=123 in CI
|
|
||||||
prNumber: ""
|
|
||||||
|
|
||||||
# Optional deployment identifier used to suffix resource names (db, user, secret)
|
|
||||||
# Example: --set deployment=alice or --set deployment=feature123
|
|
||||||
deployment: ""
|
|
||||||
|
|
||||||
# Public domain to expose the app under (used by TunnelBinding fqdn)
|
|
||||||
# Set at install time: --set domain=example.com
|
|
||||||
domain: ""
|
|
||||||
domain_scheme: ""
|
|
||||||
|
|
||||||
frontend_domain: ""
|
|
||||||
frontend_domain_scheme: ""
|
|
||||||
|
|
||||||
sentry_dsn: ""
|
|
||||||
|
|
||||||
image:
|
|
||||||
repository: lukastrkan/cc-app-demo
|
|
||||||
# You can use a tag or digest. If digest is provided, it takes precedence.
|
|
||||||
digest: ""
|
|
||||||
pullPolicy: IfNotPresent
|
|
||||||
|
|
||||||
app:
|
|
||||||
name: "finance-tracker"
|
|
||||||
replicas: 1
|
|
||||||
port: 8000
|
|
||||||
|
|
||||||
worker:
|
|
||||||
name: app-demo-worker
|
|
||||||
replicas: 1
|
|
||||||
# Queue name for Celery worker and for CRD Queue
|
|
||||||
mailQueueName: "mail_queue"
|
|
||||||
|
|
||||||
|
|
||||||
service:
|
|
||||||
port: 80
|
|
||||||
|
|
||||||
oauth:
|
|
||||||
bankid:
|
|
||||||
clientId: ""
|
|
||||||
clientSecret: ""
|
|
||||||
mojeid:
|
|
||||||
clientId: ""
|
|
||||||
clientSecret: ""
|
|
||||||
csas:
|
|
||||||
clientId: ""
|
|
||||||
clientSecret: ""
|
|
||||||
|
|
||||||
rabbitmq:
|
|
||||||
create: true
|
|
||||||
replicas: 1
|
|
||||||
storage: 5Gi
|
|
||||||
# Optional: override the generated cluster name; default is "<app.name>-rabbit[-<deployment>]"
|
|
||||||
clusterName: ""
|
|
||||||
port: "5672"
|
|
||||||
username: demo-app
|
|
||||||
password: ""
|
|
||||||
vhost: "/"
|
|
||||||
|
|
||||||
mariadb:
|
|
||||||
name: app-demo-database
|
|
||||||
cleanupPolicy: Skip
|
|
||||||
requeueInterval: 10h
|
|
||||||
retryInterval: 30s
|
|
||||||
mariaDbRef:
|
|
||||||
name: mariadb-repl
|
|
||||||
namespace: mariadb-operator
|
|
||||||
|
|
||||||
# Database access resources
|
|
||||||
database:
|
|
||||||
userName: app-demo-user
|
|
||||||
secretName: app-demo-database-secret
|
|
||||||
password: ""
|
|
||||||
encryptionSecret: ""
|
|
||||||
@@ -1,81 +0,0 @@
|
|||||||
# Project Evaluation Checklist
|
|
||||||
|
|
||||||
The group earn points by completing items from the categories below.
|
|
||||||
You are not expected to complete all items.
|
|
||||||
Focus on areas that align with your project goals and interests.
|
|
||||||
|
|
||||||
The core deliverables are required.
|
|
||||||
This means that you must get at least 2 points for each item in this category.
|
|
||||||
|
|
||||||
| **Category** | **Item** | **Max Points** | **Points** |
|
|
||||||
|----------------------------------| --------------------------------------- | -------------- |-------------------------------------------------|
|
|
||||||
| **Core Deliverables (Required)** | | | |
|
|
||||||
| Codebase & Organization | Well-organized project structure | 5 | 5 |
|
|
||||||
| | Clean, readable code | 5 | 4 |
|
|
||||||
| | Use planning tool (e.g., GitHub issues) | 5 | 4 |
|
|
||||||
| | Proper version control usage | 5 | 5 |
|
|
||||||
| 23 | Complete source code | 5 | 5 |
|
|
||||||
| Documentation | Comprehensive reproducibility report | 10 | 4-5 |
|
|
||||||
| | Updated design document | 5 | 2 |
|
|
||||||
| | Clear build/deployment instructions | 5 | 2 |
|
|
||||||
| | Troubleshooting guide | 5 | 1 |
|
|
||||||
| | Completed self-assessment table | 5 | 2 |
|
|
||||||
| 14 | Hour sheets for all members | 5 | 3 |
|
|
||||||
| Presentation Video | Project demonstration | 5 | 0 |
|
|
||||||
| | Code walk-through | 5 | 0 |
|
|
||||||
| 0 | Deployment showcase | 5 | 0 |
|
|
||||||
| **Technical Implementation** | | | |
|
|
||||||
| Application Functionality | Basic functionality works | 10 | 8 |
|
|
||||||
| | Advanced features implemented | 10 | 0 |
|
|
||||||
| | Error handling & robustness | 10 | 4 |
|
|
||||||
| 16 | User-friendly interface | 5 | 4 |
|
|
||||||
| Backend & Architecture | Stateless web server | 5 | 5 |
|
|
||||||
| | Stateful application | 10 | ? WHAT DOES THIS MEAN |
|
|
||||||
| | Database integration | 10 | 10 |
|
|
||||||
| | API design | 5 | 5 |
|
|
||||||
| 20 | Microservices architecture | 10 | 0 |
|
|
||||||
| Cloud Integration | Basic cloud deployment | 10 | 10 |
|
|
||||||
| | Cloud APIs usage | 10 | ? WHAT DOES THIS MEAN |
|
|
||||||
| | Serverless components | 10 | 0 |
|
|
||||||
| 10 | Advanced cloud services | 5 | 0 |
|
|
||||||
| **DevOps & Deployment** | | | |
|
|
||||||
| Containerization | Basic Dockerfile | 5 | 5 |
|
|
||||||
| | Optimized Dockerfile | 5 | 0 |
|
|
||||||
| | Docker Compose | 5 | 5 - dev only |
|
|
||||||
| 15 | Persistent storage | 5 | 5 |
|
|
||||||
| Deployment & Scaling | Manual deployment | 5 | 5 |
|
|
||||||
| | Automated deployment | 5 | 5 |
|
|
||||||
| | Multiple replicas | 5 | 5 |
|
|
||||||
| 20 | Kubernetes deployment | 10 | 10 |
|
|
||||||
| **Quality Assurance** | | | |
|
|
||||||
| Testing | Unit tests | 5 | 2 |
|
|
||||||
| | Integration tests | 5 | 2 |
|
|
||||||
| | End-to-end tests | 5 | 5 |
|
|
||||||
| 9 | Performance testing | 5 | 0 |
|
|
||||||
| Monitoring & Operations | Health checks | 5 | 5 |
|
|
||||||
| | Logging | 5 | 2 - only to terminal add logstash |
|
|
||||||
| 9 | Metrics/Monitoring | 5 | 2 - only DB, need to create Prometheus endpoint |
|
|
||||||
| Security | HTTPS/TLS | 5 | 5 |
|
|
||||||
| | Authentication | 5 | 5 |
|
|
||||||
| 15 | Authorization | 5 | 5 |
|
|
||||||
| **Innovation & Excellence** | | | |
|
|
||||||
| Advanced Features and | AI/ML Integration | 10 | 0 |
|
|
||||||
| Technical Excellence | Real-time features | 10 | 0 |
|
|
||||||
| | Creative problem solving | 10 | ? |
|
|
||||||
| | Performance optimization | 5 | 2 |
|
|
||||||
| 2 | Exceptional user experience | 5 | 0 |
|
|
||||||
| **Total** | | **255** | **153** |
|
|
||||||
|
|
||||||
## Grading Scale
|
|
||||||
|
|
||||||
- **Minimum Required: 100 points**
|
|
||||||
- **Maximum: 200+ points**
|
|
||||||
|
|
||||||
| Grade | Points |
|
|
||||||
| ----- | -------- |
|
|
||||||
| A | 180-200+ |
|
|
||||||
| B | 160-179 |
|
|
||||||
| C | 140-159 |
|
|
||||||
| D | 120-139 |
|
|
||||||
| E | 100-119 |
|
|
||||||
| F | 0-99 |
|
|
||||||
@@ -1,89 +0,0 @@
|
|||||||
import { useEffect, useState } from 'react';
|
|
||||||
import LoginRegisterPage from './pages/LoginRegisterPage';
|
|
||||||
import Dashboard from './pages/Dashboard';
|
|
||||||
import { logout } from './api';
|
|
||||||
import { BACKEND_URL } from './config';
|
|
||||||
|
|
||||||
function App() {
|
|
||||||
const [hasToken, setHasToken] = useState<boolean>(!!localStorage.getItem('token'));
|
|
||||||
const [processingCallback, setProcessingCallback] = useState<boolean>(false);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const path = window.location.pathname;
|
|
||||||
|
|
||||||
// Minimal handling for provider callbacks: /auth|/oauth/:provider/callback?code=...&state=...
|
|
||||||
const parts = path.split('/').filter(Boolean);
|
|
||||||
const isCallback = parts.length === 3 && (parts[0] === 'auth') && parts[2] === 'callback';
|
|
||||||
|
|
||||||
if (isCallback) {
|
|
||||||
// Guard against double invocation in React 18 StrictMode/dev
|
|
||||||
const w = window as any;
|
|
||||||
if (w.__oauthCallbackHandled) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
w.__oauthCallbackHandled = true;
|
|
||||||
|
|
||||||
setProcessingCallback(true);
|
|
||||||
|
|
||||||
const provider = parts[1];
|
|
||||||
const qs = window.location.search || '';
|
|
||||||
const base = BACKEND_URL.replace(/\/$/, '');
|
|
||||||
const url = `${base}/auth/${encodeURIComponent(provider)}/callback${qs}`;
|
|
||||||
(async () => {
|
|
||||||
try {
|
|
||||||
const token = localStorage.getItem('token');
|
|
||||||
const res = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
credentials: 'include',
|
|
||||||
headers: token ? { Authorization: `Bearer ${token}` } : undefined,
|
|
||||||
});
|
|
||||||
let data: any = null;
|
|
||||||
try {
|
|
||||||
data = await res.json();
|
|
||||||
} catch {}
|
|
||||||
if (provider !== 'csas' && res.ok && data?.access_token) {
|
|
||||||
localStorage.setItem('token', data?.access_token);
|
|
||||||
setHasToken(true);
|
|
||||||
}
|
|
||||||
} catch {}
|
|
||||||
// Clean URL and go home regardless of result
|
|
||||||
setProcessingCallback(false);
|
|
||||||
window.history.replaceState({}, '', '/');
|
|
||||||
})();
|
|
||||||
}
|
|
||||||
|
|
||||||
const onStorage = (e: StorageEvent) => {
|
|
||||||
if (e.key === 'token') setHasToken(!!e.newValue);
|
|
||||||
};
|
|
||||||
window.addEventListener('storage', onStorage);
|
|
||||||
return () => window.removeEventListener('storage', onStorage);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
if (processingCallback) {
|
|
||||||
return (
|
|
||||||
<div style={{ display: 'grid', placeItems: 'center', height: '100vh' }}>
|
|
||||||
<div className="card" style={{ width: 360, textAlign: 'center', padding: 24 }}>
|
|
||||||
<div style={{ display: 'flex', flexDirection: 'column', alignItems: 'center', gap: 12 }}>
|
|
||||||
<svg width="48" height="48" viewBox="0 0 50 50" aria-label="Loading">
|
|
||||||
<circle cx="25" cy="25" r="20" fill="none" stroke="#3b82f6" strokeWidth="5" strokeLinecap="round" strokeDasharray="31.4 31.4">
|
|
||||||
<animateTransform attributeName="transform" type="rotate" from="0 25 25" to="360 25 25" dur="0.9s" repeatCount="indefinite" />
|
|
||||||
</circle>
|
|
||||||
</svg>
|
|
||||||
<div>Finishing sign-in…</div>
|
|
||||||
<div className="muted">Please wait</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!hasToken) {
|
|
||||||
return <LoginRegisterPage onLoggedIn={() => setHasToken(true)} />;
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Dashboard onLogout={() => { logout(); setHasToken(false); }} />
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default App;
|
|
||||||
@@ -1,227 +0,0 @@
|
|||||||
import { BACKEND_URL } from './config';
|
|
||||||
|
|
||||||
export type LoginResponse = {
|
|
||||||
access_token: string;
|
|
||||||
token_type: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type Category = {
|
|
||||||
id: number;
|
|
||||||
name: string;
|
|
||||||
description?: string | null;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type Transaction = {
|
|
||||||
id: number;
|
|
||||||
amount: number;
|
|
||||||
description?: string | null;
|
|
||||||
category_ids: number[];
|
|
||||||
date?: string | null; // ISO date (YYYY-MM-DD)
|
|
||||||
};
|
|
||||||
|
|
||||||
function getBaseUrl() {
|
|
||||||
const base = BACKEND_URL?.replace(/\/$/, '') || '';
|
|
||||||
return base || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
function getHeaders(contentType: 'json' | 'form' | 'none' = 'json'): Record<string, string> {
|
|
||||||
const token = localStorage.getItem('token');
|
|
||||||
const headers: Record<string, string> = {};
|
|
||||||
|
|
||||||
if (contentType === 'json') {
|
|
||||||
headers['Content-Type'] = 'application/json';
|
|
||||||
} else if (contentType === 'form') {
|
|
||||||
headers['Content-Type'] = 'application/x-www-form-urlencoded';
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token) {
|
|
||||||
headers['Authorization'] = `Bearer ${token}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return headers;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function login(email: string, password: string): Promise<void> {
|
|
||||||
const body = new URLSearchParams();
|
|
||||||
body.set('username', email);
|
|
||||||
body.set('password', password);
|
|
||||||
|
|
||||||
const res = await fetch(`${getBaseUrl()}/auth/jwt/login`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/x-www-form-urlencoded',
|
|
||||||
},
|
|
||||||
body: body.toString(),
|
|
||||||
});
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text();
|
|
||||||
throw new Error(text || 'Login failed');
|
|
||||||
}
|
|
||||||
const data: LoginResponse = await res.json();
|
|
||||||
localStorage.setItem('token', data.access_token);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function register(email: string, password: string, first_name?: string, last_name?: string): Promise<void> {
|
|
||||||
const res = await fetch(`${getBaseUrl()}/auth/register`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ email, password, first_name, last_name }),
|
|
||||||
});
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text();
|
|
||||||
throw new Error(text || 'Registration failed');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getCategories(): Promise<Category[]> {
|
|
||||||
const res = await fetch(`${getBaseUrl()}/categories/`, {
|
|
||||||
headers: getHeaders(),
|
|
||||||
});
|
|
||||||
if (!res.ok) throw new Error('Failed to load categories');
|
|
||||||
return res.json();
|
|
||||||
}
|
|
||||||
|
|
||||||
export type CreateTransactionInput = {
|
|
||||||
amount: number;
|
|
||||||
description?: string;
|
|
||||||
category_ids?: number[];
|
|
||||||
date?: string; // YYYY-MM-DD
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function createTransaction(input: CreateTransactionInput): Promise<Transaction> {
|
|
||||||
const res = await fetch(`${getBaseUrl()}/transactions/create`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: getHeaders(),
|
|
||||||
body: JSON.stringify(input),
|
|
||||||
});
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text();
|
|
||||||
throw new Error(text || 'Failed to create transaction');
|
|
||||||
}
|
|
||||||
return res.json();
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getTransactions(start_date?: string, end_date?: string): Promise<Transaction[]> {
|
|
||||||
const params = new URLSearchParams();
|
|
||||||
if (start_date) params.set('start_date', start_date);
|
|
||||||
if (end_date) params.set('end_date', end_date);
|
|
||||||
const qs = params.toString();
|
|
||||||
const url = `${getBaseUrl()}/transactions/${qs ? `?${qs}` : ''}`;
|
|
||||||
const res = await fetch(url, {
|
|
||||||
headers: getHeaders(),
|
|
||||||
});
|
|
||||||
if (!res.ok) throw new Error('Failed to load transactions');
|
|
||||||
return res.json();
|
|
||||||
}
|
|
||||||
|
|
||||||
export type User = {
|
|
||||||
id: string;
|
|
||||||
email: string;
|
|
||||||
first_name?: string | null;
|
|
||||||
last_name?: string | null;
|
|
||||||
is_active: boolean;
|
|
||||||
is_superuser: boolean;
|
|
||||||
is_verified: boolean;
|
|
||||||
};
|
|
||||||
|
|
||||||
export async function getMe(): Promise<User> {
|
|
||||||
const res = await fetch(`${getBaseUrl()}/users/me`, {
|
|
||||||
headers: getHeaders(),
|
|
||||||
});
|
|
||||||
if (!res.ok) throw new Error('Failed to load user');
|
|
||||||
return res.json();
|
|
||||||
}
|
|
||||||
|
|
||||||
export type UpdateMeInput = Partial<Pick<User, 'first_name' | 'last_name'>> & { password?: string };
|
|
||||||
export async function updateMe(input: UpdateMeInput): Promise<User> {
|
|
||||||
const res = await fetch(`${getBaseUrl()}/users/me`, {
|
|
||||||
method: 'PATCH',
|
|
||||||
headers: getHeaders(),
|
|
||||||
body: JSON.stringify(input),
|
|
||||||
});
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text();
|
|
||||||
throw new Error(text || 'Failed to update user');
|
|
||||||
}
|
|
||||||
return res.json();
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function deleteMe(): Promise<void> {
|
|
||||||
const res = await fetch(`${getBaseUrl()}/users/me`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: getHeaders(),
|
|
||||||
});
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text();
|
|
||||||
throw new Error(text || 'Failed to delete account');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function logout() {
|
|
||||||
localStorage.removeItem('token');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Categories
|
|
||||||
export type CreateCategoryInput = { name: string; description?: string };
|
|
||||||
export async function createCategory(input: CreateCategoryInput): Promise<Category> {
|
|
||||||
const res = await fetch(`${getBaseUrl()}/categories/create`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: getHeaders(),
|
|
||||||
body: JSON.stringify(input),
|
|
||||||
});
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text();
|
|
||||||
throw new Error(text || 'Failed to create category');
|
|
||||||
}
|
|
||||||
return res.json();
|
|
||||||
}
|
|
||||||
|
|
||||||
export type UpdateCategoryInput = { name?: string; description?: string };
|
|
||||||
export async function updateCategory(category_id: number, input: UpdateCategoryInput): Promise<Category> {
|
|
||||||
const res = await fetch(`${getBaseUrl()}/categories/${category_id}`, {
|
|
||||||
method: 'PATCH',
|
|
||||||
headers: getHeaders(),
|
|
||||||
body: JSON.stringify(input),
|
|
||||||
});
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text();
|
|
||||||
throw new Error(text || 'Failed to update category');
|
|
||||||
}
|
|
||||||
return res.json();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Transactions update
|
|
||||||
export type UpdateTransactionInput = {
|
|
||||||
amount?: number;
|
|
||||||
description?: string;
|
|
||||||
date?: string;
|
|
||||||
category_ids?: number[];
|
|
||||||
};
|
|
||||||
export async function updateTransaction(id: number, input: UpdateTransactionInput): Promise<Transaction> {
|
|
||||||
const res = await fetch(`${getBaseUrl()}/transactions/${id}/edit`, {
|
|
||||||
method: 'PATCH',
|
|
||||||
headers: getHeaders(),
|
|
||||||
body: JSON.stringify(input),
|
|
||||||
});
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text();
|
|
||||||
throw new Error(text || 'Failed to update transaction');
|
|
||||||
}
|
|
||||||
return res.json();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Balance series
|
|
||||||
export type BalancePoint = { date: string; balance: number };
|
|
||||||
export async function getBalanceSeries(start_date?: string, end_date?: string): Promise<BalancePoint[]> {
|
|
||||||
const params = new URLSearchParams();
|
|
||||||
if (start_date) params.set('start_date', start_date);
|
|
||||||
if (end_date) params.set('end_date', end_date);
|
|
||||||
const qs = params.toString();
|
|
||||||
const url = `${getBaseUrl()}/transactions/balance_series${qs ? `?${qs}` : ''}`;
|
|
||||||
const res = await fetch(url, { headers: getHeaders() });
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text();
|
|
||||||
throw new Error(text || 'Failed to load balance series');
|
|
||||||
}
|
|
||||||
return res.json();
|
|
||||||
}
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
export type Theme = 'system' | 'light' | 'dark';
|
|
||||||
export type FontSize = 'small' | 'medium' | 'large';
|
|
||||||
|
|
||||||
const THEME_KEY = 'app_theme';
|
|
||||||
const FONT_KEY = 'app_font_size';
|
|
||||||
|
|
||||||
export function applyTheme(theme: Theme) {
|
|
||||||
const body = document.body;
|
|
||||||
const effective = theme === 'system' ? (window.matchMedia && window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light') : theme;
|
|
||||||
body.setAttribute('data-theme', effective);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function applyFontSize(size: FontSize) {
|
|
||||||
const root = document.documentElement;
|
|
||||||
const map: Record<FontSize, string> = {
|
|
||||||
small: '14px',
|
|
||||||
medium: '18px',
|
|
||||||
large: '22px',
|
|
||||||
};
|
|
||||||
root.style.fontSize = map[size];
|
|
||||||
}
|
|
||||||
|
|
||||||
export function saveAppearance(theme: Theme, size: FontSize) {
|
|
||||||
localStorage.setItem(THEME_KEY, theme);
|
|
||||||
localStorage.setItem(FONT_KEY, size);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function loadAppearance(): { theme: Theme; size: FontSize } {
|
|
||||||
const theme = (localStorage.getItem(THEME_KEY) as Theme) || 'light';
|
|
||||||
const size = (localStorage.getItem(FONT_KEY) as FontSize) || 'medium';
|
|
||||||
return { theme, size };
|
|
||||||
}
|
|
||||||
|
|
||||||
export function applyAppearanceFromStorage() {
|
|
||||||
const { theme, size } = loadAppearance();
|
|
||||||
applyTheme(theme);
|
|
||||||
applyFontSize(size);
|
|
||||||
}
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
export const BACKEND_URL: string =
|
|
||||||
import.meta.env.VITE_BACKEND_URL ?? '';
|
|
||||||
@@ -1,87 +0,0 @@
|
|||||||
import { useEffect, useState } from 'react';
|
|
||||||
import { deleteMe, getMe, type UpdateMeInput, type User, updateMe } from '../api';
|
|
||||||
|
|
||||||
export default function AccountPage({ onDeleted }: { onDeleted: () => void }) {
|
|
||||||
const [user, setUser] = useState<User | null>(null);
|
|
||||||
const [firstName, setFirstName] = useState('');
|
|
||||||
const [lastName, setLastName] = useState('');
|
|
||||||
const [loading, setLoading] = useState(true);
|
|
||||||
const [saving, setSaving] = useState(false);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
(async () => {
|
|
||||||
try {
|
|
||||||
const u = await getMe();
|
|
||||||
setUser(u);
|
|
||||||
setFirstName(u.first_name || '');
|
|
||||||
setLastName(u.last_name || '');
|
|
||||||
} catch (e: any) {
|
|
||||||
setError(e?.message || 'Failed to load account');
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
async function handleSave(e: React.FormEvent) {
|
|
||||||
e.preventDefault();
|
|
||||||
setSaving(true);
|
|
||||||
setError(null);
|
|
||||||
try {
|
|
||||||
const payload: UpdateMeInput = { first_name: firstName || null as any, last_name: lastName || null as any };
|
|
||||||
const updated = await updateMe(payload);
|
|
||||||
setUser(updated);
|
|
||||||
} catch (e: any) {
|
|
||||||
setError(e?.message || 'Failed to update');
|
|
||||||
} finally {
|
|
||||||
setSaving(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleDelete() {
|
|
||||||
if (!confirm('Are you sure you want to delete your account? This cannot be undone.')) return;
|
|
||||||
try {
|
|
||||||
await deleteMe();
|
|
||||||
onDeleted();
|
|
||||||
} catch (e: any) {
|
|
||||||
alert(e?.message || 'Failed to delete account');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<section className="card">
|
|
||||||
<h3>Account</h3>
|
|
||||||
{loading ? (
|
|
||||||
<div>Loading…</div>
|
|
||||||
) : error ? (
|
|
||||||
<div style={{ color: 'crimson' }}>{error}</div>
|
|
||||||
) : !user ? (
|
|
||||||
<div>Not signed in</div>
|
|
||||||
) : (
|
|
||||||
<div className="space-y">
|
|
||||||
<div className="muted">Email: <strong>{user.email}</strong></div>
|
|
||||||
<form onSubmit={handleSave} className="space-y">
|
|
||||||
<div className="form-row">
|
|
||||||
<div>
|
|
||||||
<label className="muted">First name</label>
|
|
||||||
<input className="input" value={firstName} onChange={(e) => setFirstName(e.target.value)} />
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<label className="muted">Last name</label>
|
|
||||||
<input className="input" value={lastName} onChange={(e) => setLastName(e.target.value)} />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div className="actions" style={{ justifyContent: 'flex-end' }}>
|
|
||||||
<button className="btn primary" type="submit" disabled={saving}>{saving ? 'Saving…' : 'Save changes'}</button>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
<div className="actions" style={{ justifyContent: 'space-between' }}>
|
|
||||||
<div className="muted"></div>
|
|
||||||
<button className="btn" style={{ borderColor: 'crimson', color: 'crimson' }} onClick={handleDelete}>Delete account</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</section>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
import { useEffect, useState } from 'react';
|
|
||||||
import { applyFontSize, applyTheme, loadAppearance, saveAppearance, type FontSize, type Theme } from '../appearance';
|
|
||||||
|
|
||||||
export default function AppearancePage() {
|
|
||||||
const [theme, setTheme] = useState<Theme>('light');
|
|
||||||
const [size, setSize] = useState<FontSize>('medium');
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const { theme, size } = loadAppearance();
|
|
||||||
setTheme(theme);
|
|
||||||
setSize(size);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
function onThemeChange(next: Theme) {
|
|
||||||
setTheme(next);
|
|
||||||
applyTheme(next);
|
|
||||||
saveAppearance(next, size);
|
|
||||||
}
|
|
||||||
|
|
||||||
function onSizeChange(next: FontSize) {
|
|
||||||
setSize(next);
|
|
||||||
applyFontSize(next);
|
|
||||||
saveAppearance(theme, next);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<section className="card">
|
|
||||||
<h3>Appearance</h3>
|
|
||||||
<div className="space-y">
|
|
||||||
<div>
|
|
||||||
<div className="muted" style={{ marginBottom: 6 }}>Theme</div>
|
|
||||||
<div className="segmented">
|
|
||||||
<button className={theme === 'light' ? 'active' : ''} onClick={() => onThemeChange('light')}>Light</button>
|
|
||||||
<button className={theme === 'dark' ? 'active' : ''} onClick={() => onThemeChange('dark')}>Dark</button>
|
|
||||||
<button className={theme === 'system' ? 'active' : ''} onClick={() => onThemeChange('system')}>System</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<div className="muted" style={{ marginBottom: 6 }}>Font size</div>
|
|
||||||
<div className="segmented">
|
|
||||||
<button className={size === 'small' ? 'active' : ''} onClick={() => onSizeChange('small')}>Small</button>
|
|
||||||
<button className={size === 'medium' ? 'active' : ''} onClick={() => onSizeChange('medium')}>Medium</button>
|
|
||||||
<button className={size === 'large' ? 'active' : ''} onClick={() => onSizeChange('large')}>Large</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
// src/BalanceChart.tsx
|
|
||||||
import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, Legend, ResponsiveContainer } from 'recharts';
|
|
||||||
import { type BalancePoint } from '../api';
|
|
||||||
|
|
||||||
function formatAmount(n: number) {
|
|
||||||
return new Intl.NumberFormat(undefined, { minimumFractionDigits: 2, maximumFractionDigits: 2 }).format(n);
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatDate(dateStr: string) {
|
|
||||||
return new Date(dateStr).toLocaleDateString(undefined, { month: 'short', day: 'numeric' });
|
|
||||||
}
|
|
||||||
|
|
||||||
export default function BalanceChart({ data }: { data: BalancePoint[] }) {
|
|
||||||
if (data.length === 0) {
|
|
||||||
return <div>No data to display</div>;
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<ResponsiveContainer width="100%" height={300}>
|
|
||||||
<LineChart
|
|
||||||
data={data}
|
|
||||||
// Increased 'left' margin to create more space for the Y-axis label and tick values
|
|
||||||
margin={{ top: 5, right: 30, left: 50, bottom: 5 }} // <-- Change this line
|
|
||||||
>
|
|
||||||
<CartesianGrid strokeDasharray="3 3" />
|
|
||||||
<XAxis
|
|
||||||
dataKey="date"
|
|
||||||
tickFormatter={formatDate}
|
|
||||||
label={{ value: 'Date', position: 'insideBottom', offset: -5 }}
|
|
||||||
/>
|
|
||||||
<YAxis
|
|
||||||
tickFormatter={(value) => formatAmount(value as number)}
|
|
||||||
// Adjusted 'offset' for the Y-axis label.
|
|
||||||
// A negative offset moves it further away from the axis.
|
|
||||||
label={{ value: 'Balance', angle: -90, position: 'insideLeft', offset: -30 }} // <-- Change this line
|
|
||||||
/>
|
|
||||||
<Tooltip
|
|
||||||
labelFormatter={formatDate}
|
|
||||||
formatter={(value) => [formatAmount(value as number), 'Balance']}
|
|
||||||
/>
|
|
||||||
<Legend />
|
|
||||||
<Line type="monotone" dataKey="balance" stroke="#3b82f6" strokeWidth={2} activeDot={{ r: 8 }} />
|
|
||||||
</LineChart>
|
|
||||||
</ResponsiveContainer>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,100 +0,0 @@
|
|||||||
// src/CategoryPieCharts.tsx (renamed from CategoryPieChart.tsx)
|
|
||||||
import { useMemo } from 'react';
|
|
||||||
import { PieChart, Pie, Cell, Tooltip, Legend, ResponsiveContainer } from 'recharts';
|
|
||||||
import { type Transaction, type Category } from '../api';
|
|
||||||
|
|
||||||
const COLORS = ['#0088FE', '#00C49F', '#FFBB28', '#FF8042', '#AF19FF', '#FF4242', '#8884d8', '#82ca9d'];
|
|
||||||
|
|
||||||
// Helper component for a single pie chart
|
|
||||||
function SinglePieChart({ data, title }: { data: { name: string; value: number }[]; title: string }) {
|
|
||||||
if (data.length === 0) {
|
|
||||||
return (
|
|
||||||
<div style={{ flex: 1, textAlign: 'center' }}>
|
|
||||||
<h4>{title}</h4>
|
|
||||||
<div>No data to display.</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div style={{ flex: 1 }}>
|
|
||||||
<h4>{title}</h4>
|
|
||||||
<ResponsiveContainer width="100%" height={300}>
|
|
||||||
<PieChart>
|
|
||||||
<Pie
|
|
||||||
data={data}
|
|
||||||
cx="50%"
|
|
||||||
cy="50%"
|
|
||||||
labelLine={false}
|
|
||||||
outerRadius={80}
|
|
||||||
fill="#8884d8"
|
|
||||||
dataKey="value"
|
|
||||||
nameKey="name"
|
|
||||||
label={(props: any) => `${props.name} ${(props.percent * 100).toFixed(0)}%`}
|
|
||||||
>
|
|
||||||
{data.map((_entry, index) => (
|
|
||||||
<Cell key={`cell-${index}`} fill={COLORS[index % COLORS.length]} />
|
|
||||||
))}
|
|
||||||
</Pie>
|
|
||||||
<Tooltip formatter={(value) => new Intl.NumberFormat(undefined, { style: 'currency', currency: 'USD' }).format(value as number)} />
|
|
||||||
<Legend />
|
|
||||||
</PieChart>
|
|
||||||
</ResponsiveContainer>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
export default function CategoryPieCharts({ transactions, categories }: { transactions: Transaction[], categories: Category[] }) {
|
|
||||||
|
|
||||||
// Calculate expenses data
|
|
||||||
const expensesData = useMemo(() => {
|
|
||||||
const spendingMap = new Map<number, number>();
|
|
||||||
|
|
||||||
transactions.forEach(tx => {
|
|
||||||
// Expenses are typically negative amounts in your system
|
|
||||||
if (tx.amount < 0 && tx.category_ids.length > 0) {
|
|
||||||
tx.category_ids.forEach(catId => {
|
|
||||||
// Use absolute value for display on chart
|
|
||||||
spendingMap.set(catId, (spendingMap.get(catId) || 0) + Math.abs(tx.amount));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return Array.from(spendingMap.entries())
|
|
||||||
.map(([categoryId, total]) => ({
|
|
||||||
name: categories.find(c => c.id === categoryId)?.name || `Category #${categoryId}`,
|
|
||||||
value: total,
|
|
||||||
}))
|
|
||||||
.sort((a, b) => b.value - a.value); // Sort descending
|
|
||||||
}, [transactions, categories]);
|
|
||||||
|
|
||||||
// Calculate earnings data
|
|
||||||
const earningsData = useMemo(() => {
|
|
||||||
const incomeMap = new Map<number, number>();
|
|
||||||
|
|
||||||
transactions.forEach(tx => {
|
|
||||||
// Earnings are typically positive amounts in your system
|
|
||||||
if (tx.amount > 0 && tx.category_ids.length > 0) {
|
|
||||||
tx.category_ids.forEach(catId => {
|
|
||||||
incomeMap.set(catId, (incomeMap.get(catId) || 0) + tx.amount);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return Array.from(incomeMap.entries())
|
|
||||||
.map(([categoryId, total]) => ({
|
|
||||||
name: categories.find(c => c.id === categoryId)?.name || `Category #${categoryId}`,
|
|
||||||
value: total,
|
|
||||||
}))
|
|
||||||
.sort((a, b) => b.value - a.value); // Sort descending
|
|
||||||
}, [transactions, categories]);
|
|
||||||
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div style={{ display: 'flex', flexWrap: 'wrap', gap: '20px', justifyContent: 'center' }}>
|
|
||||||
<SinglePieChart data={expensesData} title="Expenses by Category" />
|
|
||||||
<SinglePieChart data={earningsData} title="Earnings by Category" />
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,452 +0,0 @@
|
|||||||
import { useEffect, useMemo, useState } from 'react';
|
|
||||||
import { type Category, type Transaction, type BalancePoint, getCategories, getTransactions, createTransaction, updateTransaction, getBalanceSeries } from '../api';
|
|
||||||
import AccountPage from './AccountPage';
|
|
||||||
import AppearancePage from './AppearancePage';
|
|
||||||
import BalanceChart from './BalanceChart';
|
|
||||||
import ManualManagement from './ManualManagement';
|
|
||||||
import CategoryPieChart from './CategoryPieChart';
|
|
||||||
import MockBankModal, { type MockGenerationOptions } from './MockBankModal';
|
|
||||||
import { BACKEND_URL } from '../config';
|
|
||||||
|
|
||||||
function formatAmount(n: number) {
|
|
||||||
return new Intl.NumberFormat(undefined, { minimumFractionDigits: 2, maximumFractionDigits: 2 }).format(n);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add this new component to your Dashboard.tsx file, above the Dashboard component
|
|
||||||
|
|
||||||
// Define the structure for the rate data we care about
|
|
||||||
type CnbRate = {
|
|
||||||
currencyCode: string;
|
|
||||||
rate: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
// The part of the API response structure we need
|
|
||||||
type CnbApiResponse = {
|
|
||||||
rates: Array<{
|
|
||||||
amount: number;
|
|
||||||
currencyCode: string;
|
|
||||||
rate: number;
|
|
||||||
}>;
|
|
||||||
};
|
|
||||||
|
|
||||||
// The currencies you want to display
|
|
||||||
const TARGET_CURRENCIES = ['EUR', 'USD', 'NOK'];
|
|
||||||
|
|
||||||
function CurrencyRates() {
|
|
||||||
const [rates, setRates] = useState<CnbRate[]>([]);
|
|
||||||
const [loading, setLoading] = useState(true);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
async function fetchRates() {
|
|
||||||
setLoading(true);
|
|
||||||
setError(null);
|
|
||||||
|
|
||||||
// Get today's date in YYYY-MM-DD format for the API
|
|
||||||
const today = new Date().toISOString().split('T')[0];
|
|
||||||
const CNB_API_URL = `/api-cnb/cnbapi/exrates/daily?date=${today}&lang=EN`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await fetch(CNB_API_URL);
|
|
||||||
if (!res.ok) {
|
|
||||||
// This can happen on weekends/holidays or if rates aren't posted yet
|
|
||||||
throw new Error(`Rates unavailable (Status: ${res.status})`);
|
|
||||||
}
|
|
||||||
const data: CnbApiResponse = await res.json();
|
|
||||||
|
|
||||||
if (!data.rates) {
|
|
||||||
throw new Error("Invalid API response");
|
|
||||||
}
|
|
||||||
|
|
||||||
const filteredRates = data.rates
|
|
||||||
.filter(rate => TARGET_CURRENCIES.includes(rate.currencyCode))
|
|
||||||
.map(rate => ({
|
|
||||||
currencyCode: rate.currencyCode,
|
|
||||||
// Handle 'amount' field (e.g., JPY is per 100)
|
|
||||||
rate: rate.rate / rate.amount
|
|
||||||
}));
|
|
||||||
|
|
||||||
setRates(filteredRates);
|
|
||||||
} catch (err: any) {
|
|
||||||
setError(err.message || 'Could not load rates');
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fetchRates();
|
|
||||||
}, []); // Runs once on component mount
|
|
||||||
|
|
||||||
return (
|
|
||||||
// This component will push itself to the bottom of the sidebar
|
|
||||||
<div
|
|
||||||
className="currency-rates"
|
|
||||||
style={{
|
|
||||||
padding: '0 1.5rem',
|
|
||||||
marginTop: 'auto', // Pushes to bottom
|
|
||||||
paddingBottom: '1.5rem' // Adds some spacing at the end
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<h4 style={{
|
|
||||||
margin: '1.5rem 0 0.75rem 0',
|
|
||||||
color: '#8a91b4', // Muted color to match dark sidebar
|
|
||||||
fontWeight: 500,
|
|
||||||
fontSize: '0.9em',
|
|
||||||
textTransform: 'uppercase',
|
|
||||||
}}>
|
|
||||||
Rates (vs CZK)
|
|
||||||
</h4>
|
|
||||||
{loading && <div style={{ fontSize: '0.9em', color: '#ccc' }}>Loading...</div>}
|
|
||||||
{error && <div style={{ fontSize: '0.9em', color: 'crimson' }}>{error}</div>}
|
|
||||||
{!loading && !error && (
|
|
||||||
<ul style={{ listStyle: 'none', padding: 0, margin: 0, fontSize: '0.9em', color: '#fff' }}>
|
|
||||||
{rates.length > 0 ? rates.map(rate => (
|
|
||||||
<li key={rate.currencyCode} style={{ display: 'flex', justifyContent: 'space-between', marginBottom: '0.5rem' }}>
|
|
||||||
<strong>{rate.currencyCode}</strong>
|
|
||||||
<span>{rate.rate.toFixed(3)}</span>
|
|
||||||
</li>
|
|
||||||
)) : <li style={{color: '#8a91b4'}}>No rates found.</li>}
|
|
||||||
</ul>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default function Dashboard({ onLogout }: { onLogout: () => void }) {
|
|
||||||
const [current, setCurrent] = useState<'home' | 'manual' | 'account' | 'appearance'>('home');
|
|
||||||
const [transactions, setTransactions] = useState<Transaction[]>([]);
|
|
||||||
const [categories, setCategories] = useState<Category[]>([]);
|
|
||||||
const [loading, setLoading] = useState(true);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
const [isMockModalOpen, setMockModalOpen] = useState(false);
|
|
||||||
const [isGenerating, setIsGenerating] = useState(false);
|
|
||||||
|
|
||||||
// Start CSAS (George) OAuth after login
|
|
||||||
async function startOauthCsas() {
|
|
||||||
const base = BACKEND_URL.replace(/\/$/, '');
|
|
||||||
const url = `${base}/auth/csas/authorize`;
|
|
||||||
try {
|
|
||||||
const token = localStorage.getItem('token');
|
|
||||||
const res = await fetch(url, {
|
|
||||||
credentials: 'include',
|
|
||||||
headers: token ? { Authorization: `Bearer ${token}` } : undefined,
|
|
||||||
});
|
|
||||||
const data = await res.json();
|
|
||||||
if (data && typeof data.authorization_url === 'string') {
|
|
||||||
window.location.assign(data.authorization_url);
|
|
||||||
} else {
|
|
||||||
alert('Cannot start CSAS OAuth.');
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
alert('Cannot start CSAS OAuth.');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filters
|
|
||||||
const [minAmount, setMinAmount] = useState<string>('');
|
|
||||||
const [maxAmount, setMaxAmount] = useState<string>('');
|
|
||||||
const [filterCategoryId, setFilterCategoryId] = useState<number | ''>('');
|
|
||||||
const [searchText, setSearchText] = useState('');
|
|
||||||
|
|
||||||
// Date-range filter
|
|
||||||
const [startDate, setStartDate] = useState<string>(''); // YYYY-MM-DD
|
|
||||||
const [endDate, setEndDate] = useState<string>('');
|
|
||||||
|
|
||||||
// Pagination over filtered transactions (20 per page), 0 = latest (most recent)
|
|
||||||
const pageSize = 20;
|
|
||||||
const [page, setPage] = useState<number>(0);
|
|
||||||
|
|
||||||
// Balance chart series for current date filter
|
|
||||||
const [balanceSeries, setBalanceSeries] = useState<BalancePoint[]>([]);
|
|
||||||
|
|
||||||
// Manual forms moved to ManualManagement page
|
|
||||||
|
|
||||||
// Inline edit state for transaction categories
|
|
||||||
const [editingTxId, setEditingTxId] = useState<number | null>(null);
|
|
||||||
const [editingCategoryIds, setEditingCategoryIds] = useState<number[]>([]);
|
|
||||||
|
|
||||||
async function loadAll() {
|
|
||||||
setLoading(true);
|
|
||||||
setError(null);
|
|
||||||
try {
|
|
||||||
const [txs, cats, series] = await Promise.all([
|
|
||||||
getTransactions(startDate || undefined, endDate || undefined),
|
|
||||||
getCategories(),
|
|
||||||
getBalanceSeries(startDate || undefined, endDate || undefined),
|
|
||||||
]);
|
|
||||||
setTransactions(txs);
|
|
||||||
setCategories(cats);
|
|
||||||
setBalanceSeries(series);
|
|
||||||
// reset paging to most recent
|
|
||||||
setPage(0);
|
|
||||||
} catch (err: any) {
|
|
||||||
setError(err?.message || 'Failed to load data');
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleGenerateMockTransactions(options: MockGenerationOptions) {
|
|
||||||
setIsGenerating(true);
|
|
||||||
setMockModalOpen(false);
|
|
||||||
|
|
||||||
const { count, minAmount, maxAmount, startDate, endDate, categoryIds } = options;
|
|
||||||
const newTransactions: Transaction[] = [];
|
|
||||||
|
|
||||||
const startDateTime = new Date(startDate).getTime();
|
|
||||||
const endDateTime = new Date(endDate).getTime();
|
|
||||||
|
|
||||||
for (let i = 0; i < count; i++) {
|
|
||||||
// Generate random data based on user input
|
|
||||||
const amount = parseFloat((Math.random() * (maxAmount - minAmount) + minAmount).toFixed(2));
|
|
||||||
|
|
||||||
const randomTime = Math.random() * (endDateTime - startDateTime) + startDateTime;
|
|
||||||
const date = new Date(randomTime);
|
|
||||||
const dateString = date.toISOString().split('T')[0];
|
|
||||||
|
|
||||||
const randomCategory = categoryIds.length > 0
|
|
||||||
? [categoryIds[Math.floor(Math.random() * categoryIds.length)]]
|
|
||||||
: [];
|
|
||||||
|
|
||||||
const payload = {
|
|
||||||
amount,
|
|
||||||
date: dateString,
|
|
||||||
category_ids: randomCategory,
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const created = await createTransaction(payload);
|
|
||||||
newTransactions.push(created);
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Failed to create mock transaction:", err);
|
|
||||||
alert('An error occurred while generating transactions. Check the console.');
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
setIsGenerating(false);
|
|
||||||
alert(`${newTransactions.length} mock transactions were successfully generated!`);
|
|
||||||
|
|
||||||
await loadAll();
|
|
||||||
}
|
|
||||||
|
|
||||||
useEffect(() => { loadAll(); }, [startDate, endDate]);
|
|
||||||
|
|
||||||
const filtered = useMemo(() => {
|
|
||||||
let arr = [...transactions];
|
|
||||||
const min = minAmount !== '' ? Number(minAmount) : undefined;
|
|
||||||
const max = maxAmount !== '' ? Number(maxAmount) : undefined;
|
|
||||||
if (min !== undefined) arr = arr.filter(t => t.amount >= min);
|
|
||||||
if (max !== undefined) arr = arr.filter(t => t.amount <= max);
|
|
||||||
if (filterCategoryId !== '') arr = arr.filter(t => t.category_ids.includes(filterCategoryId as number));
|
|
||||||
if (searchText.trim()) arr = arr.filter(t => (t.description || '').toLowerCase().includes(searchText.toLowerCase()));
|
|
||||||
return arr;
|
|
||||||
}, [transactions, minAmount, maxAmount, filterCategoryId, searchText]);
|
|
||||||
|
|
||||||
const sortedDesc = useMemo(() => {
|
|
||||||
return [...filtered].sort((a, b) => {
|
|
||||||
const ad = (a.date || '') > (b.date || '') ? 1 : (a.date || '') < (b.date || '') ? -1 : 0;
|
|
||||||
if (ad !== 0) return -ad; // date desc
|
|
||||||
return b.id - a.id; // fallback id desc
|
|
||||||
});
|
|
||||||
}, [filtered]);
|
|
||||||
|
|
||||||
const totalPages = Math.ceil(sortedDesc.length / pageSize);
|
|
||||||
const pageStart = page * pageSize;
|
|
||||||
const pageEnd = pageStart + pageSize;
|
|
||||||
const visible = sortedDesc.slice(pageStart, pageEnd);
|
|
||||||
|
|
||||||
function categoryNameById(id: number) { return categories.find(c => c.id === id)?.name || `#${id}`; }
|
|
||||||
|
|
||||||
|
|
||||||
function beginEditCategories(t: Transaction) {
|
|
||||||
setEditingTxId(t.id);
|
|
||||||
setEditingCategoryIds([...(t.category_ids || [])]);
|
|
||||||
}
|
|
||||||
function cancelEditCategories() {
|
|
||||||
setEditingTxId(null);
|
|
||||||
setEditingCategoryIds([]);
|
|
||||||
}
|
|
||||||
async function saveEditCategories() {
|
|
||||||
if (editingTxId == null) return;
|
|
||||||
try {
|
|
||||||
const updated = await updateTransaction(editingTxId, { category_ids: editingCategoryIds });
|
|
||||||
setTransactions(prev => prev.map(p => (p.id === updated.id ? updated : p)));
|
|
||||||
cancelEditCategories();
|
|
||||||
} catch (err: any) {
|
|
||||||
alert(err?.message || 'Failed to update transaction categories');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="app-layout">
|
|
||||||
<aside className="sidebar" style={{ display: 'flex', flexDirection: 'column' }}>
|
|
||||||
<div>
|
|
||||||
<div className="logo">7Project</div>
|
|
||||||
<nav className="nav">
|
|
||||||
<button className={current === 'home' ? 'active' : ''} onClick={() => setCurrent('home')}>Home</button>
|
|
||||||
<button className={current === 'manual' ? 'active' : ''} onClick={() => setCurrent('manual')}>Manual management</button>
|
|
||||||
<button className={current === 'account' ? 'active' : ''} onClick={() => setCurrent('account')}>Account</button>
|
|
||||||
<button className={current === 'appearance' ? 'active' : ''} onClick={() => setCurrent('appearance')}>Appearance</button>
|
|
||||||
</nav>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<CurrencyRates />
|
|
||||||
|
|
||||||
</aside>
|
|
||||||
<div className="content">
|
|
||||||
<div className="topbar">
|
|
||||||
<h2 style={{ margin: 0 }}>{current === 'home' ? 'Dashboard' : current === 'manual' ? 'Manual management' : current === 'account' ? 'Account' : 'Appearance'}</h2>
|
|
||||||
<div className="actions">
|
|
||||||
<span className="user muted">Signed in</span>
|
|
||||||
<button className="btn" onClick={onLogout}>Logout</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<main className="page space-y">
|
|
||||||
{current === 'home' && (
|
|
||||||
<>
|
|
||||||
<section className="card space-y">
|
|
||||||
<h3>Bank connections</h3>
|
|
||||||
<div className="connection-row">
|
|
||||||
<p className="muted" style={{ margin: 0 }}>Connect your CSAS (George) account.</p>
|
|
||||||
<button className="btn primary" onClick={startOauthCsas}>Connect CSAS (George)</button>
|
|
||||||
</div>
|
|
||||||
<div className="connection-row">
|
|
||||||
<p className="muted" style={{ margin: 0 }}>Generate data from a mock bank.</p>
|
|
||||||
<button className="btn primary" onClick={() => setMockModalOpen(true)}>Connect Mock Bank</button>
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<section className="card">
|
|
||||||
<h3>Filters</h3>
|
|
||||||
<div className="form-row" style={{ gap: 8, flexWrap: 'wrap' }}>
|
|
||||||
<input className="input" type="date" placeholder="Start date" value={startDate} onChange={(e) => setStartDate(e.target.value)} />
|
|
||||||
<input className="input" type="date" placeholder="End date" value={endDate} onChange={(e) => setEndDate(e.target.value)} />
|
|
||||||
<input className="input" type="number" step="0.01" placeholder="Min amount" value={minAmount} onChange={(e) => setMinAmount(e.target.value)} />
|
|
||||||
<input className="input" type="number" step="0.01" placeholder="Max amount" value={maxAmount} onChange={(e) => setMaxAmount(e.target.value)} />
|
|
||||||
<select className="input" value={filterCategoryId} onChange={(e) => setFilterCategoryId(e.target.value ? Number(e.target.value) : '')}>
|
|
||||||
<option value="">All categories</option>
|
|
||||||
{categories.map(c => (<option key={c.id} value={c.id}>{c.name}</option>))}
|
|
||||||
</select>
|
|
||||||
<input className="input" type="text" placeholder="Search in description" value={searchText} onChange={(e) => setSearchText(e.target.value)} />
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section className="card">
|
|
||||||
<h3>Balance over time</h3>
|
|
||||||
{loading ? (
|
|
||||||
<div>Loading…</div>
|
|
||||||
) : error ? (
|
|
||||||
<div style={{ color: 'crimson' }}>{error}</div>
|
|
||||||
) : (
|
|
||||||
<BalanceChart data={balanceSeries} />
|
|
||||||
)}
|
|
||||||
</section>
|
|
||||||
|
|
||||||
{/* 3. Add the new section for the Category Pie Chart */}
|
|
||||||
<section className="card">
|
|
||||||
{loading ? (
|
|
||||||
<div>Loading…</div>
|
|
||||||
) : error ? (
|
|
||||||
<div style={{ color: 'crimson' }}>{error}</div>
|
|
||||||
) : (
|
|
||||||
// Pass the filtered transactions to see the breakdown for the current view
|
|
||||||
<CategoryPieChart transactions={filtered} categories={categories} />
|
|
||||||
)}
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section className="card">
|
|
||||||
<h3>Transactions</h3>
|
|
||||||
{loading ? (
|
|
||||||
<div>Loading…</div>
|
|
||||||
) : error ? (
|
|
||||||
<div style={{ color: 'crimson' }}>{error}</div>
|
|
||||||
) : filtered.length === 0 ? (
|
|
||||||
<div>No transactions</div>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<div className="table-controls">
|
|
||||||
<div className="muted">
|
|
||||||
Showing {visible.length} of {filtered.length} (page {Math.min(page + 1, Math.max(1, totalPages))}/{Math.max(1, totalPages)})
|
|
||||||
</div>
|
|
||||||
<div className="actions">
|
|
||||||
<button className="btn primary" disabled={page <= 0} onClick={() => setPage(p => Math.max(0, p - 1))}>Previous</button>
|
|
||||||
<button className="btn primary" disabled={page >= totalPages - 1} onClick={() => setPage(p => Math.min(totalPages - 1, p + 1))}>Next</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<table className="table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>Date</th>
|
|
||||||
<th style={{ textAlign: 'right' }}>Amount</th>
|
|
||||||
<th>Description</th>
|
|
||||||
<th>Categories</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
{visible.map(t => (
|
|
||||||
<tr key={t.id}>
|
|
||||||
<td>{t.date || ''}</td>
|
|
||||||
<td className="amount">{formatAmount(t.amount)}</td>
|
|
||||||
<td>{t.description || ''}</td>
|
|
||||||
<td>
|
|
||||||
{editingTxId === t.id ? (
|
|
||||||
<div className="space-y" style={{ display: 'flex', alignItems: 'center', gap: 8 }}>
|
|
||||||
<select multiple className="input" value={editingCategoryIds.map(String)} onChange={(e) => {
|
|
||||||
const opts = Array.from(e.currentTarget.selectedOptions).map(o => Number(o.value));
|
|
||||||
setEditingCategoryIds(opts);
|
|
||||||
}}>
|
|
||||||
{categories.map(c => (
|
|
||||||
<option key={c.id} value={c.id}>{c.name}</option>
|
|
||||||
))}
|
|
||||||
</select>
|
|
||||||
<button className="btn small" onClick={saveEditCategories}>Save</button>
|
|
||||||
<button className="btn small" onClick={cancelEditCategories}>Cancel</button>
|
|
||||||
</div>
|
|
||||||
) : (
|
|
||||||
<div className="space-x" style={{ display: 'flex', alignItems: 'center', gap: 8, justifyContent: 'space-between' }}>
|
|
||||||
<span>{t.category_ids.map(id => categoryNameById(id)).join(', ') || '—'}</span>
|
|
||||||
<button className="btn small" onClick={() => beginEditCategories(t)}>Change</button>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
))}
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</section>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{current === 'account' && (
|
|
||||||
// lazy import avoided for simplicity
|
|
||||||
<AccountPage onDeleted={onLogout} />
|
|
||||||
)}
|
|
||||||
|
|
||||||
{current === 'manual' && (
|
|
||||||
<ManualManagement
|
|
||||||
categories={categories}
|
|
||||||
onTransactionAdded={(t) => setTransactions(prev => [t, ...prev])}
|
|
||||||
onCategoryCreated={(c) => setCategories(prev => [...prev, c])}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{current === 'appearance' && (
|
|
||||||
<AppearancePage />
|
|
||||||
)}
|
|
||||||
</main>
|
|
||||||
</div>
|
|
||||||
<MockBankModal
|
|
||||||
isOpen={isMockModalOpen}
|
|
||||||
isGenerating={isGenerating}
|
|
||||||
categories={categories}
|
|
||||||
onClose={() => setMockModalOpen(false)}
|
|
||||||
onGenerate={handleGenerateMockTransactions}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
import { useState, useEffect } from 'react';
|
|
||||||
import { login, register } from '../api';
|
|
||||||
import { BACKEND_URL } from '../config';
|
|
||||||
|
|
||||||
// Minimal helper to start OAuth: fetch authorization_url and redirect
|
|
||||||
async function startOauth(provider: 'mojeid' | 'bankid') {
|
|
||||||
const base = BACKEND_URL.replace(/\/$/, '');
|
|
||||||
const url = `${base}/auth/${provider}/authorize`;
|
|
||||||
try {
|
|
||||||
const res = await fetch(url, { credentials: 'include' });
|
|
||||||
const data = await res.json();
|
|
||||||
if (data && typeof data.authorization_url === 'string') {
|
|
||||||
window.location.assign(data.authorization_url);
|
|
||||||
} else {
|
|
||||||
alert('Cannot start OAuth.');
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
alert('Cannot start OAuth.');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default function LoginRegisterPage({ onLoggedIn }: { onLoggedIn: () => void }) {
|
|
||||||
const [mode, setMode] = useState<'login' | 'register'>('login');
|
|
||||||
const [email, setEmail] = useState('');
|
|
||||||
const [password, setPassword] = useState('');
|
|
||||||
const [firstName, setFirstName] = useState('');
|
|
||||||
const [lastName, setLastName] = useState('');
|
|
||||||
const [loading, setLoading] = useState(false);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
async function handleSubmit(e: React.FormEvent) {
|
|
||||||
e.preventDefault();
|
|
||||||
setLoading(true);
|
|
||||||
setError(null);
|
|
||||||
try {
|
|
||||||
if (mode === 'login') {
|
|
||||||
await login(email, password);
|
|
||||||
onLoggedIn();
|
|
||||||
} else {
|
|
||||||
await register(email, password, firstName || undefined, lastName || undefined);
|
|
||||||
// After register, prompt login automatically
|
|
||||||
await login(email, password);
|
|
||||||
onLoggedIn();
|
|
||||||
}
|
|
||||||
} catch (err: any) {
|
|
||||||
setError(err?.message || 'Operation failed');
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add this useEffect hook
|
|
||||||
useEffect(() => {
|
|
||||||
// When the component mounts, add a class to the body
|
|
||||||
document.body.classList.add('auth-page');
|
|
||||||
|
|
||||||
// When the component unmounts, remove the class
|
|
||||||
return () => {
|
|
||||||
document.body.classList.remove('auth-page');
|
|
||||||
};
|
|
||||||
}, []); // The empty array ensures this runs only once
|
|
||||||
|
|
||||||
// The JSX no longer needs the wrapper div
|
|
||||||
return (
|
|
||||||
<div className="card" style={{ width: 420 }}>
|
|
||||||
<div style={{ display: 'flex', alignItems: 'center', justifyContent: 'space-between', marginBottom: 12 }}>
|
|
||||||
<h2 style={{ margin: 0 }}>{mode === 'login' ? 'Welcome back' : 'Create your account'}</h2>
|
|
||||||
<div className="segmented">
|
|
||||||
<button className={mode === 'login' ? 'active' : ''} type="button" onClick={() => setMode('login')}>Login</button>
|
|
||||||
<button className={mode === 'register' ? 'active' : ''} type="button" onClick={() => setMode('register')}>Register</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<form onSubmit={handleSubmit} className="space-y">
|
|
||||||
<div>
|
|
||||||
<label className="muted">Email</label>
|
|
||||||
<input className="input" type="email" required value={email} onChange={(e) => setEmail(e.target.value)} />
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<label className="muted">Password</label>
|
|
||||||
<input className="input" type="password" required value={password} onChange={(e) => setPassword(e.target.value)} />
|
|
||||||
</div>
|
|
||||||
{mode === 'register' && (
|
|
||||||
<div className="form-row">
|
|
||||||
<div>
|
|
||||||
<label className="muted">First name (optional)</label>
|
|
||||||
<input className="input" type="text" value={firstName} onChange={(e) => setFirstName(e.target.value)} />
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<label className="muted">Last name (optional)</label>
|
|
||||||
<input className="input" type="text" value={lastName} onChange={(e) => setLastName(e.target.value)} />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{error && <div style={{ color: 'crimson' }}>{error}</div>}
|
|
||||||
<div className="actions" style={{ justifyContent: 'space-between' }}>
|
|
||||||
<div className="muted">Or continue with</div>
|
|
||||||
<div className="actions">
|
|
||||||
<button type="button" className="btn" onClick={() => startOauth('mojeid')}>MojeID</button>
|
|
||||||
<button type="button" className="btn" onClick={() => startOauth('bankid')}>BankID</button>
|
|
||||||
<button className="btn primary" type="submit" disabled={loading}>{loading ? 'Please wait…' : (mode === 'login' ? 'Login' : 'Register')}</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,79 +0,0 @@
|
|||||||
import { useState } from 'react';
|
|
||||||
import { type Category, type Transaction, createTransaction, createCategory } from '../api';
|
|
||||||
|
|
||||||
export default function ManualManagement({
|
|
||||||
categories,
|
|
||||||
onTransactionAdded,
|
|
||||||
onCategoryCreated,
|
|
||||||
}: {
|
|
||||||
categories: Category[];
|
|
||||||
onTransactionAdded: (t: Transaction) => void;
|
|
||||||
onCategoryCreated: (c: Category) => void;
|
|
||||||
}) {
|
|
||||||
// New transaction form state
|
|
||||||
const [amount, setAmount] = useState<string>('');
|
|
||||||
const [description, setDescription] = useState('');
|
|
||||||
const [selectedCategoryId, setSelectedCategoryId] = useState<number | ''>('');
|
|
||||||
const [txDate, setTxDate] = useState<string>('');
|
|
||||||
|
|
||||||
// Category creation form
|
|
||||||
const [newCatName, setNewCatName] = useState('');
|
|
||||||
const [newCatDesc, setNewCatDesc] = useState('');
|
|
||||||
|
|
||||||
async function handleCreate(e: React.FormEvent) {
|
|
||||||
e.preventDefault();
|
|
||||||
if (!amount) return;
|
|
||||||
const payload = {
|
|
||||||
amount: Number(amount),
|
|
||||||
description: description || undefined,
|
|
||||||
category_ids: selectedCategoryId !== '' ? [Number(selectedCategoryId)] : undefined,
|
|
||||||
date: txDate || undefined,
|
|
||||||
};
|
|
||||||
try {
|
|
||||||
const created = await createTransaction(payload);
|
|
||||||
onTransactionAdded(created);
|
|
||||||
setAmount(''); setDescription(''); setSelectedCategoryId(''); setTxDate('');
|
|
||||||
} catch (err: any) {
|
|
||||||
alert(err?.message || 'Failed to create transaction');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleCreateCategory(e: React.FormEvent) {
|
|
||||||
e.preventDefault();
|
|
||||||
if (!newCatName.trim()) return;
|
|
||||||
try {
|
|
||||||
const cat = await createCategory({ name: newCatName.trim(), description: newCatDesc || undefined });
|
|
||||||
onCategoryCreated(cat);
|
|
||||||
setNewCatName(''); setNewCatDesc('');
|
|
||||||
} catch (err: any) {
|
|
||||||
alert(err?.message || 'Failed to create category');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<>
|
|
||||||
<section className="card">
|
|
||||||
<h3>Add Transaction</h3>
|
|
||||||
<form onSubmit={handleCreate} className="form-row">
|
|
||||||
<input className="input" type="number" step="0.01" placeholder="Amount" value={amount} onChange={(e) => setAmount(e.target.value)} required />
|
|
||||||
<input className="input" type="date" placeholder="Date (optional)" value={txDate} onChange={(e) => setTxDate(e.target.value)} />
|
|
||||||
<input className="input" type="text" placeholder="Description (optional)" value={description} onChange={(e) => setDescription(e.target.value)} />
|
|
||||||
<select className="input" value={selectedCategoryId} onChange={(e) => setSelectedCategoryId(e.target.value ? Number(e.target.value) : '')}>
|
|
||||||
<option value="">No category</option>
|
|
||||||
{categories.map(c => (<option key={c.id} value={c.id}>{c.name}</option>))}
|
|
||||||
</select>
|
|
||||||
<button className="btn primary" type="submit">Add</button>
|
|
||||||
</form>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section className="card">
|
|
||||||
<h3>Categories</h3>
|
|
||||||
<form className="form-row" onSubmit={handleCreateCategory}>
|
|
||||||
<input className="input" type="text" placeholder="New category name" value={newCatName} onChange={(e) => setNewCatName(e.target.value)} />
|
|
||||||
<input className="input" type="text" placeholder="Description (optional)" value={newCatDesc} onChange={(e) => setNewCatDesc(e.target.value)} />
|
|
||||||
<button className="btn primary" type="submit">Create category</button>
|
|
||||||
</form>
|
|
||||||
</section>
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,100 +0,0 @@
|
|||||||
// src/MockBankModal.tsx
|
|
||||||
import { useState } from 'react';
|
|
||||||
import { type Category } from '../api';
|
|
||||||
|
|
||||||
// Define the shape of the generation options
|
|
||||||
export interface MockGenerationOptions {
|
|
||||||
count: number;
|
|
||||||
minAmount: number;
|
|
||||||
maxAmount: number;
|
|
||||||
startDate: string;
|
|
||||||
endDate: string;
|
|
||||||
categoryIds: number[];
|
|
||||||
}
|
|
||||||
|
|
||||||
interface MockBankModalProps {
|
|
||||||
isOpen: boolean;
|
|
||||||
isGenerating: boolean;
|
|
||||||
categories: Category[]; // Pass in available categories
|
|
||||||
onClose: () => void;
|
|
||||||
onGenerate: (options: MockGenerationOptions) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export default function MockBankModal({ isOpen, isGenerating, categories, onClose, onGenerate }: MockBankModalProps) {
|
|
||||||
// State for all the new form fields
|
|
||||||
const [count, setCount] = useState('10');
|
|
||||||
const [minAmount, setMinAmount] = useState('-200');
|
|
||||||
const [maxAmount, setMaxAmount] = useState('200');
|
|
||||||
const [startDate, setStartDate] = useState(() => new Date(Date.now() - 365 * 24 * 60 * 60 * 1000).toISOString().split('T')[0]); // Default to one year ago
|
|
||||||
const [endDate, setEndDate] = useState(() => new Date().toISOString().split('T')[0]); // Default to today
|
|
||||||
const [selectedCategoryIds, setSelectedCategoryIds] = useState<string[]>([]);
|
|
||||||
|
|
||||||
if (!isOpen) return null;
|
|
||||||
|
|
||||||
function handleGenerateClick() {
|
|
||||||
const parsedCount = parseInt(count, 10);
|
|
||||||
const parsedMinAmount = parseFloat(minAmount);
|
|
||||||
const parsedMaxAmount = parseFloat(maxAmount);
|
|
||||||
const parsedStartDate = new Date(startDate);
|
|
||||||
const parsedEndDate = new Date(endDate);
|
|
||||||
|
|
||||||
// Validation
|
|
||||||
if (
|
|
||||||
isNaN(parsedCount) || parsedCount <= 0 ||
|
|
||||||
isNaN(parsedMinAmount) || isNaN(parsedMaxAmount) ||
|
|
||||||
parsedMaxAmount < parsedMinAmount ||
|
|
||||||
isNaN(parsedStartDate.getTime()) || isNaN(parsedEndDate.getTime()) ||
|
|
||||||
parsedEndDate < parsedStartDate
|
|
||||||
) {
|
|
||||||
alert(
|
|
||||||
"Please ensure:\n" +
|
|
||||||
"- Count is a positive number\n" +
|
|
||||||
"- Min and Max Amount are valid numbers, and Max >= Min\n" +
|
|
||||||
"- Start and End Date are valid, and End Date >= Start Date"
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const options: MockGenerationOptions = {
|
|
||||||
count: parsedCount,
|
|
||||||
minAmount: parsedMinAmount,
|
|
||||||
maxAmount: parsedMaxAmount,
|
|
||||||
startDate,
|
|
||||||
endDate,
|
|
||||||
categoryIds: selectedCategoryIds.map(Number),
|
|
||||||
};
|
|
||||||
|
|
||||||
onGenerate(options);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="modal-overlay" onClick={onClose}>
|
|
||||||
<div className="modal-content" onClick={(e) => e.stopPropagation()}>
|
|
||||||
<h3>Generate Mock Transactions</h3>
|
|
||||||
<p className="muted">
|
|
||||||
Customize the random transactions you'd like to import.
|
|
||||||
</p>
|
|
||||||
<div className="space-y">
|
|
||||||
<input className="input" type="number" value={count} onChange={(e) => setCount(e.target.value)} placeholder="Number of transactions" />
|
|
||||||
<div className="form-row" style={{ gridTemplateColumns: '1fr 1fr' }}>
|
|
||||||
<input className="input" type="number" value={minAmount} onChange={(e) => setMinAmount(e.target.value)} placeholder="Min amount" />
|
|
||||||
<input className="input" type="number" value={maxAmount} onChange={(e) => setMaxAmount(e.target.value)} placeholder="Max amount" />
|
|
||||||
</div>
|
|
||||||
<div className="form-row" style={{ gridTemplateColumns: '1fr 1fr' }}>
|
|
||||||
<input className="input" type="date" value={startDate} onChange={(e) => setStartDate(e.target.value)} placeholder="Earliest date" />
|
|
||||||
<input className="input" type="date" value={endDate} onChange={(e) => setEndDate(e.target.value)} placeholder="Latest date" />
|
|
||||||
</div>
|
|
||||||
<select multiple className="input" style={{ height: '120px' }} value={selectedCategoryIds} onChange={(e) => setSelectedCategoryIds(Array.from(e.target.selectedOptions, option => option.value))}>
|
|
||||||
{categories.map(c => (<option key={c.id} value={c.id}>{c.name}</option>))}
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
<div className="actions" style={{ justifyContent: 'flex-end', marginTop: '16px' }}>
|
|
||||||
<button className="btn" onClick={onClose} disabled={isGenerating}>Cancel</button>
|
|
||||||
<button className="btn primary" onClick={handleGenerateClick} disabled={isGenerating}>
|
|
||||||
{isGenerating ? 'Generating...' : `Generate Transactions`}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,153 +0,0 @@
|
|||||||
:root {
|
|
||||||
--bg: #f7f7fb;
|
|
||||||
--panel: #ffffff;
|
|
||||||
--text: #9aa3b2;
|
|
||||||
--muted: #6b7280;
|
|
||||||
--primary: #6f49fe;
|
|
||||||
--primary-600: #5a37fb;
|
|
||||||
--border: #e5e7eb;
|
|
||||||
--radius: 12px;
|
|
||||||
--shadow: 0 1px 2px rgba(0,0,0,0.04), 0 8px 24px rgba(0,0,0,0.08);
|
|
||||||
|
|
||||||
font-family: Inter, ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, Helvetica, Arial, "Apple Color Emoji", "Segoe UI Emoji";
|
|
||||||
color: var(--text);
|
|
||||||
}
|
|
||||||
|
|
||||||
* { box-sizing: border-box; }
|
|
||||||
|
|
||||||
html, body, #root { height: 100%; }
|
|
||||||
|
|
||||||
body { background: var(--bg); margin: 0; display: block; }
|
|
||||||
|
|
||||||
/* Dark theme variables */
|
|
||||||
body[data-theme="dark"] {
|
|
||||||
--bg: #161a2b;
|
|
||||||
--panel: #283046;
|
|
||||||
--text: #283046;
|
|
||||||
--muted: #cbd5e1;
|
|
||||||
--primary: #8b7bff;
|
|
||||||
--primary-600: #7b69ff;
|
|
||||||
--border: #283046;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Layout */
|
|
||||||
.app-layout { display: grid; grid-template-columns: 260px minmax(0,1fr); height: 100vh; }
|
|
||||||
.sidebar { background: #15172a; color: #e5e7eb; display: flex; flex-direction: column; padding: 20px 12px; }
|
|
||||||
.sidebar .logo { color: #fff; font-weight: 700; font-size: 18px; padding: 12px 14px; display: flex; align-items: center; gap: 10px; }
|
|
||||||
.nav { margin-top: 12px; display: grid; gap: 4px; }
|
|
||||||
.nav a, .nav button { color: #cbd5e1; text-align: left; background: transparent; border: 0; padding: 10px 12px; border-radius: 8px; cursor: pointer; }
|
|
||||||
.nav a.active, .nav a:hover, .nav button:hover { background: rgba(255,255,255,0.08); color: #fff; }
|
|
||||||
|
|
||||||
.content { display: flex; flex-direction: column; overflow-y: auto; min-width: 0; width: 100%; }
|
|
||||||
.topbar { height: 64px; display: flex; flex-shrink: 0; align-items: center; justify-content: space-between; padding: 0 24px; background: var(--panel); border-bottom: 1px solid var(--border); }
|
|
||||||
.topbar .user { color: var(--muted); }
|
|
||||||
.page { padding: 24px; }
|
|
||||||
|
|
||||||
/* Cards */
|
|
||||||
.card { background: var(--panel); border: 1px solid var(--border); border-radius: var(--radius); box-shadow: var(--shadow); padding: 16px; }
|
|
||||||
.card h3 { margin: 0 0 12px; }
|
|
||||||
|
|
||||||
/* Forms */
|
|
||||||
.input, select, textarea {
|
|
||||||
width: 100%;
|
|
||||||
padding: 10px 12px;
|
|
||||||
border-radius: 10px;
|
|
||||||
border: 1px solid var(--border);
|
|
||||||
background-color: var(--panel);
|
|
||||||
color: var(--muted);
|
|
||||||
|
|
||||||
/* Add these properties specifically for the select element */
|
|
||||||
-webkit-appearance: none;
|
|
||||||
-moz-appearance: none;
|
|
||||||
appearance: none;
|
|
||||||
|
|
||||||
padding-right: 32px; /* Add space for the custom arrow */
|
|
||||||
background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 20 20'%3e%3cpath stroke='%236b7280' stroke-linecap='round' stroke-linejoin='round' stroke-width='1.5' d='M6 8l4 4 4-4'/%3e%3c/svg%3e");
|
|
||||||
background-position: right 0.5rem center;
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
background-size: 1.5em 1.5em;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
.input:focus, select:focus, textarea:focus {
|
|
||||||
outline: 2px solid var(--primary);
|
|
||||||
outline-offset: 2px;
|
|
||||||
border-color: var(--primary);
|
|
||||||
}
|
|
||||||
.form-row { display: grid; gap: 8px; grid-template-columns: repeat(4, minmax(0,1fr)); }
|
|
||||||
.form-row > * { min-width: 140px; }
|
|
||||||
.form-row > .btn {
|
|
||||||
justify-self: start;
|
|
||||||
}
|
|
||||||
.actions { display: flex; align-items: center; gap: 8px; }
|
|
||||||
|
|
||||||
/* Buttons */
|
|
||||||
.btn { border: 1px solid var(--border); background: #fff; color: var(--text); padding: 10px 14px; border-radius: 10px; cursor: pointer; }
|
|
||||||
.btn.primary { background: var(--primary); border-color: var(--primary); color: #fff; }
|
|
||||||
.btn.primary:hover { background: var(--primary-600); }
|
|
||||||
.btn.ghost { background: transparent; color: var(--muted); }
|
|
||||||
.btn, .input, select, textarea, .nav a, .nav button, .segmented button {
|
|
||||||
transition: all 0.2s ease-in-out;
|
|
||||||
}
|
|
||||||
.btn.small {
|
|
||||||
padding: 4px 10px;
|
|
||||||
font-size: 0.875rem; /* 14px */
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Tables */
|
|
||||||
.table { width: 100%; border-collapse: collapse; }
|
|
||||||
.table th, .table td { padding: 10px; border-bottom: 1px solid var(--border); }
|
|
||||||
.table th { text-align: left; color: var(--muted); font-weight: 600; }
|
|
||||||
.table td.amount { text-align: right; font-variant-numeric: tabular-nums; }
|
|
||||||
.table-controls {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
margin-bottom: 12px; /* Adds some space above the table */
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Segmented control */
|
|
||||||
.segmented { display: inline-flex; background: #f1f5f9; border-radius: 10px; padding: 4px; border: 1px solid var(--border); }
|
|
||||||
.segmented button { border: 0; background: transparent; padding: 8px 12px; border-radius: 8px; color: var(--muted); cursor: pointer; }
|
|
||||||
.segmented button.active { background: #fff; color: var(--text); box-shadow: var(--shadow); }
|
|
||||||
|
|
||||||
/* Auth layout */
|
|
||||||
body.auth-page #root {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
min-height: 100vh;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Utility */
|
|
||||||
.muted { color: var(--muted); }
|
|
||||||
.space-y > * + * { margin-top: 12px; }
|
|
||||||
|
|
||||||
/* Modal mock bank */
|
|
||||||
.modal-overlay {
|
|
||||||
position: fixed;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
bottom: 0;
|
|
||||||
background: rgba(0, 0, 0, 0.5);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
z-index: 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
.modal-content {
|
|
||||||
background: var(--panel);
|
|
||||||
padding: 24px;
|
|
||||||
border-radius: var(--radius);
|
|
||||||
box-shadow: var(--shadow);
|
|
||||||
width: 100%;
|
|
||||||
max-width: 400px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.connection-row {
|
|
||||||
display: flex;
|
|
||||||
justify-content: space-between;
|
|
||||||
align-items: center;
|
|
||||||
}
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
# Weekly Meeting Notes
|
|
||||||
|
|
||||||
- Group 8 - Personal finance tracker
|
|
||||||
- Mentor: Jaychander
|
|
||||||
|
|
||||||
Keep all meeting notes in the `meetings.md` file in your project folder.
|
|
||||||
Just copy the template below for each weekly meeting and fill in the details.
|
|
||||||
|
|
||||||
## Administrative Info
|
|
||||||
|
|
||||||
- Date: 2025-10-16
|
|
||||||
- Attendees: Dejan Ribarovski, Lukas Trkan
|
|
||||||
- Notetaker: Dejan Ribarovski
|
|
||||||
|
|
||||||
## Progress Update (Before Meeting)
|
|
||||||
|
|
||||||
Summary of what has been accomplished since the last meeting in the following categories.
|
|
||||||
|
|
||||||
## Action Items from Last Week (During Meeting)
|
|
||||||
|
|
||||||
- [x] start coding the app logic
|
|
||||||
- [x] start writing the report so it matches the actual progress
|
|
||||||
- [x] redo the system diagram so it includes a response flow
|
|
||||||
|
|
||||||
### Coding
|
|
||||||
Implemented initial functioning version of the app, added OAuth with BankId and MojeID,
|
|
||||||
added database snapshots.
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
report.md is up to date
|
|
||||||
|
|
||||||
## Questions and Topics for Discussion (Before Meeting)
|
|
||||||
|
|
||||||
Prepare 3-5 questions and topics you want to discuss with your mentor.
|
|
||||||
|
|
||||||
1. What other functionality should be added to the app
|
|
||||||
2. Priority for the next week (Testing maybe?)
|
|
||||||
3. Question 3
|
|
||||||
|
|
||||||
## Discussion Notes (During Meeting)
|
|
||||||
|
|
||||||
## Action Items for Next Week (During Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [x] OAuth
|
|
||||||
- [x] CI/CD fix
|
|
||||||
- [ ] Database local (multiple bank accounts)
|
|
||||||
- [ ] Add tests and set up github pipeline
|
|
||||||
- [ ] Frontend imporvment - user experience
|
|
||||||
- [ ] make the report more clear
|
|
||||||
|
|
||||||
---
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
# Weekly Meeting Notes
|
|
||||||
|
|
||||||
- Group 8 - Personal finance tracker
|
|
||||||
- Mentor: Jaychander
|
|
||||||
|
|
||||||
Keep all meeting notes in the `meetings.md` file in your project folder.
|
|
||||||
Just copy the template below for each weekly meeting and fill in the details.
|
|
||||||
|
|
||||||
## Administrative Info
|
|
||||||
|
|
||||||
- Date: 2025-10-23
|
|
||||||
- Attendees: Dejan
|
|
||||||
- Notetaker: Dejan
|
|
||||||
|
|
||||||
## Progress Update (Before Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [x] OAuth (BankID)
|
|
||||||
- [x] CI/CD fix
|
|
||||||
- [X] Database local (multiple bank accounts)
|
|
||||||
- [X] Add tests and set up github pipeline
|
|
||||||
- [X] Frontend imporvment - user experience
|
|
||||||
- [ ] make the report more clear - partly
|
|
||||||
|
|
||||||
Summary of what has been accomplished since the last meeting in the following categories.
|
|
||||||
|
|
||||||
### Coding
|
|
||||||
Improved Frontend, added Mock Bank, fixed deployment, fixed OAuth(BankID) on production, added basic tests
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
Not much - just updated the work done
|
|
||||||
|
|
||||||
## Questions and Topics for Discussion (Before Meeting)
|
|
||||||
|
|
||||||
This was not prepared, I planned to do it right before meeting, but Jaychander needed to go somewhere earlier.
|
|
||||||
|
|
||||||
1. Question 1
|
|
||||||
2. Question 2
|
|
||||||
3. Question 3
|
|
||||||
|
|
||||||
## Discussion Notes (During Meeting)
|
|
||||||
The tracker should not store the transactions in the database - security vulnerability.
|
|
||||||
|
|
||||||
## Action Items for Next Week (During Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [ ] Dont store data in database (security) - Load it on login (from CSAS API and local database), load automatically with email
|
|
||||||
- [ ] Go through the checklist
|
|
||||||
- [ ] Look for possible APIs (like stocks or financial details whatever)
|
|
||||||
- [ ] Report
|
|
||||||
|
|
||||||
---
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
# Weekly Meeting Notes
|
|
||||||
|
|
||||||
- Group 8 - Personal finance tracker
|
|
||||||
- Mentor: Jaychander
|
|
||||||
|
|
||||||
Keep all meeting notes in the `meetings.md` file in your project folder.
|
|
||||||
Just copy the template below for each weekly meeting and fill in the details.
|
|
||||||
|
|
||||||
## Administrative Info
|
|
||||||
|
|
||||||
- Date: 2025-10-30
|
|
||||||
- Attendees: Dejan, Lukas
|
|
||||||
- Notetaker: Dejan
|
|
||||||
|
|
||||||
## Progress Update (Before Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [ ] Dont store data in database (security) - Load it on login (from CSAS API and local database), load automatically with email
|
|
||||||
- [X] Go through the checklist
|
|
||||||
- [X] Look for possible APIs (like stocks or financial details whatever)
|
|
||||||
- [ ] Report - partly
|
|
||||||
|
|
||||||
Summary of what has been accomplished since the last meeting in the following categories.
|
|
||||||
|
|
||||||
### Coding
|
|
||||||
Implemented CSAS API transactions fetch, Added tests with testing database on github actions, redone UI,
|
|
||||||
added currency exchange rate with CNB API
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
Not much - just updated the work done
|
|
||||||
|
|
||||||
## Questions and Topics for Discussion (Before Meeting)
|
|
||||||
|
|
||||||
1. Security regarding storing transactions - possibility of encryption
|
|
||||||
2. Realisticaly what needs to be done for us to be done
|
|
||||||
3. Question 3
|
|
||||||
|
|
||||||
## Discussion Notes (During Meeting)
|
|
||||||
The tracker should not store the transactions in the database - security vulnerability.
|
|
||||||
|
|
||||||
## Action Items for Next Week (During Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [ ] Change the name on frontend from 7project
|
|
||||||
- [ ] Finalize the funcionality and everyting in the code part
|
|
||||||
- [ ] Try to finalize report with focus on reproducibility
|
|
||||||
- [ ] More high level explanation of the workflow in the report
|
|
||||||
|
|
||||||
---
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
# Weekly Meeting Notes
|
|
||||||
|
|
||||||
- Group 8 - Personal finance tracker
|
|
||||||
- Mentor: Jaychander
|
|
||||||
|
|
||||||
Keep all meeting notes in the `meetings.md` file in your project folder.
|
|
||||||
Just copy the template below for each weekly meeting and fill in the details.
|
|
||||||
|
|
||||||
## Administrative Info
|
|
||||||
|
|
||||||
- Date: 2025-10-08
|
|
||||||
- Attendees: Dejan Ribarovski, Lukas Trkan
|
|
||||||
- Notetaker: Dejan Ribarovski
|
|
||||||
|
|
||||||
## Progress Update (Before Meeting)
|
|
||||||
|
|
||||||
Summary of what has been accomplished since the last meeting in the following categories.
|
|
||||||
|
|
||||||
### Coding
|
|
||||||
|
|
||||||
Lukas has implemented the template source directories, source files and config files necessary for deployment
|
|
||||||
- docker compose for database, redis cache and rabbit MQ
|
|
||||||
- tofu
|
|
||||||
- backend template
|
|
||||||
- frontend template
|
|
||||||
- charts templates
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
- Created GitHub issues for the next steps
|
|
||||||
- Added this document + checklist and report
|
|
||||||
|
|
||||||
## Questions and Topics for Discussion (Before Meeting)
|
|
||||||
|
|
||||||
Prepare 3-5 questions and topics you want to discuss with your mentor.
|
|
||||||
|
|
||||||
1. Anything we should add structure-wise?
|
|
||||||
2. Anything you would like us to prioritize until next week?
|
|
||||||
|
|
||||||
## Discussion Notes (During Meeting)
|
|
||||||
|
|
||||||
- start working on the report
|
|
||||||
- start coding the actual code
|
|
||||||
- write problems solved
|
|
||||||
- redo the system diagram - see the response as well
|
|
||||||
- create a meetings folder wih seperate meetings files
|
|
||||||
## Action Items for Next Week (During Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [ ] start coding the app logic
|
|
||||||
- [ ] start writing the report so it matches the actual progress
|
|
||||||
- [ ] redo the system diagram so it includes a response flow
|
|
||||||
|
|
||||||
---
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
# Weekly Meeting Notes
|
|
||||||
|
|
||||||
- Group X - Project Title
|
|
||||||
- Mentor: Mentor Name
|
|
||||||
|
|
||||||
Keep all meeting notes in the `meetings.md` file in your project folder.
|
|
||||||
Just copy the template below for each weekly meeting and fill in the details.
|
|
||||||
|
|
||||||
## Administrative Info
|
|
||||||
|
|
||||||
- Date: 2025-09-19
|
|
||||||
- Attendees: Name1, Name2, Name3
|
|
||||||
- Notetaker: Name1
|
|
||||||
|
|
||||||
## Progress Update (Before Meeting)
|
|
||||||
|
|
||||||
Summary of what has been accomplished since the last meeting in the following categories.
|
|
||||||
|
|
||||||
### Coding
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
## Questions and Topics for Discussion (Before Meeting)
|
|
||||||
|
|
||||||
Prepare 3-5 questions and topics you want to discuss with your mentor.
|
|
||||||
|
|
||||||
1. Question 1
|
|
||||||
2. Question 2
|
|
||||||
3. Question 3
|
|
||||||
|
|
||||||
## Discussion Notes (During Meeting)
|
|
||||||
|
|
||||||
## Action Items for Next Week (During Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [ ] Action Item 1
|
|
||||||
- [ ] Action Item 2
|
|
||||||
- [ ] Action Item 3
|
|
||||||
|
|
||||||
---
|
|
||||||
@@ -1,397 +0,0 @@
|
|||||||
# Personal finance tracker
|
|
||||||
|
|
||||||
> **Instructions**:
|
|
||||||
> This template provides the structure for your project report.
|
|
||||||
> Replace the placeholder text with your actual content.
|
|
||||||
> Remove instructions that are not relevant for your project, but leave the headings along with a (NA) label.
|
|
||||||
|
|
||||||
## Project Overview
|
|
||||||
|
|
||||||
**Project Name**: Personal Finance Tracker
|
|
||||||
|
|
||||||
**Group Members**:
|
|
||||||
|
|
||||||
- 289229, Lukáš Trkan, lukastrkan
|
|
||||||
- 289258, Dejan Ribarovski, derib2613, ribardej
|
|
||||||
|
|
||||||
**Brief Description**: (něco spíš jako abstract, introuction, story behind)
|
|
||||||
Our application is a finance tracker, so a person can easily track his cash flow
|
|
||||||
through multiple bank accounts. Person can label transactions with custom categories
|
|
||||||
and later filter by them.
|
|
||||||
|
|
||||||
## Architecture Overview
|
|
||||||
Our system is a full‑stack web application composed of a React frontend, a FastAPI backend, a PostgreSQL database, and asynchronous background workers powered by Celery with RabbitMQ. Redis is available for caching/kv and may be used by Celery as a result backend. The backend exposes REST endpoints for authentication (email/password and OAuth), users, categories, and transactions. A thin controller layer (FastAPI routers) lives under app/api. Infrastructure for Kubernetes is provided via OpenTofu (Terraform‑compatible) modules and the application is packaged via a Helm chart.
|
|
||||||
|
|
||||||
### High-Level Architecture
|
|
||||||
|
|
||||||
```mermaid
|
|
||||||
flowchart LR
|
|
||||||
proc_queue[Message Queue] --> proc_queue_worker[Worker Service]
|
|
||||||
proc_queue_worker --> ext_mail[(Email Service)]
|
|
||||||
proc_cron[Task planner] --> proc_queue
|
|
||||||
proc_queue_worker --> ext_bank[(Bank API)]
|
|
||||||
proc_queue_worker --> db
|
|
||||||
client[Client/Frontend] <--> svc[Backend API]
|
|
||||||
svc --> proc_queue
|
|
||||||
svc <--> db[(Database)]
|
|
||||||
svc <--> cache[(Cache)]
|
|
||||||
```
|
|
||||||
|
|
||||||
### Components
|
|
||||||
|
|
||||||
- Frontend (frontend/): React + TypeScript app built with Vite. Talks to the backend via REST, handles login/registration, shows latest transactions, filtering, and allows adding transactions.
|
|
||||||
- Backend API (backend/app): FastAPI app with routers under app/api for auth, categories, and transactions. Uses FastAPI Users for auth (JWT + OAuth), SQLAlchemy ORM, and Pydantic v2 schemas.
|
|
||||||
- Worker service (backend/app/workers): Celery worker handling asynchronous tasks (e.g., sending verification emails, future background processing).
|
|
||||||
- Database (PostgreSQL): Persists users, categories, transactions; schema managed by Alembic migrations.
|
|
||||||
- Message Queue (RabbitMQ): Transports background jobs from the API to the worker.
|
|
||||||
- Cache/Result Store (Redis): Available for caching or Celery result backend.
|
|
||||||
- Infrastructure as Code (tofu/): OpenTofu modules provisioning cluster services (RabbitMQ, Redis, Argo CD, cert-manager, Cloudflare tunnel, etc.).
|
|
||||||
- Deployment Chart (charts/myapp-chart/): Helm chart to deploy the application to Kubernetes.
|
|
||||||
|
|
||||||
### Technologies Used
|
|
||||||
|
|
||||||
- Backend: Python, FastAPI, FastAPI Users, SQLAlchemy, Pydantic, Alembic, Celery
|
|
||||||
- Frontend: React, TypeScript, Vite
|
|
||||||
- Database: PostgreSQL
|
|
||||||
- Messaging: RabbitMQ
|
|
||||||
- Cache: Redis
|
|
||||||
- Containerization/Orchestration: Docker, Docker Compose (dev), Kubernetes, Helm
|
|
||||||
- IaC/Platform: OpenTofu (Terraform), Argo CD, cert-manager, MetalLB, Cloudflare Tunnel, Prometheus
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
### System Requirements
|
|
||||||
|
|
||||||
- Operating System: Linux, macOS, or Windows
|
|
||||||
- Minimum RAM: 4 GB (8 GB recommended for running backend, frontend, and database together)
|
|
||||||
- Storage: 2 GB free (Docker images may require additional space)
|
|
||||||
|
|
||||||
### Required Software
|
|
||||||
|
|
||||||
- Docker Desktop or Docker Engine 24+
|
|
||||||
- Docker Compose v2+
|
|
||||||
- Node.js 20+ and npm 10+ (for local frontend dev/build)
|
|
||||||
- Python 3.12+ (for local backend dev outside Docker)
|
|
||||||
- PostgreSQL 15+ (optional if running DB outside Docker)
|
|
||||||
- Helm 3.12+ and kubectl 1.29+ (for Kubernetes deployment)
|
|
||||||
- OpenTofu 1.7+ (for infrastructure provisioning)
|
|
||||||
|
|
||||||
### Environment Variables (common)
|
|
||||||
|
|
||||||
- Backend: SECRET, FRONTEND_URL, BACKEND_URL, DATABASE_URL, RABBITMQ_URL, REDIS_URL
|
|
||||||
- OAuth vars (Backend): MOJEID_CLIENT_ID/SECRET, BANKID_CLIENT_ID/SECRET (optional)
|
|
||||||
- Frontend: VITE_BACKEND_URL
|
|
||||||
|
|
||||||
### Dependencies (key libraries)
|
|
||||||
I am not sure what is meant by "key libraries"
|
|
||||||
|
|
||||||
Backend: FastAPI, fastapi-users, SQLAlchemy, pydantic v2, Alembic, Celery
|
|
||||||
Frontend: React, TypeScript, Vite
|
|
||||||
Services: PostgreSQL, RabbitMQ, Redis
|
|
||||||
|
|
||||||
## Build Instructions
|
|
||||||
|
|
||||||
You can run the project with Docker Compose (recommended for local development) or run services manually.
|
|
||||||
|
|
||||||
### 1) Clone the Repository
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git clone https://github.com/dat515-2025/Group-8.git
|
|
||||||
cd 7project
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2) Install dependencies
|
|
||||||
Backend
|
|
||||||
```bash
|
|
||||||
# In 7project/backend
|
|
||||||
python3.12 -m venv .venv
|
|
||||||
source .venv/bin/activate # Windows: .venv\Scripts\activate
|
|
||||||
pip install -r requirements.txt
|
|
||||||
```
|
|
||||||
Frontend
|
|
||||||
```bash
|
|
||||||
# In 7project/frontend
|
|
||||||
npm install
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3) Manual Local Run
|
|
||||||
|
|
||||||
Backend
|
|
||||||
```bash
|
|
||||||
# From the 7project/ directory
|
|
||||||
docker compose up --build
|
|
||||||
# This starts: PostgreSQL, RabbitMQ/Redis (if defined)
|
|
||||||
|
|
||||||
# Set environment variables (or create .env file)
|
|
||||||
export SECRET=CHANGE_ME_SECRET
|
|
||||||
export BACKEND_URL=http://127.0.0.1:8000
|
|
||||||
export FRONTEND_URL=http://localhost:5173
|
|
||||||
export DATABASE_URL=postgresql+asyncpg://user:password@127.0.0.1:5432/app
|
|
||||||
export RABBITMQ_URL=amqp://guest:guest@127.0.0.1:5672/
|
|
||||||
export REDIS_URL=redis://127.0.0.1:6379/0
|
|
||||||
|
|
||||||
# Apply DB migrations (Alembic)
|
|
||||||
# From 7project/backend
|
|
||||||
alembic upgrade head
|
|
||||||
|
|
||||||
# Run API
|
|
||||||
uvicorn app.app:fastApi --reload --host 0.0.0.0 --port 8000
|
|
||||||
|
|
||||||
# Run Celery worker (optional, for emails/background tasks)
|
|
||||||
celery -A app.celery_app.celery_app worker -l info
|
|
||||||
```
|
|
||||||
|
|
||||||
Frontend
|
|
||||||
```bash
|
|
||||||
# Configure backend URL for dev
|
|
||||||
echo 'VITE_BACKEND_URL=http://127.0.0.1:8000' > .env
|
|
||||||
npm run dev
|
|
||||||
# Open http://localhost:5173
|
|
||||||
```
|
|
||||||
|
|
||||||
- Backend default: http://127.0.0.1:8000 (OpenAPI at /docs)
|
|
||||||
- Frontend default: http://localhost:5173
|
|
||||||
|
|
||||||
If needed, adjust compose services/ports in compose.yml.
|
|
||||||
|
|
||||||
|
|
||||||
## Deployment Instructions
|
|
||||||
|
|
||||||
### Local (Docker Compose)
|
|
||||||
|
|
||||||
Described in the previous section (Manual Local Run)
|
|
||||||
|
|
||||||
### Kubernetes (via OpenTofu + Helm)
|
|
||||||
|
|
||||||
1) Provision platform services (RabbitMQ/Redis/ingress/tunnel/etc.) with OpenTofu
|
|
||||||
```bash
|
|
||||||
cd tofu
|
|
||||||
# copy and edit variables
|
|
||||||
cp terraform.tfvars.example terraform.tfvars
|
|
||||||
# authenticate to your cluster/cloud as needed, then:
|
|
||||||
tofu init
|
|
||||||
tofu plan
|
|
||||||
tofu apply
|
|
||||||
```
|
|
||||||
|
|
||||||
2) Deploy the app using Helm
|
|
||||||
```bash
|
|
||||||
# Set the namespace
|
|
||||||
kubectl create namespace myapp || true
|
|
||||||
|
|
||||||
# Install/upgrade the chart with required values
|
|
||||||
helm upgrade --install myapp charts/myapp-chart \
|
|
||||||
-n myapp \
|
|
||||||
-f charts/myapp-chart/values.yaml \
|
|
||||||
--set image.backend.repository=myorg/myapp-backend \
|
|
||||||
--set image.backend.tag=latest \
|
|
||||||
--set env.BACKEND_URL="https://myapp.example.com" \
|
|
||||||
--set env.FRONTEND_URL="https://myapp.example.com" \
|
|
||||||
--set env.SECRET="CHANGE_ME_SECRET"
|
|
||||||
```
|
|
||||||
Adjust values to your registry and domain. The chart’s NOTES.txt includes additional examples.
|
|
||||||
|
|
||||||
3) Expose and access
|
|
||||||
- If using Cloudflare Tunnel or an ingress, configure DNS accordingly (see tofu/modules/cloudflare and deployment/tunnel.yaml).
|
|
||||||
- For quick testing without ingress:
|
|
||||||
```bash
|
|
||||||
kubectl -n myapp port-forward deploy/myapp-backend 8000:8000
|
|
||||||
kubectl -n myapp port-forward deploy/myapp-frontend 5173:80
|
|
||||||
```
|
|
||||||
|
|
||||||
### Verification
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check pods
|
|
||||||
kubectl -n myapp get pods
|
|
||||||
|
|
||||||
# Backend health
|
|
||||||
curl -i http://127.0.0.1:8000/
|
|
||||||
# OpenAPI
|
|
||||||
open http://127.0.0.1:8000/docs
|
|
||||||
|
|
||||||
# Frontend (if port-forwarded)
|
|
||||||
open http://localhost:5173
|
|
||||||
```
|
|
||||||
|
|
||||||
## Testing Instructions
|
|
||||||
|
|
||||||
### Unit Tests
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Commands to run unit tests
|
|
||||||
# For example:
|
|
||||||
# go test ./...
|
|
||||||
# npm test
|
|
||||||
```
|
|
||||||
|
|
||||||
### Integration Tests
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Commands to run integration tests
|
|
||||||
# Any setup required for integration tests
|
|
||||||
```
|
|
||||||
|
|
||||||
### End-to-End Tests
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Commands to run e2e tests
|
|
||||||
# How to set up test environment
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage Examples
|
|
||||||
|
|
||||||
All endpoints are documented at OpenAPI: http://127.0.0.1:8000/docs
|
|
||||||
|
|
||||||
### Auth: Register and Login (JWT)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Register
|
|
||||||
curl -X POST http://127.0.0.1:8000/auth/register \
|
|
||||||
-H 'Content-Type: application/json' \
|
|
||||||
-d '{
|
|
||||||
"email": "user@example.com",
|
|
||||||
"password": "StrongPassw0rd",
|
|
||||||
"first_name": "Jane",
|
|
||||||
"last_name": "Doe"
|
|
||||||
}'
|
|
||||||
|
|
||||||
# Login (JWT)
|
|
||||||
TOKEN=$(curl -s -X POST http://127.0.0.1:8000/auth/jwt/login \
|
|
||||||
-H 'Content-Type: application/x-www-form-urlencoded' \
|
|
||||||
-d 'username=user@example.com&password=StrongPassw0rd' | jq -r .access_token)
|
|
||||||
|
|
||||||
echo $TOKEN
|
|
||||||
|
|
||||||
# Call a protected route
|
|
||||||
curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|
||||||
```
|
|
||||||
|
|
||||||
### Frontend
|
|
||||||
|
|
||||||
- Start with: npm run dev in 7project/frontend
|
|
||||||
- Ensure VITE_BACKEND_URL is set to the backend URL (e.g., http://127.0.0.1:8000)
|
|
||||||
- Open http://localhost:5173
|
|
||||||
- Login, view latest transactions, filter, and add new transactions from the UI.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Presentation Video
|
|
||||||
|
|
||||||
**YouTube Link**: [Insert your YouTube link here]
|
|
||||||
|
|
||||||
**Duration**: [X minutes Y seconds]
|
|
||||||
|
|
||||||
**Video Includes**:
|
|
||||||
|
|
||||||
- [ ] Project overview and architecture
|
|
||||||
- [ ] Live demonstration of key features
|
|
||||||
- [ ] Code walkthrough
|
|
||||||
- [ ] Build and deployment showcase
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Common Issues
|
|
||||||
|
|
||||||
#### Issue 1: [Common problem]
|
|
||||||
|
|
||||||
**Symptoms**: [What the user sees]
|
|
||||||
**Solution**: [Step-by-step fix]
|
|
||||||
|
|
||||||
#### Issue 2: [Another common problem]
|
|
||||||
|
|
||||||
**Symptoms**: [What the user sees]
|
|
||||||
**Solution**: [Step-by-step fix]
|
|
||||||
|
|
||||||
### Debug Commands
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Useful commands for debugging
|
|
||||||
# Log viewing commands
|
|
||||||
# Service status checks
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Self-Assessment Table
|
|
||||||
|
|
||||||
> Be honest and detailed in your assessments.
|
|
||||||
> This information is used for individual grading.
|
|
||||||
> Link to the specific commit on GitHub for each contribution.
|
|
||||||
|
|
||||||
| Task/Component | Assigned To | Status | Time Spent | Difficulty | Notes |
|
|
||||||
|-----------------------------------------------------------------------|-------------| ------------- |----------------|------------| ----------- |
|
|
||||||
| [Project Setup & Repository](https://github.com/dat515-2025/Group-8#) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
|
||||||
| [Design Document](https://github.com/dat515-2025/Group-8/blob/main/6design/design.md) | Both | ✅ Complete | 2 Hours | Easy | [Any notes] |
|
|
||||||
| [Backend API Development](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/api) | Dejan | 🔄 In Progress | 10 hours | Medium | [Any notes] |
|
|
||||||
| [Database Setup & Models](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/models) | Lukas | 🔄 In Progress | [X hours] | Medium | [Any notes] |
|
|
||||||
| [Frontend Development](https://github.com/dat515-2025/Group-8/tree/main/7project/frontend) | Dejan | 🔄 In Progress | 7 hours so far | Medium | [Any notes] |
|
|
||||||
| [Docker Configuration](https://github.com/dat515-2025/Group-8/blob/main/7project/compose.yml) | Lukas | ✅ Complete | [X hours] | Easy | [Any notes] |
|
|
||||||
| [Cloud Deployment](https://github.com/dat515-2025/Group-8/blob/main/7project/deployment/app-demo-deployment.yaml) | Lukas | ✅ Complete | [X hours] | Hard | [Any notes] |
|
|
||||||
| [Testing Implementation](https://github.com/dat515-2025/group-name) | Dejan | 🔄 In Progress | [X hours] | Medium | [Any notes] |
|
|
||||||
| [Documentation](https://github.com/dat515-2025/group-name) | Both | 🔄 In Progress | [X hours] | Easy | [Any notes] |
|
|
||||||
| [Presentation Video](https://github.com/dat515-2025/group-name) | Both | ❌ Not Started | [X hours] | Medium | [Any notes] |
|
|
||||||
|
|
||||||
**Legend**: ✅ Complete | 🔄 In Progress | ⏳ Pending | ❌ Not Started
|
|
||||||
|
|
||||||
## Hour Sheet
|
|
||||||
|
|
||||||
> Link to the specific commit on GitHub for each contribution.
|
|
||||||
|
|
||||||
### [Lukáš]
|
|
||||||
|
|
||||||
| Date | Activity | Hours | Description |
|
|
||||||
|----------------|---------------------|------------|----------------------------------------------------|
|
|
||||||
| 4.10 to 10.10 | Initial Setup | 40 | Repository setup, project structure, cluster setup |
|
|
||||||
| 14.10 to 16.10 | Backend Development | 12 | Implemented user authentication - oauth |
|
|
||||||
| 8.10 to 12.10 | CI/CD | 10 | Created database schema and models |
|
|
||||||
| [Date] | Testing | [X.X] | Unit tests for API endpoints |
|
|
||||||
| [Date] | Documentation | [X.X] | Updated README and design doc |
|
|
||||||
| **Total** | | **[XX.X]** | |
|
|
||||||
|
|
||||||
### Dejan
|
|
||||||
|
|
||||||
| Date | Activity | Hours | Description |
|
|
||||||
|-----------------|----------------------|--------|----------------------------------------------------------------------------------|
|
|
||||||
| 25.9. | Design | 2 | 6design |
|
|
||||||
| 9.10 to 11.10. | Backend APIs | 10 | Implemented Backend APIs |
|
|
||||||
| 13.10 to 15.10. | Frontend Development | 7 | Created user interface mockups |
|
|
||||||
| Continually | Documantation | 5 | Documenting the dev process |
|
|
||||||
| 21.10 to 23.10 | Tests, forntend | 10 | Test basics, balance charts, and frontend improvement |
|
|
||||||
| 28.10 to 30.10 | Tests, forntend | 7 | Tests improvement with test database setup, UI fix and exchange rate integration |
|
|
||||||
| **Total** | | **41** | |
|
|
||||||
|
|
||||||
|
|
||||||
### Group Total: [XXX.X] hours
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Final Reflection
|
|
||||||
|
|
||||||
### What We Learned
|
|
||||||
|
|
||||||
[Reflect on the key technical and collaboration skills learned during this project]
|
|
||||||
|
|
||||||
### Challenges Faced
|
|
||||||
|
|
||||||
[Describe the main challenges and how you overcame them]
|
|
||||||
|
|
||||||
### If We Did This Again
|
|
||||||
|
|
||||||
[What would you do differently? What worked well that you'd keep?]
|
|
||||||
|
|
||||||
### Individual Growth
|
|
||||||
|
|
||||||
#### [Team Member 1 Name]
|
|
||||||
|
|
||||||
[Personal reflection on growth, challenges, and learning]
|
|
||||||
|
|
||||||
#### [Team Member 2 Name]
|
|
||||||
|
|
||||||
[Personal reflection on growth, challenges, and learning]
|
|
||||||
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Report Completion Date**: [Date]
|
|
||||||
**Last Updated**: 15.10.2025
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
{{- if .Values.s3.enabled }}
|
|
||||||
apiVersion: k8s.mariadb.com/v1alpha1
|
|
||||||
kind: Backup
|
|
||||||
metadata:
|
|
||||||
name: backup
|
|
||||||
namespace: mariadb-operator
|
|
||||||
spec:
|
|
||||||
mariaDbRef:
|
|
||||||
name: mariadb-repl
|
|
||||||
namespace: mariadb-operator
|
|
||||||
schedule:
|
|
||||||
cron: "0 */3 * * *"
|
|
||||||
suspend: false
|
|
||||||
timeZone: "Europe/Prague"
|
|
||||||
maxRetention: 720h # 30 days
|
|
||||||
compression: bzip2
|
|
||||||
storage:
|
|
||||||
s3:
|
|
||||||
bucket: {{ .Values.s3.bucket | quote }}
|
|
||||||
endpoint: {{ .Values.s3.endpoint | quote }}
|
|
||||||
accessKeyIdSecretKeyRef:
|
|
||||||
name: s3-credentials
|
|
||||||
key: key_id
|
|
||||||
secretAccessKeySecretKeyRef:
|
|
||||||
name: s3-credentials
|
|
||||||
key: secret_key
|
|
||||||
region: {{ .Values.s3.region | quote }}
|
|
||||||
tls:
|
|
||||||
enabled: true
|
|
||||||
# Define a PVC to use as staging area for keeping the backups while they are being processed.
|
|
||||||
stagingStorage:
|
|
||||||
persistentVolumeClaim:
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
storage: 10Gi
|
|
||||||
accessModes:
|
|
||||||
- ReadWriteOnce
|
|
||||||
args:
|
|
||||||
- --single-transaction
|
|
||||||
- --all-databases
|
|
||||||
logLevel: info
|
|
||||||
{{- end }}
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
{{- if .Values.s3.enabled }}
|
|
||||||
apiVersion: v1
|
|
||||||
kind: Secret
|
|
||||||
metadata:
|
|
||||||
name: s3-credentials
|
|
||||||
namespace: mariadb-operator
|
|
||||||
type: Opaque
|
|
||||||
stringData:
|
|
||||||
key_id: "{{ .Values.s3.key_id }}"
|
|
||||||
secret_key: "{{ .Values.s3.key_secret }}"
|
|
||||||
{{- end }}
|
|
||||||
@@ -1,82 +0,0 @@
|
|||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
kubectl = {
|
|
||||||
source = "gavinbunney/kubectl"
|
|
||||||
version = "1.19.0"
|
|
||||||
}
|
|
||||||
helm = {
|
|
||||||
source = "hashicorp/helm"
|
|
||||||
version = "3.0.2"
|
|
||||||
}
|
|
||||||
kubernetes = {
|
|
||||||
source = "hashicorp/kubernetes"
|
|
||||||
version = "2.38.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_namespace" "mariadb-operator" {
|
|
||||||
metadata {
|
|
||||||
name = "mariadb-operator"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
locals {
|
|
||||||
mariadb_secret_yaml = templatefile("${path.module}/mariadb-secret.yaml", {
|
|
||||||
password = var.mariadb_password
|
|
||||||
user_password = var.mariadb_user_password
|
|
||||||
root_password = var.mariadb_root_password
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubectl_manifest" "secrets" {
|
|
||||||
yaml_body = local.mariadb_secret_yaml
|
|
||||||
depends_on = [kubernetes_namespace.mariadb-operator]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
resource "helm_release" "mariadb-operator-crds" {
|
|
||||||
name = "mariadb-operator-crds"
|
|
||||||
repository = "https://helm.mariadb.com/mariadb-operator"
|
|
||||||
chart = "mariadb-operator-crds"
|
|
||||||
namespace = "mariadb-operator"
|
|
||||||
version = "25.8.4"
|
|
||||||
depends_on = [kubectl_manifest.secrets]
|
|
||||||
timeout = 3600
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
resource "helm_release" "mariadb-operator" {
|
|
||||||
name = "mariadb-operator"
|
|
||||||
repository = "https://helm.mariadb.com/mariadb-operator"
|
|
||||||
chart = "mariadb-operator"
|
|
||||||
depends_on = [helm_release.mariadb-operator-crds, kubectl_manifest.secrets]
|
|
||||||
namespace = "mariadb-operator"
|
|
||||||
version = "25.8.3"
|
|
||||||
timeout = 3600
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "helm_release" "maxscale_helm" {
|
|
||||||
name = "maxscale-helm"
|
|
||||||
chart = "${path.module}/charts/maxscale-helm"
|
|
||||||
version = "1.0.14"
|
|
||||||
depends_on = [helm_release.mariadb-operator-crds, kubectl_manifest.secrets]
|
|
||||||
timeout = 3600
|
|
||||||
|
|
||||||
set = [
|
|
||||||
{ name = "user.name", value = var.mariadb_user_name },
|
|
||||||
{ name = "user.host", value = var.mariadb_user_host },
|
|
||||||
{ name = "metallb.maxscale_ip", value = var.maxscale_ip },
|
|
||||||
{ name = "metallb.service_ip", value = var.service_ip },
|
|
||||||
{ name = "metallb.primary_ip", value = var.primary_ip },
|
|
||||||
{ name = "metallb.secondary_ip", value = var.secondary_ip },
|
|
||||||
{ name = "phpmyadmin.enabled", value = tostring(var.phpmyadmin_enabled) },
|
|
||||||
{ name = "base_domain", value = var.cloudflare_domain },
|
|
||||||
{ name = "s3.key_id", value = var.s3_key_id },
|
|
||||||
{ name = "s3.key_secret", value = var.s3_key_secret },
|
|
||||||
{ name = "s3.enabled", value = var.s3_enabled },
|
|
||||||
{ name = "s3.endpoint", value = var.s3_endpoint },
|
|
||||||
{ name = "s3.region", value = var.s3_region },
|
|
||||||
{ name = "s3.bucket", value = var.s3_bucket },
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
kubectl = {
|
|
||||||
source = "gavinbunney/kubectl"
|
|
||||||
version = "1.19.0"
|
|
||||||
}
|
|
||||||
helm = {
|
|
||||||
source = "hashicorp/helm"
|
|
||||||
version = "3.0.2"
|
|
||||||
}
|
|
||||||
kubernetes = {
|
|
||||||
source = "hashicorp/kubernetes"
|
|
||||||
version = "2.38.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Deploy metrics-server via Helm into kube-system namespace
|
|
||||||
resource "helm_release" "metrics_server" {
|
|
||||||
name = "metrics-server"
|
|
||||||
repository = "https://kubernetes-sigs.github.io/metrics-server/"
|
|
||||||
chart = "metrics-server"
|
|
||||||
namespace = "kube-system"
|
|
||||||
|
|
||||||
wait = true
|
|
||||||
timeout = 600
|
|
||||||
recreate_pods = false
|
|
||||||
force_update = false
|
|
||||||
|
|
||||||
values = [
|
|
||||||
file("${path.module}/values.yaml")
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
# Values overriding defaults for metrics-server Helm chart
|
|
||||||
# Fix TLS and address selection issues when scraping kubelets (common on Talos)
|
|
||||||
args:
|
|
||||||
- --kubelet-insecure-tls
|
|
||||||
- --kubelet-preferred-address-types=InternalIP,Hostname,InternalDNS,ExternalDNS,ExternalIP
|
|
||||||
- --kubelet-use-node-status-port=true
|
|
||||||
|
|
||||||
# Using hostNetwork often helps in restricted CNI/DNS environments
|
|
||||||
#hostNetwork: true
|
|
||||||
# Required when hostNetwork is true so DNS works as expected
|
|
||||||
#dnsPolicy: ClusterFirstWithHostNet
|
|
||||||
|
|
||||||
# Enable metrics API service monitor if Prometheus Operator is present (optional)
|
|
||||||
# serviceMonitor:
|
|
||||||
# enabled: true
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
apiVersion: networking.cfargotunnel.com/v1alpha1
|
|
||||||
kind: TunnelBinding
|
|
||||||
metadata:
|
|
||||||
name: grafana-tunnel-binding
|
|
||||||
namespace: monitoring
|
|
||||||
subjects:
|
|
||||||
- name: grafana
|
|
||||||
spec:
|
|
||||||
target: http://kube-prometheus-stack-grafana.monitoring.svc.cluster.local
|
|
||||||
fqdn: grafana.${base_domain}
|
|
||||||
noTlsVerify: true
|
|
||||||
tunnelRef:
|
|
||||||
kind: ClusterTunnel
|
|
||||||
name: cluster-tunnel
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
kubectl = {
|
|
||||||
source = "gavinbunney/kubectl"
|
|
||||||
version = "1.19.0"
|
|
||||||
}
|
|
||||||
helm = {
|
|
||||||
source = "hashicorp/helm"
|
|
||||||
version = "3.0.2"
|
|
||||||
}
|
|
||||||
kubernetes = {
|
|
||||||
source = "hashicorp/kubernetes"
|
|
||||||
version = "2.38.0"
|
|
||||||
}
|
|
||||||
kustomization = {
|
|
||||||
source = "kbst/kustomization"
|
|
||||||
version = "0.9.6"
|
|
||||||
}
|
|
||||||
time = {
|
|
||||||
source = "hashicorp/time"
|
|
||||||
version = "0.13.1"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Create namespace for monitoring
|
|
||||||
resource "kubernetes_namespace" "monitoring" {
|
|
||||||
metadata {
|
|
||||||
name = "monitoring"
|
|
||||||
labels = {
|
|
||||||
"pod-security.kubernetes.io/enforce" = "privileged"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Deploy kube-prometheus-stack
|
|
||||||
resource "helm_release" "kube_prometheus_stack" {
|
|
||||||
name = "kube-prometheus-stack"
|
|
||||||
repository = "https://prometheus-community.github.io/helm-charts"
|
|
||||||
chart = "kube-prometheus-stack"
|
|
||||||
namespace = kubernetes_namespace.monitoring.metadata[0].name
|
|
||||||
version = "67.2.1" # Check for latest version
|
|
||||||
|
|
||||||
# Wait for CRDs to be created
|
|
||||||
wait = true
|
|
||||||
timeout = 600
|
|
||||||
force_update = false
|
|
||||||
recreate_pods = false
|
|
||||||
|
|
||||||
# Reference the values file
|
|
||||||
values = [
|
|
||||||
file("${path.module}/values.yaml")
|
|
||||||
]
|
|
||||||
|
|
||||||
depends_on = [
|
|
||||||
kubernetes_namespace.monitoring
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubectl_manifest" "argocd-tunnel-bind" {
|
|
||||||
depends_on = [helm_release.kube_prometheus_stack]
|
|
||||||
|
|
||||||
yaml_body = templatefile("${path.module}/grafana-ui.yaml", {
|
|
||||||
base_domain = var.cloudflare_domain
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@@ -1,189 +0,0 @@
|
|||||||
# Prometheus configuration
|
|
||||||
prometheus:
|
|
||||||
prometheusSpec:
|
|
||||||
retention: 30d
|
|
||||||
retentionSize: "45GB"
|
|
||||||
|
|
||||||
# Storage configuration
|
|
||||||
storageSpec:
|
|
||||||
volumeClaimTemplate:
|
|
||||||
spec:
|
|
||||||
accessModes:
|
|
||||||
- ReadWriteOnce
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
storage: 50Gi
|
|
||||||
# storageClassName: "your-storage-class" # Uncomment and specify if needed
|
|
||||||
|
|
||||||
# Resource limits
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
cpu: 500m
|
|
||||||
memory: 2Gi
|
|
||||||
limits:
|
|
||||||
cpu: 2000m
|
|
||||||
memory: 4Gi
|
|
||||||
|
|
||||||
# Scrape interval
|
|
||||||
scrapeInterval: 30s
|
|
||||||
evaluationInterval: 30s
|
|
||||||
|
|
||||||
# Service configuration
|
|
||||||
service:
|
|
||||||
type: ClusterIP
|
|
||||||
port: 9090
|
|
||||||
|
|
||||||
# Ingress (disabled by default)
|
|
||||||
ingress:
|
|
||||||
enabled: false
|
|
||||||
# ingressClassName: nginx
|
|
||||||
# hosts:
|
|
||||||
# - prometheus.example.com
|
|
||||||
# tls:
|
|
||||||
# - secretName: prometheus-tls
|
|
||||||
# hosts:
|
|
||||||
# - prometheus.example.com
|
|
||||||
|
|
||||||
# Grafana configuration
|
|
||||||
grafana:
|
|
||||||
enabled: true
|
|
||||||
|
|
||||||
# Admin credentials
|
|
||||||
adminPassword: "admin" # CHANGE THIS IN PRODUCTION!
|
|
||||||
|
|
||||||
# Persistence
|
|
||||||
persistence:
|
|
||||||
enabled: true
|
|
||||||
size: 10Gi
|
|
||||||
# storageClassName: "your-storage-class" # Uncomment and specify if needed
|
|
||||||
|
|
||||||
# Resource limits
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
cpu: 100m
|
|
||||||
memory: 256Mi
|
|
||||||
limits:
|
|
||||||
cpu: 500m
|
|
||||||
memory: 512Mi
|
|
||||||
|
|
||||||
# Service configuration
|
|
||||||
service:
|
|
||||||
type: ClusterIP
|
|
||||||
port: 80
|
|
||||||
|
|
||||||
# Ingress (disabled by default)
|
|
||||||
ingress:
|
|
||||||
enabled: false
|
|
||||||
# ingressClassName: nginx
|
|
||||||
# hosts:
|
|
||||||
# - grafana.example.com
|
|
||||||
# tls:
|
|
||||||
# - secretName: grafana-tls
|
|
||||||
# hosts:
|
|
||||||
# - grafana.example.com
|
|
||||||
|
|
||||||
# Default dashboards
|
|
||||||
defaultDashboardsEnabled: true
|
|
||||||
defaultDashboardsTimezone: Europe/Prague
|
|
||||||
|
|
||||||
# Alertmanager configuration
|
|
||||||
alertmanager:
|
|
||||||
enabled: true
|
|
||||||
|
|
||||||
alertmanagerSpec:
|
|
||||||
# Storage configuration
|
|
||||||
storage:
|
|
||||||
volumeClaimTemplate:
|
|
||||||
spec:
|
|
||||||
accessModes:
|
|
||||||
- ReadWriteOnce
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
storage: 10Gi
|
|
||||||
# storageClassName: "your-storage-class" # Uncomment and specify if needed
|
|
||||||
|
|
||||||
# Resource limits
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
cpu: 100m
|
|
||||||
memory: 128Mi
|
|
||||||
limits:
|
|
||||||
cpu: 500m
|
|
||||||
memory: 256Mi
|
|
||||||
|
|
||||||
# Service configuration
|
|
||||||
service:
|
|
||||||
type: ClusterIP
|
|
||||||
port: 9093
|
|
||||||
|
|
||||||
# Ingress (disabled by default)
|
|
||||||
ingress:
|
|
||||||
enabled: false
|
|
||||||
# ingressClassName: nginx
|
|
||||||
# hosts:
|
|
||||||
# - alertmanager.example.com
|
|
||||||
# tls:
|
|
||||||
# - secretName: alertmanager-tls
|
|
||||||
# hosts:
|
|
||||||
# - alertmanager.example.com
|
|
||||||
|
|
||||||
# Alertmanager configuration
|
|
||||||
config:
|
|
||||||
global:
|
|
||||||
resolve_timeout: 5m
|
|
||||||
|
|
||||||
route:
|
|
||||||
group_by: [ 'alertname', 'cluster', 'service' ]
|
|
||||||
group_wait: 10s
|
|
||||||
group_interval: 10s
|
|
||||||
repeat_interval: 12h
|
|
||||||
receiver: 'null'
|
|
||||||
routes:
|
|
||||||
- match:
|
|
||||||
alertname: Watchdog
|
|
||||||
receiver: 'null'
|
|
||||||
|
|
||||||
receivers:
|
|
||||||
- name: 'null'
|
|
||||||
# Add your receivers here (email, slack, pagerduty, etc.)
|
|
||||||
# - name: 'slack'
|
|
||||||
# slack_configs:
|
|
||||||
# - api_url: 'YOUR_SLACK_WEBHOOK_URL'
|
|
||||||
# channel: '#alerts'
|
|
||||||
# title: '{{ range .Alerts }}{{ .Annotations.summary }}\n{{ end }}'
|
|
||||||
# text: '{{ range .Alerts }}{{ .Annotations.description }}\n{{ end }}'
|
|
||||||
|
|
||||||
# Node Exporter
|
|
||||||
nodeExporter:
|
|
||||||
enabled: true
|
|
||||||
|
|
||||||
# Kube State Metrics
|
|
||||||
kubeStateMetrics:
|
|
||||||
enabled: true
|
|
||||||
|
|
||||||
# Prometheus Operator
|
|
||||||
prometheusOperator:
|
|
||||||
enabled: true
|
|
||||||
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
cpu: 100m
|
|
||||||
memory: 128Mi
|
|
||||||
limits:
|
|
||||||
cpu: 500m
|
|
||||||
memory: 256Mi
|
|
||||||
|
|
||||||
# Service Monitors
|
|
||||||
# Automatically discover and monitor services with appropriate labels
|
|
||||||
prometheus-node-exporter:
|
|
||||||
prometheus:
|
|
||||||
monitor:
|
|
||||||
enabled: true
|
|
||||||
|
|
||||||
# Additional ServiceMonitors can be defined here
|
|
||||||
# additionalServiceMonitors: []
|
|
||||||
|
|
||||||
# Global settings
|
|
||||||
global:
|
|
||||||
rbac:
|
|
||||||
create: true
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
variable "cloudflare_domain" {
|
|
||||||
type = string
|
|
||||||
default = "Base cloudflare domain, e.g. example.com"
|
|
||||||
nullable = false
|
|
||||||
}
|
|
||||||
@@ -1,91 +0,0 @@
|
|||||||
terraform {
|
|
||||||
required_providers {
|
|
||||||
kubectl = {
|
|
||||||
source = "gavinbunney/kubectl"
|
|
||||||
version = "1.19.0"
|
|
||||||
}
|
|
||||||
helm = {
|
|
||||||
source = "hashicorp/helm"
|
|
||||||
version = "3.0.2" # Doporučuji použít novější verzi providera
|
|
||||||
}
|
|
||||||
kubernetes = {
|
|
||||||
source = "hashicorp/kubernetes"
|
|
||||||
version = "2.38.0" # Doporučuji použít novější verzi providera
|
|
||||||
}
|
|
||||||
# Ostatní provideři mohou zůstat
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubernetes_namespace" "rabbitmq_namespace" {
|
|
||||||
metadata {
|
|
||||||
name = "rabbitmq-system"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
resource "helm_release" "rabbitmq_operator" {
|
|
||||||
name = "rabbitmq-cluster-operator"
|
|
||||||
repository = "oci://registry-1.docker.io/bitnamicharts"
|
|
||||||
chart = "rabbitmq-cluster-operator"
|
|
||||||
|
|
||||||
version = "4.4.34"
|
|
||||||
|
|
||||||
namespace = "rabbitmq-system"
|
|
||||||
|
|
||||||
# Zde můžete přepsat výchozí hodnoty chartu, pokud by bylo potřeba
|
|
||||||
# Například sledovat jen určité namespace, nastavit tolerations atd.
|
|
||||||
# Pro základní instalaci není potřeba nic měnit.
|
|
||||||
# values = [
|
|
||||||
# templatefile("${path.module}/values/operator-values.yaml", {})
|
|
||||||
# ]
|
|
||||||
set = [
|
|
||||||
{
|
|
||||||
name = "rabbitmqImage.repository"
|
|
||||||
value = "bitnamilegacy/rabbitmq"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name = "clusterOperator.image.repository"
|
|
||||||
value = "bitnamilegacy/rabbitmq-cluster-operator"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name = "msgTopologyOperator.image.repository"
|
|
||||||
value = "bitnamilegacy/rmq-messaging-topology-operator"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name = "credentialUpdaterImage.repository"
|
|
||||||
value = "bitnamilegacy/rmq-default-credential-updater"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name = "clusterOperator.metrics.service.enabled"
|
|
||||||
value = "true"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name = "clusterOperator.metrics.service.enabled"
|
|
||||||
value = "true"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
depends_on = [kubernetes_namespace.rabbitmq_namespace]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
resource "kubectl_manifest" "rabbitmq_cluster" {
|
|
||||||
yaml_body = templatefile("${path.module}/rabbit-cluster.yaml", {
|
|
||||||
replicas = var.rabbitmq_replicas
|
|
||||||
password = var.rabbitmq-password
|
|
||||||
})
|
|
||||||
|
|
||||||
depends_on = [
|
|
||||||
helm_release.rabbitmq_operator
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
resource "kubectl_manifest" "rabbit_ui" {
|
|
||||||
yaml_body = templatefile("${path.module}/rabbit-ui.yaml", {
|
|
||||||
base_domain = var.base_domain
|
|
||||||
})
|
|
||||||
|
|
||||||
depends_on = [
|
|
||||||
kubectl_manifest.rabbitmq_cluster
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
apiVersion: rabbitmq.com/v1beta1
|
|
||||||
kind: RabbitmqCluster
|
|
||||||
metadata:
|
|
||||||
name: 'rabbitmq-cluster'
|
|
||||||
namespace: "rabbitmq-system"
|
|
||||||
@@ -1,8 +1,7 @@
|
|||||||
FROM python:3.11-slim
|
FROM python:3.11-slim
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
COPY . .
|
COPY . .
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
CMD alembic upgrade head && uvicorn app.app:fastApi --host 0.0.0.0 --port 8000
|
CMD alembic upgrade head && uvicorn app.app:app --host 0.0.0.0 --port 8000
|
||||||
@@ -11,7 +11,7 @@ script_location = %(here)s/alembic
|
|||||||
# Uncomment the line below if you want the files to be prepended with date and time
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
# for all available tokens
|
# for all available tokens
|
||||||
file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
# defaults to the current working directory. for multiple paths, the path separator
|
# defaults to the current working directory. for multiple paths, the path separator
|
||||||
@@ -25,8 +25,7 @@ if not DATABASE_URL:
|
|||||||
|
|
||||||
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
||||||
|
|
||||||
host_env = os.getenv("MARIADB_HOST", "localhost")
|
ssl_enabled = os.getenv("MARIADB_HOST", "localhost") != "localhost"
|
||||||
ssl_enabled = host_env not in {"localhost", "127.0.0.1"}
|
|
||||||
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
||||||
|
|
||||||
def run_migrations_offline() -> None:
|
def run_migrations_offline() -> None:
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
"""add categories
|
"""Init migration
|
||||||
|
|
||||||
Revision ID: 63e072f09836
|
Revision ID: 81f275275556
|
||||||
Revises:
|
Revises:
|
||||||
Create Date: 2025-10-09 14:56:14.653249
|
Create Date: 2025-09-24 17:39:25.346690
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Sequence, Union
|
from typing import Sequence, Union
|
||||||
@@ -13,7 +13,7 @@ import sqlalchemy as sa
|
|||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision: str = '63e072f09836'
|
revision: str = '81f275275556'
|
||||||
down_revision: Union[str, Sequence[str], None] = None
|
down_revision: Union[str, Sequence[str], None] = None
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
@@ -22,6 +22,12 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
"""Upgrade schema."""
|
"""Upgrade schema."""
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('transaction',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('amount', sa.Float(), nullable=False),
|
||||||
|
sa.Column('description', sa.String(length=255), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
op.create_table('user',
|
op.create_table('user',
|
||||||
sa.Column('first_name', sa.String(length=100), nullable=True),
|
sa.Column('first_name', sa.String(length=100), nullable=True),
|
||||||
sa.Column('last_name', sa.String(length=100), nullable=True),
|
sa.Column('last_name', sa.String(length=100), nullable=True),
|
||||||
@@ -34,38 +40,13 @@ def upgrade() -> None:
|
|||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
|
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
|
||||||
op.create_table('categories',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('name', sa.String(length=100), nullable=False),
|
|
||||||
sa.Column('description', sa.String(length=255), nullable=True),
|
|
||||||
sa.Column('user_id', fastapi_users_db_sqlalchemy.generics.GUID(), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
|
||||||
sa.UniqueConstraint('name')
|
|
||||||
)
|
|
||||||
op.create_table('transaction',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('amount', sa.Float(), nullable=False),
|
|
||||||
sa.Column('description', sa.String(length=255), nullable=True),
|
|
||||||
sa.Column('user_id', fastapi_users_db_sqlalchemy.generics.GUID(), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
op.create_table('category_transaction',
|
|
||||||
sa.Column('id_category', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('id_transaction', sa.Integer(), nullable=True),
|
|
||||||
sa.ForeignKeyConstraint(['id_category'], ['categories.id'], ),
|
|
||||||
sa.ForeignKeyConstraint(['id_transaction'], ['transaction.id'], )
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
"""Downgrade schema."""
|
"""Downgrade schema."""
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.drop_table('category_transaction')
|
|
||||||
op.drop_table('transaction')
|
|
||||||
op.drop_table('categories')
|
|
||||||
op.drop_index(op.f('ix_user_email'), table_name='user')
|
op.drop_index(op.f('ix_user_email'), table_name='user')
|
||||||
op.drop_table('user')
|
op.drop_table('user')
|
||||||
|
op.drop_table('transaction')
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user