mirror of
https://github.com/dat515-2025/Group-8.git
synced 2026-03-22 15:12:08 +01:00
Compare commits
274 Commits
3a6ee3dace
...
merge/core
| Author | SHA1 | Date | |
|---|---|---|---|
| c689caea88 | |||
| 8c20deb690 | |||
| 39979b51ee | |||
| da0c77101d | |||
| a5a83e5d07 | |||
| 3749aa4525 | |||
| 94aa64addc | |||
| ba1677b2d3 | |||
|
|
8ea1ef9eea | ||
|
|
4b614902b2 | ||
| a152ecbe4d | |||
| 7d7dd98d0f | |||
| 5aca071ac2 | |||
|
|
80991c7390 | ||
|
|
1403e0029b | ||
| aa63e51e6a | |||
|
|
4aaaba3956 | ||
|
|
f0c28ba9e1 | ||
|
|
b560c07d62 | ||
|
|
f0b1452e30 | ||
| 6effb2793a | |||
|
|
ba7798259c | ||
| deb67f421e | |||
| 74557eeea8 | |||
| 2e0619d03f | |||
| 31add42d6d | |||
| 4de79169a2 | |||
| 59d53967b0 | |||
| f3086f8c73 | |||
|
|
fd437b1caf | ||
| 96ebc27001 | |||
|
|
922651fdbf | ||
|
|
e164b185e0 | ||
|
|
186b4fd09a | ||
|
|
280d495335 | ||
|
|
e73233c90a | ||
|
|
aade78bf3f | ||
|
|
50e489a8e0 | ||
|
|
1679abb71f | ||
| 573404dead | |||
| d57dd82a64 | |||
| 50f37c1161 | |||
| ae22d2ee5f | |||
| 509608f8c9 | |||
| ed723d1d13 | |||
| b0dee5e289 | |||
| 640da2ee04 | |||
| ab9aefd140 | |||
|
|
4eaf46e77e | ||
|
|
a30ae4d010 | ||
|
|
ef26e88713 | ||
|
|
2e1dddb4f8 | ||
|
|
25e587cea8 | ||
|
|
3cdefc33fc | ||
|
|
5954e56956 | ||
|
|
8575ef8ff5 | ||
| c53e314b2a | |||
| c0bc44622f | |||
| 3d31ff4631 | |||
|
|
8b92b9bd18 | ||
|
|
3d26ed6a62 | ||
|
|
67b44539f2 | ||
|
|
ff9cc712db | ||
| dc7ce9e6a1 | |||
| 188cdf5727 | |||
| 4cf0d2a981 | |||
| 9986cce8f9 | |||
| b3b5717e9e | |||
|
|
1da927dc07 | ||
| 537d050080 | |||
| 1e4f342176 | |||
| c62e0adcf3 | |||
| 24d86abfc4 | |||
| 21305f18e2 | |||
| e708f7b18b | |||
| f58083870f | |||
| ca8287cd8b | |||
|
|
ed3e6329dd | ||
|
|
a214e2cd8b | ||
| 6c8d2202b5 | |||
|
|
b480734fee | ||
|
|
8b301c386e | ||
|
|
733e7a8918 | ||
|
|
524e7a6f98 | ||
|
|
0c9882e9b3 | ||
|
|
72494c4aae | ||
|
|
60560dea99 | ||
|
|
a9b2aba55a | ||
|
|
36b1fe887b | ||
|
|
8543c72730 | ||
| 24087c2810 | |||
|
|
6818b1f649 | ||
| c864e753c9 | |||
| b4a453be04 | |||
| d290664352 | |||
| 008f111fa7 | |||
| ece2c4d4c5 | |||
| 2d0d309d2b | |||
| 7f8dd2e846 | |||
| e0c18912f3 | |||
| 99384aeb0a | |||
| 912697b046 | |||
|
|
356e1d868c | ||
|
|
14397b8a25 | ||
|
|
5671f97120 | ||
|
|
b02c502b4f | ||
| ff118603db | |||
|
|
3ee2abefd0 | ||
|
|
4a8edf6eb8 | ||
| a97f0f7097 | |||
|
|
c74462b82f | ||
|
|
a96514f795 | ||
|
|
4c9879cebf | ||
|
|
d9c562f867 | ||
|
|
dddca9d805 | ||
|
|
483a859b4b | ||
|
|
7529c9b265 | ||
| d6a913a896 | |||
|
|
2ca8a3b576 | ||
|
|
52f6bd6a53 | ||
| d8ea25943c | |||
| 06dcccb321 | |||
| e916a57e4e | |||
| 7d2e94e683 | |||
|
|
55f8e38376 | ||
| 3348e0a035 | |||
|
|
542b05d541 | ||
|
|
65957d78ec | ||
|
|
edb4dfd147 | ||
|
|
cf1d520a30 | ||
|
|
4aa299d77d | ||
|
|
e460f647b2 | ||
|
|
b0cd7030d8 | ||
|
|
eb7b2290b8 | ||
|
|
584c090b80 | ||
|
|
4f6d46ba7e | ||
|
|
9fc8601e4d | ||
|
|
e488771cc7 | ||
|
|
77992bab17 | ||
|
|
6972a03090 | ||
|
|
6d7f834808 | ||
|
|
d5611e3e92 | ||
|
|
5ecfc62b02 | ||
| d0cbec5fca | |||
|
|
82eb34c6e6 | ||
|
|
cddc1d3a9f | ||
|
|
e78b8c2e6b | ||
|
|
aade88beb9 | ||
|
|
5305531950 | ||
|
|
6d8a6a55c0 | ||
| 396047574a | |||
| d926168ef9 | |||
| 41956b8e0c | |||
| 9734895758 | |||
| 91a32b2f10 | |||
| 2b640fc6ac | |||
| 3ebf47e371 | |||
|
|
40d07677bd | ||
|
|
76eb2cce41 | ||
|
|
391e9da0c4 | ||
|
|
be4a3b401a | ||
| 8c72091658 | |||
| 607c5eadd7 | |||
| 2617c640a8 | |||
| cb9ef5e461 | |||
| b0cabe027f | |||
| 8974561308 | |||
| 2f275ef605 | |||
| d593f7a994 | |||
| ef5b3f2d30 | |||
| 60109c4a35 | |||
| b6f9ee8fc7 | |||
| 52333b24d5 | |||
| 8929920072 | |||
| cdb6cf5e20 | |||
| 5190e9c48e | |||
| 815bf7f065 | |||
| 85a390565a | |||
| 20d26b7edc | |||
| 579dda50b9 | |||
| 4f7d30daf6 | |||
| 49c96187c9 | |||
| d1feafd4ef | |||
| efb454ba99 | |||
| 810f1ccb32 | |||
| c4afdf5ad2 | |||
| c290a109b6 | |||
| 7c161f6f37 | |||
| c4991ea3c4 | |||
| 3b6b64d472 | |||
|
|
9bc543a5fa | ||
|
|
14516a808b | ||
|
|
922ebf46ae | ||
|
|
1f5d6f127f | ||
|
|
3a7580c315 | ||
|
|
c21af2732e | ||
|
|
f208e73986 | ||
|
|
eb087e457c | ||
|
|
89d032dd69 | ||
| e200c73b47 | |||
|
|
ac10ab381e | ||
|
|
879109144c | ||
|
|
7061e57442 | ||
|
|
30068079c6 | ||
|
|
9580bea630 | ||
|
|
975f5e5bec | ||
|
|
f1065bc274 | ||
|
|
12152238c6 | ||
|
|
21ef5a3961 | ||
|
|
2f20fb12e4 | ||
| bf213234b1 | |||
| 95c8bf1e92 | |||
| b213f22a15 | |||
| 0cf06b7bd9 | |||
| 7a67b12533 | |||
| a91aea805f | |||
| 32764ab1b0 | |||
|
|
6c248039ac | ||
| df0f2584ae | |||
| b7570e334f | |||
| 4ea6876b74 | |||
| 6d5dd1a222 | |||
|
|
f09f9eaa82 | ||
| ae10c4daff | |||
| abebdb019b | |||
| 6040f4339c | |||
| 72c241f4f7 | |||
| 8db669ac72 | |||
| e32e18f0de | |||
| 95996d22f8 | |||
|
|
991c070918 | ||
|
|
a717e4afeb | ||
|
|
2bc03bcd5b | ||
| dbd37a8b83 | |||
| f1cbdbce9c | |||
| fa1b9523a1 | |||
| e5fceb886b | |||
| ec7c0cbc7a | |||
| 9ea02ed10c | |||
| afb8199cad | |||
| 1e23b32f30 | |||
| cdfaf3e66d | |||
| 21ccb00f4a | |||
| 901fff8651 | |||
| 9c4144f5c4 | |||
| 37f4d44caf | |||
| d0ffab97c3 | |||
|
|
b6f8daba8c | ||
| 316939b53c | |||
| 101bb34cb0 | |||
| 9a7759ab3d | |||
| c15dea5456 | |||
| fae5d828bf | |||
| 7ee45b451e | |||
| d4da625408 | |||
| 5c4e155546 | |||
| edfa42eee5 | |||
| ba7cc381cf | |||
| fc1b614f19 | |||
| 7b9d72791f | |||
| 48d56681fb | |||
| c45ecbc5bc | |||
| a940e257ee | |||
| bc219338b1 | |||
| 384d5004eb | |||
| 8e1d65a078 | |||
| 35e2ca6a72 | |||
| bda4cafcf6 | |||
| 29422f6500 | |||
| d03ff463a0 | |||
| e0fd68b135 | |||
| 8cef7467cf | |||
| c9705616dd | |||
| 40131cf7ca |
105
.github/workflows/build-image.yaml
vendored
Normal file
105
.github/workflows/build-image.yaml
vendored
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
name: Build and Push Image
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
mode:
|
||||||
|
description: "Build mode: 'prod' or 'pr'"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
image_repo:
|
||||||
|
description: "Docker image repository (e.g., user/app)"
|
||||||
|
required: false
|
||||||
|
default: "lukastrkan/cc-app-demo"
|
||||||
|
type: string
|
||||||
|
context:
|
||||||
|
description: "Docker build context path"
|
||||||
|
required: false
|
||||||
|
default: "7project/src/backend"
|
||||||
|
type: string
|
||||||
|
pr_number:
|
||||||
|
description: "PR number (required when mode=pr)"
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
secrets:
|
||||||
|
DOCKER_USER:
|
||||||
|
required: true
|
||||||
|
DOCKER_PASSWORD:
|
||||||
|
required: true
|
||||||
|
outputs:
|
||||||
|
digest:
|
||||||
|
description: "Built image digest"
|
||||||
|
value: ${{ jobs.build.outputs.digest }}
|
||||||
|
image_repo:
|
||||||
|
description: "Image repository used"
|
||||||
|
value: ${{ jobs.build.outputs.image_repo }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
digest: ${{ steps.set.outputs.digest }}
|
||||||
|
image_repo: ${{ steps.set.outputs.image_repo }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
id: buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to Docker Hub
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_USER }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Compute image repo and tags
|
||||||
|
id: meta
|
||||||
|
env:
|
||||||
|
MODE: ${{ inputs.mode }}
|
||||||
|
IMAGE_REPO: ${{ inputs.image_repo }}
|
||||||
|
PR: ${{ inputs.pr_number }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
if [ -z "${IMAGE_REPO:-}" ]; then IMAGE_REPO="lukastrkan/cc-app-demo"; fi
|
||||||
|
echo "IMAGE_REPO=$IMAGE_REPO" >> $GITHUB_ENV
|
||||||
|
SHA_SHORT="${GITHUB_SHA::12}"
|
||||||
|
case "$MODE" in
|
||||||
|
prod)
|
||||||
|
TAG1="prod-$SHA_SHORT"
|
||||||
|
TAG2="latest"
|
||||||
|
;;
|
||||||
|
pr)
|
||||||
|
if [ -z "${PR:-}" ]; then echo "pr_number input is required for mode=pr"; exit 1; fi
|
||||||
|
TAG1="pr-$PR"
|
||||||
|
TAG2="pr-$PR-$SHA_SHORT"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown mode '$MODE' (expected 'prod' or 'pr')"; exit 1;
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
echo "TAG1=$TAG1" >> $GITHUB_ENV
|
||||||
|
echo "TAG2=$TAG2" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Build and push image
|
||||||
|
id: build
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: ${{ inputs.context }}
|
||||||
|
push: true
|
||||||
|
tags: |
|
||||||
|
${{ env.IMAGE_REPO }}:${{ env.TAG1 }}
|
||||||
|
${{ env.IMAGE_REPO }}:${{ env.TAG2 }}
|
||||||
|
platforms: linux/arm64,linux/amd64
|
||||||
|
|
||||||
|
- name: Set outputs
|
||||||
|
id: set
|
||||||
|
env:
|
||||||
|
IMAGE_REPO: ${{ env.IMAGE_REPO }}
|
||||||
|
run: |
|
||||||
|
echo "digest=${{ steps.build.outputs.digest }}" >> $GITHUB_OUTPUT
|
||||||
|
echo "image_repo=$IMAGE_REPO" >> $GITHUB_OUTPUT
|
||||||
160
.github/workflows/deploy-pr.yaml
vendored
Normal file
160
.github/workflows/deploy-pr.yaml
vendored
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
name: Deploy Preview (PR)
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, reopened, synchronize, closed]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: Run Python Tests
|
||||||
|
if: github.event.action != 'closed'
|
||||||
|
uses: ./.github/workflows/run-tests.yml
|
||||||
|
|
||||||
|
build:
|
||||||
|
if: github.event.action != 'closed'
|
||||||
|
name: Build and push image (reusable)
|
||||||
|
uses: ./.github/workflows/build-image.yaml
|
||||||
|
with:
|
||||||
|
mode: pr
|
||||||
|
image_repo: lukastrkan/cc-app-demo
|
||||||
|
context: 7project/src/backend
|
||||||
|
pr_number: ${{ github.event.pull_request.number }}
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
get_urls:
|
||||||
|
if: github.event.action != 'closed'
|
||||||
|
name: Generate Preview URLs
|
||||||
|
uses: ./.github/workflows/url_generator.yml
|
||||||
|
with:
|
||||||
|
runner: vhs
|
||||||
|
mode: pr
|
||||||
|
pr_number: ${{ github.event.pull_request.number }}
|
||||||
|
base_domain: ${{ vars.PROD_DOMAIN }}
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
frontend:
|
||||||
|
if: github.event.action != 'closed'
|
||||||
|
name: Frontend - Build and Deploy to Cloudflare Pages (PR)
|
||||||
|
needs: [get_urls]
|
||||||
|
uses: ./.github/workflows/frontend-pages.yml
|
||||||
|
with:
|
||||||
|
mode: pr
|
||||||
|
pr_number: ${{ github.event.pull_request.number }}
|
||||||
|
backend_url_scheme: ${{ needs.get_urls.outputs.backend_url_scheme }}
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
if: github.event.action != 'closed'
|
||||||
|
name: Helm upgrade/install (PR preview)
|
||||||
|
runs-on: vhs
|
||||||
|
concurrency:
|
||||||
|
group: pr-${{ github.event.pull_request.number }}
|
||||||
|
cancel-in-progress: false
|
||||||
|
needs: [build, frontend, get_urls]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Helm
|
||||||
|
uses: azure/setup-helm@v4
|
||||||
|
|
||||||
|
- name: Setup kubectl
|
||||||
|
uses: azure/setup-kubectl@v4
|
||||||
|
|
||||||
|
- name: Configure kubeconfig
|
||||||
|
env:
|
||||||
|
KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
|
||||||
|
run: |
|
||||||
|
mkdir -p ~/.kube
|
||||||
|
if [ -z "$KUBE_CONFIG" ]; then
|
||||||
|
echo "Secret KUBE_CONFIG is required (kubeconfig content)"; exit 1; fi
|
||||||
|
echo "$KUBE_CONFIG" > ~/.kube/config
|
||||||
|
chmod 600 ~/.kube/config
|
||||||
|
|
||||||
|
- name: Helm upgrade/install PR preview
|
||||||
|
env:
|
||||||
|
DEV_BASE_DOMAIN: ${{ vars.BASE_DOMAIN }}
|
||||||
|
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
||||||
|
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
||||||
|
DIGEST: ${{ needs.build.outputs.digest }}
|
||||||
|
DOMAIN: "${{ needs.get_urls.outputs.backend_url }}"
|
||||||
|
DOMAIN_SCHEME: "${{ needs.get_urls.outputs.backend_url_scheme }}"
|
||||||
|
FRONTEND_DOMAIN: "${{ needs.get_urls.outputs.frontend_url }}"
|
||||||
|
FRONTEND_DOMAIN_SCHEME: "${{ needs.get_urls.outputs.frontend_url_scheme }}"
|
||||||
|
UNIRATE_API_KEY: ${{ secrets.UNIRATE_API_KEY }}
|
||||||
|
run: |
|
||||||
|
PR=${{ github.event.pull_request.number }}
|
||||||
|
RELEASE=myapp-pr-$PR
|
||||||
|
NAMESPACE=pr-$PR
|
||||||
|
helm upgrade --install "$RELEASE" ./7project/src/charts/myapp-chart \
|
||||||
|
-n "$NAMESPACE" --create-namespace \
|
||||||
|
-f 7project/src/charts/myapp-chart/values-dev.yaml \
|
||||||
|
--set prNumber="$PR" \
|
||||||
|
--set deployment="pr-$PR" \
|
||||||
|
--set domain="$DOMAIN" \
|
||||||
|
--set domain_scheme="$DOMAIN_SCHEME" \
|
||||||
|
--set frontend_domain="$FRONTEND_DOMAIN" \
|
||||||
|
--set frontend_domain_scheme="$FRONTEND_DOMAIN_SCHEME" \
|
||||||
|
--set image.digest="$DIGEST" \
|
||||||
|
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
||||||
|
--set-string database.password="$DB_PASSWORD" \
|
||||||
|
--set-string database.encryptionSecret="$PR" \
|
||||||
|
--set-string app.name="finance-tracker-pr-$PR" \
|
||||||
|
--set-string unirate.key="$UNIRATE_API_KEY"
|
||||||
|
|
||||||
|
- name: Post preview URLs as PR comment
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
env:
|
||||||
|
BACKEND_URL: ${{ needs.get_urls.outputs.backend_url_scheme }}
|
||||||
|
FRONTEND_URL: ${{ needs.get_urls.outputs.frontend_url_scheme }}
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const pr = context.payload.pull_request;
|
||||||
|
if (!pr) { core.setFailed('No pull_request context'); return; }
|
||||||
|
const prNumber = pr.number;
|
||||||
|
const backendUrl = process.env.BACKEND_URL || '(not available)';
|
||||||
|
const frontendUrl = process.env.FRONTEND_URL || '(not available)';
|
||||||
|
const marker = '<!-- preview-comment-marker -->';
|
||||||
|
const body = `${marker}\nPreview environment is running\n- Frontend: ${frontendUrl}\n- Backend: ${backendUrl}\n`;
|
||||||
|
const { owner, repo } = context.repo;
|
||||||
|
const { data: comments } = await github.rest.issues.listComments({ owner, repo, issue_number: prNumber, per_page: 100 });
|
||||||
|
const existing = comments.find(c => c.body && c.body.includes(marker));
|
||||||
|
if (existing) {
|
||||||
|
await github.rest.issues.updateComment({ owner, repo, comment_id: existing.id, body });
|
||||||
|
} else {
|
||||||
|
await github.rest.issues.createComment({ owner, repo, issue_number: prNumber, body });
|
||||||
|
}
|
||||||
|
|
||||||
|
uninstall:
|
||||||
|
if: github.event.action == 'closed'
|
||||||
|
name: Helm uninstall (PR preview)
|
||||||
|
runs-on: vhs
|
||||||
|
steps:
|
||||||
|
- name: Setup Helm
|
||||||
|
uses: azure/setup-helm@v4
|
||||||
|
|
||||||
|
- name: Setup kubectl
|
||||||
|
uses: azure/setup-kubectl@v4
|
||||||
|
|
||||||
|
- name: Configure kubeconfig
|
||||||
|
env:
|
||||||
|
KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
|
||||||
|
run: |
|
||||||
|
mkdir -p ~/.kube
|
||||||
|
if [ -z "$KUBE_CONFIG" ]; then
|
||||||
|
echo "Secret KUBE_CONFIG is required (kubeconfig content)"; exit 1; fi
|
||||||
|
echo "$KUBE_CONFIG" > ~/.kube/config
|
||||||
|
chmod 600 ~/.kube/config
|
||||||
|
|
||||||
|
- name: Helm uninstall release and cleanup namespace
|
||||||
|
run: |
|
||||||
|
PR=${{ github.event.pull_request.number }}
|
||||||
|
RELEASE=myapp-pr-$PR
|
||||||
|
NAMESPACE=pr-$PR
|
||||||
|
helm uninstall "$RELEASE" -n "$NAMESPACE" || true
|
||||||
|
# Optionally delete the namespace if empty
|
||||||
|
kubectl delete namespace "$NAMESPACE" --ignore-not-found=true || true
|
||||||
132
.github/workflows/deploy-prod.yaml
vendored
Normal file
132
.github/workflows/deploy-prod.yaml
vendored
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
name: Deploy Prod
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "main" ]
|
||||||
|
paths:
|
||||||
|
- ../../7project/src/backend/**
|
||||||
|
- ../../7project/src/frontend/**
|
||||||
|
- ../../7project/src/charts/myapp-chart/**
|
||||||
|
- .github/workflows/deploy-prod.yaml
|
||||||
|
- .github/workflows/build-image.yaml
|
||||||
|
- .github/workflows/frontend-pages.yml
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: deploy-prod
|
||||||
|
cancel-in-progress: false
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: Run Python Tests
|
||||||
|
uses: ./.github/workflows/run-tests.yml
|
||||||
|
|
||||||
|
build:
|
||||||
|
name: Build and push image (reusable)
|
||||||
|
needs: [test]
|
||||||
|
uses: ./.github/workflows/build-image.yaml
|
||||||
|
with:
|
||||||
|
mode: prod
|
||||||
|
image_repo: lukastrkan/cc-app-demo
|
||||||
|
context: 7project/src/backend
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
get_urls:
|
||||||
|
name: Generate Production URLs
|
||||||
|
needs: [test]
|
||||||
|
uses: ./.github/workflows/url_generator.yml
|
||||||
|
with:
|
||||||
|
mode: prod
|
||||||
|
runner: vhs
|
||||||
|
base_domain: ${{ vars.PROD_DOMAIN }}
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
frontend:
|
||||||
|
name: Frontend - Build and Deploy to Cloudflare Pages (prod)
|
||||||
|
needs: [get_urls]
|
||||||
|
uses: ./.github/workflows/frontend-pages.yml
|
||||||
|
with:
|
||||||
|
mode: prod
|
||||||
|
backend_url_scheme: ${{ needs.get_urls.outputs.backend_url_scheme }}
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
name: Helm upgrade/install (prod)
|
||||||
|
runs-on: vhs
|
||||||
|
needs: [build, frontend, get_urls]
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Helm
|
||||||
|
uses: azure/setup-helm@v4
|
||||||
|
|
||||||
|
- name: Setup kubectl
|
||||||
|
uses: azure/setup-kubectl@v4
|
||||||
|
|
||||||
|
- name: Configure kubeconfig
|
||||||
|
env:
|
||||||
|
KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
|
||||||
|
run: |
|
||||||
|
mkdir -p ~/.kube
|
||||||
|
if [ -z "$KUBE_CONFIG" ]; then
|
||||||
|
echo "Secret KUBE_CONFIG is required (kubeconfig content)"; exit 1; fi
|
||||||
|
echo "$KUBE_CONFIG" > ~/.kube/config
|
||||||
|
chmod 600 ~/.kube/config
|
||||||
|
|
||||||
|
- name: Helm upgrade/install prod
|
||||||
|
env:
|
||||||
|
DOMAIN: ${{ needs.get_urls.outputs.backend_url }}
|
||||||
|
DOMAIN_SCHEME: ${{ needs.get_urls.outputs.backend_url_scheme }}
|
||||||
|
FRONTEND_DOMAIN: ${{ needs.get_urls.outputs.frontend_url }}
|
||||||
|
FRONTEND_DOMAIN_SCHEME: ${{ needs.get_urls.outputs.frontend_url_scheme }}
|
||||||
|
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
||||||
|
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
||||||
|
DIGEST: ${{ needs.build.outputs.digest }}
|
||||||
|
BANKID_CLIENT_ID: ${{ secrets.BANKID_CLIENT_ID }}
|
||||||
|
BANKID_CLIENT_SECRET: ${{ secrets.BANKID_CLIENT_SECRET }}
|
||||||
|
MOJEID_CLIENT_ID: ${{ secrets.MOJEID_CLIENT_ID }}
|
||||||
|
MOJEID_CLIENT_SECRET: ${{ secrets.MOJEID_CLIENT_SECRET }}
|
||||||
|
CSAS_CLIENT_ID: ${{ secrets.CSAS_CLIENT_ID }}
|
||||||
|
CSAS_CLIENT_SECRET: ${{ secrets.CSAS_CLIENT_SECRET }}
|
||||||
|
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||||
|
SMTP_HOST: ${{ secrets.SMTP_HOST }}
|
||||||
|
SMTP_PORT: ${{ secrets.SMTP_PORT }}
|
||||||
|
SMTP_USERNAME: ${{ secrets.SMTP_USERNAME }}
|
||||||
|
SMTP_PASSWORD: ${{ secrets.SMTP_PASSWORD }}
|
||||||
|
SMTP_USE_TLS: ${{ secrets.SMTP_USE_TLS }}
|
||||||
|
SMTP_USE_SSL: ${{ secrets.SMTP_USE_SSL }}
|
||||||
|
SMTP_FROM: ${{ secrets.SMTP_FROM }}
|
||||||
|
UNIRATE_API_KEY: ${{ secrets.UNIRATE_API_KEY }}
|
||||||
|
run: |
|
||||||
|
helm upgrade --install myapp ./7project/src/charts/myapp-chart \
|
||||||
|
-n prod --create-namespace \
|
||||||
|
-f 7project/src/charts/myapp-chart/values-prod.yaml \
|
||||||
|
--set deployment="prod" \
|
||||||
|
--set domain="$DOMAIN" \
|
||||||
|
--set domain_scheme="$DOMAIN_SCHEME" \
|
||||||
|
--set frontend_domain="$FRONTEND_DOMAIN" \
|
||||||
|
--set frontend_domain_scheme="$FRONTEND_DOMAIN_SCHEME" \
|
||||||
|
--set image.digest="$DIGEST" \
|
||||||
|
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
||||||
|
--set-string database.password="$DB_PASSWORD" \
|
||||||
|
--set-string oauth.bankid.clientId="$BANKID_CLIENT_ID" \
|
||||||
|
--set-string oauth.bankid.clientSecret="$BANKID_CLIENT_SECRET" \
|
||||||
|
--set-string oauth.mojeid.clientId="$MOJEID_CLIENT_ID" \
|
||||||
|
--set-string oauth.mojeid.clientSecret="$MOJEID_CLIENT_SECRET" \
|
||||||
|
--set-string oauth.csas.clientId="$CSAS_CLIENT_ID" \
|
||||||
|
--set-string oauth.csas.clientSecret="$CSAS_CLIENT_SECRET" \
|
||||||
|
--set-string sentry_dsn="$SENTRY_DSN" \
|
||||||
|
--set-string database.encryptionSecret="${{ secrets.PROD_DB_ENCRYPTION_KEY }}" \
|
||||||
|
--set-string smtp.host="$SMTP_HOST" \
|
||||||
|
--set smtp.port="$SMTP_PORT" \
|
||||||
|
--set-string smtp.username="$SMTP_USERNAME" \
|
||||||
|
--set-string smtp.password="$SMTP_PASSWORD" \
|
||||||
|
--set-string smtp.tls="$SMTP_USE_TLS" \
|
||||||
|
--set-string smtp.ssl="$SMTP_USE_SSL" \
|
||||||
|
--set-string smtp.from="$SMTP_FROM" \
|
||||||
|
--set-string unirate.key="$UNIRATE_API_KEY"
|
||||||
135
.github/workflows/frontend-pages.yml
vendored
Normal file
135
.github/workflows/frontend-pages.yml
vendored
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
name: Frontend - Build and Deploy to Cloudflare Pages
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
mode:
|
||||||
|
description: "Build mode: 'prod' or 'pr'"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
pr_number:
|
||||||
|
description: 'PR number (required when mode=pr)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
project_name:
|
||||||
|
description: 'Cloudflare Pages project name (overrides default)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
backend_url_scheme:
|
||||||
|
description: 'The full scheme URL for the backend (e.g., https://api.example.com)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
secrets:
|
||||||
|
CLOUDFLARE_API_TOKEN:
|
||||||
|
required: true
|
||||||
|
CLOUDFLARE_ACCOUNT_ID:
|
||||||
|
required: true
|
||||||
|
outputs:
|
||||||
|
deployed_url:
|
||||||
|
description: 'URL of deployed frontend'
|
||||||
|
value: ${{ jobs.deploy.outputs.deployed_url }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
name: Build frontend
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: 7project/src/frontend
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Use Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
cache: 'npm'
|
||||||
|
cache-dependency-path: 7project/src/frontend/package-lock.json
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Set backend URL from workflow input
|
||||||
|
run: |
|
||||||
|
echo "VITE_BACKEND_URL=${{ inputs.backend_url_scheme }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: npm run build
|
||||||
|
|
||||||
|
- name: Upload build artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: frontend-dist
|
||||||
|
path: 7project/src/frontend/dist
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
name: Deploy to Cloudflare Pages
|
||||||
|
needs: build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
deployed_url: ${{ steps.out.outputs.deployed_url }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Download build artifact
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: frontend-dist
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
- name: Determine project name and branch
|
||||||
|
id: pname
|
||||||
|
env:
|
||||||
|
INPUT_MODE: ${{ inputs.mode }}
|
||||||
|
INPUT_PR: ${{ inputs.pr_number }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
# Prefer manual input, then repo variable, fallback to repo-name
|
||||||
|
INPUT_NAME='${{ inputs.project_name }}'
|
||||||
|
VAR_NAME='${{ vars.CF_PAGES_PROJECT_NAME }}'
|
||||||
|
if [ -n "$INPUT_NAME" ]; then PNAME_RAW="$INPUT_NAME";
|
||||||
|
elif [ -n "$VAR_NAME" ]; then PNAME_RAW="$VAR_NAME";
|
||||||
|
else PNAME_RAW="${GITHUB_REPOSITORY##*/}-frontend"; fi
|
||||||
|
# Normalize project name to lowercase to satisfy Cloudflare Pages naming
|
||||||
|
PNAME="${PNAME_RAW,,}"
|
||||||
|
# Determine branch for Pages
|
||||||
|
if [ "${INPUT_MODE}" = "pr" ]; then
|
||||||
|
if [ -z "${INPUT_PR}" ]; then echo "pr_number is required when mode=pr"; exit 1; fi
|
||||||
|
PBRANCH="pr-${INPUT_PR}"
|
||||||
|
else
|
||||||
|
PBRANCH="main"
|
||||||
|
fi
|
||||||
|
echo "project_name=$PNAME" >> $GITHUB_OUTPUT
|
||||||
|
echo "branch=$PBRANCH" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Ensure Cloudflare Pages project exists
|
||||||
|
env:
|
||||||
|
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
PNAME: ${{ steps.pname.outputs.project_name }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
npx wrangler pages project create "$PNAME" --production-branch=main || true
|
||||||
|
|
||||||
|
- name: Deploy using Cloudflare Wrangler
|
||||||
|
uses: cloudflare/wrangler-action@v3
|
||||||
|
with:
|
||||||
|
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
command: pages deploy dist --project-name=${{ steps.pname.outputs.project_name }} --branch=${{ steps.pname.outputs.branch }}
|
||||||
|
|
||||||
|
- name: Compute deployed URL
|
||||||
|
id: out
|
||||||
|
env:
|
||||||
|
PNAME: ${{ steps.pname.outputs.project_name }}
|
||||||
|
PBRANCH: ${{ steps.pname.outputs.branch }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
if [ "$PBRANCH" = "main" ]; then
|
||||||
|
URL="https://${PNAME}.pages.dev"
|
||||||
|
else
|
||||||
|
URL="https://${PBRANCH}.${PNAME}.pages.dev"
|
||||||
|
fi
|
||||||
|
echo "deployed_url=$URL" >> $GITHUB_OUTPUT
|
||||||
66
.github/workflows/run-tests.yml
vendored
Normal file
66
.github/workflows/run-tests.yml
vendored
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
name: Run Python Tests
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
services:
|
||||||
|
mariadb:
|
||||||
|
image: mariadb:11.4
|
||||||
|
env:
|
||||||
|
MARIADB_ROOT_PASSWORD: rootpw
|
||||||
|
MARIADB_DATABASE: group_project
|
||||||
|
MARIADB_USER: appuser
|
||||||
|
MARIADB_PASSWORD: apppass
|
||||||
|
ports:
|
||||||
|
- 3306:3306
|
||||||
|
options: >-
|
||||||
|
--health-cmd="mariadb-admin ping -h 127.0.0.1 -u root -prootpw --silent"
|
||||||
|
--health-interval=5s
|
||||||
|
--health-timeout=2s
|
||||||
|
--health-retries=20
|
||||||
|
|
||||||
|
env:
|
||||||
|
MARIADB_HOST: 127.0.0.1
|
||||||
|
MARIADB_PORT: "3306"
|
||||||
|
MARIADB_DB: group_project
|
||||||
|
MARIADB_USER: appuser
|
||||||
|
MARIADB_PASSWORD: apppass
|
||||||
|
# Ensure the application uses MariaDB (async) during tests
|
||||||
|
DATABASE_URL: mysql+asyncmy://appuser:apppass@127.0.0.1:3306/group_project
|
||||||
|
DISABLE_METRICS: "1"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python 3.11
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
|
||||||
|
- name: Add test dependencies to requirements
|
||||||
|
run: |
|
||||||
|
echo "pytest==8.4.2" >> ./7project/src/backend/requirements.txt
|
||||||
|
echo "pytest-asyncio==1.2.0" >> ./7project/src/backend/requirements.txt
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r ./7project/src/backend/requirements.txt
|
||||||
|
|
||||||
|
- name: Run Alembic migrations
|
||||||
|
run: |
|
||||||
|
alembic upgrade head
|
||||||
|
working-directory: ./7project/src/backend
|
||||||
|
|
||||||
|
- name: Run tests with pytest
|
||||||
|
env:
|
||||||
|
PYTEST_RUN_CONFIG: "True"
|
||||||
|
run: pytest
|
||||||
|
working-directory: ./7project/src/backend
|
||||||
74
.github/workflows/url_generator.yml
vendored
Normal file
74
.github/workflows/url_generator.yml
vendored
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
name: Generate Preview or Production URLs
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
mode:
|
||||||
|
description: "Build mode: 'prod' or 'pr'"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
pr_number:
|
||||||
|
description: 'PR number (required when mode=pr)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
runner:
|
||||||
|
description: 'The runner to use for this job'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: 'ubuntu-latest'
|
||||||
|
base_domain:
|
||||||
|
description: 'The base domain for production URLs (e.g., example.com)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
backend_url:
|
||||||
|
description: "The backend URL without scheme (e.g., api.example.com)"
|
||||||
|
value: ${{ jobs.generate-urls.outputs.backend_url }}
|
||||||
|
frontend_url:
|
||||||
|
description: "The frontend URL without scheme (e.g., app.example.com)"
|
||||||
|
value: ${{ jobs.generate-urls.outputs.frontend_url }}
|
||||||
|
backend_url_scheme:
|
||||||
|
description: "The backend URL with scheme (e.g., https://api.example.com)"
|
||||||
|
value: ${{ jobs.generate-urls.outputs.backend_url_scheme }}
|
||||||
|
frontend_url_scheme:
|
||||||
|
description: "The frontend URL with scheme (e.g., https://app.example.com)"
|
||||||
|
value: ${{ jobs.generate-urls.outputs.frontend_url_scheme }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
generate-urls:
|
||||||
|
permissions:
|
||||||
|
contents: none
|
||||||
|
runs-on: ${{ inputs.runner }}
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
backend_url: ${{ steps.set_urls.outputs.backend_url }}
|
||||||
|
frontend_url: ${{ steps.set_urls.outputs.frontend_url }}
|
||||||
|
backend_url_scheme: ${{ steps.set_urls.outputs.backend_url_scheme }}
|
||||||
|
frontend_url_scheme: ${{ steps.set_urls.outputs.frontend_url_scheme }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Generate URLs
|
||||||
|
id: set_urls
|
||||||
|
env:
|
||||||
|
BASE_DOMAIN: ${{ inputs.base_domain }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
if [ "${{ inputs.mode }}" = "prod" ]; then
|
||||||
|
BACKEND_URL="api.${BASE_DOMAIN}"
|
||||||
|
FRONTEND_URL="finance.${BASE_DOMAIN}"
|
||||||
|
else
|
||||||
|
# This is your current logic
|
||||||
|
FRONTEND_URL="pr-${{ inputs.pr_number }}.group-8-frontend.pages.dev"
|
||||||
|
BACKEND_URL="api-pr-${{ inputs.pr_number }}.${BASE_DOMAIN}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
FRONTEND_URL_SCHEME="https://$FRONTEND_URL"
|
||||||
|
BACKEND_URL_SCHEME="https://$BACKEND_URL"
|
||||||
|
|
||||||
|
# This part correctly writes to GITHUB_OUTPUT for the step
|
||||||
|
echo "backend_url_scheme=$BACKEND_URL_SCHEME" >> $GITHUB_OUTPUT
|
||||||
|
echo "frontend_url_scheme=$FRONTEND_URL_SCHEME" >> $GITHUB_OUTPUT
|
||||||
|
echo "backend_url=$BACKEND_URL" >> $GITHUB_OUTPUT
|
||||||
|
echo "frontend_url=$FRONTEND_URL" >> $GITHUB_OUTPUT
|
||||||
54
.github/workflows/workflow.yml
vendored
54
.github/workflows/workflow.yml
vendored
@@ -1,54 +0,0 @@
|
|||||||
name: Build, Push and Update Image in Manifest
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ "main" ]
|
|
||||||
paths:
|
|
||||||
- '7project/backend/**'
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-update:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
packages: write
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repo
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKER_USER }}
|
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Build and push Docker image
|
|
||||||
id: build
|
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
|
||||||
context: ./7project/backend
|
|
||||||
push: true
|
|
||||||
tags: ${{ secrets.DOCKER_USER }}/cc-app-demo:latest
|
|
||||||
|
|
||||||
- name: Get image digest
|
|
||||||
run: echo "IMAGE_DIGEST=${{ steps.build.outputs.digest }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
#- name: Update manifests with new image digest
|
|
||||||
# uses: OpsVerseIO/image-updater-action@0.1.0
|
|
||||||
# with:
|
|
||||||
# branch: main
|
|
||||||
# targetBranch: main
|
|
||||||
# createPR: 'false'
|
|
||||||
# message: "${{ github.event.head_commit.message }}"
|
|
||||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
# changes: |
|
|
||||||
# {
|
|
||||||
# "deployment/app-demo-deployment.yaml": {
|
|
||||||
# "spec.template.spec.containers[0].image": "${{ secrets.DOCKER_USER }}/cc-app-demo@${{ env.IMAGE_DIGEST }}"
|
|
||||||
# },
|
|
||||||
# "deployment/app-demo-worker-deployment.yaml": {
|
|
||||||
# "spec.template.spec.containers[0].image": "${{ secrets.DOCKER_USER }}/cc-app-demo@${{ env.IMAGE_DIGEST }}"
|
|
||||||
# }
|
|
||||||
# }
|
|
||||||
8
.idea/.gitignore
generated
vendored
Normal file
8
.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
||||||
|
# Editor-based HTTP Client requests
|
||||||
|
/httpRequests/
|
||||||
|
# Datasource local storage ignored files
|
||||||
|
/dataSources/
|
||||||
|
/dataSources.local.xml
|
||||||
@@ -45,11 +45,11 @@ flowchart LR
|
|||||||
proc_cron[Task planner] --> proc_queue
|
proc_cron[Task planner] --> proc_queue
|
||||||
proc_queue_worker --> ext_bank[(Bank API)]
|
proc_queue_worker --> ext_bank[(Bank API)]
|
||||||
proc_queue_worker --> db
|
proc_queue_worker --> db
|
||||||
client[Client/UI] --> api[API Gateway / Web Server]
|
client[Client/UI] <--> api[API Gateway / Web Server]
|
||||||
api --> svc[Web API]
|
api <--> svc[Web API]
|
||||||
svc --> proc_queue
|
svc --> proc_queue
|
||||||
svc --> db[(Database)]
|
svc <--> db[(Database)]
|
||||||
svc --> cache[(Cache)]
|
svc <--> cache[(Cache)]
|
||||||
```
|
```
|
||||||
|
|
||||||
- Components and responsibilities: What does each box do?
|
- Components and responsibilities: What does each box do?
|
||||||
|
|||||||
16
7project/.gitignore
vendored
16
7project/.gitignore
vendored
@@ -1,8 +1,8 @@
|
|||||||
/tofu/controlplane.yaml
|
/src/tofu/controlplane.yaml
|
||||||
/tofu/kubeconfig
|
/src/tofu/kubeconfig
|
||||||
/tofu/talosconfig
|
/src/tofu/talosconfig
|
||||||
/tofu/terraform.tfstate
|
/src/tofu/terraform.tfstate
|
||||||
/tofu/terraform.tfstate.backup
|
/src/tofu/terraform.tfstate.backup
|
||||||
/tofu/worker.yaml
|
/src/tofu/worker.yaml
|
||||||
/tofu/.terraform.lock.hcl
|
/src/tofu/.terraform.lock.hcl
|
||||||
/tofu/.terraform/
|
/src/tofu/.terraform/
|
||||||
|
|||||||
8
7project/.idea/.gitignore
generated
vendored
Normal file
8
7project/.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
||||||
|
# Editor-based HTTP Client requests
|
||||||
|
/httpRequests/
|
||||||
|
# Datasource local storage ignored files
|
||||||
|
/dataSources/
|
||||||
|
/dataSources.local.xml
|
||||||
@@ -1,43 +1,6 @@
|
|||||||
# Lab 6: Design Document for Course Project
|
# Personal Finance Tracker
|
||||||
|
## Folder Structure
|
||||||
| Lab 6: | Design Document for Course Project |
|
- meetings: Contains note from meetings
|
||||||
| ----------- | ---------------------------------- |
|
- scr: Source code for the project
|
||||||
| Subject: | DAT515 Cloud Computing |
|
- checklist: Project checklist and self assessment tracking
|
||||||
| Deadline: | **September 19, 2025 23:59** |
|
- report.md: Detailed report of the project
|
||||||
| Grading: | No Grade |
|
|
||||||
| Submission: | Group |
|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
|
|
||||||
- [Table of Contents](#table-of-contents)
|
|
||||||
- [1. Design Document (design.md)](#1-design-document-designmd)
|
|
||||||
|
|
||||||
The design document is the first deliverable for your project.
|
|
||||||
We separated this out as a separate deliverable, with its own deadline, to ensure that you have a clear plan before you start coding.
|
|
||||||
This part only needs a cursory review by the teaching staff to ensure it is sufficiently comprehensive, while still realistic.
|
|
||||||
The teaching staff will assign you to a project mentor who will provide guidance and support throughout the development process.
|
|
||||||
|
|
||||||
## 1. Design Document (design.md)
|
|
||||||
|
|
||||||
You are required to prepare a design document for your application.
|
|
||||||
The design doc should be brief, well-organized and easy to understand.
|
|
||||||
The design doc should be prepared in markdown format and named `design.md` and submitted in the project group's repository.
|
|
||||||
Remember that you can use [mermaid diagrams](https://github.com/mermaid-js/mermaid#readme) in markdown files.
|
|
||||||
|
|
||||||
The design doc **should include** the following sections:
|
|
||||||
|
|
||||||
- **Overview**: A brief description of the application and its purpose.
|
|
||||||
- **Architecture**: The high-level architecture of the application, including components, interactions, and data flow.
|
|
||||||
- **Technologies**: The cloud computing technologies or services used in the application.
|
|
||||||
- **Deployment**: The deployment strategy for the application, including any infrastructure requirements.
|
|
||||||
|
|
||||||
The design document should be updated throughout the development process and reflect the final implementation of your project.
|
|
||||||
|
|
||||||
Optional sections may include:
|
|
||||||
|
|
||||||
- Security: The security measures implemented in the application to protect data and resources.
|
|
||||||
- Scalability: The scalability considerations for the application, including load balancing and auto-scaling.
|
|
||||||
- Monitoring: The monitoring and logging strategy for the application to track performance and detect issues.
|
|
||||||
- Disaster Recovery: The disaster recovery plan for the application to ensure business continuity in case of failures.
|
|
||||||
- Cost Analysis: The cost analysis of running the application on the cloud, including pricing models and cost-saving strategies.
|
|
||||||
- References: Any external sources or references used in the design document.
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
from fastapi import Depends, FastAPI
|
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
|
||||||
|
|
||||||
from app.models.user import User
|
|
||||||
|
|
||||||
from app.schemas.user import UserCreate, UserRead, UserUpdate
|
|
||||||
from app.services.user_service import auth_backend, current_active_verified_user, fastapi_users
|
|
||||||
|
|
||||||
app = FastAPI()
|
|
||||||
|
|
||||||
# CORS for frontend dev server
|
|
||||||
app.add_middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=[
|
|
||||||
"http://localhost:5173",
|
|
||||||
"http://127.0.0.1:5173",
|
|
||||||
],
|
|
||||||
allow_credentials=True,
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
)
|
|
||||||
|
|
||||||
app.include_router(
|
|
||||||
fastapi_users.get_auth_router(auth_backend), prefix="/auth/jwt", tags=["auth"]
|
|
||||||
)
|
|
||||||
app.include_router(
|
|
||||||
fastapi_users.get_register_router(UserRead, UserCreate),
|
|
||||||
prefix="/auth",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
app.include_router(
|
|
||||||
fastapi_users.get_reset_password_router(),
|
|
||||||
prefix="/auth",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
app.include_router(
|
|
||||||
fastapi_users.get_verify_router(UserRead),
|
|
||||||
prefix="/auth",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
app.include_router(
|
|
||||||
fastapi_users.get_users_router(UserRead, UserUpdate),
|
|
||||||
prefix="/users",
|
|
||||||
tags=["users"],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Liveness/root endpoint
|
|
||||||
@app.get("/", include_in_schema=False)
|
|
||||||
async def root():
|
|
||||||
return {"status": "ok"}
|
|
||||||
|
|
||||||
|
|
||||||
@app.get("/authenticated-route")
|
|
||||||
async def authenticated_route(user: User = Depends(current_active_verified_user)):
|
|
||||||
return {"message": f"Hello {user.email}!"}
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
import app.celery_app # noqa: F401
|
|
||||||
from app.workers.celery_tasks import send_email
|
|
||||||
|
|
||||||
|
|
||||||
def enqueue_email(to: str, subject: str, body: str) -> None:
|
|
||||||
send_email.delay(to, subject, body)
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
from sqlalchemy import Column, Integer, String, Float
|
|
||||||
from app.core.base import Base
|
|
||||||
|
|
||||||
class Transaction(Base):
|
|
||||||
__tablename__ = "transaction"
|
|
||||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
|
||||||
amount = Column(Float, nullable=False)
|
|
||||||
description = Column(String(length=255), nullable=True)
|
|
||||||
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
from sqlalchemy import Column, String
|
|
||||||
from fastapi_users.db import SQLAlchemyBaseUserTableUUID
|
|
||||||
from app.core.base import Base
|
|
||||||
|
|
||||||
class User(SQLAlchemyBaseUserTableUUID, Base):
|
|
||||||
first_name = Column(String(length=100), nullable=True)
|
|
||||||
last_name = Column(String(length=100), nullable=True)
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
import logging
|
|
||||||
|
|
||||||
from celery import shared_task
|
|
||||||
|
|
||||||
logger = logging.getLogger("celery_tasks")
|
|
||||||
if not logger.handlers:
|
|
||||||
_h = logging.StreamHandler()
|
|
||||||
logger.addHandler(_h)
|
|
||||||
logger.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.send_email")
|
|
||||||
def send_email(to: str, subject: str, body: str) -> None:
|
|
||||||
if not (to and subject and body):
|
|
||||||
logger.error("Email task missing fields. to=%r subject=%r body_len=%r", to, subject, len(body) if body else 0)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Placeholder for real email sending logic
|
|
||||||
logger.info("[Celery] Email sent | to=%s | subject=%s | body_len=%d", to, subject, len(body))
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
import uvicorn
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
uvicorn.run("app.app:app", host="0.0.0.0", log_level="info")
|
|
||||||
81
7project/checklist.md
Normal file
81
7project/checklist.md
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
# Project Evaluation Checklist
|
||||||
|
|
||||||
|
The group earn points by completing items from the categories below.
|
||||||
|
You are not expected to complete all items.
|
||||||
|
Focus on areas that align with your project goals and interests.
|
||||||
|
|
||||||
|
The core deliverables are required.
|
||||||
|
This means that you must get at least 2 points for each item in this category.
|
||||||
|
|
||||||
|
| **Category** | **Item** | **Max Points** | **Points** | **Comments** |
|
||||||
|
|----------------------------------| --------------------------------------- | -------------- |-------------------------------------------------| |
|
||||||
|
| **Core Deliverables (Required)** | | | | |
|
||||||
|
| Codebase & Organization | Well-organized project structure | 5 | 5 | |
|
||||||
|
| | Clean, readable code | 5 | 4 | |
|
||||||
|
| | Use planning tool (e.g., GitHub issues) | 5 | 4 | |
|
||||||
|
| | Proper version control usage | 5 | 5 | |
|
||||||
|
| 23 | Complete source code | 5 | 5 | |
|
||||||
|
| Documentation | Comprehensive reproducibility report | 10 | 4-5 | |
|
||||||
|
| | Updated design document | 5 | 2 | |
|
||||||
|
| | Clear build/deployment instructions | 5 | 2 | |
|
||||||
|
| | Troubleshooting guide | 5 | 1 | |
|
||||||
|
| | Completed self-assessment table | 5 | 2 | |
|
||||||
|
| 14 | Hour sheets for all members | 5 | 3 | |
|
||||||
|
| Presentation Video | Project demonstration | 5 | 0 | |
|
||||||
|
| | Code walk-through | 5 | 0 | |
|
||||||
|
| 0 | Deployment showcase | 5 | 0 | |
|
||||||
|
| **Technical Implementation** | | | | |
|
||||||
|
| Application Functionality | Basic functionality works | 10 | 8 | |
|
||||||
|
| | Advanced features implemented | 10 | 0 | |
|
||||||
|
| | Error handling & robustness | 10 | 4 | |
|
||||||
|
| 16 | User-friendly interface | 5 | 4 | |
|
||||||
|
| Backend & Architecture | Stateless web server | 5 | 5 | |
|
||||||
|
| | Stateful application | 10 | ? WHAT DOES THIS MEAN | |
|
||||||
|
| | Database integration | 10 | 10 | |
|
||||||
|
| | API design | 5 | 5 | |
|
||||||
|
| 20 | Microservices architecture | 10 | 0 | |
|
||||||
|
| Cloud Integration | Basic cloud deployment | 10 | 10 | |
|
||||||
|
| | Cloud APIs usage | 10 | ? WHAT DOES THIS MEAN | |
|
||||||
|
| | Serverless components | 10 | 0 | |
|
||||||
|
| 10 | Advanced cloud services | 5 | 0 | |
|
||||||
|
| **DevOps & Deployment** | | | | |
|
||||||
|
| Containerization | Basic Dockerfile | 5 | 5 | |
|
||||||
|
| | Optimized Dockerfile | 5 | 0 | |
|
||||||
|
| | Docker Compose | 5 | 5 - dev only | |
|
||||||
|
| 15 | Persistent storage | 5 | 5 | |
|
||||||
|
| Deployment & Scaling | Manual deployment | 5 | 5 | |
|
||||||
|
| | Automated deployment | 5 | 5 | |
|
||||||
|
| | Multiple replicas | 5 | 5 | |
|
||||||
|
| 20 | Kubernetes deployment | 10 | 10 | |
|
||||||
|
| **Quality Assurance** | | | | |
|
||||||
|
| Testing | Unit tests | 5 | 2 | |
|
||||||
|
| | Integration tests | 5 | 2 | |
|
||||||
|
| | End-to-end tests | 5 | 5 | |
|
||||||
|
| 9 | Performance testing | 5 | 0 | |
|
||||||
|
| Monitoring & Operations | Health checks | 5 | 5 | |
|
||||||
|
| | Logging | 5 | 2 - only to terminal add logstash | |
|
||||||
|
| 9 | Metrics/Monitoring | 5 | 2 - only DB, need to create Prometheus endpoint | |
|
||||||
|
| Security | HTTPS/TLS | 5 | 5 | |
|
||||||
|
| | Authentication | 5 | 5 | |
|
||||||
|
| 15 | Authorization | 5 | 5 | |
|
||||||
|
| **Innovation & Excellence** | | | | |
|
||||||
|
| Advanced Features and | AI/ML Integration | 10 | 0 | |
|
||||||
|
| Technical Excellence | Real-time features | 10 | 0 | |
|
||||||
|
| | Creative problem solving | 10 | ? | |
|
||||||
|
| | Performance optimization | 5 | 2 | |
|
||||||
|
| 2 | Exceptional user experience | 5 | 0 | |
|
||||||
|
| **Total** | | **255** | **153** | |
|
||||||
|
|
||||||
|
## Grading Scale
|
||||||
|
|
||||||
|
- **Minimum Required: 100 points**
|
||||||
|
- **Maximum: 200+ points**
|
||||||
|
|
||||||
|
| Grade | Points |
|
||||||
|
| ----- | -------- |
|
||||||
|
| A | 180-200+ |
|
||||||
|
| B | 160-179 |
|
||||||
|
| C | 140-159 |
|
||||||
|
| D | 120-139 |
|
||||||
|
| E | 100-119 |
|
||||||
|
| F | 0-99 |
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
apiVersion: k8s.mariadb.com/v1alpha1
|
|
||||||
kind: Grant
|
|
||||||
metadata:
|
|
||||||
name: grant
|
|
||||||
spec:
|
|
||||||
mariaDbRef:
|
|
||||||
name: mariadb-repl
|
|
||||||
namespace: mariadb-operator
|
|
||||||
privileges:
|
|
||||||
- "ALL PRIVILEGES"
|
|
||||||
database: "app-demo-database"
|
|
||||||
table: "*"
|
|
||||||
username: "app-demo-user"
|
|
||||||
grantOption: true
|
|
||||||
host: "%"
|
|
||||||
# Delete the resource in the database whenever the CR gets deleted.
|
|
||||||
# Alternatively, you can specify Skip in order to omit deletion.
|
|
||||||
cleanupPolicy: Skip
|
|
||||||
requeueInterval: 10h
|
|
||||||
retryInterval: 30s
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
apiVersion: v1
|
|
||||||
kind: Secret
|
|
||||||
metadata:
|
|
||||||
name: app-demo-database-secret
|
|
||||||
type: kubernetes.io/basic-auth
|
|
||||||
stringData:
|
|
||||||
password: "strongpassword"
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
apiVersion: k8s.mariadb.com/v1alpha1
|
|
||||||
kind: User
|
|
||||||
metadata:
|
|
||||||
name: app-demo-user
|
|
||||||
spec:
|
|
||||||
# If you want the user to be created with a different name than the resource name
|
|
||||||
# name: user-custom
|
|
||||||
mariaDbRef:
|
|
||||||
name: mariadb-repl
|
|
||||||
namespace: mariadb-operator
|
|
||||||
passwordSecretKeyRef:
|
|
||||||
name: app-demo-database-secret
|
|
||||||
key: password
|
|
||||||
maxUserConnections: 20
|
|
||||||
host: "%"
|
|
||||||
# Delete the resource in the database whenever the CR gets deleted.
|
|
||||||
# Alternatively, you can specify Skip in order to omit deletion.
|
|
||||||
cleanupPolicy: Skip
|
|
||||||
requeueInterval: 10h
|
|
||||||
retryInterval: 30s
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
apiVersion: k8s.mariadb.com/v1alpha1
|
|
||||||
kind: Database
|
|
||||||
metadata:
|
|
||||||
name: app-demo-database
|
|
||||||
spec:
|
|
||||||
mariaDbRef:
|
|
||||||
name: mariadb-repl
|
|
||||||
namespace: mariadb-operator
|
|
||||||
characterSet: utf8
|
|
||||||
collate: utf8_general_ci
|
|
||||||
# Delete the resource in the database whenever the CR gets deleted.
|
|
||||||
# Alternatively, you can specify Skip in order to omit deletion.
|
|
||||||
cleanupPolicy: Skip
|
|
||||||
requeueInterval: 10h
|
|
||||||
retryInterval: 30s
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
apiVersion: apps/v1
|
|
||||||
kind: Deployment
|
|
||||||
metadata:
|
|
||||||
name: app-demo
|
|
||||||
spec:
|
|
||||||
replicas: 3
|
|
||||||
revisionHistoryLimit: 3
|
|
||||||
selector:
|
|
||||||
matchLabels:
|
|
||||||
app: app-demo
|
|
||||||
template:
|
|
||||||
metadata:
|
|
||||||
labels:
|
|
||||||
app: app-demo
|
|
||||||
spec:
|
|
||||||
containers:
|
|
||||||
- image: lukastrkan/cc-app-demo@sha256:75634b4d97282b6b8424fe17767c81adf44af5f7359c1d25883073b5629b3e05
|
|
||||||
name: app-demo
|
|
||||||
ports:
|
|
||||||
- containerPort: 8000
|
|
||||||
env:
|
|
||||||
- name: MARIADB_HOST
|
|
||||||
value: mariadb-repl.mariadb-operator.svc.cluster.local
|
|
||||||
- name: MARIADB_PORT
|
|
||||||
value: '3306'
|
|
||||||
- name: MARIADB_DB
|
|
||||||
value: app-demo-database
|
|
||||||
- name: MARIADB_USER
|
|
||||||
value: app-demo-user
|
|
||||||
- name: MARIADB_PASSWORD
|
|
||||||
valueFrom:
|
|
||||||
secretKeyRef:
|
|
||||||
name: app-demo-database-secret
|
|
||||||
key: password
|
|
||||||
livenessProbe:
|
|
||||||
httpGet:
|
|
||||||
path: /
|
|
||||||
port: 8000
|
|
||||||
initialDelaySeconds: 10
|
|
||||||
periodSeconds: 10
|
|
||||||
failureThreshold: 3
|
|
||||||
readinessProbe:
|
|
||||||
httpGet:
|
|
||||||
path: /
|
|
||||||
port: 8000
|
|
||||||
initialDelaySeconds: 10
|
|
||||||
periodSeconds: 10
|
|
||||||
failureThreshold: 3
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
apiVersion: v1
|
|
||||||
kind: Service
|
|
||||||
metadata:
|
|
||||||
name: app-demo
|
|
||||||
spec:
|
|
||||||
ports:
|
|
||||||
- port: 80
|
|
||||||
targetPort: 8000
|
|
||||||
selector:
|
|
||||||
app: app-demo
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
apiVersion: apps/v1
|
|
||||||
kind: Deployment
|
|
||||||
metadata:
|
|
||||||
name: app-demo-worker
|
|
||||||
spec:
|
|
||||||
replicas: 3
|
|
||||||
revisionHistoryLimit: 3
|
|
||||||
selector:
|
|
||||||
matchLabels:
|
|
||||||
app: app-demo-worker
|
|
||||||
template:
|
|
||||||
metadata:
|
|
||||||
labels:
|
|
||||||
app: app-demo-worker
|
|
||||||
spec:
|
|
||||||
containers:
|
|
||||||
- image: lukastrkan/cc-app-demo@sha256:75634b4d97282b6b8424fe17767c81adf44af5f7359c1d25883073b5629b3e05
|
|
||||||
name: app-demo-worker
|
|
||||||
command:
|
|
||||||
- celery
|
|
||||||
- -A
|
|
||||||
- app.celery_app
|
|
||||||
- worker
|
|
||||||
- -Q
|
|
||||||
- $(MAIL_QUEUE)
|
|
||||||
- --loglevel
|
|
||||||
- INFO
|
|
||||||
env:
|
|
||||||
- name: RABBITMQ_USERNAME
|
|
||||||
value: demo-app
|
|
||||||
- name: RABBITMQ_PASSWORD
|
|
||||||
value: StrongPassword123!
|
|
||||||
- name: RABBITMQ_HOST
|
|
||||||
value: rabbitmq.rabbitmq.svc.cluster.local
|
|
||||||
- name: RABBITMQ_PORT
|
|
||||||
value: '5672'
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
apiVersion: networking.cfargotunnel.com/v1alpha1
|
|
||||||
kind: TunnelBinding
|
|
||||||
metadata:
|
|
||||||
name: guestbook-tunnel-binding
|
|
||||||
namespace: group-project
|
|
||||||
subjects:
|
|
||||||
- name: app-server
|
|
||||||
spec:
|
|
||||||
target: http://app-demo.group-project.svc.cluster.local
|
|
||||||
fqdn: demo.ltrk.cz
|
|
||||||
noTlsVerify: true
|
|
||||||
tunnelRef:
|
|
||||||
kind: ClusterTunnel
|
|
||||||
name: cluster-tunnel
|
|
||||||
53
7project/meetings/2025-10-16-meeting.md
Normal file
53
7project/meetings/2025-10-16-meeting.md
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
# Weekly Meeting Notes
|
||||||
|
|
||||||
|
- Group 8 - Personal finance tracker
|
||||||
|
- Mentor: Jaychander
|
||||||
|
|
||||||
|
Keep all meeting notes in the `meetings.md` file in your project folder.
|
||||||
|
Just copy the template below for each weekly meeting and fill in the details.
|
||||||
|
|
||||||
|
## Administrative Info
|
||||||
|
|
||||||
|
- Date: 2025-10-16
|
||||||
|
- Attendees: Dejan Ribarovski, Lukas Trkan
|
||||||
|
- Notetaker: Dejan Ribarovski
|
||||||
|
|
||||||
|
## Progress Update (Before Meeting)
|
||||||
|
|
||||||
|
Summary of what has been accomplished since the last meeting in the following categories.
|
||||||
|
|
||||||
|
## Action Items from Last Week (During Meeting)
|
||||||
|
|
||||||
|
- [x] start coding the app logic
|
||||||
|
- [x] start writing the report so it matches the actual progress
|
||||||
|
- [x] redo the system diagram so it includes a response flow
|
||||||
|
|
||||||
|
### Coding
|
||||||
|
Implemented initial functioning version of the app, added OAuth with BankId and MojeID,
|
||||||
|
added database snapshots.
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
report.md is up to date
|
||||||
|
|
||||||
|
## Questions and Topics for Discussion (Before Meeting)
|
||||||
|
|
||||||
|
Prepare 3-5 questions and topics you want to discuss with your mentor.
|
||||||
|
|
||||||
|
1. What other functionality should be added to the app
|
||||||
|
2. Priority for the next week (Testing maybe?)
|
||||||
|
3. Question 3
|
||||||
|
|
||||||
|
## Discussion Notes (During Meeting)
|
||||||
|
|
||||||
|
## Action Items for Next Week (During Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [x] OAuth
|
||||||
|
- [x] CI/CD fix
|
||||||
|
- [ ] Database local (multiple bank accounts)
|
||||||
|
- [ ] Add tests and set up github pipeline
|
||||||
|
- [ ] Frontend imporvment - user experience
|
||||||
|
- [ ] make the report more clear
|
||||||
|
|
||||||
|
---
|
||||||
54
7project/meetings/2025-10-23-meeting.md
Normal file
54
7project/meetings/2025-10-23-meeting.md
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# Weekly Meeting Notes
|
||||||
|
|
||||||
|
- Group 8 - Personal finance tracker
|
||||||
|
- Mentor: Jaychander
|
||||||
|
|
||||||
|
Keep all meeting notes in the `meetings.md` file in your project folder.
|
||||||
|
Just copy the template below for each weekly meeting and fill in the details.
|
||||||
|
|
||||||
|
## Administrative Info
|
||||||
|
|
||||||
|
- Date: 2025-10-23
|
||||||
|
- Attendees: Dejan
|
||||||
|
- Notetaker: Dejan
|
||||||
|
|
||||||
|
## Progress Update (Before Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [x] OAuth (BankID)
|
||||||
|
- [x] CI/CD fix
|
||||||
|
- [X] Database local (multiple bank accounts)
|
||||||
|
- [X] Add tests and set up github pipeline
|
||||||
|
- [X] Frontend imporvment - user experience
|
||||||
|
- [ ] make the report more clear - partly
|
||||||
|
|
||||||
|
Summary of what has been accomplished since the last meeting in the following categories.
|
||||||
|
|
||||||
|
### Coding
|
||||||
|
Improved Frontend, added Mock Bank, fixed deployment, fixed OAuth(BankID) on production, added basic tests
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
Not much - just updated the work done
|
||||||
|
|
||||||
|
## Questions and Topics for Discussion (Before Meeting)
|
||||||
|
|
||||||
|
This was not prepared, I planned to do it right before meeting, but Jaychander needed to go somewhere earlier.
|
||||||
|
|
||||||
|
1. Question 1
|
||||||
|
2. Question 2
|
||||||
|
3. Question 3
|
||||||
|
|
||||||
|
## Discussion Notes (During Meeting)
|
||||||
|
The tracker should not store the transactions in the database - security vulnerability.
|
||||||
|
|
||||||
|
## Action Items for Next Week (During Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [ ] Dont store data in database (security) - Load it on login (from CSAS API and local database), load automatically with email
|
||||||
|
- [ ] Go through the checklist
|
||||||
|
- [ ] Look for possible APIs (like stocks or financial details whatever)
|
||||||
|
- [ ] Report
|
||||||
|
|
||||||
|
---
|
||||||
51
7project/meetings/2025-10-30-meeting.md
Normal file
51
7project/meetings/2025-10-30-meeting.md
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# Weekly Meeting Notes
|
||||||
|
|
||||||
|
- Group 8 - Personal finance tracker
|
||||||
|
- Mentor: Jaychander
|
||||||
|
|
||||||
|
Keep all meeting notes in the `meetings.md` file in your project folder.
|
||||||
|
Just copy the template below for each weekly meeting and fill in the details.
|
||||||
|
|
||||||
|
## Administrative Info
|
||||||
|
|
||||||
|
- Date: 2025-10-30
|
||||||
|
- Attendees: Dejan, Lukas
|
||||||
|
- Notetaker: Dejan
|
||||||
|
|
||||||
|
## Progress Update (Before Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [ ] Dont store data in database (security) - Load it on login (from CSAS API and local database), load automatically with email
|
||||||
|
- [X] Go through the checklist
|
||||||
|
- [X] Look for possible APIs (like stocks or financial details whatever)
|
||||||
|
- [ ] Report - partly
|
||||||
|
|
||||||
|
Summary of what has been accomplished since the last meeting in the following categories.
|
||||||
|
|
||||||
|
### Coding
|
||||||
|
Implemented CSAS API transactions fetch, Added tests with testing database on github actions, redone UI,
|
||||||
|
added currency exchange rate with CNB API
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
Not much - just updated the work done
|
||||||
|
|
||||||
|
## Questions and Topics for Discussion (Before Meeting)
|
||||||
|
|
||||||
|
1. Security regarding storing transactions - possibility of encryption
|
||||||
|
2. Realisticaly what needs to be done for us to be done
|
||||||
|
3. Question 3
|
||||||
|
|
||||||
|
## Discussion Notes (During Meeting)
|
||||||
|
The tracker should not store the transactions in the database - security vulnerability.
|
||||||
|
|
||||||
|
## Action Items for Next Week (During Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [x] Change the name on frontend from 7project
|
||||||
|
- [x] Finalize the funcionality and everyting in the code part
|
||||||
|
- [ ] Try to finalize report with focus on reproducibility
|
||||||
|
- [ ] More high level explanation of the workflow in the report
|
||||||
|
|
||||||
|
---
|
||||||
54
7project/meetings/2025-10-9-meeting.md
Normal file
54
7project/meetings/2025-10-9-meeting.md
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# Weekly Meeting Notes
|
||||||
|
|
||||||
|
- Group 8 - Personal finance tracker
|
||||||
|
- Mentor: Jaychander
|
||||||
|
|
||||||
|
Keep all meeting notes in the `meetings.md` file in your project folder.
|
||||||
|
Just copy the template below for each weekly meeting and fill in the details.
|
||||||
|
|
||||||
|
## Administrative Info
|
||||||
|
|
||||||
|
- Date: 2025-10-08
|
||||||
|
- Attendees: Dejan Ribarovski, Lukas Trkan
|
||||||
|
- Notetaker: Dejan Ribarovski
|
||||||
|
|
||||||
|
## Progress Update (Before Meeting)
|
||||||
|
|
||||||
|
Summary of what has been accomplished since the last meeting in the following categories.
|
||||||
|
|
||||||
|
### Coding
|
||||||
|
|
||||||
|
Lukas has implemented the template source directories, source files and config files necessary for deployment
|
||||||
|
- docker compose for database, redis cache and rabbit MQ
|
||||||
|
- tofu
|
||||||
|
- backend template
|
||||||
|
- frontend template
|
||||||
|
- charts templates
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
- Created GitHub issues for the next steps
|
||||||
|
- Added this document + checklist and report
|
||||||
|
|
||||||
|
## Questions and Topics for Discussion (Before Meeting)
|
||||||
|
|
||||||
|
Prepare 3-5 questions and topics you want to discuss with your mentor.
|
||||||
|
|
||||||
|
1. Anything we should add structure-wise?
|
||||||
|
2. Anything you would like us to prioritize until next week?
|
||||||
|
|
||||||
|
## Discussion Notes (During Meeting)
|
||||||
|
|
||||||
|
- start working on the report
|
||||||
|
- start coding the actual code
|
||||||
|
- write problems solved
|
||||||
|
- redo the system diagram - see the response as well
|
||||||
|
- create a meetings folder wih seperate meetings files
|
||||||
|
## Action Items for Next Week (During Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [ ] start coding the app logic
|
||||||
|
- [ ] start writing the report so it matches the actual progress
|
||||||
|
- [ ] redo the system diagram so it includes a response flow
|
||||||
|
|
||||||
|
---
|
||||||
47
7project/meetings/2025-11-6-meeting.md
Normal file
47
7project/meetings/2025-11-6-meeting.md
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
# Weekly Meeting Notes
|
||||||
|
|
||||||
|
- Group 8 - Personal finance tracker
|
||||||
|
- Mentor: Jaychander
|
||||||
|
|
||||||
|
Keep all meeting notes in the `meetings.md` file in your project folder.
|
||||||
|
Just copy the template below for each weekly meeting and fill in the details.
|
||||||
|
|
||||||
|
## Administrative Info
|
||||||
|
|
||||||
|
- Date: 2025-10-30
|
||||||
|
- Attendees: Dejan, Lukas
|
||||||
|
- Notetaker: Dejan
|
||||||
|
|
||||||
|
## Progress Update (Before Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [x] Change the name on frontend from 7project
|
||||||
|
- [x] Finalize the funcionality and everyting in the code part
|
||||||
|
- [x] Try to finalize report with focus on reproducibility
|
||||||
|
- [x] More high level explanation of the workflow in the report
|
||||||
|
|
||||||
|
Summary of what has been accomplished since the last meeting in the following categories.
|
||||||
|
|
||||||
|
### Coding
|
||||||
|
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
|
||||||
|
## Questions and Topics for Discussion (Before Meeting)
|
||||||
|
|
||||||
|
|
||||||
|
## Discussion Notes (During Meeting)
|
||||||
|
The tracker should not store the transactions in the database - security vulnerability.
|
||||||
|
|
||||||
|
## Action Items for Next Week (During Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [ ] video
|
||||||
|
- [ ] highlight the optional stuff in the report
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
41
7project/meetings/meeting-template.md
Normal file
41
7project/meetings/meeting-template.md
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Weekly Meeting Notes
|
||||||
|
|
||||||
|
- Group X - Project Title
|
||||||
|
- Mentor: Mentor Name
|
||||||
|
|
||||||
|
Keep all meeting notes in the `meetings.md` file in your project folder.
|
||||||
|
Just copy the template below for each weekly meeting and fill in the details.
|
||||||
|
|
||||||
|
## Administrative Info
|
||||||
|
|
||||||
|
- Date: 2025-09-19
|
||||||
|
- Attendees: Name1, Name2, Name3
|
||||||
|
- Notetaker: Name1
|
||||||
|
|
||||||
|
## Progress Update (Before Meeting)
|
||||||
|
|
||||||
|
Summary of what has been accomplished since the last meeting in the following categories.
|
||||||
|
|
||||||
|
### Coding
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
## Questions and Topics for Discussion (Before Meeting)
|
||||||
|
|
||||||
|
Prepare 3-5 questions and topics you want to discuss with your mentor.
|
||||||
|
|
||||||
|
1. Question 1
|
||||||
|
2. Question 2
|
||||||
|
3. Question 3
|
||||||
|
|
||||||
|
## Discussion Notes (During Meeting)
|
||||||
|
|
||||||
|
## Action Items for Next Week (During Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [ ] Action Item 1
|
||||||
|
- [ ] Action Item 2
|
||||||
|
- [ ] Action Item 3
|
||||||
|
|
||||||
|
---
|
||||||
689
7project/report.md
Normal file
689
7project/report.md
Normal file
@@ -0,0 +1,689 @@
|
|||||||
|
# Personal finance tracker
|
||||||
|
|
||||||
|
<!--- **Instructions**:
|
||||||
|
> This template provides the structure for your project report.
|
||||||
|
> Replace the placeholder text with your actual content.
|
||||||
|
> Remove instructions that are not relevant for your project, but leave the headings along with a (NA) label. -->
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
**Project Name**: Personal Finance Tracker
|
||||||
|
|
||||||
|
**Deployment URL**: https://finance.ltrk.cz/
|
||||||
|
|
||||||
|
**Group Members**:
|
||||||
|
|
||||||
|
- 289229, Lukáš Trkan, lukastrkan
|
||||||
|
- 289258, Dejan Ribarovski, ribardej (derib2613)
|
||||||
|
|
||||||
|
**Brief Description**:
|
||||||
|
Our application allows users to easily track their cash flow
|
||||||
|
through multiple bank accounts. Users can label their transactions with custom categories that can be later used for
|
||||||
|
filtering and visualization. New transactions are automatically fetched in the background.
|
||||||
|
|
||||||
|
## Architecture Overview
|
||||||
|
|
||||||
|
Our system is a full‑stack web application composed of a React frontend, a FastAPI backend,
|
||||||
|
a asynchronousMariaDB database with Maxscale, and background workers powered by Celery with RabbitMQ.
|
||||||
|
The backend exposes REST endpoints for authentication (email/password and OAuth), users, categories,
|
||||||
|
transactions, exchange rates and bank APIs. Infrastructure for Kubernetes is managed via Terraform/OpenTofu and
|
||||||
|
the application is packaged via a Helm chart. This all is deployed on private TalosOS cluster running on Proxmox VE with
|
||||||
|
CI/CD and with public access over Cloudflare tunnels. Static files for frontend are served via Cloudflare pages.
|
||||||
|
Other services deployed in the cluster includes Longhorn for persistent storage, Prometheus with Grafana for monitoring.
|
||||||
|
|
||||||
|
### High-Level Architecture
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
flowchart TB
|
||||||
|
n3(("User")) <--> client["Frontend"]
|
||||||
|
proc_queue["Message Queue"] --> proc_queue_worker["Worker Service"]
|
||||||
|
proc_queue_worker -- SMTP --> ext_mail[("Email Service")]
|
||||||
|
proc_queue_worker <-- HTTP request/response --> ext_bank[("Bank API")]
|
||||||
|
proc_queue_worker <--> db[("Database")]
|
||||||
|
proc_cron["Cron"] <-- HTTP request/response --> svc["Backend API"]
|
||||||
|
svc --> proc_queue
|
||||||
|
n2["Cloudflare tunnel"] <-- HTTP request/response --> svc
|
||||||
|
svc <--> db
|
||||||
|
svc <-- HTTP request/response --> api[("UniRate API")]
|
||||||
|
client <-- HTTP request/response --> n2
|
||||||
|
```
|
||||||
|
|
||||||
|
The workflow works in the following way:
|
||||||
|
|
||||||
|
- Client connects to the frontend. After login, frontend automatically fetches the stored transactions from
|
||||||
|
the database via the backend API and currency rates from UniRate API.
|
||||||
|
- When the client opts for fetching new transactions via the Bank API, cron will trigger periodic fetching
|
||||||
|
using background worker.
|
||||||
|
- After successful load, these transactions are stored to the database and displayed to the client
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- The stored transactions are encrypted in the DB for security reasons.
|
||||||
|
- For every pull request the full APP is deployed on a separate URL and the tests are run by github CI/CD
|
||||||
|
- On every push to main, the production app is automatically updated
|
||||||
|
- UI is responsive for mobile devices
|
||||||
|
- Slow operations (emails, transactions fetching) are handled
|
||||||
|
in the background by Celery workers.
|
||||||
|
- App is monitored using prometheus metrics endpoint and metrics are shown in Grafana dashboard.
|
||||||
|
|
||||||
|
### Components
|
||||||
|
|
||||||
|
- Frontend (frontend/): React + TypeScript app built with Vite. Talks to the backend via REST, handles
|
||||||
|
login/registration, shows latest transactions, filtering, and allows adding transactions.
|
||||||
|
- Backend API (backend/app): FastAPI app with routers under app/api for auth, users, categories, transactions, exchange
|
||||||
|
rates and bankAPI. Uses FastAPI Users for auth (JWT + OAuth), SQLAlchemy ORM, and Pydantic v2 schemas.
|
||||||
|
- Worker service (backend/app/workers): Celery worker handling background tasks (emails, transactions fetching).
|
||||||
|
- Database (MariaDB with Maxscale): Persists users, categories, transactions; schema managed by Alembic migrations.
|
||||||
|
- Message Queue (RabbitMQ): Queues background tasks for Celery workers.
|
||||||
|
- Infrastructure as Code (tofu/): OpenTofu modules provisioning cluster services (RabbitMQ, Redis, Cloudflare tunnel,
|
||||||
|
etc.).
|
||||||
|
- Deployment Chart (charts/myapp-chart/): Helm chart to deploy the application to Kubernetes.
|
||||||
|
|
||||||
|
### Technologies Used
|
||||||
|
|
||||||
|
- Backend: Python, FastAPI, FastAPI Users, SQLAlchemy, Pydantic, Alembic, Celery
|
||||||
|
- Frontend: React, TypeScript, Vite
|
||||||
|
- Database: MariaDB with Maxscale
|
||||||
|
- Background jobs: RabbitMQ, Celery
|
||||||
|
- Containerization/Orchestration: Docker, Docker Compose (dev), Kubernetes, Helm
|
||||||
|
- IaC/Platform: Proxmox, Talos, Cloudflare pages, OpenTofu (Terraform), cert-manager, MetalLB, Cloudflare Tunnel,
|
||||||
|
Prometheus, Loki
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
Here are software and hardware prerequisites for the development and production environments. This section also
|
||||||
|
describes
|
||||||
|
necessary environment variables and key dependencies used in the project.
|
||||||
|
|
||||||
|
### System Requirements
|
||||||
|
|
||||||
|
#### Development
|
||||||
|
|
||||||
|
- OS: Tested on MacOS, Linux and Windows should work as well
|
||||||
|
- Minimum RAM: 8 GB
|
||||||
|
- Storage: 10 GB+ free
|
||||||
|
|
||||||
|
#### Production
|
||||||
|
|
||||||
|
- 1 + 4 nodes
|
||||||
|
- CPU: 4 cores
|
||||||
|
- RAM: 8 GB
|
||||||
|
- Storage: 200 GB
|
||||||
|
|
||||||
|
### Required Software
|
||||||
|
|
||||||
|
#### Development
|
||||||
|
|
||||||
|
- Docker
|
||||||
|
- Docker Compose
|
||||||
|
- Node.js and npm
|
||||||
|
- Python 3.12
|
||||||
|
- MariaDB 11
|
||||||
|
|
||||||
|
#### Production
|
||||||
|
|
||||||
|
##### Minimal:
|
||||||
|
|
||||||
|
- domain name with Cloudflare`s nameservers - tunnel, pages
|
||||||
|
- Kubernetes cluster
|
||||||
|
- kubectl
|
||||||
|
- Helm
|
||||||
|
- OpenTofu
|
||||||
|
|
||||||
|
##### Our setup specifics:
|
||||||
|
|
||||||
|
- Proxmox VE
|
||||||
|
- TalosOS cluster
|
||||||
|
- talosctl
|
||||||
|
- GitHub self-hosted runner with access to the cluster
|
||||||
|
- TailScale for remote access to cluster
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
#### Backend
|
||||||
|
|
||||||
|
- `MOJEID_CLIENT_ID`, `MOJEID_CLIENT_SECRET` \- OAuth client ID and secret for
|
||||||
|
[MojeID](https://www.mojeid.cz/en/provider/)
|
||||||
|
- `BANKID_CLIENT_ID`, `BANKID_CLIENT_SECRET` \- OAuth client ID and secret for [BankID](https://developer.bankid.cz/)
|
||||||
|
- `CSAS_CLIENT_ID`, `CSAS_CLIENT_SECRET` \- OAuth client ID and secret for [Česká
|
||||||
|
spořitelna](https://developers.erstegroup.com/docs/apis/bank.csas)
|
||||||
|
- `DATABASE_URL`(or `MARIADB_HOST`, `MARIADB_PORT`, `MARIADB_DB`, `MARIADB_USER`, `MARIADB_PASSWORD`) \- MariaDB
|
||||||
|
connection details
|
||||||
|
- `RABBITMQ_USERNAME`, `RABBITMQ_PASSWORD` \- credentials for RabbitMQ
|
||||||
|
- `SENTRY_DSN` \- Sentry DSN for error reporting
|
||||||
|
- `DB_ENCRYPTION_KEY` \- symmetric key for encrypting sensitive data in the database
|
||||||
|
- `SMTP_HOST`, `SMTP_PORT`, `SMTP_USERNAME`, `SMTP_PASSWORD`, `SMTP_USE_TLS`, `SMTP_USE_SSL`, `SMTP_FROM` \- SMTP
|
||||||
|
configuration (host, port, auth credentials, TLS/SSL options, sender).
|
||||||
|
- `UNIRATE_API_KEY` \- API key for UniRate.
|
||||||
|
|
||||||
|
#### Frontend
|
||||||
|
|
||||||
|
- `VITE_BACKEND_URL` \- URL of the backend API
|
||||||
|
|
||||||
|
### Dependencies (key libraries)
|
||||||
|
|
||||||
|
Backend: FastAPI, fastapi-users, SQLAlchemy, pydantic v2, Alembic, Celery, uvicorn, pytest
|
||||||
|
Frontend: React, TypeScript, Vite
|
||||||
|
|
||||||
|
## Local development
|
||||||
|
|
||||||
|
You can run the project with Docker Compose and Python virtual environment for testing and development purposes
|
||||||
|
|
||||||
|
### 1) Clone the Repository
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/dat515-2025/Group-8.git
|
||||||
|
cd Group-8/7project
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2) Install dependencies
|
||||||
|
|
||||||
|
Backend
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd backend
|
||||||
|
python3 -m venv .venv
|
||||||
|
source .venv/bin/activate
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3) Run Docker containers
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd ..
|
||||||
|
docker compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4) Prepare the database
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bash upgrade_database.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5) Run backend
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd backend
|
||||||
|
|
||||||
|
#TODO: set env variables
|
||||||
|
uvicorn app.app:fastApi --reload --host 0.0.0.0 --port 8000
|
||||||
|
```
|
||||||
|
|
||||||
|
### 6) Run Celery worker (optional, in another terminal)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd Group-8/7project/src/backend
|
||||||
|
source .venv/bin/activate
|
||||||
|
celery -A app.celery_app.celery_app worker -l info
|
||||||
|
```
|
||||||
|
|
||||||
|
### 7) Install frontend dependencies and run
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd ../frontend
|
||||||
|
npm i
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
|
||||||
|
- Backend available at: http://127.0.0.1:8000 (OpenAPI at /docs)
|
||||||
|
- Frontend available at: http://localhost:5173
|
||||||
|
|
||||||
|
## Build Instructions
|
||||||
|
|
||||||
|
### Backend
|
||||||
|
|
||||||
|
App is separated into backend and frontend so it also needs to be built separately. Backend is build into docker image
|
||||||
|
and frontend is deployed as static files.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd 7project/backend
|
||||||
|
# Dont forget to set correct image tag with your registry and name
|
||||||
|
# For example lukastrkan/cc-app-demo or gitea.ltrk.dev/lukas/cc-app-demo
|
||||||
|
docker buildx build --platform linux/amd64,linux/arm64 -t CHANGE_ME --push .
|
||||||
|
```
|
||||||
|
|
||||||
|
### Frontend
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd project7/src/frontend
|
||||||
|
npm ci
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
## Deployment Instructions
|
||||||
|
|
||||||
|
Deployment is tested on TalosOS cluster with 1 control plane and 4 workers, cluster needs to be setup and configured
|
||||||
|
manually. Terraform/OpenTofu is then used to deploy base services to the cluster. App itself is deployed automatically
|
||||||
|
via GitHub actions and Helm chart. Frontend files are deployed to Cloudflare pages.
|
||||||
|
|
||||||
|
### Setup Cluster
|
||||||
|
|
||||||
|
Deployment should work on any Kubernetes cluster. However, we are using 4 TalosOS virtual machines (1 control plane, 3
|
||||||
|
workers)
|
||||||
|
running on top of Proxmox VE.
|
||||||
|
|
||||||
|
1) Create at least 4 VMs with TalosOS (4 cores, 8 GB RAM, 200 GB disk)
|
||||||
|
2) Install talosctl for your OS: https://docs.siderolabs.com/talos/v1.10/getting-started/talosctl
|
||||||
|
3) Generate Talos config
|
||||||
|
4) Navigate to tofu directory
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd 7project/src/tofu
|
||||||
|
````
|
||||||
|
|
||||||
|
5) Set IP addresses in environment variables
|
||||||
|
|
||||||
|
```bash
|
||||||
|
CONTROL_PLANE_IP=<control-plane-ip>
|
||||||
|
WORKER1_IP=<worker1-ip>
|
||||||
|
WORKER2_IP=<worker2-ip>
|
||||||
|
WORKER3_IP=<worker3-ip>
|
||||||
|
WORKER4_IP=<worker4-ip>
|
||||||
|
....
|
||||||
|
```
|
||||||
|
|
||||||
|
6) Create config files
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# change my-cluster to your desired cluster name
|
||||||
|
talosctl gen config my-cluster https://$CONTROL_PLANE_IP:6443
|
||||||
|
```
|
||||||
|
|
||||||
|
7) Edit the generated configs
|
||||||
|
|
||||||
|
Apply the following changes to `worker.yaml`:
|
||||||
|
|
||||||
|
1) Add mounts for persistent storage to `machine.kubelet.extraMounts` section:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
extraMounts:
|
||||||
|
- destination: /var/lib/longhorn
|
||||||
|
type: bindind.
|
||||||
|
source: /var/lib/longhorn
|
||||||
|
options:
|
||||||
|
- bind
|
||||||
|
- rshared
|
||||||
|
- rw
|
||||||
|
```
|
||||||
|
|
||||||
|
2) Change `machine.install.image` to image with extra modules:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
image: factory.talos.dev/metal-installer/88d1f7a5c4f1d3aba7df787c448c1d3d008ed29cfb34af53fa0df4336a56040b:v1.11.1
|
||||||
|
```
|
||||||
|
|
||||||
|
or you can use latest image generated at https://factory.talos.dev with following options:
|
||||||
|
|
||||||
|
- Bare-metal machine
|
||||||
|
- your Talos os version
|
||||||
|
- amd64 architecture
|
||||||
|
- siderolabs/iscsi-tools
|
||||||
|
- siderolabs/util-linux-tools
|
||||||
|
- (Optionally) siderolabs/qemu-guest-agent
|
||||||
|
|
||||||
|
Then copy "Initial Installation" value and paste it to the image field.
|
||||||
|
|
||||||
|
3) Add docker registry mirror to `machine.registries.mirrors` section:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
registries:
|
||||||
|
mirrors:
|
||||||
|
docker.io:
|
||||||
|
endpoints:
|
||||||
|
- https://mirror.gcr.io
|
||||||
|
- https://registry-1.docker.io
|
||||||
|
```
|
||||||
|
|
||||||
|
8) Apply configs to the VMs
|
||||||
|
|
||||||
|
```bash
|
||||||
|
talosctl apply-config --insecure --nodes $CONTROL_PLANE_IP --file controlplane.yaml
|
||||||
|
talosctl apply-config --insecure --nodes $WORKER1_IP --file worker.yaml
|
||||||
|
talosctl apply-config --insecure --nodes $WORKER2_IP --file worker.yaml
|
||||||
|
talosctl apply-config --insecure --nodes $WORKER3_IP --file worker.yaml
|
||||||
|
talosctl apply-config --insecure --nodes $WORKER4_IP --file worker.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
9) Boostrap the cluster and retrieve kubeconfig
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export TALOSCONFIG=$(pwd)/talosconfig
|
||||||
|
talosctl config endpoint https://$CONTROL_PLANE_IP:6443
|
||||||
|
talosctl config node $CONTROL_PLANE_IP
|
||||||
|
|
||||||
|
talosctl bootstrap
|
||||||
|
|
||||||
|
talosctl kubeconfig .
|
||||||
|
```
|
||||||
|
|
||||||
|
You can now use k8s client like https://headlamp.dev/ with the generated kubeconfig file.
|
||||||
|
|
||||||
|
### Install base services to the cluster
|
||||||
|
|
||||||
|
1) Copy and edit variables
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cp terraform.tfvars.example terraform.tfvars
|
||||||
|
```
|
||||||
|
|
||||||
|
- `metallb_ip_range` - set to range available in your network for load balancer services
|
||||||
|
- `mariadb_password` - password for internal mariadb user
|
||||||
|
- `mariadb_root_password` - password for root user
|
||||||
|
- `mariadb_user_name` - username for admin user
|
||||||
|
- `mariadb_user_host` - allowed hosts for admin user
|
||||||
|
- `mariadb_user_password` - password for admin user
|
||||||
|
- `metallb_maxscale_ip`, `metallb_service_ip`, `metallb_primary_ip`, `metallb_secondary_ip` - IPs for database
|
||||||
|
cluster,
|
||||||
|
set them to static IPs from the `metallb_ip_range`
|
||||||
|
- `s3_enabled`, `s3_bucket`, `s3_region`, `s3_endpoint`, `s3_key_id`, `s3_key_secret` - S3 compatible storage for
|
||||||
|
backups (optional)
|
||||||
|
- `phpmyadmin_enabled` - set to false if you want to disable phpmyadmin
|
||||||
|
- `rabbitmq-password` - password for RabbitMQ
|
||||||
|
|
||||||
|
- `cloudflare_account_id` - your Cloudflare account ID
|
||||||
|
- `cloudflare_api_token` - your Cloudflare API token with permissions to manage tunnels and DNS
|
||||||
|
- `cloudflare_email` - your Cloudflare account email
|
||||||
|
- `cloudflare_tunnel_name` - name for the tunnel
|
||||||
|
- `cloudflare_domain` - your domain name managed in Cloudflare
|
||||||
|
|
||||||
|
2) Deploy without Cloudflare module first
|
||||||
|
|
||||||
|
```bash
|
||||||
|
tofu init
|
||||||
|
tofu apply -exclude modules.cloudflare
|
||||||
|
```
|
||||||
|
|
||||||
|
3) Deploy rest of the modules
|
||||||
|
|
||||||
|
```bash
|
||||||
|
tofu apply
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configure deployment
|
||||||
|
|
||||||
|
1) Create self-hosted runner with access to the cluster or make cluster publicly accessible
|
||||||
|
2) Change `jobs.deploy.runs-on` in `.github/workflows/deploy-prod.yml` and in `.github/workflows/deploy-pr.yaml` to your
|
||||||
|
runner label
|
||||||
|
3) Add variables to GitHub in repository settings:
|
||||||
|
- `PROD_DOMAIN` - base domain for deployments (e.g. ltrk.cz)
|
||||||
|
- `DEV_FRONTEND_BASE_DOMAIN` - base domain for your cloudflare pages
|
||||||
|
4) Add secrets to GitHub in repository settings:
|
||||||
|
- CLOUDFLARE_ACCOUNT_ID - same as in tofu/terraform.tfvars
|
||||||
|
- CLOUDFLARE_API_TOKEN - same as in tofu/terraform.tfvars
|
||||||
|
- DOCKER_USER - your docker registry username
|
||||||
|
- DOCKER_PASSWORD - your docker registry password
|
||||||
|
- KUBE_CONFIG - content of your kubeconfig file for the cluster
|
||||||
|
- PROD_DB_PASSWORD - same as MARIADB_PASSWORD
|
||||||
|
- PROD_RABBITMQ_PASSWORD - same as MARIADB_PASSWORD
|
||||||
|
- PROD_DB_ENCRYPTION_KEY - same as DB_ENCRYPTION_KEY
|
||||||
|
- MOJEID_CLIENT_ID
|
||||||
|
- MOJEID_CLIENT_SECRET
|
||||||
|
- BANKID_CLIENT_ID
|
||||||
|
- BANKID_CLIENT_SECRET
|
||||||
|
- CSAS_CLIENT_ID
|
||||||
|
- CSAS_CLIENT_SECRET
|
||||||
|
- SENTRY_DSN
|
||||||
|
- SMTP_HOST
|
||||||
|
- SMTP_PORT
|
||||||
|
- SMTP_USERNAME
|
||||||
|
- SMTP_PASSWORD
|
||||||
|
- SMTP_FROM
|
||||||
|
- UNIRATE_API_KEY
|
||||||
|
5) On Github open Actions tab, select "Deploy Prod" and run workflow manually
|
||||||
|
|
||||||
|
## Testing Instructions
|
||||||
|
|
||||||
|
The tests are located in 7project/backend/tests directory. All tests are run by GitHub actions on every pull request and
|
||||||
|
push to main.
|
||||||
|
See the workflow [here](../.github/workflows/run-tests.yml).
|
||||||
|
|
||||||
|
If you want to run the tests locally, the preferred way is to use a [bash script](backend/test_locally.sh)
|
||||||
|
that will start a test DB container with [docker compose](backend/docker-compose.test.yml) and remove it afterwards.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd 7project/src/backend
|
||||||
|
bash test_locally.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
### Unit Tests
|
||||||
|
|
||||||
|
There are only 5 basic unit tests, since our services logic is very simple
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bash test_locally.sh --only-unit
|
||||||
|
```
|
||||||
|
|
||||||
|
### Integration Tests
|
||||||
|
|
||||||
|
There are 9 basic unit tests, testing the individual backend API logic
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bash test_locally.sh --only-integration
|
||||||
|
```
|
||||||
|
|
||||||
|
### End-to-End Tests
|
||||||
|
|
||||||
|
There are 7 e2e tests, testing more complex app logic
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bash test_locally.sh --only-e2e
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage Examples
|
||||||
|
|
||||||
|
All endpoints are documented at OpenAPI: http://127.0.0.1:8000/docs
|
||||||
|
|
||||||
|
### Auth: Register and Login (JWT)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Register
|
||||||
|
curl -X POST http://127.0.0.1:8000/auth/register \
|
||||||
|
-H 'Content-Type: application/json' \
|
||||||
|
-d '{
|
||||||
|
"email": "user@example.com",
|
||||||
|
"password": "StrongPassw0rd",
|
||||||
|
"first_name": "Jane",
|
||||||
|
"last_name": "Doe"
|
||||||
|
}'
|
||||||
|
|
||||||
|
# Login (JWT)
|
||||||
|
TOKEN=$(curl -s -X POST http://127.0.0.1:8000/auth/jwt/login \
|
||||||
|
-H 'Content-Type: application/x-www-form-urlencoded' \
|
||||||
|
-d 'username=user@example.com&password=StrongPassw0rd' | jq -r .access_token)
|
||||||
|
|
||||||
|
echo $TOKEN
|
||||||
|
|
||||||
|
# Call a protected route
|
||||||
|
curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
||||||
|
```
|
||||||
|
|
||||||
|
### Frontend
|
||||||
|
|
||||||
|
- Start with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run dev in 7project/src/frontend
|
||||||
|
```
|
||||||
|
|
||||||
|
- Ensure VITE_BACKEND_URL is set to the backend URL (e.g., http://127.0.0.1:8000)
|
||||||
|
- Open http://localhost:5173
|
||||||
|
- Login, view latest transactions, filter, and add new transactions from the UI.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Presentation Video
|
||||||
|
|
||||||
|
**YouTube Link**: [Insert your YouTube link here]
|
||||||
|
|
||||||
|
**Duration**: [X minutes Y seconds]
|
||||||
|
|
||||||
|
**Video Includes**:
|
||||||
|
|
||||||
|
- [ ] Project overview and architecture
|
||||||
|
- [ ] Live demonstration of key features
|
||||||
|
- [ ] Code walkthrough
|
||||||
|
- [ ] Build and deployment showcase
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
#### Issue 1: Unable to apply Cloudflare terraform module
|
||||||
|
|
||||||
|
**Symptoms**: Terraform/OpenTofu apply fails during Cloudflare module deployment.
|
||||||
|
This is caused by unknown variable not known beforehand.
|
||||||
|
|
||||||
|
**Solution**: Apply first without Cloudflare module and then apply again.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
tofu apply -exclude modules.cloudflare
|
||||||
|
tofu apply
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Issue 2: Pods are unable to start
|
||||||
|
|
||||||
|
**Symptoms**: Pods are unable to start with ImagePullBackOff error. This could be caused
|
||||||
|
by either hitting docker hub rate limits or by docker hub being down.
|
||||||
|
|
||||||
|
**Solution**: Make sure you updated the cluster config to use registry mirror as described in
|
||||||
|
"Setup Cluster" section.
|
||||||
|
|
||||||
|
### Debug Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Useful commands for debugging
|
||||||
|
# Log viewing commands
|
||||||
|
# Service status checks
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Progress Table
|
||||||
|
|
||||||
|
> Be honest and detailed in your assessments.
|
||||||
|
> This information is used for individual grading.
|
||||||
|
> Link to the specific commit on GitHub for each contribution.
|
||||||
|
|
||||||
|
| Task/Component | Assigned To | Status | Time Spent | Difficulty | Notes |
|
||||||
|
|-------------------------------------------------------------------------------------------------------------------|-------------|----------------|------------|------------|-----------------------------------------------------------------------------------------------------|
|
||||||
|
| [Project Setup & Repository](https://github.com/dat515-2025/Group-8#) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
||||||
|
| [Design Document](https://github.com/dat515-2025/Group-8/blob/main/6design/design.md) | Both | ✅ Complete | 4 Hours | Easy | [Any notes] |
|
||||||
|
| [Backend API Development](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/api) | Dejan | ✅ Complete | 12 hours | Medium | [Any notes] |
|
||||||
|
| [Database Setup & Models](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/models) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
||||||
|
| [Frontend Development](https://github.com/dat515-2025/Group-8/tree/main/7project/frontend) | Dejan | ✅ Complete | 17 hours | Medium | [Any notes] |
|
||||||
|
| [Docker Configuration](https://github.com/dat515-2025/Group-8/blob/main/7project/compose.yml) | Lukas | ✅ Complete | 3 hours | Easy | [Any notes] |
|
||||||
|
| [Cloud Deployment](https://github.com/dat515-2025/Group-8/blob/main/7project/deployment/app-demo-deployment.yaml) | Lukas | ✅ Complete | [X hours] | Hard | Using Talos cluster running in proxmox - easy snapshots etc. Frontend deployed at Cloudflare pages. |
|
||||||
|
| [Testing Implementation](https://github.com/dat515-2025/group-name) | Dejan | ✅ Complete | 16 hours | Medium | [Any notes] |
|
||||||
|
| [Documentation](https://github.com/dat515-2025/group-name) | Both | 🔄 In Progress | [X hours] | Easy | [Any notes] |
|
||||||
|
| [Presentation Video](https://github.com/dat515-2025/group-name) | Both | ❌ Not Started | [X hours] | Medium | [Any notes] |
|
||||||
|
|
||||||
|
**Legend**: ✅ Complete | 🔄 In Progress | ⏳ Pending | ❌ Not Started
|
||||||
|
|
||||||
|
## Hour Sheet
|
||||||
|
|
||||||
|
> Link to the specific commit on GitHub for each contribution.
|
||||||
|
|
||||||
|
### [Lukáš]
|
||||||
|
|
||||||
|
## Hour Sheet
|
||||||
|
|
||||||
|
**Name:** Lukáš Trkan
|
||||||
|
|
||||||
|
| Date | Activity | Hours | Description | Representative Commit / PR |
|
||||||
|
|:----------------|:----------------------------|:--------|:------------------------------------------------------------------------------------|:------------------------------------------------------|
|
||||||
|
| 18.9. - 19.9. | Initial Setup & Design | 40 | Repository init, system design diagrams, basic Terraform setup | `feat(infrastructure): add basic terraform resources` |
|
||||||
|
| 20.9. - 5.10. | Core Infrastructure & CI/CD | 12 | K8s setup (ArgoCD), CI/CD workflows, RabbitMQ, Redis, Celery workers, DB migrations | `PR #2`, `feat(infrastructure): add rabbitmq cluster` |
|
||||||
|
| 6.10. - 9.10. | Frontend Infra & DB | 5 | Deployed frontend to Cloudflare, setup metrics, created database models | `PR #16` (Cloudflare), `PR #19` (DB structure) |
|
||||||
|
| 10.10. - 11.10. | Backend | 5 | Implemented OAuth support (MojeID, BankID) | `feat(auth): add support for OAuth and MojeID` |
|
||||||
|
| 12.10. | Infrastructure | 2 | Added database backups | `feat(infrastructure): add backups` |
|
||||||
|
| 16.10. | Infrastructure | 4 | Implemented secrets management, fixed deployment/env variables | `PR #29` (Deployment envs) |
|
||||||
|
| 17.10. | Monitoring | 1 | Added Sentry logging | `feat(app): add sentry loging` |
|
||||||
|
| 21.10. - 22.10. | Backend | 8 | Added ČSAS bank connection | `PR #32` (Fix React OAuth) |
|
||||||
|
| 29.10. - 30.10. | Backend | 5 | Implemented transaction encryption, add bank scraping | `PR #39` (CSAS Scraping) |
|
||||||
|
| 30.10. | Monitoring | 6 | Implemented Loki logging and basic Prometheus metrics | `PR #42` (Prometheus metrics) |
|
||||||
|
| 9.11. | Monitoring | 2 | Added custom Prometheus metrics | `PR #46` (Prometheus custom metrics) |
|
||||||
|
| 11.11. | Tests | 1 | Investigated and fixed broken Pytest environment | `fix(tests): set pytest env` |
|
||||||
|
| 11.11. - 12.11. | Features & Deployment | 6 | Added cron support, email sender service, updated workers & image | `PR #49` (Email), `PR #50` (Update workers) |
|
||||||
|
| 18.9 - 14.11 | Documentation | 8 | Updated report.md, design docs, and tfvars.example | `Create design.md`, `update report` |
|
||||||
|
| **Total** | | **105** | | |
|
||||||
|
|
||||||
|
### Dejan
|
||||||
|
|
||||||
|
| Date | Activity | Hours | Description | Representative Commit / PR |
|
||||||
|
|:----------------|:---------------------|:-------|:--------------------------------------------------------------|:---------------------------------------------------------|
|
||||||
|
| 25.9. | Design | 2 | 6design | |
|
||||||
|
| 9.10 to 11.10. | Backend APIs | 14 | Implemented Backend APIs | `PR #26`, `20-create-a-controller-layer-on-backend-side` |
|
||||||
|
| 13.10 to 15.10. | Frontend Development | 8 | Created user interface mockups | `PR #28`, `frontend basics` |
|
||||||
|
| Continually | Documentation | 7 | Documenting the dev process | |
|
||||||
|
| 21.10 to 23.10 | Tests, frontend | 10 | Test basics, balance charts, and frontend improvement | `PR #31`, `30 create tests and set up a GitHub pipeline` |
|
||||||
|
| 28.10 to 30.10 | CI | 6 | Integrated tests with test database setup on github workflows | `PR #28`, `frontend basics` |
|
||||||
|
| 28.10 to 30.10 | Frontend | 8 | UI improvements and exchange rate API integration | `PR #28`, `frontend basics` |
|
||||||
|
| 4.11 to 6.11 | Tests | 6 | Test fixes improvement, more integration and e2e | `PR #28`, `frontend basics` |
|
||||||
|
| 4.11 to 6.11 | Frontend | 6 | Fixes, Improved UI, added support for mobile devices | `PR #28`, `frontend basics` |
|
||||||
|
| 11.11 | Backend APIs | 4 | Moved rates API, mock bank to Backend, few fixes | `PR #28`, `frontend basics` |
|
||||||
|
| 11.11 to 12.11 | Tests | 3 | Local testing DB container, few fixes | `PR #28`, `frontend basics` |
|
||||||
|
| 12.11 | Frontend | 3 | Enabled multiple transaction edits at once, CSAS button state | `PR #28`, `frontend basics` |
|
||||||
|
| 13.11 | Video | 3 | Video | |
|
||||||
|
| **Total** | | **80** | | |
|
||||||
|
|
||||||
|
### Group Total: [XXX.X] hours
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Final Reflection
|
||||||
|
|
||||||
|
### What We Learned
|
||||||
|
|
||||||
|
[Reflect on the key technical and collaboration skills learned during this project]
|
||||||
|
|
||||||
|
### Challenges Faced
|
||||||
|
|
||||||
|
#### Slow cluster performance
|
||||||
|
|
||||||
|
This was caused by single SATA SSD disk running all VMs. This was solved by adding second NVMe disk just for Talos VMs.
|
||||||
|
|
||||||
|
#### Stucked IaC deployment
|
||||||
|
|
||||||
|
If the deployed module (helm chart for example) was not configured properly, it would get stuck and timeout resulting in
|
||||||
|
namespace that cannot be deleted.
|
||||||
|
This was solved by using snapshots in Proxmox and restoring if this happened.
|
||||||
|
|
||||||
|
### If We Did This Again
|
||||||
|
|
||||||
|
#### Different framework
|
||||||
|
|
||||||
|
FastAPI lacks usable build in support for database migrations and implementing Alembic was a bit tricky.
|
||||||
|
Tricky was also integrating FastAPI auth system with React frontend, since there is no official project template.
|
||||||
|
Using .NET (which we considered initially) would probably solve these issues.
|
||||||
|
|
||||||
|
#### Private container registry
|
||||||
|
|
||||||
|
Using private container registry would allow us to include environment variables directly in the image during build.
|
||||||
|
This would simplify deployment and CI/CD setup.
|
||||||
|
|
||||||
|
[What would you do differently? What worked well that you'd keep?]
|
||||||
|
|
||||||
|
### Individual Growth
|
||||||
|
|
||||||
|
#### [Lukas]
|
||||||
|
|
||||||
|
This course finally forced me to learn kubernetes (been on by TODO list for at least 3 years).
|
||||||
|
I had some prior experience with terraform/opentofu from work but this improved by understanding of it.
|
||||||
|
|
||||||
|
The biggest challenge for me was time tracking since I am used to tracking to projects, not to tasks.
|
||||||
|
(I am bad even at that :) ).
|
||||||
|
|
||||||
|
It was also interesting experience to be the one responsible for the initial project structure/design/setup
|
||||||
|
used not only by myself.
|
||||||
|
|
||||||
|
#### [Dejan]
|
||||||
|
|
||||||
|
Since I do not have a job, this project was probably the most complex one I have ever worked on.
|
||||||
|
It was also the first school project where I was encouraged to use AI.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Report Completion Date**: [Date]
|
||||||
|
**Last Updated**: 13.11.2025
|
||||||
23
7project/src/README.md
Normal file
23
7project/src/README.md
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
## Folder structure
|
||||||
|
|
||||||
|
- `src/`
|
||||||
|
- `backend/`
|
||||||
|
- `alembic/` - database migrations
|
||||||
|
- `app/` - main application code
|
||||||
|
- `tests/` - tests
|
||||||
|
- `docker-compose.test.yml` - docker compose for testing database
|
||||||
|
- `Dockerfile` - production Dockerfile
|
||||||
|
- `main.py` - App entrypoint
|
||||||
|
- `requirements.txt` - Python dependencies
|
||||||
|
- `test_locally.sh` - script to run tests with temporary database
|
||||||
|
- `charts/`
|
||||||
|
- `myapp-chart/` - Helm chart for deploying the application, supports prod and dev environments
|
||||||
|
- `frontend/` - React frontend application
|
||||||
|
- `tofu/` - Terraform/OpenTofu services deployment configurations
|
||||||
|
- `modules/` - separated modules for different services
|
||||||
|
- `main.tf` - main deployment configuration
|
||||||
|
- `variables.tf` - deployment variables
|
||||||
|
- `terraform.tfvars.example` - example variables file
|
||||||
|
- `compose.yaml` - Docker Compose file for local development
|
||||||
|
- `create_migration.sh` - script to create new Alembic database migration
|
||||||
|
- `upgrade_database.sh` - script to upgrade database to latest Alembic revision
|
||||||
8
7project/src/backend/.idea/.gitignore
generated
vendored
Normal file
8
7project/src/backend/.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
||||||
|
# Editor-based HTTP Client requests
|
||||||
|
/httpRequests/
|
||||||
|
# Datasource local storage ignored files
|
||||||
|
/dataSources/
|
||||||
|
/dataSources.local.xml
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
FROM python:3.11-slim
|
FROM python:3.11-trixie
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
COPY . .
|
COPY . .
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
CMD alembic upgrade head && uvicorn app.app:app --host 0.0.0.0 --port 8000
|
CMD alembic upgrade head && uvicorn app.app:fastApi --host 0.0.0.0 --port 8000
|
||||||
@@ -11,7 +11,7 @@ script_location = %(here)s/alembic
|
|||||||
# Uncomment the line below if you want the files to be prepended with date and time
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
# for all available tokens
|
# for all available tokens
|
||||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
# defaults to the current working directory. for multiple paths, the path separator
|
# defaults to the current working directory. for multiple paths, the path separator
|
||||||
@@ -25,7 +25,8 @@ if not DATABASE_URL:
|
|||||||
|
|
||||||
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
||||||
|
|
||||||
ssl_enabled = os.getenv("MARIADB_HOST", "localhost") != "localhost"
|
host_env = os.getenv("MARIADB_HOST", "localhost")
|
||||||
|
ssl_enabled = host_env not in {"localhost", "127.0.0.1"}
|
||||||
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
||||||
|
|
||||||
def run_migrations_offline() -> None:
|
def run_migrations_offline() -> None:
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
"""Init migration
|
"""add categories
|
||||||
|
|
||||||
Revision ID: 81f275275556
|
Revision ID: 63e072f09836
|
||||||
Revises:
|
Revises:
|
||||||
Create Date: 2025-09-24 17:39:25.346690
|
Create Date: 2025-10-09 14:56:14.653249
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Sequence, Union
|
from typing import Sequence, Union
|
||||||
@@ -13,7 +13,7 @@ import sqlalchemy as sa
|
|||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision: str = '81f275275556'
|
revision: str = '63e072f09836'
|
||||||
down_revision: Union[str, Sequence[str], None] = None
|
down_revision: Union[str, Sequence[str], None] = None
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
@@ -22,12 +22,6 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
"""Upgrade schema."""
|
"""Upgrade schema."""
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.create_table('transaction',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('amount', sa.Float(), nullable=False),
|
|
||||||
sa.Column('description', sa.String(length=255), nullable=True),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
op.create_table('user',
|
op.create_table('user',
|
||||||
sa.Column('first_name', sa.String(length=100), nullable=True),
|
sa.Column('first_name', sa.String(length=100), nullable=True),
|
||||||
sa.Column('last_name', sa.String(length=100), nullable=True),
|
sa.Column('last_name', sa.String(length=100), nullable=True),
|
||||||
@@ -40,13 +34,38 @@ def upgrade() -> None:
|
|||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
|
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
|
||||||
|
op.create_table('categories',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=100), nullable=False),
|
||||||
|
sa.Column('description', sa.String(length=255), nullable=True),
|
||||||
|
sa.Column('user_id', fastapi_users_db_sqlalchemy.generics.GUID(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id'),
|
||||||
|
sa.UniqueConstraint('name')
|
||||||
|
)
|
||||||
|
op.create_table('transaction',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('amount', sa.Float(), nullable=False),
|
||||||
|
sa.Column('description', sa.String(length=255), nullable=True),
|
||||||
|
sa.Column('user_id', fastapi_users_db_sqlalchemy.generics.GUID(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_table('category_transaction',
|
||||||
|
sa.Column('id_category', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('id_transaction', sa.Integer(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['id_category'], ['categories.id'], ),
|
||||||
|
sa.ForeignKeyConstraint(['id_transaction'], ['transaction.id'], )
|
||||||
|
)
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
"""Downgrade schema."""
|
"""Downgrade schema."""
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table('category_transaction')
|
||||||
|
op.drop_table('transaction')
|
||||||
|
op.drop_table('categories')
|
||||||
op.drop_index(op.f('ix_user_email'), table_name='user')
|
op.drop_index(op.f('ix_user_email'), table_name='user')
|
||||||
op.drop_table('user')
|
op.drop_table('user')
|
||||||
op.drop_table('transaction')
|
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
"""update categories unique
|
||||||
|
|
||||||
|
Revision ID: 390041bd839e
|
||||||
|
Revises: 63e072f09836
|
||||||
|
Create Date: 2025-10-09 15:14:31.557686
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '390041bd839e'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '63e072f09836'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index(op.f('name'), table_name='categories')
|
||||||
|
op.create_unique_constraint('uix_name_user_id', 'categories', ['name', 'user_id'])
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint('uix_name_user_id', 'categories', type_='unique')
|
||||||
|
op.create_index(op.f('name'), 'categories', ['name'], unique=True)
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,48 @@
|
|||||||
|
"""add user oauth
|
||||||
|
|
||||||
|
Revision ID: 7af8f296d089
|
||||||
|
Revises: 390041bd839e
|
||||||
|
Create Date: 2025-10-10 14:05:00.153376
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import fastapi_users_db_sqlalchemy
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '7af8f296d089'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '390041bd839e'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('oauth_account',
|
||||||
|
sa.Column('id', fastapi_users_db_sqlalchemy.generics.GUID(), nullable=False),
|
||||||
|
sa.Column('user_id', fastapi_users_db_sqlalchemy.generics.GUID(), nullable=False),
|
||||||
|
sa.Column('oauth_name', sa.String(length=100), nullable=False),
|
||||||
|
sa.Column('access_token', sa.String(length=1024), nullable=False),
|
||||||
|
sa.Column('expires_at', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('refresh_token', sa.String(length=1024), nullable=True),
|
||||||
|
sa.Column('account_id', sa.String(length=320), nullable=False),
|
||||||
|
sa.Column('account_email', sa.String(length=320), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='cascade'),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
|
op.create_index(op.f('ix_oauth_account_account_id'), 'oauth_account', ['account_id'], unique=False)
|
||||||
|
op.create_index(op.f('ix_oauth_account_oauth_name'), 'oauth_account', ['oauth_name'], unique=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_index(op.f('ix_oauth_account_oauth_name'), table_name='oauth_account')
|
||||||
|
op.drop_index(op.f('ix_oauth_account_account_id'), table_name='oauth_account')
|
||||||
|
op.drop_table('oauth_account')
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
"""change token length
|
||||||
|
|
||||||
|
Revision ID: 5ab2e654c96e
|
||||||
|
Revises: 7af8f296d089
|
||||||
|
Create Date: 2025-10-11 21:07:41.930470
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '5ab2e654c96e'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '7af8f296d089'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('oauth_account', 'access_token',
|
||||||
|
existing_type=mysql.VARCHAR(length=1024),
|
||||||
|
type_=sa.String(length=4096),
|
||||||
|
existing_nullable=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('oauth_account', 'access_token',
|
||||||
|
existing_type=sa.String(length=4096),
|
||||||
|
type_=mysql.VARCHAR(length=1024),
|
||||||
|
existing_nullable=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
"""add config to user
|
||||||
|
|
||||||
|
Revision ID: eabec90a94fe
|
||||||
|
Revises: 5ab2e654c96e
|
||||||
|
Create Date: 2025-10-21 18:56:42.085973
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = 'eabec90a94fe'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '5ab2e654c96e'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('user', sa.Column('config', sa.JSON(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column('user', 'config')
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
"""add date to transaction
|
||||||
|
|
||||||
|
Revision ID: 1f2a3c4d5e6f
|
||||||
|
Revises: eabec90a94fe
|
||||||
|
Create Date: 2025-10-22 16:18:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '1f2a3c4d5e6f'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = 'eabec90a94fe'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema by adding date column with server default current_date."""
|
||||||
|
op.add_column(
|
||||||
|
'transaction',
|
||||||
|
sa.Column('date', sa.Date(), nullable=False, server_default=sa.text('CURRENT_DATE'))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema by removing date column."""
|
||||||
|
op.drop_column('transaction', 'date')
|
||||||
@@ -0,0 +1,47 @@
|
|||||||
|
"""Add encrypted type
|
||||||
|
|
||||||
|
Revision ID: 46b9e702e83f
|
||||||
|
Revises: 1f2a3c4d5e6f
|
||||||
|
Create Date: 2025-10-29 13:26:24.568523
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '46b9e702e83f'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '1f2a3c4d5e6f'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('transaction', 'amount',
|
||||||
|
existing_type=mysql.FLOAT(),
|
||||||
|
type_=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
||||||
|
existing_nullable=False)
|
||||||
|
op.alter_column('transaction', 'description',
|
||||||
|
existing_type=mysql.VARCHAR(length=255),
|
||||||
|
type_=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
||||||
|
existing_nullable=True)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('transaction', 'description',
|
||||||
|
existing_type=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
||||||
|
type_=mysql.VARCHAR(length=255),
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column('transaction', 'amount',
|
||||||
|
existing_type=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
||||||
|
type_=mysql.FLOAT(),
|
||||||
|
existing_nullable=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
"""Cascade categories
|
||||||
|
|
||||||
|
Revision ID: 59cebf320c4a
|
||||||
|
Revises: 46b9e702e83f
|
||||||
|
Create Date: 2025-10-30 13:42:44.555284
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '59cebf320c4a'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '46b9e702e83f'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('category_transaction', sa.Column('category_id', sa.Integer(), nullable=False))
|
||||||
|
op.add_column('category_transaction', sa.Column('transaction_id', sa.Integer(), nullable=False))
|
||||||
|
op.drop_constraint(op.f('category_transaction_ibfk_2'), 'category_transaction', type_='foreignkey')
|
||||||
|
op.drop_constraint(op.f('category_transaction_ibfk_1'), 'category_transaction', type_='foreignkey')
|
||||||
|
op.create_foreign_key(None, 'category_transaction', 'transaction', ['transaction_id'], ['id'], ondelete='CASCADE')
|
||||||
|
op.create_foreign_key(None, 'category_transaction', 'categories', ['category_id'], ['id'], ondelete='CASCADE')
|
||||||
|
op.drop_column('category_transaction', 'id_category')
|
||||||
|
op.drop_column('category_transaction', 'id_transaction')
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('category_transaction', sa.Column('id_transaction', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
|
||||||
|
op.add_column('category_transaction', sa.Column('id_category', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
|
||||||
|
op.drop_constraint(None, 'category_transaction', type_='foreignkey')
|
||||||
|
op.drop_constraint(None, 'category_transaction', type_='foreignkey')
|
||||||
|
op.create_foreign_key(op.f('category_transaction_ibfk_1'), 'category_transaction', 'categories', ['id_category'], ['id'])
|
||||||
|
op.create_foreign_key(op.f('category_transaction_ibfk_2'), 'category_transaction', 'transaction', ['id_transaction'], ['id'])
|
||||||
|
op.drop_column('category_transaction', 'transaction_id')
|
||||||
|
op.drop_column('category_transaction', 'category_id')
|
||||||
|
# ### end Alembic commands ###
|
||||||
66
7project/src/backend/app/api/auth.py
Normal file
66
7project/src/backend/app/api/auth.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
from fastapi import APIRouter, Depends, status
|
||||||
|
from fastapi_users import models
|
||||||
|
from fastapi_users.manager import BaseUserManager
|
||||||
|
|
||||||
|
from app.schemas.user import UserCreate, UserRead, UserUpdate
|
||||||
|
from app.services.user_service import auth_backend, fastapi_users
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/users/me",
|
||||||
|
status_code=status.HTTP_204_NO_CONTENT,
|
||||||
|
tags=["users"],
|
||||||
|
summary="Delete current user",
|
||||||
|
response_description="The user has been successfully deleted.",
|
||||||
|
)
|
||||||
|
async def delete_me(
|
||||||
|
user: models.UserProtocol = Depends(fastapi_users.current_user(active=True)),
|
||||||
|
user_manager: BaseUserManager = Depends(fastapi_users.get_user_manager),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Delete the currently authenticated user.
|
||||||
|
"""
|
||||||
|
await user_manager.delete(user)
|
||||||
|
|
||||||
|
# Keep existing paths as-is under /auth/* and /users/*
|
||||||
|
from fastapi import Request, Response
|
||||||
|
from app.core.security import revoke_token, extract_bearer_token
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/auth/jwt/logout",
|
||||||
|
status_code=status.HTTP_204_NO_CONTENT,
|
||||||
|
tags=["auth"],
|
||||||
|
summary="Log out and revoke current token",
|
||||||
|
)
|
||||||
|
async def custom_logout(request: Request) -> Response:
|
||||||
|
"""Revoke the current bearer token so it cannot be used anymore."""
|
||||||
|
token = extract_bearer_token(request)
|
||||||
|
if token:
|
||||||
|
revoke_token(token)
|
||||||
|
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
router.include_router(
|
||||||
|
fastapi_users.get_auth_router(auth_backend), prefix="/auth/jwt", tags=["auth"]
|
||||||
|
)
|
||||||
|
router.include_router(
|
||||||
|
fastapi_users.get_register_router(UserRead, UserCreate),
|
||||||
|
prefix="/auth",
|
||||||
|
tags=["auth"],
|
||||||
|
)
|
||||||
|
router.include_router(
|
||||||
|
fastapi_users.get_reset_password_router(),
|
||||||
|
prefix="/auth",
|
||||||
|
tags=["auth"],
|
||||||
|
)
|
||||||
|
router.include_router(
|
||||||
|
fastapi_users.get_verify_router(UserRead),
|
||||||
|
prefix="/auth",
|
||||||
|
tags=["auth"],
|
||||||
|
)
|
||||||
|
router.include_router(
|
||||||
|
fastapi_users.get_users_router(UserRead, UserUpdate),
|
||||||
|
prefix="/users",
|
||||||
|
tags=["users"],
|
||||||
|
)
|
||||||
108
7project/src/backend/app/api/categories.py
Normal file
108
7project/src/backend/app/api/categories.py
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
from typing import List
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from sqlalchemy import select, delete
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from app.models.categories import Category
|
||||||
|
from app.schemas.category import CategoryCreate, CategoryRead, CategoryUpdate
|
||||||
|
from app.services.db import get_async_session
|
||||||
|
from app.services.user_service import current_active_user
|
||||||
|
from app.models.user import User
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/categories", tags=["categories"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/create", response_model=CategoryRead, status_code=status.HTTP_201_CREATED)
|
||||||
|
async def create_category(
|
||||||
|
payload: CategoryCreate,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
# Enforce per-user unique name via query to provide 409 feedback
|
||||||
|
res = await session.execute(
|
||||||
|
select(Category).where(Category.user_id == user.id, Category.name == payload.name)
|
||||||
|
)
|
||||||
|
existing = res.scalar_one_or_none()
|
||||||
|
if existing:
|
||||||
|
raise HTTPException(status_code=409, detail="Category with this name already exists")
|
||||||
|
|
||||||
|
category = Category(name=payload.name, description=payload.description, user_id=user.id)
|
||||||
|
session.add(category)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(category)
|
||||||
|
return category
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/", response_model=List[CategoryRead])
|
||||||
|
async def list_categories(
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
res = await session.execute(select(Category).where(Category.user_id == user.id))
|
||||||
|
return list(res.scalars())
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{category_id}", response_model=CategoryRead)
|
||||||
|
async def update_category(
|
||||||
|
category_id: int,
|
||||||
|
payload: CategoryUpdate,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
res = await session.execute(
|
||||||
|
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
||||||
|
)
|
||||||
|
category = res.scalar_one_or_none()
|
||||||
|
if not category:
|
||||||
|
raise HTTPException(status_code=404, detail="Category not found")
|
||||||
|
|
||||||
|
# If name changed, check uniqueness per user
|
||||||
|
if payload.name is not None and payload.name != category.name:
|
||||||
|
dup = await session.execute(
|
||||||
|
select(Category.id).where(Category.user_id == user.id, Category.name == payload.name)
|
||||||
|
)
|
||||||
|
if dup.scalar_one_or_none() is not None:
|
||||||
|
raise HTTPException(status_code=409, detail="Category with this name already exists")
|
||||||
|
category.name = payload.name
|
||||||
|
|
||||||
|
if payload.description is not None:
|
||||||
|
category.description = payload.description
|
||||||
|
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(category)
|
||||||
|
return category
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{category_id}", response_model=CategoryRead)
|
||||||
|
async def get_category(
|
||||||
|
category_id: int,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
res = await session.execute(
|
||||||
|
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
||||||
|
)
|
||||||
|
category = res.scalar_one_or_none()
|
||||||
|
if not category:
|
||||||
|
raise HTTPException(status_code=404, detail="Category not found")
|
||||||
|
return category
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{category_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
async def delete_category(
|
||||||
|
category_id: int,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
res = await session.execute(
|
||||||
|
select(Category.id).where(Category.id == category_id, Category.user_id == user.id)
|
||||||
|
)
|
||||||
|
if res.scalar_one_or_none() is None:
|
||||||
|
raise HTTPException(status_code=404, detail="Category not found")
|
||||||
|
|
||||||
|
await session.execute(
|
||||||
|
delete(Category).where(Category.id == category_id, Category.user_id == user.id)
|
||||||
|
)
|
||||||
|
await session.commit()
|
||||||
|
return None
|
||||||
40
7project/src/backend/app/api/csas.py
Normal file
40
7project/src/backend/app/api/csas.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
from fastapi import APIRouter
|
||||||
|
from fastapi.params import Depends
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from app.models.user import User
|
||||||
|
from app.oauth.csas import CSASOAuth
|
||||||
|
from app.services.db import get_async_session
|
||||||
|
from app.services.user_service import current_active_user
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/auth/csas", tags=["csas"])
|
||||||
|
|
||||||
|
CLIENT_ID = os.getenv("CSAS_CLIENT_ID")
|
||||||
|
CLIENT_SECRET = os.getenv("CSAS_CLIENT_SECRET")
|
||||||
|
CSAS_OAUTH = CSASOAuth(CLIENT_ID, CLIENT_SECRET)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/authorize")
|
||||||
|
async def csas_authorize():
|
||||||
|
return {"authorization_url":
|
||||||
|
await CSAS_OAUTH.get_authorization_url(os.getenv("FRONTEND_DOMAIN_SCHEME") + "/auth/csas/callback")}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/callback")
|
||||||
|
async def csas_callback(code: str, session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user)):
|
||||||
|
response = await CSAS_OAUTH.get_access_token(code, os.getenv("FRONTEND_DOMAIN_SCHEME") + "/auth/csas/callback")
|
||||||
|
|
||||||
|
if not user.config:
|
||||||
|
user.config = {}
|
||||||
|
|
||||||
|
new_dict = user.config.copy()
|
||||||
|
new_dict["csas"] = json.dumps(response)
|
||||||
|
|
||||||
|
user.config = new_dict
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
return "OK"
|
||||||
66
7project/src/backend/app/api/exchange_rates.py
Normal file
66
7project/src/backend/app/api/exchange_rates.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import os
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, status
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/exchange-rates", tags=["exchange-rates"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", status_code=status.HTTP_200_OK)
|
||||||
|
async def get_exchange_rates(symbols: str = Query("EUR,USD,NOK", description="Comma-separated currency codes to fetch vs CZK")):
|
||||||
|
"""
|
||||||
|
Fetch exchange rates from UniRate API on the backend and return CZK-per-target rates.
|
||||||
|
- Always requests CZK in addition to requested symbols to compute conversion from USD-base.
|
||||||
|
- Returns a list of {currencyCode, rate} where rate is CZK per 1 unit of the target currency.
|
||||||
|
"""
|
||||||
|
api_key = os.getenv("UNIRATE_API_KEY")
|
||||||
|
if not api_key:
|
||||||
|
raise HTTPException(status_code=500, detail="Server is not configured with UNIRATE_API_KEY")
|
||||||
|
|
||||||
|
# Ensure CZK is included for conversion
|
||||||
|
requested = [s.strip().upper() for s in symbols.split(",") if s.strip()]
|
||||||
|
if "CZK" not in requested:
|
||||||
|
requested.append("CZK")
|
||||||
|
query_symbols = ",".join(sorted(set(requested)))
|
||||||
|
|
||||||
|
url = f"https://unirateapi.com/api/rates?api_key={api_key}&symbols={query_symbols}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=httpx.Timeout(15.0)) as client:
|
||||||
|
resp = await client.get(url)
|
||||||
|
if resp.status_code != httpx.codes.OK:
|
||||||
|
raise HTTPException(status_code=502, detail=f"Upstream UniRate error: HTTP {resp.status_code}")
|
||||||
|
data = resp.json()
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
raise HTTPException(status_code=502, detail=f"Failed to contact UniRate: {str(e)}")
|
||||||
|
|
||||||
|
# Validate response structure
|
||||||
|
rates = data.get("rates") if isinstance(data, dict) else None
|
||||||
|
base = data.get("base") if isinstance(data, dict) else None
|
||||||
|
if not rates or base != "USD" or "CZK" not in rates:
|
||||||
|
# Prefer upstream message when available
|
||||||
|
detail = data.get("message") if isinstance(data, dict) else None
|
||||||
|
if not detail and isinstance(data, dict):
|
||||||
|
err = data.get("error")
|
||||||
|
if isinstance(err, dict):
|
||||||
|
detail = err.get("info")
|
||||||
|
raise HTTPException(status_code=502, detail=detail or "Invalid response from UniRate API")
|
||||||
|
|
||||||
|
czk_per_usd = rates["CZK"]
|
||||||
|
|
||||||
|
# Build result excluding CZK itself
|
||||||
|
result = []
|
||||||
|
for code in requested:
|
||||||
|
if code == "CZK":
|
||||||
|
continue
|
||||||
|
target_per_usd = rates.get(code)
|
||||||
|
if target_per_usd in (None, 0):
|
||||||
|
# Skip unavailable or invalid
|
||||||
|
continue
|
||||||
|
czk_per_target = czk_per_usd / target_per_usd
|
||||||
|
result.append({"currencyCode": code, "rate": czk_per_target})
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
116
7project/src/backend/app/api/mock_bank.py
Normal file
116
7project/src/backend/app/api/mock_bank.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import List, Optional
|
||||||
|
import random
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
from pydantic import BaseModel, Field, conint, confloat, validator
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from app.services.db import get_async_session
|
||||||
|
from app.services.user_service import current_active_user
|
||||||
|
from app.models.user import User
|
||||||
|
from app.models.transaction import Transaction
|
||||||
|
from app.models.categories import Category
|
||||||
|
from app.schemas.transaction import TransactionRead
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/mock-bank", tags=["mock-bank"])
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateOptions(BaseModel):
|
||||||
|
count: conint(strict=True, gt=0) = Field(default=10, description="Number of transactions to generate")
|
||||||
|
minAmount: confloat(strict=True) = Field(default=-200.0, description="Minimum transaction amount")
|
||||||
|
maxAmount: confloat(strict=True) = Field(default=200.0, description="Maximum transaction amount")
|
||||||
|
startDate: Optional[str] = Field(None, description="Earliest date (YYYY-MM-DD)")
|
||||||
|
endDate: Optional[str] = Field(None, description="Latest date (YYYY-MM-DD)")
|
||||||
|
categoryIds: List[int] = Field(default_factory=list, description="Optional category IDs to assign randomly")
|
||||||
|
|
||||||
|
@validator("maxAmount")
|
||||||
|
def _validate_amounts(cls, v, values):
|
||||||
|
min_amt = values.get("minAmount")
|
||||||
|
if min_amt is not None and v < min_amt:
|
||||||
|
raise ValueError("maxAmount must be greater than or equal to minAmount")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@validator("endDate")
|
||||||
|
def _validate_dates(cls, v, values):
|
||||||
|
sd = values.get("startDate")
|
||||||
|
if v and sd:
|
||||||
|
try:
|
||||||
|
ed = datetime.strptime(v, "%Y-%m-%d").date()
|
||||||
|
st = datetime.strptime(sd, "%Y-%m-%d").date()
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError("Invalid date format, expected YYYY-MM-DD")
|
||||||
|
if ed < st:
|
||||||
|
raise ValueError("endDate must be greater than or equal to startDate")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class GeneratedTransaction(BaseModel):
|
||||||
|
amount: float
|
||||||
|
date: str # YYYY-MM-DD
|
||||||
|
category_ids: List[int] = []
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/generate", response_model=List[GeneratedTransaction])
|
||||||
|
async def generate_mock_transactions(
|
||||||
|
options: GenerateOptions,
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
# Seed randomness per user to make results less erratic across multiple calls in quick succession
|
||||||
|
seed = int(datetime.utcnow().timestamp()) ^ int(user.id)
|
||||||
|
rnd = random.Random(seed)
|
||||||
|
|
||||||
|
# Determine date range
|
||||||
|
if options.startDate:
|
||||||
|
start_date = datetime.strptime(options.startDate, "%Y-%m-%d").date()
|
||||||
|
else:
|
||||||
|
start_date = (datetime.utcnow() - timedelta(days=365)).date()
|
||||||
|
if options.endDate:
|
||||||
|
end_date = datetime.strptime(options.endDate, "%Y-%m-%d").date()
|
||||||
|
else:
|
||||||
|
end_date = datetime.utcnow().date()
|
||||||
|
|
||||||
|
span_days = max(0, (end_date - start_date).days)
|
||||||
|
|
||||||
|
results: List[GeneratedTransaction] = []
|
||||||
|
for _ in range(options.count):
|
||||||
|
amount = round(rnd.uniform(options.minAmount, options.maxAmount), 2)
|
||||||
|
# Pick a random date in the inclusive range
|
||||||
|
rand_day = rnd.randint(0, span_days) if span_days > 0 else 0
|
||||||
|
tx_date = start_date + timedelta(days=rand_day)
|
||||||
|
# Pick category randomly from provided list, or empty
|
||||||
|
if options.categoryIds:
|
||||||
|
cat = [rnd.choice(options.categoryIds)]
|
||||||
|
else:
|
||||||
|
cat = []
|
||||||
|
# Optional simple description for flavor
|
||||||
|
desc = None
|
||||||
|
# Assemble
|
||||||
|
results.append(GeneratedTransaction(
|
||||||
|
amount=amount,
|
||||||
|
date=tx_date.isoformat(),
|
||||||
|
category_ids=cat,
|
||||||
|
description=desc,
|
||||||
|
))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/scrape")
|
||||||
|
async def scrape_mock_bank():
|
||||||
|
# 80% of the time: nothing to scrape
|
||||||
|
if random.random() < 0.8:
|
||||||
|
return []
|
||||||
|
|
||||||
|
transactions = []
|
||||||
|
count = random.randint(1, 10)
|
||||||
|
for _ in range(count):
|
||||||
|
transactions.append({
|
||||||
|
"amount": round(random.uniform(-200.0, 200.0), 2),
|
||||||
|
"date": (datetime.utcnow().date() - timedelta(days=random.randint(0, 30))).isoformat(),
|
||||||
|
"description": "Mock transaction",
|
||||||
|
})
|
||||||
|
|
||||||
|
return transactions
|
||||||
280
7project/src/backend/app/api/transactions.py
Normal file
280
7project/src/backend/app/api/transactions.py
Normal file
@@ -0,0 +1,280 @@
|
|||||||
|
from typing import List, Optional
|
||||||
|
from datetime import date
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
from sqlalchemy import select, and_, func
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from app.models.transaction import Transaction
|
||||||
|
from app.models.categories import Category
|
||||||
|
from app.schemas.transaction import (
|
||||||
|
TransactionCreate,
|
||||||
|
TransactionRead,
|
||||||
|
TransactionUpdate,
|
||||||
|
)
|
||||||
|
from app.services.db import get_async_session
|
||||||
|
from app.services.user_service import current_active_user
|
||||||
|
from app.models.user import User
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/transactions", tags=["transactions"])
|
||||||
|
|
||||||
|
|
||||||
|
def _to_read_model(tx: Transaction) -> TransactionRead:
|
||||||
|
return TransactionRead(
|
||||||
|
id=tx.id,
|
||||||
|
amount=tx.amount,
|
||||||
|
description=tx.description,
|
||||||
|
date=tx.date,
|
||||||
|
category_ids=[c.id for c in (tx.categories or [])],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/create", response_model=TransactionRead, status_code=status.HTTP_201_CREATED)
|
||||||
|
async def create_transaction(
|
||||||
|
payload: TransactionCreate,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
# Build transaction; set `date` only if provided to let DB default apply otherwise
|
||||||
|
tx_kwargs = dict(
|
||||||
|
amount=payload.amount,
|
||||||
|
description=payload.description,
|
||||||
|
user_id=user.id,
|
||||||
|
)
|
||||||
|
if payload.date is not None:
|
||||||
|
parsed_date = payload.date
|
||||||
|
if isinstance(parsed_date, str):
|
||||||
|
try:
|
||||||
|
parsed_date = date.fromisoformat(parsed_date)
|
||||||
|
except ValueError:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid date format, expected YYYY-MM-DD")
|
||||||
|
tx_kwargs["date"] = parsed_date
|
||||||
|
tx = Transaction(**tx_kwargs)
|
||||||
|
|
||||||
|
# Attach categories if provided (and owned by user)
|
||||||
|
if payload.category_ids:
|
||||||
|
res = await session.execute(
|
||||||
|
select(Category).where(
|
||||||
|
Category.user_id == user.id, Category.id.in_(payload.category_ids)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
categories = list(res.scalars())
|
||||||
|
if len(categories) != len(set(payload.category_ids)):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Duplicate category IDs provided or one or more categories not found"
|
||||||
|
)
|
||||||
|
tx.categories = categories
|
||||||
|
|
||||||
|
session.add(tx)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(tx)
|
||||||
|
# Ensure categories are loaded
|
||||||
|
await session.refresh(tx, attribute_names=["categories"])
|
||||||
|
return _to_read_model(tx)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/", response_model=List[TransactionRead])
|
||||||
|
async def list_transactions(
|
||||||
|
start_date: Optional[date] = None,
|
||||||
|
end_date: Optional[date] = None,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
cond = [Transaction.user_id == user.id]
|
||||||
|
if start_date is not None:
|
||||||
|
cond.append(Transaction.date >= start_date)
|
||||||
|
if end_date is not None:
|
||||||
|
cond.append(Transaction.date <= end_date)
|
||||||
|
res = await session.execute(
|
||||||
|
select(Transaction).where(and_(*cond)).order_by(Transaction.date, Transaction.id)
|
||||||
|
)
|
||||||
|
txs = list(res.scalars())
|
||||||
|
# Eagerly load categories for each transaction
|
||||||
|
for tx in txs:
|
||||||
|
await session.refresh(tx, attribute_names=["categories"])
|
||||||
|
return [_to_read_model(tx) for tx in txs]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/balance_series")
|
||||||
|
async def get_balance_series(
|
||||||
|
start_date: Optional[date] = None,
|
||||||
|
end_date: Optional[date] = None,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
cond = [Transaction.user_id == user.id]
|
||||||
|
if start_date is not None:
|
||||||
|
cond.append(Transaction.date >= start_date)
|
||||||
|
if end_date is not None:
|
||||||
|
cond.append(Transaction.date <= end_date)
|
||||||
|
res = await session.execute(
|
||||||
|
select(Transaction).where(and_(*cond)).order_by(Transaction.date, Transaction.id)
|
||||||
|
)
|
||||||
|
txs = list(res.scalars())
|
||||||
|
# Group by date and accumulate
|
||||||
|
daily = {}
|
||||||
|
for tx in txs:
|
||||||
|
key = tx.date.isoformat() if hasattr(tx.date, 'isoformat') else str(tx.date)
|
||||||
|
daily[key] = daily.get(key, 0.0) + float(tx.amount)
|
||||||
|
# Build cumulative series sorted by date
|
||||||
|
series = []
|
||||||
|
running = 0.0
|
||||||
|
for d in sorted(daily.keys()):
|
||||||
|
running += daily[d]
|
||||||
|
series.append({"date": d, "balance": running})
|
||||||
|
return series
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{transaction_id}", response_model=TransactionRead)
|
||||||
|
async def get_transaction(
|
||||||
|
transaction_id: int,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
res = await session.execute(
|
||||||
|
select(Transaction).where(
|
||||||
|
Transaction.id == transaction_id, Transaction.user_id == user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
tx: Optional[Transaction] = res.scalar_one_or_none()
|
||||||
|
if not tx:
|
||||||
|
raise HTTPException(status_code=404, detail="Transaction not found")
|
||||||
|
await session.refresh(tx, attribute_names=["categories"])
|
||||||
|
return _to_read_model(tx)
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{transaction_id}/edit", response_model=TransactionRead)
|
||||||
|
async def update_transaction(
|
||||||
|
transaction_id: int,
|
||||||
|
payload: TransactionUpdate,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
res = await session.execute(
|
||||||
|
select(Transaction).where(
|
||||||
|
Transaction.id == transaction_id, Transaction.user_id == user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
tx: Optional[Transaction] = res.scalar_one_or_none()
|
||||||
|
if not tx:
|
||||||
|
raise HTTPException(status_code=404, detail="Transaction not found")
|
||||||
|
|
||||||
|
if payload.amount is not None:
|
||||||
|
tx.amount = payload.amount
|
||||||
|
if payload.description is not None:
|
||||||
|
tx.description = payload.description
|
||||||
|
if payload.date is not None:
|
||||||
|
new_date = payload.date
|
||||||
|
if isinstance(new_date, str):
|
||||||
|
try:
|
||||||
|
new_date = date.fromisoformat(new_date)
|
||||||
|
except ValueError:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid date format, expected YYYY-MM-DD")
|
||||||
|
tx.date = new_date
|
||||||
|
|
||||||
|
if payload.category_ids is not None:
|
||||||
|
# Preload categories to avoid async lazy-load during assignment
|
||||||
|
await session.refresh(tx, attribute_names=["categories"])
|
||||||
|
if payload.category_ids:
|
||||||
|
# Check for duplicate category IDs in the payload
|
||||||
|
if len(payload.category_ids) != len(set(payload.category_ids)):
|
||||||
|
raise HTTPException(status_code=400, detail="Duplicate category IDs in payload")
|
||||||
|
res = await session.execute(
|
||||||
|
select(Category).where(
|
||||||
|
Category.user_id == user.id, Category.id.in_(payload.category_ids)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
categories = list(res.scalars())
|
||||||
|
if len(categories) != len(payload.category_ids):
|
||||||
|
raise HTTPException(status_code=400, detail="One or more categories not found")
|
||||||
|
tx.categories = categories
|
||||||
|
else:
|
||||||
|
tx.categories = []
|
||||||
|
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(tx, attribute_names=["categories"])
|
||||||
|
return _to_read_model(tx)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{transaction_id}/delete", status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
async def delete_transaction(
|
||||||
|
transaction_id: int,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
res = await session.execute(
|
||||||
|
select(Transaction).where(
|
||||||
|
Transaction.id == transaction_id, Transaction.user_id == user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
tx = res.scalar_one_or_none()
|
||||||
|
if not tx:
|
||||||
|
raise HTTPException(status_code=404, detail="Transaction not found")
|
||||||
|
|
||||||
|
await session.delete(tx)
|
||||||
|
await session.commit()
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/{transaction_id}/categories/{category_id}", response_model=TransactionRead)
|
||||||
|
async def assign_category(
|
||||||
|
transaction_id: int,
|
||||||
|
category_id: int,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
# Load transaction and category ensuring ownership
|
||||||
|
res_tx = await session.execute(
|
||||||
|
select(Transaction).where(
|
||||||
|
Transaction.id == transaction_id, Transaction.user_id == user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
tx: Optional[Transaction] = res_tx.scalar_one_or_none()
|
||||||
|
if not tx:
|
||||||
|
raise HTTPException(status_code=404, detail="Transaction not found")
|
||||||
|
|
||||||
|
res_cat = await session.execute(
|
||||||
|
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
||||||
|
)
|
||||||
|
cat: Optional[Category] = res_cat.scalar_one_or_none()
|
||||||
|
if not cat:
|
||||||
|
raise HTTPException(status_code=404, detail="Category not found")
|
||||||
|
|
||||||
|
await session.refresh(tx, attribute_names=["categories"])
|
||||||
|
if cat not in tx.categories:
|
||||||
|
tx.categories.append(cat)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(tx, attribute_names=["categories"])
|
||||||
|
return _to_read_model(tx)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/{transaction_id}/categories/{category_id}", response_model=TransactionRead)
|
||||||
|
async def unassign_category(
|
||||||
|
transaction_id: int,
|
||||||
|
category_id: int,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
res_tx = await session.execute(
|
||||||
|
select(Transaction).where(
|
||||||
|
Transaction.id == transaction_id, Transaction.user_id == user.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
tx: Optional[Transaction] = res_tx.scalar_one_or_none()
|
||||||
|
if not tx:
|
||||||
|
raise HTTPException(status_code=404, detail="Transaction not found")
|
||||||
|
|
||||||
|
res_cat = await session.execute(
|
||||||
|
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
||||||
|
)
|
||||||
|
cat: Optional[Category] = res_cat.scalar_one_or_none()
|
||||||
|
if not cat:
|
||||||
|
raise HTTPException(status_code=404, detail="Category not found")
|
||||||
|
|
||||||
|
await session.refresh(tx, attribute_names=["categories"])
|
||||||
|
if cat in tx.categories:
|
||||||
|
tx.categories.remove(cat)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(tx, attribute_names=["categories"])
|
||||||
|
return _to_read_model(tx)
|
||||||
176
7project/src/backend/app/app.py
Normal file
176
7project/src/backend/app/app.py
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from datetime import datetime
|
||||||
|
from pythonjsonlogger import jsonlogger
|
||||||
|
|
||||||
|
from fastapi import Depends, FastAPI, HTTPException
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from prometheus_fastapi_instrumentator import Instrumentator, metrics
|
||||||
|
from starlette.requests import Request
|
||||||
|
|
||||||
|
from app.services.prometheus import number_of_users, number_of_transactions
|
||||||
|
|
||||||
|
from app.services import bank_scraper
|
||||||
|
from app.workers.celery_tasks import load_transactions, load_all_transactions
|
||||||
|
from app.models.user import User, OAuthAccount
|
||||||
|
|
||||||
|
from app.services.user_service import current_active_verified_user
|
||||||
|
from app.api.auth import router as auth_router
|
||||||
|
from app.api.csas import router as csas_router
|
||||||
|
from app.api.categories import router as categories_router
|
||||||
|
from app.api.transactions import router as transactions_router
|
||||||
|
from app.api.exchange_rates import router as exchange_rates_router
|
||||||
|
from app.services.user_service import auth_backend, current_active_verified_user, fastapi_users, get_oauth_provider, \
|
||||||
|
UserManager, get_jwt_strategy
|
||||||
|
from app.core.security import extract_bearer_token, is_token_revoked, decode_and_verify_jwt
|
||||||
|
from app.services.user_service import SECRET
|
||||||
|
|
||||||
|
from fastapi import FastAPI
|
||||||
|
import sentry_sdk
|
||||||
|
from fastapi_users.db import SQLAlchemyUserDatabase
|
||||||
|
from app.core.db import async_session_maker, engine
|
||||||
|
from app.core.base import Base
|
||||||
|
|
||||||
|
sentry_sdk.init(
|
||||||
|
dsn=os.getenv("SENTRY_DSN"),
|
||||||
|
send_default_pii=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
fastApi = FastAPI()
|
||||||
|
|
||||||
|
# CORS for frontend dev server
|
||||||
|
fastApi.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=[
|
||||||
|
"http://localhost:5173",
|
||||||
|
"http://127.0.0.1:5173",
|
||||||
|
os.getenv("FRONTEND_DOMAIN_SCHEME", "")
|
||||||
|
],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if not os.getenv("PYTEST_RUN_CONFIG"):
|
||||||
|
prometheus = Instrumentator().instrument(fastApi)
|
||||||
|
# Register custom metrics
|
||||||
|
prometheus.add(number_of_users()).add(number_of_transactions())
|
||||||
|
prometheus.expose(
|
||||||
|
fastApi,
|
||||||
|
endpoint="/metrics",
|
||||||
|
include_in_schema=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
fastApi.include_router(auth_router)
|
||||||
|
fastApi.include_router(categories_router)
|
||||||
|
fastApi.include_router(transactions_router)
|
||||||
|
fastApi.include_router(exchange_rates_router)
|
||||||
|
from app.api.mock_bank import router as mock_bank_router
|
||||||
|
fastApi.include_router(mock_bank_router)
|
||||||
|
|
||||||
|
for h in list(logging.root.handlers):
|
||||||
|
logging.root.removeHandler(h)
|
||||||
|
|
||||||
|
_log_handler = logging.StreamHandler(sys.stdout)
|
||||||
|
_formatter = jsonlogger.JsonFormatter(
|
||||||
|
fmt='%(asctime)s %(levelname)s %(name)s %(message)s %(pathname)s %(lineno)d %(process)d %(thread)d'
|
||||||
|
)
|
||||||
|
_log_handler.setFormatter(_formatter)
|
||||||
|
|
||||||
|
logging.root.setLevel(logging.INFO)
|
||||||
|
logging.root.addHandler(_log_handler)
|
||||||
|
|
||||||
|
for _name in ("uvicorn", "uvicorn.error", "uvicorn.access"):
|
||||||
|
_logger = logging.getLogger(_name)
|
||||||
|
_logger.handlers = [_log_handler]
|
||||||
|
_logger.propagate = True
|
||||||
|
|
||||||
|
|
||||||
|
@fastApi.middleware("http")
|
||||||
|
async def auth_guard(request: Request, call_next):
|
||||||
|
# Enforce revoked/expired JWTs are rejected globally
|
||||||
|
token = extract_bearer_token(request)
|
||||||
|
if token:
|
||||||
|
from fastapi import Response, status as _status
|
||||||
|
# Deny if token is revoked
|
||||||
|
if is_token_revoked(token):
|
||||||
|
return Response(status_code=_status.HTTP_401_UNAUTHORIZED)
|
||||||
|
# Deny if token is expired or invalid
|
||||||
|
try:
|
||||||
|
decode_and_verify_jwt(token, SECRET)
|
||||||
|
except Exception:
|
||||||
|
return Response(status_code=_status.HTTP_401_UNAUTHORIZED)
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
|
||||||
|
@fastApi.middleware("http")
|
||||||
|
async def log_traffic(request: Request, call_next):
|
||||||
|
start_time = datetime.now()
|
||||||
|
response = await call_next(request)
|
||||||
|
process_time = (datetime.now() - start_time).total_seconds()
|
||||||
|
client_host = request.client.host
|
||||||
|
log_params = {
|
||||||
|
"request_method": request.method,
|
||||||
|
"request_url": str(request.url),
|
||||||
|
"request_size": request.headers.get("content-length"),
|
||||||
|
"request_headers": dict(request.headers),
|
||||||
|
"response_status": response.status_code,
|
||||||
|
"response_size": response.headers.get("content-length"),
|
||||||
|
"response_headers": dict(response.headers),
|
||||||
|
"process_time": process_time,
|
||||||
|
"client_host": client_host
|
||||||
|
}
|
||||||
|
logging.getLogger(__name__).info("http_request", extra=log_params)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
fastApi.include_router(
|
||||||
|
fastapi_users.get_oauth_router(
|
||||||
|
get_oauth_provider("MojeID"),
|
||||||
|
auth_backend,
|
||||||
|
"SECRET",
|
||||||
|
associate_by_email=True,
|
||||||
|
redirect_url=os.getenv("FRONTEND_DOMAIN_SCHEME", "http://localhost:3000") + "/auth/mojeid/callback",
|
||||||
|
),
|
||||||
|
prefix="/auth/mojeid",
|
||||||
|
tags=["auth"],
|
||||||
|
)
|
||||||
|
|
||||||
|
fastApi.include_router(
|
||||||
|
fastapi_users.get_oauth_router(
|
||||||
|
get_oauth_provider("BankID"),
|
||||||
|
auth_backend,
|
||||||
|
"SECRET",
|
||||||
|
associate_by_email=True,
|
||||||
|
redirect_url=os.getenv("FRONTEND_DOMAIN_SCHEME", "http://localhost:3000") + "/auth/bankid/callback",
|
||||||
|
),
|
||||||
|
prefix="/auth/bankid",
|
||||||
|
tags=["auth"],
|
||||||
|
)
|
||||||
|
|
||||||
|
fastApi.include_router(csas_router)
|
||||||
|
|
||||||
|
|
||||||
|
# Liveness/root endpoint
|
||||||
|
@fastApi.get("/", include_in_schema=False)
|
||||||
|
async def root():
|
||||||
|
return {"status": "ok"}
|
||||||
|
|
||||||
|
|
||||||
|
@fastApi.get("/authenticated-route")
|
||||||
|
async def authenticated_route(user: User = Depends(current_active_verified_user)):
|
||||||
|
return {"message": f"Hello {user.email}!"}
|
||||||
|
|
||||||
|
|
||||||
|
@fastApi.get("/_cron", include_in_schema=False)
|
||||||
|
async def handle_cron(request: Request):
|
||||||
|
# endpoint accessed by Clodflare => return 404
|
||||||
|
if request.headers.get("cf-connecting-ip"):
|
||||||
|
raise HTTPException(status_code=404)
|
||||||
|
|
||||||
|
logging.info("[Cron] Triggering scheduled tasks via HTTP endpoint")
|
||||||
|
task = load_all_transactions.delay()
|
||||||
|
return {"status": "queued", "action": "csas_scrape_all", "task_id": getattr(task, 'id', None)}
|
||||||
@@ -1,5 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
from app.core.base import Base
|
from app.core.base import Base
|
||||||
|
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||||
@@ -17,10 +19,13 @@ if not DATABASE_URL:
|
|||||||
# Load all models to register them
|
# Load all models to register them
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.models.transaction import Transaction
|
from app.models.transaction import Transaction
|
||||||
|
from app.models.categories import Category
|
||||||
|
|
||||||
ssl_enabled = os.getenv("MARIADB_HOST", "localhost") != "localhost"
|
host_env = os.getenv("MARIADB_HOST", "localhost")
|
||||||
|
ssl_enabled = host_env not in {"localhost", "127.0.0.1"}
|
||||||
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
||||||
|
|
||||||
|
# Async engine/session for the async parts of the app
|
||||||
engine = create_async_engine(
|
engine = create_async_engine(
|
||||||
DATABASE_URL,
|
DATABASE_URL,
|
||||||
pool_pre_ping=True,
|
pool_pre_ping=True,
|
||||||
@@ -28,3 +33,13 @@ engine = create_async_engine(
|
|||||||
connect_args=connect_args,
|
connect_args=connect_args,
|
||||||
)
|
)
|
||||||
async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
|
async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
|
||||||
|
|
||||||
|
# Synchronous engine/session for sync utilities (e.g., bank_scraper)
|
||||||
|
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
||||||
|
engine_sync = create_engine(
|
||||||
|
SYNC_DATABASE_URL,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
echo=os.getenv("SQL_ECHO", "0") == "1",
|
||||||
|
connect_args=connect_args,
|
||||||
|
)
|
||||||
|
sync_session_maker = sessionmaker(bind=engine_sync, expire_on_commit=False)
|
||||||
52
7project/src/backend/app/core/security.py
Normal file
52
7project/src/backend/app/core/security.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
from typing import Optional
|
||||||
|
import re
|
||||||
|
import jwt
|
||||||
|
from fastapi import Request
|
||||||
|
|
||||||
|
# Simple in-memory revocation store for revoked JWT tokens.
|
||||||
|
#
|
||||||
|
# Limitations:
|
||||||
|
# - All revoked tokens will be lost if the process restarts (data loss on restart).
|
||||||
|
# - Not suitable for multi-instance deployments: the revocation list is not shared between instances.
|
||||||
|
# A token revoked in one instance will not be recognized as revoked in others.
|
||||||
|
#
|
||||||
|
# For production, use a persistent and shared store (e.g., Redis or a database).
|
||||||
|
_REVOKED_TOKENS: set[str] = set()
|
||||||
|
|
||||||
|
# Bearer token regex
|
||||||
|
_BEARER_RE = re.compile(r"^[Bb]earer\s+(.+)$")
|
||||||
|
|
||||||
|
|
||||||
|
def extract_bearer_token(request: Request) -> Optional[str]:
|
||||||
|
auth = request.headers.get("authorization")
|
||||||
|
if not auth:
|
||||||
|
return None
|
||||||
|
m = _BEARER_RE.match(auth)
|
||||||
|
if not m:
|
||||||
|
return None
|
||||||
|
return m.group(1).strip()
|
||||||
|
|
||||||
|
|
||||||
|
def revoke_token(token: str) -> None:
|
||||||
|
if token:
|
||||||
|
_REVOKED_TOKENS.add(token)
|
||||||
|
|
||||||
|
|
||||||
|
def is_token_revoked(token: str) -> bool:
|
||||||
|
return token in _REVOKED_TOKENS
|
||||||
|
|
||||||
|
|
||||||
|
def decode_and_verify_jwt(token: str, secret: str) -> dict:
|
||||||
|
"""
|
||||||
|
Decode the JWT using the shared secret, verifying expiration and signature.
|
||||||
|
Audience is not verified here to be compatible with fastapi-users default tokens.
|
||||||
|
Raises jwt.ExpiredSignatureError if expired.
|
||||||
|
Raises jwt.InvalidTokenError for other issues.
|
||||||
|
Returns the decoded payload dict on success.
|
||||||
|
"""
|
||||||
|
return jwt.decode(
|
||||||
|
token,
|
||||||
|
secret,
|
||||||
|
algorithms=["HS256"],
|
||||||
|
options={"verify_aud": False},
|
||||||
|
) # verify_exp is True by default
|
||||||
25
7project/src/backend/app/models/categories.py
Normal file
25
7project/src/backend/app/models/categories.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from fastapi_users_db_sqlalchemy import GUID
|
||||||
|
from sqlalchemy import Column, Integer, String, ForeignKey, Table, UniqueConstraint
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from app.core.base import Base
|
||||||
|
|
||||||
|
association_table = Table(
|
||||||
|
"category_transaction",
|
||||||
|
Base.metadata,
|
||||||
|
Column("category_id", Integer, ForeignKey("categories.id", ondelete="CASCADE"), primary_key=True),
|
||||||
|
Column("transaction_id", Integer, ForeignKey("transaction.id", ondelete="CASCADE"), primary_key=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Category(Base):
|
||||||
|
__tablename__ = "categories"
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("name", "user_id", name="uix_name_user_id"),
|
||||||
|
)
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
name = Column(String(length=100), nullable=False)
|
||||||
|
description = Column(String(length=255), nullable=True)
|
||||||
|
user_id = Column(GUID, ForeignKey("user.id"), nullable=False)
|
||||||
|
user = relationship("User", back_populates="categories")
|
||||||
|
transactions = relationship("Transaction", secondary=association_table, back_populates="categories")
|
||||||
24
7project/src/backend/app/models/transaction.py
Normal file
24
7project/src/backend/app/models/transaction.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import os
|
||||||
|
from fastapi_users_db_sqlalchemy import GUID
|
||||||
|
from sqlalchemy import Column, Integer, String, Float, ForeignKey, Date, func
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
from sqlalchemy_utils import EncryptedType
|
||||||
|
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||||
|
|
||||||
|
from app.core.base import Base
|
||||||
|
from app.models.categories import association_table
|
||||||
|
|
||||||
|
SECRET_KEY = os.environ.get("DB_ENCRYPTION_KEY", "localdev")
|
||||||
|
|
||||||
|
|
||||||
|
class Transaction(Base):
|
||||||
|
__tablename__ = "transaction"
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
amount = Column(EncryptedType(Float, SECRET_KEY, engine=FernetEngine), nullable=False)
|
||||||
|
description = Column(EncryptedType(String(length=255), SECRET_KEY, engine=FernetEngine), nullable=True)
|
||||||
|
date = Column(Date, nullable=False, server_default=func.current_date())
|
||||||
|
user_id = Column(GUID, ForeignKey("user.id"), nullable=False)
|
||||||
|
|
||||||
|
# Relationship
|
||||||
|
user = relationship("User", back_populates="transactions")
|
||||||
|
categories = relationship("Category", secondary=association_table, back_populates="transactions", passive_deletes=True)
|
||||||
22
7project/src/backend/app/models/user.py
Normal file
22
7project/src/backend/app/models/user.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
from sqlalchemy import Column, String
|
||||||
|
from sqlalchemy.orm import relationship, mapped_column, Mapped
|
||||||
|
from fastapi_users.db import SQLAlchemyBaseUserTableUUID, SQLAlchemyBaseOAuthAccountTableUUID
|
||||||
|
from sqlalchemy.sql.sqltypes import JSON
|
||||||
|
|
||||||
|
from app.core.base import Base
|
||||||
|
|
||||||
|
|
||||||
|
class OAuthAccount(SQLAlchemyBaseOAuthAccountTableUUID, Base):
|
||||||
|
# BankID token is longer than default
|
||||||
|
access_token: Mapped[str] = mapped_column(String(length=4096), nullable=False)
|
||||||
|
|
||||||
|
|
||||||
|
class User(SQLAlchemyBaseUserTableUUID, Base):
|
||||||
|
first_name = Column(String(length=100), nullable=True)
|
||||||
|
last_name = Column(String(length=100), nullable=True)
|
||||||
|
oauth_accounts = relationship("OAuthAccount", lazy="joined")
|
||||||
|
config = Column(JSON, default={})
|
||||||
|
|
||||||
|
# Relationship
|
||||||
|
transactions = relationship("Transaction", back_populates="user")
|
||||||
|
categories = relationship("Category", back_populates="user")
|
||||||
50
7project/src/backend/app/oauth/bank_id.py
Normal file
50
7project/src/backend/app/oauth/bank_id.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import secrets
|
||||||
|
from typing import Optional, Literal
|
||||||
|
|
||||||
|
from httpx_oauth.oauth2 import T
|
||||||
|
|
||||||
|
from app.oauth.custom_openid import CustomOpenID
|
||||||
|
|
||||||
|
|
||||||
|
class BankID(CustomOpenID):
|
||||||
|
def __init__(self, client_id: str, client_secret: str):
|
||||||
|
super().__init__(
|
||||||
|
client_id,
|
||||||
|
client_secret,
|
||||||
|
"https://oidc.sandbox.bankid.cz/.well-known/openid-configuration",
|
||||||
|
"BankID",
|
||||||
|
base_scopes=["openid", "profile.email", "profile.name"],
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_user_info(self, token: str) -> dict:
|
||||||
|
info = await self.get_profile(token)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"first_name": info.get("given_name"),
|
||||||
|
"last_name": info.get("family_name"),
|
||||||
|
}
|
||||||
|
|
||||||
|
async def get_authorization_url(
|
||||||
|
self,
|
||||||
|
redirect_uri: str,
|
||||||
|
state: Optional[str] = None,
|
||||||
|
scope: Optional[list[str]] = None,
|
||||||
|
code_challenge: Optional[str] = None,
|
||||||
|
code_challenge_method: Optional[Literal["plain", "S256"]] = None,
|
||||||
|
extras_params: Optional[T] = None,
|
||||||
|
) -> str:
|
||||||
|
if extras_params is None:
|
||||||
|
extras_params = {}
|
||||||
|
|
||||||
|
# BankID requires random nonce parameter for security
|
||||||
|
# https://developer.bankid.cz/docs/security_sep
|
||||||
|
extras_params["nonce"] = secrets.token_urlsafe()
|
||||||
|
|
||||||
|
return await super().get_authorization_url(
|
||||||
|
redirect_uri,
|
||||||
|
state,
|
||||||
|
scope,
|
||||||
|
code_challenge,
|
||||||
|
code_challenge_method,
|
||||||
|
extras_params,
|
||||||
|
)
|
||||||
33
7project/src/backend/app/oauth/csas.py
Normal file
33
7project/src/backend/app/oauth/csas.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import os
|
||||||
|
from os.path import dirname, join
|
||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from httpx_oauth.exceptions import GetProfileError
|
||||||
|
from httpx_oauth.oauth2 import BaseOAuth2
|
||||||
|
|
||||||
|
import app.services.db
|
||||||
|
|
||||||
|
BASE_DIR = dirname(__file__)
|
||||||
|
certs = (
|
||||||
|
join(BASE_DIR, "public_key.pem"),
|
||||||
|
join(BASE_DIR, "private_key.key")
|
||||||
|
)
|
||||||
|
|
||||||
|
class CSASOAuth(BaseOAuth2):
|
||||||
|
|
||||||
|
def __init__(self, client_id: str, client_secret: str):
|
||||||
|
super().__init__(
|
||||||
|
client_id,
|
||||||
|
client_secret,
|
||||||
|
base_scopes=["aisp"],
|
||||||
|
authorize_endpoint="https://webapi.developers.erstegroup.com/api/csas/sandbox/v1/sandbox-idp/auth",
|
||||||
|
access_token_endpoint="https://webapi.developers.erstegroup.com/api/csas/sandbox/v1/sandbox-idp/token",
|
||||||
|
refresh_token_endpoint="https://webapi.developers.erstegroup.com/api/csas/sandbox/v1/sandbox-idp/token"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
6
7project/src/backend/app/oauth/custom_openid.py
Normal file
6
7project/src/backend/app/oauth/custom_openid.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
from httpx_oauth.clients.openid import OpenID
|
||||||
|
|
||||||
|
|
||||||
|
class CustomOpenID(OpenID):
|
||||||
|
async def get_user_info(self, token: str) -> dict:
|
||||||
|
raise NotImplementedError()
|
||||||
56
7project/src/backend/app/oauth/moje_id.py
Normal file
56
7project/src/backend/app/oauth/moje_id.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
import json
|
||||||
|
from typing import Optional, Literal, Any
|
||||||
|
|
||||||
|
from httpx_oauth.oauth2 import T
|
||||||
|
|
||||||
|
from app.oauth.custom_openid import CustomOpenID
|
||||||
|
|
||||||
|
|
||||||
|
class MojeIDOAuth(CustomOpenID):
|
||||||
|
def __init__(self, client_id: str, client_secret: str):
|
||||||
|
super().__init__(
|
||||||
|
client_id,
|
||||||
|
client_secret,
|
||||||
|
"https://mojeid.cz/.well-known/openid-configuration/",
|
||||||
|
"MojeID",
|
||||||
|
base_scopes=["openid", "email", "profile"],
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_user_info(self, token: str) -> Optional[Any]:
|
||||||
|
info = await self.get_profile(token)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"first_name": info.get("given_name"),
|
||||||
|
"last_name": info.get("family_name"),
|
||||||
|
}
|
||||||
|
|
||||||
|
async def get_authorization_url(
|
||||||
|
self,
|
||||||
|
redirect_uri: str,
|
||||||
|
state: Optional[str] = None,
|
||||||
|
scope: Optional[list[str]] = None,
|
||||||
|
code_challenge: Optional[str] = None,
|
||||||
|
code_challenge_method: Optional[Literal["plain", "S256"]] = None,
|
||||||
|
extras_params: Optional[T] = None,
|
||||||
|
) -> str:
|
||||||
|
required_fields = {
|
||||||
|
'id_token': {
|
||||||
|
'name': {'essential': True},
|
||||||
|
'given_name': {'essential': True},
|
||||||
|
'family_name': {'essential': True},
|
||||||
|
'email': {'essential': True},
|
||||||
|
'mojeid_valid': {'essential': True},
|
||||||
|
}}
|
||||||
|
|
||||||
|
if extras_params is None:
|
||||||
|
extras_params = {}
|
||||||
|
extras_params["claims"] = json.dumps(required_fields)
|
||||||
|
|
||||||
|
return await super().get_authorization_url(
|
||||||
|
redirect_uri,
|
||||||
|
state,
|
||||||
|
scope,
|
||||||
|
code_challenge,
|
||||||
|
code_challenge_method,
|
||||||
|
extras_params,
|
||||||
|
)
|
||||||
28
7project/src/backend/app/oauth/private_key.key
Normal file
28
7project/src/backend/app/oauth/private_key.key
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
-----BEGIN RSA PRIVATE KEY-----
|
||||||
|
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDcr/oxgV074ETd
|
||||||
|
DkP/0l8LFnRofru+m2wNNG/ttVCioTqwnvR4oYxwq3U9qIBsT0D+Rx/Ef7qcpzqf
|
||||||
|
/w9xt6Hosdv6I5jMHGaVQqLiPuV26/a7WvcmU+PpYuEBmbBHjGVJRBwgPtlUW1VL
|
||||||
|
M8Pht9YiaagEKvFa6SUidZLfPv+ECohqgH4mgMrEcG/BTnry0/5xQdadRC9o25cl
|
||||||
|
NtZIesS5GPeelhggFTkbh/FaxvMXhIAaRXT61cnxgxtfM71h5ObX5Lwle9z5a+Tw
|
||||||
|
xgQhSQq1jbHALYvTwsc4Q/NQGXpGNWy599sb7dg5AkPFSSF4ceXBo/2jOaZCqWrt
|
||||||
|
FVONZ+blAgMBAAECggEBAJwQbrRXsaFIRiq1jez5znC+3m+PQCHZM55a+NR3pqB7
|
||||||
|
uE9y+ZvdUr3S4sRJxxfRLDsl/Rcu5L8nm9PNwhQ/MmamcNQCHGoro3fmed3ZcNia
|
||||||
|
og94ktMt/DztygUhtIHEjVQ0sFc1WufG9xiJcPrM0MfhRAo+fBQ4UCSAVO8/U98B
|
||||||
|
a4yukrPNeEA03hyjLB9W41pNQfyOtAHqzwDg9Q5XVaGMCLZT1bjCIquUcht5iMva
|
||||||
|
tiw3cwdiYIklLTzTCsPPK9A/AlWZyUXL8KxtN0mU0kkwlXqASoXZ2nqdkhjRye/V
|
||||||
|
3JXOmlDtDaJCqWDpH2gHLxMCl7OjfPvuD66bAT3H63kCgYEA5zxW/l6oI3gwYW7+
|
||||||
|
j6rEjA2n8LikVnyW2e/PZ7pxBH3iBFe2DHx/imeqd/0IzixcM1zZT/V+PTFPQizG
|
||||||
|
lOU7stN6Zg/LuRdxneHPyLWCimJP7BBJCWyJkuxKy9psokyBhGSLR/phL3fP7UkB
|
||||||
|
o2I3vGmTFu5A0FzXcNH/cXPMdy8CgYEA9FJw3kyzXlInhJ6Cd63mckLPLYDArUsm
|
||||||
|
THBoeH2CVTBS5g0bCbl7N1ZxUoYwZPD4lg5V0nWhZALGf+85ULSjX03PMf1cc6WW
|
||||||
|
EIbZIo9hX+mGRa/FudDd+TlbtBnn0jucwABuLQi9mIepE55Hu9tw5/FT3cHeZVQc
|
||||||
|
cC0T6ulVvisCgYBCzFeFG+sOdAXl356B+h7VJozBKVWv9kXNp00O9fj4BzVnc78P
|
||||||
|
VFezr8a66snEZWQtIkFUq+JP4xK2VyD2mlHoktbk7OM5EOCtbzILFQQk3cmgtAOl
|
||||||
|
SUlkvAXPZcXEDL3NdQ4XOOkiQUY7kb97Z0AamZT4JtNqXaeO29si9wS12QKBgHYg
|
||||||
|
Hd3864Qg6GZgVOgUNiTsVErFw2KFwQCYIIqQ9CDH+myrzXTILuC0dJnXszI6p5W1
|
||||||
|
XJ0irmMyTFKykN2KWKrNbe3Xd4mad5GKARWKiSPcPkUXFNwgNhI3PzU2iTTGCaVz
|
||||||
|
D9HKNhC3FnIbxsb29AHQViITh7kqD43U3ZpoMkJ9AoGAZ+sg+CPfuo3ZMpbcdb3B
|
||||||
|
ZX2UhAvNKxgHvNnHOjO+pvaM7HiH+BT0650brfBWQ0nTG1dt18mCevVk1UM/5hO9
|
||||||
|
AtZw06vCLOJ3p3qpgkSlRZ1H7VokG9M8Od0zXqtJrmeLeBq7dfuDisYOuA+NUEbJ
|
||||||
|
UM/UHByieS6ywetruz0LpM0=
|
||||||
|
-----END RSA PRIVATE KEY-----
|
||||||
31
7project/src/backend/app/oauth/public_key.pem
Normal file
31
7project/src/backend/app/oauth/public_key.pem
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFSTCCAzGgAwIBAgIEAQIDBDANBgkqhkiG9w0BAQsFADCBgDELMAkGA1UEBhMC
|
||||||
|
Q1oxDjAMBgNVBAcTBUN6ZWNoMRMwEQYDVQQKEwpFcnN0ZUdyb3VwMRUwEwYDVQQL
|
||||||
|
EwxFcnN0ZUh1YlRlYW0xETAPBgNVBAMTCEVyc3RlSHViMSIwIAYJKoZIhvcNAQkB
|
||||||
|
FhNpbmZvQGVyc3RlZ3JvdXAuY29tMB4XDTIyMTIxNDA4MDc1N1oXDTI2MDMxNDA4
|
||||||
|
MDc1N1owUjEaMBgGA1UEYRMRUFNEQ1otQ05CLTEyMzQ1NjcxCzAJBgNVBAYTAkNa
|
||||||
|
MRYwFAYDVQQDEw1UUFAgVGVzdCBRV0FDMQ8wDQYDVQQKEwZNeSBUUFAwggEiMA0G
|
||||||
|
CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDcr/oxgV074ETdDkP/0l8LFnRofru+
|
||||||
|
m2wNNG/ttVCioTqwnvR4oYxwq3U9qIBsT0D+Rx/Ef7qcpzqf/w9xt6Hosdv6I5jM
|
||||||
|
HGaVQqLiPuV26/a7WvcmU+PpYuEBmbBHjGVJRBwgPtlUW1VLM8Pht9YiaagEKvFa
|
||||||
|
6SUidZLfPv+ECohqgH4mgMrEcG/BTnry0/5xQdadRC9o25clNtZIesS5GPeelhgg
|
||||||
|
FTkbh/FaxvMXhIAaRXT61cnxgxtfM71h5ObX5Lwle9z5a+TwxgQhSQq1jbHALYvT
|
||||||
|
wsc4Q/NQGXpGNWy599sb7dg5AkPFSSF4ceXBo/2jOaZCqWrtFVONZ+blAgMBAAGj
|
||||||
|
gfcwgfQwCwYDVR0PBAQDAgHGMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD
|
||||||
|
AjCBrwYIKwYBBQUHAQMEgaIwgZ8wCAYGBACORgEBMAsGBgQAjkYBAwIBFDAIBgYE
|
||||||
|
AI5GAQQwEwYGBACORgEGMAkGBwQAjkYBBgMwZwYGBACBmCcCMF0wTDARBgcEAIGY
|
||||||
|
JwEBDAZQU1BfQVMwEQYHBACBmCcBAgwGUFNQX1BJMBEGBwQAgZgnAQMMBlBTUF9B
|
||||||
|
STARBgcEAIGYJwEEDAZQU1BfSUMMBUVyc3RlDAZBVC1FUlMwFAYDVR0RBA0wC4IJ
|
||||||
|
bXl0cHAuY29tMA0GCSqGSIb3DQEBCwUAA4ICAQBlTMPSwz46GMRBEPcy+25gV7xE
|
||||||
|
5aFS5N6sf3YQyFelRJgPxxPxTHo55WelcK4XmXRQKeQ4VoKf4FgP0Cj74+p0N0gw
|
||||||
|
wFJDWPGXH3SdjAXPRtG+FOiHwUSoyrmvbL4kk6Vbrd4cF+qe0BlzHzJ2Q6vFLwsk
|
||||||
|
NYvWzkY9YjoItB38nAnQhyYgl1yHUK/uDWyrwHVfZn1AeTws/hr/KufORuiQfaTU
|
||||||
|
kvAH1nzi7WSJ6AIQCd2exUEPx/O14Y+oCoJhTVd+RpA/9lkcqebceBijj47b2bvv
|
||||||
|
QbjymvyTXqHd3L224Y7zVmh95g+CaJ8PRpApdrImfjfDDRy8PaFWx2pd/v0UQgrQ
|
||||||
|
lgbO6jE7ah/tS0T5q5JtwnLAiOOqHPaKRvo5WB65jcZ2fvOH/0/oZ89noxp1Ihus
|
||||||
|
vvsjqc9k2h9Rvt2pEjVU40HtQZ6XCmWqgFwK3n9CHrKNV/GqgANIZRNcvXKMCUoB
|
||||||
|
VoJORVwi2DF4caKSFmyEWuK+5FyCEILtQ60SY/NHVGsUeOuN7OTjZjECARO6p4hz
|
||||||
|
Uw+GCIXrzmIjS6ydh/LRef+NK28+xTbjmLHu/wnHg9rrHEnTPd39is+byfS7eeLV
|
||||||
|
Dld/0Xrv88C0wxz63dcwAceiahjyz2mbQm765tOf9rK7EqsvT5M8EXFJ3dP4zwqS
|
||||||
|
6mNFoIa0XGbAUT3E1w==
|
||||||
|
-----END CERTIFICATE-----
|
||||||
21
7project/src/backend/app/schemas/category.py
Normal file
21
7project/src/backend/app/schemas/category.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
from typing import Optional
|
||||||
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryBase(BaseModel):
|
||||||
|
name: str
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryCreate(CategoryBase):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryUpdate(BaseModel):
|
||||||
|
name: Optional[str] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryRead(CategoryBase):
|
||||||
|
id: int
|
||||||
|
model_config = ConfigDict(from_attributes=True)
|
||||||
26
7project/src/backend/app/schemas/transaction.py
Normal file
26
7project/src/backend/app/schemas/transaction.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
from typing import List, Optional, Union
|
||||||
|
from datetime import date
|
||||||
|
from pydantic import BaseModel, Field, ConfigDict
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionBase(BaseModel):
|
||||||
|
amount: float = Field(..., gt=-1e18, lt=1e18)
|
||||||
|
description: Optional[str] = None
|
||||||
|
# accept either ISO date string or date object
|
||||||
|
date: Optional[Union[date, str]] = None
|
||||||
|
|
||||||
|
class TransactionCreate(TransactionBase):
|
||||||
|
category_ids: Optional[List[int]] = None
|
||||||
|
|
||||||
|
class TransactionUpdate(BaseModel):
|
||||||
|
amount: Optional[float] = Field(None, gt=-1e18, lt=1e18)
|
||||||
|
description: Optional[str] = None
|
||||||
|
# accept either ISO date string or date object
|
||||||
|
date: Optional[Union[date, str]] = None
|
||||||
|
category_ids: Optional[List[int]] = None
|
||||||
|
|
||||||
|
class TransactionRead(TransactionBase):
|
||||||
|
id: int
|
||||||
|
category_ids: List[int] = []
|
||||||
|
date: Optional[Union[date, str]]
|
||||||
|
model_config = ConfigDict(from_attributes=True)
|
||||||
@@ -1,16 +1,17 @@
|
|||||||
import uuid
|
import uuid
|
||||||
from typing import Optional
|
from typing import Optional, Dict, Any
|
||||||
from fastapi_users import schemas
|
from fastapi_users import schemas
|
||||||
|
|
||||||
class UserRead(schemas.BaseUser[uuid.UUID]):
|
class UserRead(schemas.BaseUser[uuid.UUID]):
|
||||||
first_name: Optional[str] = None
|
first_name: Optional[str] = None
|
||||||
surname: Optional[str] = None
|
last_name: Optional[str] = None
|
||||||
|
config: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
class UserCreate(schemas.BaseUserCreate):
|
class UserCreate(schemas.BaseUserCreate):
|
||||||
first_name: Optional[str] = None
|
first_name: Optional[str] = None
|
||||||
surname: Optional[str] = None
|
last_name: Optional[str] = None
|
||||||
|
|
||||||
class UserUpdate(schemas.BaseUserUpdate):
|
class UserUpdate(schemas.BaseUserUpdate):
|
||||||
first_name: Optional[str] = None
|
first_name: Optional[str] = None
|
||||||
surname: Optional[str] = None
|
last_name: Optional[str] = None
|
||||||
|
|
||||||
178
7project/src/backend/app/services/bank_scraper.py
Normal file
178
7project/src/backend/app/services/bank_scraper.py
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from os.path import dirname, join
|
||||||
|
from time import strptime
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from app.core.db import sync_session_maker
|
||||||
|
from app.models.transaction import Transaction
|
||||||
|
from app.models.user import User
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
OAUTH_DIR = join(dirname(__file__), "..", "oauth")
|
||||||
|
CERTS = (
|
||||||
|
join(OAUTH_DIR, "public_key.pem"),
|
||||||
|
join(OAUTH_DIR, "private_key.key"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_mock_bank_transactions(user_id: str) -> None:
|
||||||
|
try:
|
||||||
|
uid = UUID(str(user_id))
|
||||||
|
except Exception:
|
||||||
|
logger.error("Invalid user_id provided to bank_scraper (sync): %r", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
_load_mock_bank_transactions(uid)
|
||||||
|
|
||||||
|
|
||||||
|
def load_all_mock_bank_transactions() -> None:
|
||||||
|
with sync_session_maker() as session:
|
||||||
|
users = session.execute(select(User)).unique().scalars().all()
|
||||||
|
logger.info("[BankScraper] Starting Mock Bank scrape for all users | count=%d", len(users))
|
||||||
|
|
||||||
|
processed = 0
|
||||||
|
for user in users:
|
||||||
|
try:
|
||||||
|
_load_mock_bank_transactions(user.id)
|
||||||
|
processed += 1
|
||||||
|
except Exception:
|
||||||
|
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
||||||
|
getattr(user, 'email', None))
|
||||||
|
logger.info("[BankScraper] Finished Mock Bank scrape for all users | processed=%d", processed)
|
||||||
|
|
||||||
|
|
||||||
|
def _load_mock_bank_transactions(user_id: UUID) -> None:
|
||||||
|
with sync_session_maker() as session:
|
||||||
|
user: User | None = session.execute(select(User).where(User.id == user_id)).unique().scalar_one_or_none()
|
||||||
|
if user is None:
|
||||||
|
logger.warning("User not found for id=%s", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
transactions = []
|
||||||
|
with httpx.Client() as client:
|
||||||
|
response = client.get(f"{os.getenv('APP_POD_URL')}/mock-bank/scrape")
|
||||||
|
if response.status_code != httpx.codes.OK:
|
||||||
|
return
|
||||||
|
for transaction in response.json():
|
||||||
|
transactions.append(
|
||||||
|
Transaction(
|
||||||
|
amount=transaction["amount"],
|
||||||
|
description=transaction.get("description"),
|
||||||
|
date=strptime(transaction["date"], "%Y-%m-%d"),
|
||||||
|
user_id=user_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for transaction in transactions:
|
||||||
|
session.add(transaction)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def load_ceska_sporitelna_transactions(user_id: str) -> None:
|
||||||
|
try:
|
||||||
|
uid = UUID(str(user_id))
|
||||||
|
except Exception:
|
||||||
|
logger.error("Invalid user_id provided to bank_scraper (sync): %r", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
_load_ceska_sporitelna_transactions(uid)
|
||||||
|
|
||||||
|
|
||||||
|
def load_all_ceska_sporitelna_transactions() -> None:
|
||||||
|
with sync_session_maker() as session:
|
||||||
|
users = session.execute(select(User)).unique().scalars().all()
|
||||||
|
logger.info("[BankScraper] Starting CSAS scrape for all users | count=%d", len(users))
|
||||||
|
|
||||||
|
processed = 0
|
||||||
|
for user in users:
|
||||||
|
try:
|
||||||
|
_load_ceska_sporitelna_transactions(user.id)
|
||||||
|
processed += 1
|
||||||
|
except Exception:
|
||||||
|
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
||||||
|
getattr(user, 'email', None))
|
||||||
|
logger.info("[BankScraper] Finished CSAS scrape for all users | processed=%d", processed)
|
||||||
|
|
||||||
|
|
||||||
|
def _load_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
||||||
|
with sync_session_maker() as session:
|
||||||
|
user: User | None = session.execute(select(User).where(User.id == user_id)).unique().scalar_one_or_none()
|
||||||
|
if user is None:
|
||||||
|
logger.warning("User not found for id=%s", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
cfg = user.config or {}
|
||||||
|
if "csas" not in cfg:
|
||||||
|
return
|
||||||
|
|
||||||
|
cfg = json.loads(cfg["csas"])
|
||||||
|
if "access_token" not in cfg:
|
||||||
|
return
|
||||||
|
|
||||||
|
accounts = []
|
||||||
|
try:
|
||||||
|
with httpx.Client(cert=CERTS, timeout=httpx.Timeout(20.0)) as client:
|
||||||
|
response = client.get(
|
||||||
|
"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts?size=10&page=0&sort=iban&order=desc",
|
||||||
|
headers={
|
||||||
|
"Authorization": f"Bearer {cfg['access_token']}",
|
||||||
|
"WEB-API-key": "09fdc637-3c57-4242-95f2-c2205a2438f3",
|
||||||
|
"user-involved": "false",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if response.status_code != httpx.codes.OK:
|
||||||
|
return
|
||||||
|
|
||||||
|
for account in response.json().get("accounts", []):
|
||||||
|
accounts.append(account)
|
||||||
|
|
||||||
|
except (httpx.HTTPError,) as e:
|
||||||
|
logger.exception("[BankScraper] HTTP error during CSAS request | user_id=%s", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
for account in accounts:
|
||||||
|
acc_id = account.get("id")
|
||||||
|
if not acc_id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
url = f"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts/{acc_id}/transactions?size=100&page=0&sort=bookingdate&order=desc"
|
||||||
|
with httpx.Client(cert=CERTS) as client:
|
||||||
|
response = client.get(
|
||||||
|
url,
|
||||||
|
headers={
|
||||||
|
"Authorization": f"Bearer {cfg['access_token']}",
|
||||||
|
"WEB-API-key": "09fdc637-3c57-4242-95f2-c2205a2438f3",
|
||||||
|
"user-involved": "false",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if response.status_code != httpx.codes.OK:
|
||||||
|
continue
|
||||||
|
|
||||||
|
transactions = response.json().get("transactions", [])
|
||||||
|
|
||||||
|
for transaction in transactions:
|
||||||
|
description = transaction.get("entryDetails", {}).get("transactionDetails", {}).get(
|
||||||
|
"additionalRemittanceInformation")
|
||||||
|
date_str = transaction.get("bookingDate", {}).get("date")
|
||||||
|
date = strptime(date_str, "%Y-%m-%d") if date_str else None
|
||||||
|
amount = transaction.get("amount", {}).get("value")
|
||||||
|
if transaction.get("creditDebitIndicator") == "DBIT" and amount is not None:
|
||||||
|
amount = -abs(amount)
|
||||||
|
|
||||||
|
if amount is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
obj = Transaction(
|
||||||
|
amount=amount,
|
||||||
|
description=description,
|
||||||
|
date=date,
|
||||||
|
user_id=user_id,
|
||||||
|
)
|
||||||
|
session.add(obj)
|
||||||
|
session.commit()
|
||||||
@@ -4,11 +4,13 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
|||||||
from fastapi_users.db import SQLAlchemyUserDatabase
|
from fastapi_users.db import SQLAlchemyUserDatabase
|
||||||
|
|
||||||
from ..core.db import async_session_maker
|
from ..core.db import async_session_maker
|
||||||
from ..models.user import User
|
from ..models.user import User, OAuthAccount
|
||||||
|
|
||||||
|
|
||||||
async def get_async_session() -> AsyncGenerator[AsyncSession, None]:
|
async def get_async_session() -> AsyncGenerator[AsyncSession, None]:
|
||||||
async with async_session_maker() as session:
|
async with async_session_maker() as session:
|
||||||
yield session
|
yield session
|
||||||
|
|
||||||
|
|
||||||
async def get_user_db(session: AsyncSession = Depends(get_async_session)):
|
async def get_user_db(session: AsyncSession = Depends(get_async_session)):
|
||||||
yield SQLAlchemyUserDatabase(session, User)
|
yield SQLAlchemyUserDatabase(session, User, OAuthAccount)
|
||||||
48
7project/src/backend/app/services/prometheus.py
Normal file
48
7project/src/backend/app/services/prometheus.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
from typing import Callable
|
||||||
|
from prometheus_fastapi_instrumentator.metrics import Info
|
||||||
|
from prometheus_client import Gauge
|
||||||
|
from sqlalchemy import select, func
|
||||||
|
|
||||||
|
from app.core.db import async_session_maker
|
||||||
|
from app.models.transaction import Transaction
|
||||||
|
from app.models.user import User
|
||||||
|
|
||||||
|
|
||||||
|
def number_of_users() -> Callable[[Info], None]:
|
||||||
|
METRIC = Gauge(
|
||||||
|
"number_of_users_total",
|
||||||
|
"Number of registered users.",
|
||||||
|
labelnames=("users",)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def instrumentation(info: Info) -> None:
|
||||||
|
try:
|
||||||
|
async with async_session_maker() as session:
|
||||||
|
result = await session.execute(select(func.count(User.id)))
|
||||||
|
user_count = result.scalar_one() or 0
|
||||||
|
except Exception:
|
||||||
|
# In case of DB errors, avoid crashing metrics endpoint
|
||||||
|
user_count = 0
|
||||||
|
METRIC.labels(users="total").set(user_count)
|
||||||
|
|
||||||
|
return instrumentation
|
||||||
|
|
||||||
|
|
||||||
|
def number_of_transactions() -> Callable[[Info], None]:
|
||||||
|
METRIC = Gauge(
|
||||||
|
"number_of_transactions_total",
|
||||||
|
"Number of transactions stored.",
|
||||||
|
labelnames=("transactions",)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def instrumentation(info: Info) -> None:
|
||||||
|
try:
|
||||||
|
async with async_session_maker() as session:
|
||||||
|
result = await session.execute(select(func.count()).select_from(Transaction))
|
||||||
|
transaction_count = result.scalar_one() or 0
|
||||||
|
except Exception:
|
||||||
|
# In case of DB errors, avoid crashing metrics endpoint
|
||||||
|
transaction_count = 0
|
||||||
|
METRIC.labels(transactions="total").set(transaction_count)
|
||||||
|
|
||||||
|
return instrumentation
|
||||||
@@ -3,26 +3,66 @@ import uuid
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from fastapi import Depends, Request
|
from fastapi import Depends, Request
|
||||||
from fastapi_users import BaseUserManager, FastAPIUsers, UUIDIDMixin
|
from fastapi_users import BaseUserManager, FastAPIUsers, UUIDIDMixin, models
|
||||||
from fastapi_users.authentication import (
|
from fastapi_users.authentication import (
|
||||||
AuthenticationBackend,
|
AuthenticationBackend,
|
||||||
BearerTransport,
|
BearerTransport,
|
||||||
)
|
)
|
||||||
from fastapi_users.authentication.strategy.jwt import JWTStrategy
|
from fastapi_users.authentication.strategy.jwt import JWTStrategy
|
||||||
from fastapi_users.db import SQLAlchemyUserDatabase
|
from fastapi_users.db import SQLAlchemyUserDatabase
|
||||||
|
from httpx_oauth.oauth2 import BaseOAuth2
|
||||||
|
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
|
from app.oauth.bank_id import BankID
|
||||||
|
from app.workers.celery_tasks import send_email
|
||||||
|
from app.oauth.custom_openid import CustomOpenID
|
||||||
|
from app.oauth.moje_id import MojeIDOAuth
|
||||||
from app.services.db import get_user_db
|
from app.services.db import get_user_db
|
||||||
from app.core.queue import enqueue_email
|
|
||||||
|
|
||||||
SECRET = os.getenv("SECRET", "CHANGE_ME_SECRET")
|
SECRET = os.getenv("SECRET", "CHANGE_ME_SECRET")
|
||||||
|
|
||||||
FRONTEND_URL = os.getenv("FRONTEND_URL", "http://localhost:5173")
|
FRONTEND_URL = os.getenv("FRONTEND_URL", "http://localhost:5173")
|
||||||
BACKEND_URL = os.getenv("BACKEND_URL", "http://localhost:8000")
|
BACKEND_URL = os.getenv("BACKEND_URL", "http://localhost:8000")
|
||||||
|
|
||||||
|
providers = {
|
||||||
|
"MojeID": MojeIDOAuth(
|
||||||
|
os.getenv("MOJEID_CLIENT_ID", "CHANGE_ME_CLIENT_ID"),
|
||||||
|
os.getenv("MOJEID_CLIENT_SECRET", "CHANGE_ME_CLIENT_SECRET"),
|
||||||
|
),
|
||||||
|
"BankID": BankID(
|
||||||
|
os.getenv("BANKID_CLIENT_ID", "CHANGE_ME_CLIENT_ID"),
|
||||||
|
os.getenv("BANKID_CLIENT_SECRET", "CHANGE_ME_CLIENT_SECRET"),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_oauth_provider(name: str) -> Optional[BaseOAuth2]:
|
||||||
|
if name not in providers:
|
||||||
|
return None
|
||||||
|
return providers[name]
|
||||||
|
|
||||||
|
|
||||||
class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
|
class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
|
||||||
reset_password_token_secret = SECRET
|
reset_password_token_secret = SECRET
|
||||||
verification_token_secret = SECRET
|
verification_token_secret = SECRET
|
||||||
|
|
||||||
|
async def oauth_callback(self: "BaseUserManager[models.UOAP, models.ID]", oauth_name: str, access_token: str,
|
||||||
|
account_id: str, account_email: str, expires_at: Optional[int] = None,
|
||||||
|
refresh_token: Optional[str] = None, request: Optional[Request] = None, *,
|
||||||
|
associate_by_email: bool = False, is_verified_by_default: bool = False) -> models.UOAP:
|
||||||
|
|
||||||
|
user = await super().oauth_callback(oauth_name, access_token, account_id, account_email, expires_at,
|
||||||
|
refresh_token, request, associate_by_email=associate_by_email,
|
||||||
|
is_verified_by_default=is_verified_by_default)
|
||||||
|
|
||||||
|
# set additional user info from the OAuth provider
|
||||||
|
provider = get_oauth_provider(oauth_name)
|
||||||
|
if provider is not None and isinstance(provider, CustomOpenID):
|
||||||
|
update_dict = await provider.get_user_info(access_token)
|
||||||
|
await self.user_db.update(user, update_dict)
|
||||||
|
|
||||||
|
return user
|
||||||
|
|
||||||
async def on_after_register(self, user: User, request: Optional[Request] = None):
|
async def on_after_register(self, user: User, request: Optional[Request] = None):
|
||||||
await self.request_verify(user, request)
|
await self.request_verify(user, request)
|
||||||
|
|
||||||
@@ -46,19 +86,23 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
|
|||||||
"Pokud jsi registraci neprováděl(a), tento email ignoruj.\n"
|
"Pokud jsi registraci neprováděl(a), tento email ignoruj.\n"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
enqueue_email(to=user.email, subject=subject, body=body)
|
send_email.delay(user.email, subject, body)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("[Email Fallback] To:", user.email)
|
print("[Email Fallback] To:", user.email)
|
||||||
print("[Email Fallback] Subject:", subject)
|
print("[Email Fallback] Subject:", subject)
|
||||||
print("[Email Fallback] Body:\n", body)
|
print("[Email Fallback] Body:\n", body)
|
||||||
|
|
||||||
|
|
||||||
async def get_user_manager(user_db: SQLAlchemyUserDatabase = Depends(get_user_db)):
|
async def get_user_manager(user_db: SQLAlchemyUserDatabase = Depends(get_user_db)):
|
||||||
yield UserManager(user_db)
|
yield UserManager(user_db)
|
||||||
|
|
||||||
|
|
||||||
bearer_transport = BearerTransport(tokenUrl="auth/jwt/login")
|
bearer_transport = BearerTransport(tokenUrl="auth/jwt/login")
|
||||||
|
|
||||||
|
|
||||||
def get_jwt_strategy() -> JWTStrategy:
|
def get_jwt_strategy() -> JWTStrategy:
|
||||||
return JWTStrategy(secret=SECRET, lifetime_seconds=3600)
|
return JWTStrategy(secret=SECRET, lifetime_seconds=604800)
|
||||||
|
|
||||||
|
|
||||||
auth_backend = AuthenticationBackend(
|
auth_backend = AuthenticationBackend(
|
||||||
name="jwt",
|
name="jwt",
|
||||||
@@ -70,4 +114,3 @@ fastapi_users = FastAPIUsers[User, uuid.UUID](get_user_manager, [auth_backend])
|
|||||||
|
|
||||||
current_active_user = fastapi_users.current_user(active=True)
|
current_active_user = fastapi_users.current_user(active=True)
|
||||||
current_active_verified_user = fastapi_users.current_user(active=True, verified=True)
|
current_active_verified_user = fastapi_users.current_user(active=True, verified=True)
|
||||||
|
|
||||||
0
7project/src/backend/app/workers/__init__.py
Normal file
0
7project/src/backend/app/workers/__init__.py
Normal file
86
7project/src/backend/app/workers/celery_tasks.py
Normal file
86
7project/src/backend/app/workers/celery_tasks.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import smtplib
|
||||||
|
from email.message import EmailMessage
|
||||||
|
|
||||||
|
import app.services.bank_scraper
|
||||||
|
from app.celery_app import celery_app
|
||||||
|
|
||||||
|
logger = logging.getLogger("celery_tasks")
|
||||||
|
if not logger.handlers:
|
||||||
|
_h = logging.StreamHandler()
|
||||||
|
logger.addHandler(_h)
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(name="workers.send_email")
|
||||||
|
def send_email(to: str, subject: str, body: str) -> None:
|
||||||
|
if not (to and subject and body):
|
||||||
|
logger.error("Email task missing fields. to=%r subject=%r body_len=%r", to, subject, len(body) if body else 0)
|
||||||
|
return
|
||||||
|
|
||||||
|
host = os.getenv("SMTP_HOST")
|
||||||
|
if not host:
|
||||||
|
logger.error("SMTP_HOST is not configured; cannot send email")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
port = int(os.getenv("SMTP_PORT", "25"))
|
||||||
|
username = os.getenv("SMTP_USERNAME")
|
||||||
|
password = os.getenv("SMTP_PASSWORD")
|
||||||
|
use_tls = os.getenv("SMTP_USE_TLS", "0").lower() in {"1", "true", "yes"}
|
||||||
|
use_ssl = os.getenv("SMTP_USE_SSL", "0").lower() in {"1", "true", "yes"}
|
||||||
|
timeout = int(os.getenv("SMTP_TIMEOUT", "10"))
|
||||||
|
mail_from = os.getenv("SMTP_FROM") or username or "noreply@localhost"
|
||||||
|
|
||||||
|
# Build message
|
||||||
|
msg = EmailMessage()
|
||||||
|
msg["To"] = to
|
||||||
|
msg["From"] = mail_from
|
||||||
|
msg["Subject"] = subject
|
||||||
|
msg.set_content(body)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if use_ssl:
|
||||||
|
with smtplib.SMTP_SSL(host=host, port=port, timeout=timeout) as smtp:
|
||||||
|
if username and password:
|
||||||
|
smtp.login(username, password)
|
||||||
|
smtp.send_message(msg)
|
||||||
|
else:
|
||||||
|
with smtplib.SMTP(host=host, port=port, timeout=timeout) as smtp:
|
||||||
|
# STARTTLS if requested
|
||||||
|
if use_tls:
|
||||||
|
smtp.starttls()
|
||||||
|
if username and password:
|
||||||
|
smtp.login(username, password)
|
||||||
|
smtp.send_message(msg)
|
||||||
|
logger.info("[Celery] Email sent | to=%s | subject=%s | body_len=%d", to, subject, len(body))
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to send email via SMTP to=%s subject=%s host=%s port=%s tls=%s ssl=%s", to, subject,
|
||||||
|
host, port, use_tls, use_ssl)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(name="workers.load_transactions")
|
||||||
|
def load_transactions(user_id: str) -> None:
|
||||||
|
if not user_id:
|
||||||
|
logger.error("Load transactions task missing user_id.")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("[Celery] Starting load_transactions | user_id=%s", user_id)
|
||||||
|
try:
|
||||||
|
# Use synchronous bank scraper functions directly, mirroring load_all_transactions
|
||||||
|
app.services.bank_scraper.load_mock_bank_transactions(user_id)
|
||||||
|
app.services.bank_scraper.load_ceska_sporitelna_transactions(user_id)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to load transactions for user_id=%s", user_id)
|
||||||
|
else:
|
||||||
|
logger.info("[Celery] Finished load_transactions | user_id=%s", user_id)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(name="workers.load_all_transactions")
|
||||||
|
def load_all_transactions() -> None:
|
||||||
|
logger.info("[Celery] Starting load_all_transactions")
|
||||||
|
# Now use synchronous bank scraper functions directly
|
||||||
|
app.services.bank_scraper.load_all_mock_bank_transactions()
|
||||||
|
app.services.bank_scraper.load_all_ceska_sporitelna_transactions()
|
||||||
|
logger.info("[Celery] Finished load_all_transactions")
|
||||||
20
7project/src/backend/docker-compose.test.yml
Normal file
20
7project/src/backend/docker-compose.test.yml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
version: "3.9"
|
||||||
|
services:
|
||||||
|
mariadb:
|
||||||
|
image: mariadb:11.4
|
||||||
|
container_name: test-mariadb
|
||||||
|
environment:
|
||||||
|
MARIADB_ROOT_PASSWORD: rootpw
|
||||||
|
MARIADB_DATABASE: group_project
|
||||||
|
MARIADB_USER: appuser
|
||||||
|
MARIADB_PASSWORD: apppass
|
||||||
|
ports:
|
||||||
|
- "3307:3306" # host:container (use 3307 on host to avoid conflicts)
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "mariadb-admin", "ping", "-h", "127.0.0.1", "-u", "root", "-prootpw", "--silent"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 2s
|
||||||
|
retries: 20
|
||||||
|
# Truly ephemeral, fast storage (removed when container stops)
|
||||||
|
tmpfs:
|
||||||
|
- /var/lib/mysql
|
||||||
4
7project/src/backend/main.py
Normal file
4
7project/src/backend/main.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
import uvicorn
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
uvicorn.run("app.app:fastApi", host="0.0.0.0", log_level="info")
|
||||||
5
7project/src/backend/pyproject.toml
Normal file
5
7project/src/backend/pyproject.toml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
[tool.pytest.ini_options]
|
||||||
|
pythonpath = "."
|
||||||
|
asyncio_mode = "auto"
|
||||||
|
asyncio_default_fixture_loop_scope = "session"
|
||||||
|
asyncio_default_test_loop_scope = "session"
|
||||||
@@ -11,6 +11,7 @@ asyncmy==0.2.9
|
|||||||
bcrypt==4.3.0
|
bcrypt==4.3.0
|
||||||
billiard==4.2.2
|
billiard==4.2.2
|
||||||
celery==5.5.3
|
celery==5.5.3
|
||||||
|
certifi==2025.10.5
|
||||||
cffi==2.0.0
|
cffi==2.0.0
|
||||||
click==8.1.8
|
click==8.1.8
|
||||||
click-didyoumean==0.3.1
|
click-didyoumean==0.3.1
|
||||||
@@ -25,7 +26,10 @@ fastapi-users==14.0.1
|
|||||||
fastapi-users-db-sqlalchemy==7.0.0
|
fastapi-users-db-sqlalchemy==7.0.0
|
||||||
greenlet==3.2.4
|
greenlet==3.2.4
|
||||||
h11==0.16.0
|
h11==0.16.0
|
||||||
|
httpcore==1.0.9
|
||||||
httptools==0.6.4
|
httptools==0.6.4
|
||||||
|
httpx==0.28.1
|
||||||
|
httpx-oauth==0.16.1
|
||||||
idna==3.10
|
idna==3.10
|
||||||
kombu==5.5.4
|
kombu==5.5.4
|
||||||
makefun==1.16.0
|
makefun==1.16.0
|
||||||
@@ -34,6 +38,8 @@ MarkupSafe==3.0.2
|
|||||||
multidict==6.6.4
|
multidict==6.6.4
|
||||||
packaging==25.0
|
packaging==25.0
|
||||||
pamqp==3.3.0
|
pamqp==3.3.0
|
||||||
|
prometheus-fastapi-instrumentator==7.1.0
|
||||||
|
prometheus_client==0.23.1
|
||||||
prompt_toolkit==3.0.52
|
prompt_toolkit==3.0.52
|
||||||
propcache==0.3.2
|
propcache==0.3.2
|
||||||
pwdlib==0.2.1
|
pwdlib==0.2.1
|
||||||
@@ -46,14 +52,17 @@ python-dateutil==2.9.0.post0
|
|||||||
python-dotenv==1.1.1
|
python-dotenv==1.1.1
|
||||||
python-multipart==0.0.20
|
python-multipart==0.0.20
|
||||||
PyYAML==6.0.2
|
PyYAML==6.0.2
|
||||||
|
sentry-sdk==2.42.0
|
||||||
six==1.17.0
|
six==1.17.0
|
||||||
sniffio==1.3.1
|
sniffio==1.3.1
|
||||||
SQLAlchemy==2.0.43
|
SQLAlchemy==2.0.43
|
||||||
|
SQLAlchemy-Utils==0.42.0
|
||||||
starlette==0.48.0
|
starlette==0.48.0
|
||||||
tomli==2.2.1
|
tomli==2.2.1
|
||||||
typing-inspection==0.4.1
|
typing-inspection==0.4.1
|
||||||
typing_extensions==4.15.0
|
typing_extensions==4.15.0
|
||||||
tzdata==2025.2
|
tzdata==2025.2
|
||||||
|
urllib3==2.5.0
|
||||||
uvicorn==0.37.0
|
uvicorn==0.37.0
|
||||||
uvloop==0.21.0
|
uvloop==0.21.0
|
||||||
vine==5.1.0
|
vine==5.1.0
|
||||||
@@ -61,3 +70,4 @@ watchfiles==1.1.0
|
|||||||
wcwidth==0.2.14
|
wcwidth==0.2.14
|
||||||
websockets==15.0.1
|
websockets==15.0.1
|
||||||
yarl==1.20.1
|
yarl==1.20.1
|
||||||
|
python-json-logger==2.0.7
|
||||||
113
7project/src/backend/test_locally.sh
Executable file
113
7project/src/backend/test_locally.sh
Executable file
@@ -0,0 +1,113 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Run tests against a disposable local MariaDB on host port 3307 using Docker Compose.
|
||||||
|
# Requirements: Docker, docker compose plugin, Python, Alembic, pytest.
|
||||||
|
# Usage:
|
||||||
|
# chmod +x ./test_locally.sh
|
||||||
|
# # From 7project/backend directory
|
||||||
|
# ./test_locally.sh [--only-unit|--only-integration|--only-e2e] [pytest-args...]
|
||||||
|
# # Examples:
|
||||||
|
# ./test_locally.sh --only-unit -q
|
||||||
|
# ./test_locally.sh --only-integration -k "login"
|
||||||
|
# ./test_locally.sh --only-e2e -vv
|
||||||
|
#
|
||||||
|
# This script will:
|
||||||
|
# 1) Start a MariaDB 11.4 container (ephemeral storage, port 3307)
|
||||||
|
# 2) Wait until it's healthy
|
||||||
|
# 3) Export env vars expected by the app (DATABASE_URL etc.)
|
||||||
|
# 4) Run Alembic migrations
|
||||||
|
# 5) Run pytest
|
||||||
|
# 6) Tear everything down (containers and tmpfs data)
|
||||||
|
|
||||||
|
COMPOSE_FILE="docker-compose.test.yml"
|
||||||
|
SERVICE_NAME="mariadb"
|
||||||
|
CONTAINER_NAME="test-mariadb"
|
||||||
|
|
||||||
|
if ! command -v docker >/dev/null 2>&1; then
|
||||||
|
echo "Docker is required but not found in PATH" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if ! docker compose version >/dev/null 2>&1; then
|
||||||
|
echo "Docker Compose V2 plugin is required (docker compose)" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Bring up the DB
|
||||||
|
echo "Starting MariaDB (port 3307) with docker compose..."
|
||||||
|
docker compose -f "$COMPOSE_FILE" up -d
|
||||||
|
|
||||||
|
# Ensure we clean up on exit
|
||||||
|
cleanup() {
|
||||||
|
echo "\nTearing down docker compose stack..."
|
||||||
|
docker compose -f "$COMPOSE_FILE" down -v || true
|
||||||
|
}
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
# Wait for healthy container
|
||||||
|
echo -n "Waiting for MariaDB to become healthy"
|
||||||
|
for i in {1..60}; do
|
||||||
|
status=$(docker inspect -f '{{.State.Health.Status}}' "$CONTAINER_NAME" 2>/dev/null || echo "")
|
||||||
|
if [ "$status" = "healthy" ]; then
|
||||||
|
echo " -> healthy"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
echo -n "."
|
||||||
|
sleep 1
|
||||||
|
if [ $i -eq 60 ]; then
|
||||||
|
echo "\nMariaDB did not become healthy in time" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Export env vars for the app/tests (match app/core/db.py expectations)
|
||||||
|
export MARIADB_HOST=127.0.0.1
|
||||||
|
export MARIADB_PORT=3307
|
||||||
|
export MARIADB_DB=group_project
|
||||||
|
export MARIADB_USER=appuser
|
||||||
|
export MARIADB_PASSWORD=apppass
|
||||||
|
export DATABASE_URL="mysql+asyncmy://$MARIADB_USER:$MARIADB_PASSWORD@$MARIADB_HOST:$MARIADB_PORT/$MARIADB_DB"
|
||||||
|
export PYTEST_RUN_CONFIG="True"
|
||||||
|
|
||||||
|
# Determine which tests to run based on flags
|
||||||
|
UNIT_TESTS="tests/test_unit_user_service.py"
|
||||||
|
INTEGRATION_TESTS="tests/test_integration_app.py"
|
||||||
|
E2E_TESTS="tests/test_e2e.py"
|
||||||
|
|
||||||
|
FLAG_COUNT=0
|
||||||
|
TEST_TARGET=""
|
||||||
|
declare -a PYTEST_ARGS=()
|
||||||
|
for arg in "$@"; do
|
||||||
|
case "$arg" in
|
||||||
|
--only-unit)
|
||||||
|
TEST_TARGET="$UNIT_TESTS"; FLAG_COUNT=$((FLAG_COUNT+1));;
|
||||||
|
--only-integration)
|
||||||
|
TEST_TARGET="$INTEGRATION_TESTS"; FLAG_COUNT=$((FLAG_COUNT+1));;
|
||||||
|
--only-e2e)
|
||||||
|
TEST_TARGET="$E2E_TESTS"; FLAG_COUNT=$((FLAG_COUNT+1));;
|
||||||
|
*)
|
||||||
|
PYTEST_ARGS+=("$arg");;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ "$FLAG_COUNT" -gt 1 ]; then
|
||||||
|
echo "Error: Use only one of --only-unit, --only-integration, or --only-e2e" >&2
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run Alembic migrations then tests
|
||||||
|
pushd . >/dev/null
|
||||||
|
echo "Running Alembic migrations..."
|
||||||
|
alembic upgrade head
|
||||||
|
|
||||||
|
echo "Running pytest..."
|
||||||
|
if [ -n "$TEST_TARGET" ]; then
|
||||||
|
# Use "${PYTEST_ARGS[@]:-}" to safely expand empty array with 'set -u'
|
||||||
|
pytest "$TEST_TARGET" "${PYTEST_ARGS[@]:-}"
|
||||||
|
else
|
||||||
|
# Use "${PYTEST_ARGS[@]:-}" to safely expand empty array with 'set -u'
|
||||||
|
pytest "${PYTEST_ARGS[@]:-}"
|
||||||
|
fi
|
||||||
|
popd >/dev/null
|
||||||
|
|
||||||
|
# Cleanup handled by trap
|
||||||
44
7project/src/backend/tests/conftest.py
Normal file
44
7project/src/backend/tests/conftest.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import sys
|
||||||
|
import uuid
|
||||||
|
import types
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from httpx import AsyncClient, ASGITransport
|
||||||
|
|
||||||
|
# Stub sentry_sdk to avoid optional dependency issues during import of app
|
||||||
|
stub = types.ModuleType("sentry_sdk")
|
||||||
|
stub.init = lambda *args, **kwargs: None
|
||||||
|
sys.modules.setdefault("sentry_sdk", stub)
|
||||||
|
|
||||||
|
# Import the FastAPI application
|
||||||
|
from app.app import fastApi as app # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def fastapi_app():
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def client(fastapi_app):
|
||||||
|
return TestClient(fastapi_app, raise_server_exceptions=True)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="function")
|
||||||
|
async def test_user(fastapi_app):
|
||||||
|
"""
|
||||||
|
Creates a new user asynchronously and returns their credentials.
|
||||||
|
Does NOT log them in.
|
||||||
|
Using AsyncClient with ASGITransport avoids event loop conflicts with DB connections.
|
||||||
|
"""
|
||||||
|
unique_email = f"testuser_{uuid.uuid4()}@example.com"
|
||||||
|
password = "a_strong_password"
|
||||||
|
user_payload = {"email": unique_email, "password": password}
|
||||||
|
|
||||||
|
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
response = await ac.post("/auth/register", json=user_payload)
|
||||||
|
assert response.status_code == 201
|
||||||
|
|
||||||
|
return {"username": unique_email, "password": password}
|
||||||
|
|
||||||
210
7project/src/backend/tests/test_e2e.py
Normal file
210
7project/src/backend/tests/test_e2e.py
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
import pytest
|
||||||
|
import uuid
|
||||||
|
from httpx import AsyncClient, ASGITransport
|
||||||
|
from fastapi import status
|
||||||
|
|
||||||
|
|
||||||
|
def test_e2e(client):
|
||||||
|
# 1) Service is alive
|
||||||
|
alive = client.get("/")
|
||||||
|
assert alive.status_code == status.HTTP_200_OK
|
||||||
|
|
||||||
|
# 2) Attempt to login without payload should fail fast (validation error)
|
||||||
|
login = client.post("/auth/jwt/login")
|
||||||
|
assert login.status_code in (status.HTTP_400_BAD_REQUEST, status.HTTP_422_UNPROCESSABLE_CONTENT)
|
||||||
|
|
||||||
|
# 3) Protected endpoint should not be accessible without token
|
||||||
|
me = client.get("/users/me")
|
||||||
|
assert me.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_e2e_full_user_lifecycle(fastapi_app, test_user):
|
||||||
|
# Use an AsyncClient with ASGITransport for async tests
|
||||||
|
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
login_payload = test_user
|
||||||
|
|
||||||
|
# 1. Log in with the new credentials
|
||||||
|
login_resp = await ac.post("/auth/jwt/login", data=login_payload)
|
||||||
|
assert login_resp.status_code == status.HTTP_200_OK
|
||||||
|
token = login_resp.json()["access_token"]
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# 2. Access a protected endpoint
|
||||||
|
me_resp = await ac.get("/users/me", headers=headers)
|
||||||
|
assert me_resp.status_code == status.HTTP_200_OK
|
||||||
|
assert me_resp.json()["email"] == test_user["username"]
|
||||||
|
|
||||||
|
# 3. Update the user's profile
|
||||||
|
update_payload = {"first_name": "Test"}
|
||||||
|
patch_resp = await ac.patch("/users/me", json=update_payload, headers=headers)
|
||||||
|
assert patch_resp.status_code == status.HTTP_200_OK
|
||||||
|
assert patch_resp.json()["first_name"] == "Test"
|
||||||
|
|
||||||
|
# 4. Log out
|
||||||
|
logout_resp = await ac.post("/auth/jwt/logout", headers=headers)
|
||||||
|
assert logout_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
# 5. Verify token is invalid
|
||||||
|
me_again_resp = await ac.get("/users/me", headers=headers)
|
||||||
|
assert me_again_resp.status_code == status.HTTP_401_UNAUTHORIZED
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_e2e_transaction_workflow(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
# 1. Log in to get the token
|
||||||
|
login_resp = await ac.post("/auth/jwt/login", data=test_user)
|
||||||
|
token = login_resp.json()["access_token"]
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# NEW STEP: Create a category first to get a valid ID
|
||||||
|
category_payload = {"name": "Test Category for E2E"}
|
||||||
|
create_category_resp = await ac.post("/categories/create", json=category_payload, headers=headers)
|
||||||
|
assert create_category_resp.status_code == status.HTTP_201_CREATED
|
||||||
|
category_id = create_category_resp.json()["id"]
|
||||||
|
|
||||||
|
# 2. Create a new transaction
|
||||||
|
tx_payload = {"amount": -55.40, "description": "Milk and eggs"}
|
||||||
|
tx_resp = await ac.post("/transactions/create", json=tx_payload, headers=headers)
|
||||||
|
assert tx_resp.status_code == status.HTTP_201_CREATED
|
||||||
|
tx_id = tx_resp.json()["id"]
|
||||||
|
|
||||||
|
# 3. Assign the category
|
||||||
|
assign_resp = await ac.post(f"/transactions/{tx_id}/categories/{category_id}", headers=headers)
|
||||||
|
assert assign_resp.status_code == status.HTTP_200_OK
|
||||||
|
|
||||||
|
# 4. Verify assignment
|
||||||
|
get_tx_resp = await ac.get(f"/transactions/{tx_id}", headers=headers)
|
||||||
|
assert category_id in get_tx_resp.json()["category_ids"]
|
||||||
|
|
||||||
|
# 5. Unassign the category
|
||||||
|
unassign_resp = await ac.delete(f"/transactions/{tx_id}/categories/{category_id}", headers=headers)
|
||||||
|
assert unassign_resp.status_code == status.HTTP_200_OK
|
||||||
|
|
||||||
|
# 6. Get the transaction again and verify the category is gone
|
||||||
|
get_tx_again_resp = await ac.get(f"/transactions/{tx_id}", headers=headers)
|
||||||
|
final_tx_data = get_tx_again_resp.json()
|
||||||
|
assert category_id not in final_tx_data["category_ids"]
|
||||||
|
|
||||||
|
# 7. Delete the transaction for cleanup
|
||||||
|
delete_resp = await ac.delete(f"/transactions/{tx_id}/delete", headers=headers)
|
||||||
|
assert delete_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
# NEW STEP: Clean up the created category
|
||||||
|
delete_category_resp = await ac.delete(f"/categories/{category_id}", headers=headers)
|
||||||
|
assert delete_category_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_register_then_login_and_fetch_me(fastapi_app):
|
||||||
|
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
# Use unique email to avoid duplicates across runs
|
||||||
|
suffix = uuid.uuid4().hex[:8]
|
||||||
|
email = f"newuser_{suffix}@example.com"
|
||||||
|
password = "StrongPassw0rd!"
|
||||||
|
|
||||||
|
reg = await ac.post("/auth/register", json={"email": email, "password": password})
|
||||||
|
assert reg.status_code in (status.HTTP_201_CREATED, status.HTTP_200_OK)
|
||||||
|
|
||||||
|
login = await ac.post("/auth/jwt/login", data={"username": email, "password": password})
|
||||||
|
assert login.status_code == status.HTTP_200_OK
|
||||||
|
token = login.json()["access_token"]
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
try:
|
||||||
|
me = await ac.get("/users/me", headers=headers)
|
||||||
|
assert me.status_code == status.HTTP_200_OK
|
||||||
|
assert me.json()["email"] == email
|
||||||
|
finally:
|
||||||
|
# Cleanup: delete the created user so future runs won’t conflict
|
||||||
|
d = await ac.delete("/users/me", headers=headers)
|
||||||
|
assert d.status_code == status.HTTP_204_NO_CONTENT
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_delete_current_user_revokes_access(fastapi_app):
|
||||||
|
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
email = "todelete@example.com"
|
||||||
|
password = "Passw0rd!"
|
||||||
|
reg = await ac.post("/auth/register", json={"email": email, "password": password})
|
||||||
|
assert reg.status_code in (status.HTTP_200_OK, status.HTTP_201_CREATED)
|
||||||
|
|
||||||
|
login = await ac.post("/auth/jwt/login", data={"username": email, "password": password})
|
||||||
|
token = login.json()["access_token"]
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# Delete self
|
||||||
|
d = await ac.delete("/users/me", headers=headers)
|
||||||
|
assert d.status_code == status.HTTP_204_NO_CONTENT
|
||||||
|
|
||||||
|
# Access should now fail
|
||||||
|
me = await ac.get("/users/me", headers=headers)
|
||||||
|
assert me.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_update_category_conflict_and_404(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
a = (await ac.post("/categories/create", json={"name": "A"}, headers=h)).json()
|
||||||
|
b = (await ac.post("/categories/create", json={"name": "B"}, headers=h)).json()
|
||||||
|
|
||||||
|
# Attempt to rename A -> B should conflict
|
||||||
|
conflict = await ac.patch(f"/categories/{a['id']}", json={"name": "B"}, headers=h)
|
||||||
|
assert conflict.status_code == status.HTTP_409_CONFLICT
|
||||||
|
|
||||||
|
# Update non-existent
|
||||||
|
missing = await ac.patch("/categories/999999", json={"name": "Z"}, headers=h)
|
||||||
|
assert missing.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_category_cross_user_isolation(fastapi_app):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
# Generate unique emails for both users
|
||||||
|
sfx = uuid.uuid4().hex[:8]
|
||||||
|
u1 = {"email": f"u1_{sfx}@example.com", "password": "Aaaaaa1!"}
|
||||||
|
u2 = {"email": f"u2_{sfx}@example.com", "password": "Aaaaaa1!"}
|
||||||
|
|
||||||
|
# user1
|
||||||
|
assert (await ac.post("/auth/register", json=u1)).status_code in (200, 201)
|
||||||
|
t1 = (await ac.post("/auth/jwt/login", data={"username": u1["email"], "password": u1["password"]})).json()["access_token"]
|
||||||
|
h1 = {"Authorization": f"Bearer {t1}"}
|
||||||
|
|
||||||
|
# user1 creates a category
|
||||||
|
c = (await ac.post("/categories/create", json={"name": "Private"}, headers=h1)).json()
|
||||||
|
cat_id = c["id"]
|
||||||
|
|
||||||
|
# user2
|
||||||
|
assert (await ac.post("/auth/register", json=u2)).status_code in (200, 201)
|
||||||
|
t2 = (await ac.post("/auth/jwt/login", data={"username": u2["email"], "password": u2["password"]})).json()["access_token"]
|
||||||
|
h2 = {"Authorization": f"Bearer {t2}"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# user2 cannot read/delete user1's category
|
||||||
|
g = await ac.get(f"/categories/{cat_id}", headers=h2)
|
||||||
|
assert g.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
d = await ac.delete(f"/categories/{cat_id}", headers=h2)
|
||||||
|
assert d.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
finally:
|
||||||
|
# Cleanup: remove the created category as its owner
|
||||||
|
try:
|
||||||
|
_ = await ac.delete(f"/categories/{cat_id}", headers=h1)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
# Cleanup: delete both users to avoid email conflicts later
|
||||||
|
try:
|
||||||
|
_ = await ac.delete("/users/me", headers=h1)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
_ = await ac.delete("/users/me", headers=h2)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
159
7project/src/backend/tests/test_integration_app.py
Normal file
159
7project/src/backend/tests/test_integration_app.py
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
from fastapi import status
|
||||||
|
import pytest
|
||||||
|
from httpx import AsyncClient, ASGITransport
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_and_get_category(fastapi_app, test_user):
|
||||||
|
# Use AsyncClient for async tests
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
# 1. Log in to get an auth token
|
||||||
|
login_resp = await ac.post("/auth/jwt/login", data=test_user)
|
||||||
|
token = login_resp.json()["access_token"]
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# 2. Define and create the new category
|
||||||
|
category_name = "Async Integration Test"
|
||||||
|
category_payload = {"name": category_name}
|
||||||
|
create_resp = await ac.post("/categories/create", json=category_payload, headers=headers)
|
||||||
|
|
||||||
|
# 3. Assert creation was successful
|
||||||
|
assert create_resp.status_code == status.HTTP_201_CREATED
|
||||||
|
created_data = create_resp.json()
|
||||||
|
category_id = created_data["id"]
|
||||||
|
assert created_data["name"] == category_name
|
||||||
|
|
||||||
|
# 4. GET the list of categories to verify
|
||||||
|
list_resp = await ac.get("/categories/", headers=headers)
|
||||||
|
assert list_resp.status_code == status.HTTP_200_OK
|
||||||
|
|
||||||
|
# 5. Check that our new category is in the list
|
||||||
|
categories_list = list_resp.json()
|
||||||
|
assert any(cat["name"] == category_name for cat in categories_list)
|
||||||
|
|
||||||
|
delete_resp = await ac.delete(f"/categories/{category_id}", headers=headers)
|
||||||
|
assert delete_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_transaction_missing_amount_fails(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
# 1. Log in to get an auth token
|
||||||
|
login_resp = await ac.post("/auth/jwt/login", data=test_user)
|
||||||
|
token = login_resp.json()["access_token"]
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# 2. Define an invalid payload
|
||||||
|
invalid_payload = {"description": "This should fail"}
|
||||||
|
|
||||||
|
# 3. Attempt to create the transaction
|
||||||
|
resp = await ac.post("/transactions/create", json=invalid_payload, headers=headers)
|
||||||
|
|
||||||
|
# 4. Assert the expected validation error
|
||||||
|
assert resp.status_code == status.HTTP_422_UNPROCESSABLE_CONTENT
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_login_invalid_credentials(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
bad = await ac.post("/auth/jwt/login", data={"username": test_user["username"], "password": "nope"})
|
||||||
|
assert bad.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_400_BAD_REQUEST)
|
||||||
|
unknown = await ac.post("/auth/jwt/login", data={"username": "nouser@example.com", "password": "x"})
|
||||||
|
assert unknown.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_category_duplicate_name_conflict(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
p = {"name": "Food"}
|
||||||
|
r1 = await ac.post("/categories/create", json=p, headers=h)
|
||||||
|
assert r1.status_code == status.HTTP_201_CREATED
|
||||||
|
r2 = await ac.post("/categories/create", json=p, headers=h)
|
||||||
|
assert r2.status_code == status.HTTP_409_CONFLICT
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_transaction_invalid_date_format(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
bad = await ac.post("/transactions/create", json={"amount": 10, "description": "x", "date": "31-12-2024"}, headers=h)
|
||||||
|
assert bad.status_code == status.HTTP_400_BAD_REQUEST
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_update_transaction_rejects_duplicate_category_ids(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
tx = (await ac.post("/transactions/create", json={"amount": 5, "description": "x"}, headers=h)).json()
|
||||||
|
dup = await ac.patch(f"/transactions/{tx['id']}/edit", json={"category_ids": [1, 1]}, headers=h)
|
||||||
|
assert dup.status_code == status.HTTP_400_BAD_REQUEST
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_assign_unassign_category_not_found_cases(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# Create tx and category
|
||||||
|
tx = (await ac.post("/transactions/create", json={"amount": 1, "description": "a"}, headers=h)).json()
|
||||||
|
cat = (await ac.post("/categories/create", json={"name": "X"}, headers=h)).json()
|
||||||
|
|
||||||
|
# Missing transaction
|
||||||
|
r1 = await ac.post(f"/transactions/999999/categories/{cat['id']}", headers=h)
|
||||||
|
assert r1.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
|
||||||
|
# Missing category
|
||||||
|
r2 = await ac.post(f"/transactions/{tx['id']}/categories/999999", headers=h)
|
||||||
|
assert r2.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_transactions_date_filter_and_balance_series(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# Seed transactions spanning days
|
||||||
|
data = [
|
||||||
|
{"amount": 100, "description": "day1", "date": "2024-01-01"},
|
||||||
|
{"amount": -25, "description": "day2", "date": "2024-01-02"},
|
||||||
|
{"amount": 50, "description": "day3", "date": "2024-01-03"},
|
||||||
|
]
|
||||||
|
for p in data:
|
||||||
|
r = await ac.post("/transactions/create", json=p, headers=h)
|
||||||
|
assert r.status_code == status.HTTP_201_CREATED
|
||||||
|
|
||||||
|
# Filtered list (2nd and 3rd only)
|
||||||
|
lst = await ac.get("/transactions/", params={"start_date": "2024-01-02", "end_date": "2024-01-03"}, headers=h)
|
||||||
|
assert lst.status_code == status.HTTP_200_OK
|
||||||
|
assert len(lst.json()) == 2
|
||||||
|
|
||||||
|
# Balance series should be cumulative per date
|
||||||
|
series = await ac.get("/transactions/balance_series", headers=h)
|
||||||
|
assert series.status_code == status.HTTP_200_OK
|
||||||
|
s = series.json()
|
||||||
|
assert s == [
|
||||||
|
{"date": "2024-01-01", "balance": 100.0},
|
||||||
|
{"date": "2024-01-02", "balance": 75.0},
|
||||||
|
{"date": "2024-01-03", "balance": 125.0},
|
||||||
|
]
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_delete_transaction_not_found(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
r = await ac.delete("/transactions/9999999/delete", headers=h)
|
||||||
|
assert r.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
|
||||||
63
7project/src/backend/tests/test_unit_user_service.py
Normal file
63
7project/src/backend/tests/test_unit_user_service.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import pytest
|
||||||
|
from fastapi import status
|
||||||
|
from app.services import user_service
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_oauth_provider_known_unknown():
|
||||||
|
# Known providers should return a provider instance
|
||||||
|
bankid = user_service.get_oauth_provider("BankID")
|
||||||
|
mojeid = user_service.get_oauth_provider("MojeID")
|
||||||
|
assert bankid is not None
|
||||||
|
assert mojeid is not None
|
||||||
|
|
||||||
|
# Unknown should return None
|
||||||
|
assert user_service.get_oauth_provider("DoesNotExist") is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_jwt_strategy_lifetime():
|
||||||
|
strategy = user_service.get_jwt_strategy()
|
||||||
|
assert strategy is not None
|
||||||
|
# Basic smoke check: strategy has a lifetime set to 604800
|
||||||
|
assert getattr(strategy, "lifetime_seconds", None) in (604800,)
|
||||||
|
|
||||||
|
def test_root_ok(client):
|
||||||
|
resp = client.get("/")
|
||||||
|
assert resp.status_code == status.HTTP_200_OK
|
||||||
|
assert resp.json() == {"status": "ok"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_authenticated_route_requires_auth(client):
|
||||||
|
resp = client.get("/authenticated-route")
|
||||||
|
assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_after_request_verify_enqueues_email(monkeypatch):
|
||||||
|
calls = {}
|
||||||
|
|
||||||
|
class FakeCeleryTask:
|
||||||
|
def delay(to: str, subject: str, body: str):
|
||||||
|
calls.setdefault("emails", []).append({
|
||||||
|
"to": to,
|
||||||
|
"subject": subject,
|
||||||
|
"body": body,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Patch the enqueue_email used inside user_service
|
||||||
|
monkeypatch.setattr(user_service, "send_email", FakeCeleryTask)
|
||||||
|
|
||||||
|
class DummyUser:
|
||||||
|
def __init__(self, email):
|
||||||
|
self.email = email
|
||||||
|
|
||||||
|
mgr = user_service.UserManager(user_db=None) # user_db not needed for this method
|
||||||
|
user = DummyUser("test@example.com")
|
||||||
|
|
||||||
|
# Call the hook
|
||||||
|
await mgr.on_after_request_verify(user, token="abc123", request=None)
|
||||||
|
|
||||||
|
# Verify one email has been enqueued with expected content
|
||||||
|
assert len(calls.get("emails", [])) == 1
|
||||||
|
email = calls["emails"][0]
|
||||||
|
assert email["to"] == "test@example.com"
|
||||||
|
assert "ověření účtu" in email["subject"].lower()
|
||||||
|
assert "abc123" in email["body"]
|
||||||
30
7project/src/charts/README.md
Normal file
30
7project/src/charts/README.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Helm chart deployment
|
||||||
|
|
||||||
|
This directory contains a Helm chart for deploying the app to a cluster, it support bot production and preview
|
||||||
|
deployment.
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
- `myapp-chart/`
|
||||||
|
- `templates/`
|
||||||
|
- `app-deployment.yaml` - Kubernetes Deployment for the application
|
||||||
|
- `cron.yaml` - cronjob for periodic tasks - periodically calls app endpoint
|
||||||
|
- `database.yaml` - Creates database using MariaDB operator. Production database is kept, but preview/dev
|
||||||
|
database is dropped after uninstalling the chart.
|
||||||
|
- `database-grant.yaml` - Defines rights for the database user
|
||||||
|
- `database-user.yaml` - Creates database user
|
||||||
|
- `monitoring.yaml` - Adds /metrics endpoint to Prometheus scraping
|
||||||
|
- `prod.yaml` - Application secrets
|
||||||
|
- `rabbitmq-cluster.yaml` - Defines RabbitMQ cluster for this deployment
|
||||||
|
- `rabbitmq-permission.yalm` - Defines RabbitMQ user permissions
|
||||||
|
- `rabbitmq-queue.yaml` - Defines RabbitMQ queue
|
||||||
|
- `rabbitmq-user.yaml` - Defines RabbitMQ user
|
||||||
|
- `rabbitmq-user-secret.yaml` - Defines RabbitMQ user secret
|
||||||
|
- `service.yaml` - Kubernetes Service for the application
|
||||||
|
- `tunnel.yaml` - Cloudflare tunnel for accessing the application¨
|
||||||
|
- `worker-deployment.yaml` - Kubernetes Deployment for the Celery worker, uses same image as the app-deployment,
|
||||||
|
but with different entrypoint
|
||||||
|
- `Chart.yaml` - Helm chart metadata
|
||||||
|
- `values.yaml` - list of all configurable values
|
||||||
|
- `values-dev.yaml` - default values for development/preview deployment
|
||||||
|
- `values-prod.yaml` - default values for production deployment
|
||||||
6
7project/src/charts/myapp-chart/Chart.yaml
Normal file
6
7project/src/charts/myapp-chart/Chart.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
apiVersion: v2
|
||||||
|
name: myapp-chart
|
||||||
|
version: 0.1.0
|
||||||
|
description: Helm chart for my app with MariaDB Database CR
|
||||||
|
appVersion: "1.0.0"
|
||||||
|
type: application
|
||||||
129
7project/src/charts/myapp-chart/templates/app-deployment.yaml
Normal file
129
7project/src/charts/myapp-chart/templates/app-deployment.yaml
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ .Values.app.name }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .Values.app.replicas }}
|
||||||
|
revisionHistoryLimit: 3
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: {{ .Values.app.name }}
|
||||||
|
endpoint: metrics
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: {{ .Values.app.name }}
|
||||||
|
endpoint: metrics
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: {{ .Values.app.name }}
|
||||||
|
image: "{{- if .Values.image.digest -}}{{ .Values.image.repository }}@{{ .Values.image.digest }}{{- else -}}{{ .Values.image.repository }}:{{ default "latest" .Values.image.tag }}{{- end -}}"
|
||||||
|
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||||
|
securityContext:
|
||||||
|
allowPrivilegeEscalation: false
|
||||||
|
capabilities:
|
||||||
|
drop: [ "ALL" ]
|
||||||
|
ports:
|
||||||
|
- containerPort: {{ .Values.app.port }}
|
||||||
|
env:
|
||||||
|
- name: MARIADB_HOST
|
||||||
|
value: "mariadb-repl-maxscale-internal.mariadb-operator.svc.cluster.local"
|
||||||
|
- name: MARIADB_PORT
|
||||||
|
value: '3306'
|
||||||
|
- name: MARIADB_DB
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: MARIADB_DB
|
||||||
|
- name: MARIADB_USER
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: MARIADB_USER
|
||||||
|
- name: MARIADB_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: MARIADB_PASSWORD
|
||||||
|
- name: RABBITMQ_USERNAME
|
||||||
|
value: {{ .Values.rabbitmq.username | quote }}
|
||||||
|
- name: RABBITMQ_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: RABBITMQ_PASSWORD
|
||||||
|
- name: RABBITMQ_HOST
|
||||||
|
value: {{ printf "%s.%s.svc.cluster.local" "rabbitmq-cluster" .Release.Namespace | quote }}
|
||||||
|
- name: RABBITMQ_PORT
|
||||||
|
value: {{ .Values.rabbitmq.port | quote }}
|
||||||
|
- name: RABBITMQ_VHOST
|
||||||
|
value: {{ .Values.rabbitmq.vhost | default "/" | quote }}
|
||||||
|
- name: MAIL_QUEUE
|
||||||
|
value: {{ .Values.worker.mailQueueName | default "mail_queue" | quote }}
|
||||||
|
- name: MOJEID_CLIENT_ID
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: MOJEID_CLIENT_ID
|
||||||
|
- name: MOJEID_CLIENT_SECRET
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: MOJEID_CLIENT_SECRET
|
||||||
|
- name: BANKID_CLIENT_ID
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: BANKID_CLIENT_ID
|
||||||
|
- name: BANKID_CLIENT_SECRET
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: BANKID_CLIENT_SECRET
|
||||||
|
- name: CSAS_CLIENT_ID
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: CSAS_CLIENT_ID
|
||||||
|
- name: CSAS_CLIENT_SECRET
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: CSAS_CLIENT_SECRET
|
||||||
|
- name: UNIRATE_API_KEY
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: UNIRATE_API_KEY
|
||||||
|
- name: DOMAIN
|
||||||
|
value: {{ required "Set .Values.domain" .Values.domain | quote }}
|
||||||
|
- name: DOMAIN_SCHEME
|
||||||
|
value: {{ required "Set .Values.domain_scheme" .Values.domain_scheme | quote }}
|
||||||
|
- name: FRONTEND_DOMAIN
|
||||||
|
value: {{ required "Set .Values.frontend_domain" .Values.frontend_domain | quote }}
|
||||||
|
- name: FRONTEND_DOMAIN_SCHEME
|
||||||
|
value: {{ required "Set .Values.frontend_domain_scheme" .Values.frontend_domain_scheme | quote }}
|
||||||
|
- name: SENTRY_DSN
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SENTRY_DSN
|
||||||
|
- name: DB_ENCRYPTION_KEY
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: DB_ENCRYPTION_KEY
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /
|
||||||
|
port: {{ .Values.app.port }}
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
failureThreshold: 3
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /
|
||||||
|
port: {{ .Values.app.port }}
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
failureThreshold: 3
|
||||||
25
7project/src/charts/myapp-chart/templates/cron.yaml
Normal file
25
7project/src/charts/myapp-chart/templates/cron.yaml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{{- if .Values.cron.enabled }}
|
||||||
|
apiVersion: batch/v1
|
||||||
|
kind: CronJob
|
||||||
|
metadata:
|
||||||
|
name: cronjob
|
||||||
|
spec:
|
||||||
|
schedule: {{ .Values.cron.schedule | quote }}
|
||||||
|
concurrencyPolicy: {{ .Values.cron.concurrencyPolicy | quote }}
|
||||||
|
jobTemplate:
|
||||||
|
spec:
|
||||||
|
template:
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: cronjob
|
||||||
|
image: curlimages/curl:latest
|
||||||
|
imagePullPolicy: IfNotPresent
|
||||||
|
args:
|
||||||
|
- -sS
|
||||||
|
- -o
|
||||||
|
- /dev/null
|
||||||
|
- -w
|
||||||
|
- "%{http_code}"
|
||||||
|
- {{ printf "%s://%s.%s.svc.cluster.local%s" .Values.cron.scheme .Values.app.name .Release.Namespace .Values.cron.endpoint | quote }}
|
||||||
|
restartPolicy: OnFailure
|
||||||
|
{{- end }}
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
apiVersion: k8s.mariadb.com/v1alpha1
|
||||||
|
kind: Grant
|
||||||
|
metadata:
|
||||||
|
name: grant
|
||||||
|
spec:
|
||||||
|
mariaDbRef:
|
||||||
|
name: {{ .Values.mariadb.mariaDbRef.name }}
|
||||||
|
namespace: {{ .Values.mariadb.mariaDbRef.namespace }}
|
||||||
|
privileges:
|
||||||
|
- "ALL PRIVILEGES"
|
||||||
|
database: {{ required "Set .Values.deployment" .Values.deployment | quote }}
|
||||||
|
table: "*"
|
||||||
|
username: {{ required "Set .Values.deployment" .Values.deployment | quote }}
|
||||||
|
grantOption: true
|
||||||
|
host: "%"
|
||||||
|
cleanupPolicy: {{ .Values.mariadb.cleanupPolicy }}
|
||||||
|
requeueInterval: {{ .Values.mariadb.requeueInterval | quote }}
|
||||||
|
retryInterval: {{ .Values.mariadb.retryInterval | quote }}
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Secret
|
||||||
|
metadata:
|
||||||
|
name: {{ required "Set .Values.database.secretName" .Values.database.secretName }}
|
||||||
|
type: kubernetes.io/basic-auth
|
||||||
|
stringData:
|
||||||
|
password: {{ required "Set .Values.database.password" .Values.database.password | quote }}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user