mirror of
https://github.com/dat515-2025/Group-8.git
synced 2026-03-22 15:12:08 +01:00
Compare commits
153 Commits
396047574a
...
merge/core
| Author | SHA1 | Date | |
|---|---|---|---|
| c689caea88 | |||
| 8c20deb690 | |||
| 39979b51ee | |||
| da0c77101d | |||
| a5a83e5d07 | |||
| 3749aa4525 | |||
| 94aa64addc | |||
| ba1677b2d3 | |||
|
|
8ea1ef9eea | ||
|
|
4b614902b2 | ||
| a152ecbe4d | |||
| 7d7dd98d0f | |||
| 5aca071ac2 | |||
|
|
80991c7390 | ||
|
|
1403e0029b | ||
| aa63e51e6a | |||
|
|
4aaaba3956 | ||
|
|
f0c28ba9e1 | ||
|
|
b560c07d62 | ||
|
|
f0b1452e30 | ||
| 6effb2793a | |||
|
|
ba7798259c | ||
| deb67f421e | |||
| 74557eeea8 | |||
| 2e0619d03f | |||
| 31add42d6d | |||
| 4de79169a2 | |||
| 59d53967b0 | |||
| f3086f8c73 | |||
|
|
fd437b1caf | ||
| 96ebc27001 | |||
|
|
922651fdbf | ||
|
|
e164b185e0 | ||
|
|
186b4fd09a | ||
|
|
280d495335 | ||
|
|
e73233c90a | ||
|
|
aade78bf3f | ||
|
|
50e489a8e0 | ||
|
|
1679abb71f | ||
| 573404dead | |||
| d57dd82a64 | |||
| 50f37c1161 | |||
| ae22d2ee5f | |||
| 509608f8c9 | |||
| ed723d1d13 | |||
| b0dee5e289 | |||
| 640da2ee04 | |||
| ab9aefd140 | |||
|
|
4eaf46e77e | ||
|
|
a30ae4d010 | ||
|
|
ef26e88713 | ||
|
|
2e1dddb4f8 | ||
|
|
25e587cea8 | ||
|
|
3cdefc33fc | ||
|
|
5954e56956 | ||
|
|
8575ef8ff5 | ||
| c53e314b2a | |||
| c0bc44622f | |||
| 3d31ff4631 | |||
|
|
8b92b9bd18 | ||
|
|
3d26ed6a62 | ||
|
|
67b44539f2 | ||
|
|
ff9cc712db | ||
| dc7ce9e6a1 | |||
| 188cdf5727 | |||
| 4cf0d2a981 | |||
| 9986cce8f9 | |||
| b3b5717e9e | |||
|
|
1da927dc07 | ||
| 537d050080 | |||
| 1e4f342176 | |||
| c62e0adcf3 | |||
| 24d86abfc4 | |||
| 21305f18e2 | |||
| e708f7b18b | |||
| f58083870f | |||
| ca8287cd8b | |||
|
|
ed3e6329dd | ||
|
|
a214e2cd8b | ||
| 6c8d2202b5 | |||
|
|
b480734fee | ||
|
|
8b301c386e | ||
|
|
733e7a8918 | ||
|
|
524e7a6f98 | ||
|
|
0c9882e9b3 | ||
|
|
72494c4aae | ||
|
|
60560dea99 | ||
|
|
a9b2aba55a | ||
|
|
36b1fe887b | ||
|
|
8543c72730 | ||
| 24087c2810 | |||
|
|
6818b1f649 | ||
| c864e753c9 | |||
| b4a453be04 | |||
| d290664352 | |||
| 008f111fa7 | |||
| ece2c4d4c5 | |||
| 2d0d309d2b | |||
| 7f8dd2e846 | |||
| e0c18912f3 | |||
| 99384aeb0a | |||
| 912697b046 | |||
|
|
356e1d868c | ||
|
|
14397b8a25 | ||
|
|
5671f97120 | ||
|
|
b02c502b4f | ||
| ff118603db | |||
|
|
3ee2abefd0 | ||
|
|
4a8edf6eb8 | ||
| a97f0f7097 | |||
|
|
c74462b82f | ||
|
|
a96514f795 | ||
|
|
4c9879cebf | ||
|
|
d9c562f867 | ||
|
|
dddca9d805 | ||
|
|
483a859b4b | ||
|
|
7529c9b265 | ||
| d6a913a896 | |||
|
|
2ca8a3b576 | ||
|
|
52f6bd6a53 | ||
| d8ea25943c | |||
| 06dcccb321 | |||
| e916a57e4e | |||
| 7d2e94e683 | |||
|
|
55f8e38376 | ||
| 3348e0a035 | |||
|
|
542b05d541 | ||
|
|
65957d78ec | ||
|
|
edb4dfd147 | ||
|
|
cf1d520a30 | ||
|
|
4aa299d77d | ||
|
|
e460f647b2 | ||
|
|
b0cd7030d8 | ||
|
|
eb7b2290b8 | ||
|
|
584c090b80 | ||
|
|
4f6d46ba7e | ||
|
|
9fc8601e4d | ||
|
|
e488771cc7 | ||
|
|
77992bab17 | ||
|
|
6972a03090 | ||
|
|
6d7f834808 | ||
|
|
d5611e3e92 | ||
|
|
5ecfc62b02 | ||
| d0cbec5fca | |||
|
|
82eb34c6e6 | ||
|
|
cddc1d3a9f | ||
|
|
e78b8c2e6b | ||
|
|
aade88beb9 | ||
|
|
5305531950 | ||
|
|
6d8a6a55c0 | ||
|
|
40d07677bd | ||
|
|
76eb2cce41 | ||
|
|
391e9da0c4 |
4
.github/workflows/build-image.yaml
vendored
4
.github/workflows/build-image.yaml
vendored
@@ -15,7 +15,7 @@ on:
|
|||||||
context:
|
context:
|
||||||
description: "Docker build context path"
|
description: "Docker build context path"
|
||||||
required: false
|
required: false
|
||||||
default: "7project/backend"
|
default: "7project/src/backend"
|
||||||
type: string
|
type: string
|
||||||
pr_number:
|
pr_number:
|
||||||
description: "PR number (required when mode=pr)"
|
description: "PR number (required when mode=pr)"
|
||||||
@@ -94,7 +94,7 @@ jobs:
|
|||||||
tags: |
|
tags: |
|
||||||
${{ env.IMAGE_REPO }}:${{ env.TAG1 }}
|
${{ env.IMAGE_REPO }}:${{ env.TAG1 }}
|
||||||
${{ env.IMAGE_REPO }}:${{ env.TAG2 }}
|
${{ env.IMAGE_REPO }}:${{ env.TAG2 }}
|
||||||
platforms: linux/amd64
|
platforms: linux/arm64,linux/amd64
|
||||||
|
|
||||||
- name: Set outputs
|
- name: Set outputs
|
||||||
id: set
|
id: set
|
||||||
|
|||||||
21
.github/workflows/deploy-pr.yaml
vendored
21
.github/workflows/deploy-pr.yaml
vendored
@@ -9,6 +9,11 @@ permissions:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
test:
|
||||||
|
name: Run Python Tests
|
||||||
|
if: github.event.action != 'closed'
|
||||||
|
uses: ./.github/workflows/run-tests.yml
|
||||||
|
|
||||||
build:
|
build:
|
||||||
if: github.event.action != 'closed'
|
if: github.event.action != 'closed'
|
||||||
name: Build and push image (reusable)
|
name: Build and push image (reusable)
|
||||||
@@ -16,7 +21,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
mode: pr
|
mode: pr
|
||||||
image_repo: lukastrkan/cc-app-demo
|
image_repo: lukastrkan/cc-app-demo
|
||||||
context: 7project/backend
|
context: 7project/src/backend
|
||||||
pr_number: ${{ github.event.pull_request.number }}
|
pr_number: ${{ github.event.pull_request.number }}
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
@@ -28,7 +33,7 @@ jobs:
|
|||||||
runner: vhs
|
runner: vhs
|
||||||
mode: pr
|
mode: pr
|
||||||
pr_number: ${{ github.event.pull_request.number }}
|
pr_number: ${{ github.event.pull_request.number }}
|
||||||
base_domain: ${{ vars.DEV_BASE_DOMAIN }}
|
base_domain: ${{ vars.PROD_DOMAIN }}
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
@@ -72,7 +77,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Helm upgrade/install PR preview
|
- name: Helm upgrade/install PR preview
|
||||||
env:
|
env:
|
||||||
DEV_BASE_DOMAIN: ${{ secrets.BASE_DOMAIN }}
|
DEV_BASE_DOMAIN: ${{ vars.BASE_DOMAIN }}
|
||||||
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
||||||
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
||||||
DIGEST: ${{ needs.build.outputs.digest }}
|
DIGEST: ${{ needs.build.outputs.digest }}
|
||||||
@@ -80,13 +85,14 @@ jobs:
|
|||||||
DOMAIN_SCHEME: "${{ needs.get_urls.outputs.backend_url_scheme }}"
|
DOMAIN_SCHEME: "${{ needs.get_urls.outputs.backend_url_scheme }}"
|
||||||
FRONTEND_DOMAIN: "${{ needs.get_urls.outputs.frontend_url }}"
|
FRONTEND_DOMAIN: "${{ needs.get_urls.outputs.frontend_url }}"
|
||||||
FRONTEND_DOMAIN_SCHEME: "${{ needs.get_urls.outputs.frontend_url_scheme }}"
|
FRONTEND_DOMAIN_SCHEME: "${{ needs.get_urls.outputs.frontend_url_scheme }}"
|
||||||
|
UNIRATE_API_KEY: ${{ secrets.UNIRATE_API_KEY }}
|
||||||
run: |
|
run: |
|
||||||
PR=${{ github.event.pull_request.number }}
|
PR=${{ github.event.pull_request.number }}
|
||||||
RELEASE=myapp-pr-$PR
|
RELEASE=myapp-pr-$PR
|
||||||
NAMESPACE=pr-$PR
|
NAMESPACE=pr-$PR
|
||||||
helm upgrade --install "$RELEASE" ./7project/charts/myapp-chart \
|
helm upgrade --install "$RELEASE" ./7project/src/charts/myapp-chart \
|
||||||
-n "$NAMESPACE" --create-namespace \
|
-n "$NAMESPACE" --create-namespace \
|
||||||
-f 7project/charts/myapp-chart/values-dev.yaml \
|
-f 7project/src/charts/myapp-chart/values-dev.yaml \
|
||||||
--set prNumber="$PR" \
|
--set prNumber="$PR" \
|
||||||
--set deployment="pr-$PR" \
|
--set deployment="pr-$PR" \
|
||||||
--set domain="$DOMAIN" \
|
--set domain="$DOMAIN" \
|
||||||
@@ -95,7 +101,10 @@ jobs:
|
|||||||
--set frontend_domain_scheme="$FRONTEND_DOMAIN_SCHEME" \
|
--set frontend_domain_scheme="$FRONTEND_DOMAIN_SCHEME" \
|
||||||
--set image.digest="$DIGEST" \
|
--set image.digest="$DIGEST" \
|
||||||
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
||||||
--set-string database.password="$DB_PASSWORD"
|
--set-string database.password="$DB_PASSWORD" \
|
||||||
|
--set-string database.encryptionSecret="$PR" \
|
||||||
|
--set-string app.name="finance-tracker-pr-$PR" \
|
||||||
|
--set-string unirate.key="$UNIRATE_API_KEY"
|
||||||
|
|
||||||
- name: Post preview URLs as PR comment
|
- name: Post preview URLs as PR comment
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
|
|||||||
35
.github/workflows/deploy-prod.yaml
vendored
35
.github/workflows/deploy-prod.yaml
vendored
@@ -4,9 +4,9 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches: [ "main" ]
|
branches: [ "main" ]
|
||||||
paths:
|
paths:
|
||||||
- 7project/backend/**
|
- ../../7project/src/backend/**
|
||||||
- 7project/frontend/**
|
- ../../7project/src/frontend/**
|
||||||
- 7project/charts/myapp-chart/**
|
- ../../7project/src/charts/myapp-chart/**
|
||||||
- .github/workflows/deploy-prod.yaml
|
- .github/workflows/deploy-prod.yaml
|
||||||
- .github/workflows/build-image.yaml
|
- .github/workflows/build-image.yaml
|
||||||
- .github/workflows/frontend-pages.yml
|
- .github/workflows/frontend-pages.yml
|
||||||
@@ -21,17 +21,23 @@ concurrency:
|
|||||||
cancel-in-progress: false
|
cancel-in-progress: false
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
test:
|
||||||
|
name: Run Python Tests
|
||||||
|
uses: ./.github/workflows/run-tests.yml
|
||||||
|
|
||||||
build:
|
build:
|
||||||
name: Build and push image (reusable)
|
name: Build and push image (reusable)
|
||||||
|
needs: [test]
|
||||||
uses: ./.github/workflows/build-image.yaml
|
uses: ./.github/workflows/build-image.yaml
|
||||||
with:
|
with:
|
||||||
mode: prod
|
mode: prod
|
||||||
image_repo: lukastrkan/cc-app-demo
|
image_repo: lukastrkan/cc-app-demo
|
||||||
context: 7project/backend
|
context: 7project/src/backend
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
get_urls:
|
get_urls:
|
||||||
name: Generate Production URLs
|
name: Generate Production URLs
|
||||||
|
needs: [test]
|
||||||
uses: ./.github/workflows/url_generator.yml
|
uses: ./.github/workflows/url_generator.yml
|
||||||
with:
|
with:
|
||||||
mode: prod
|
mode: prod
|
||||||
@@ -88,10 +94,18 @@ jobs:
|
|||||||
CSAS_CLIENT_ID: ${{ secrets.CSAS_CLIENT_ID }}
|
CSAS_CLIENT_ID: ${{ secrets.CSAS_CLIENT_ID }}
|
||||||
CSAS_CLIENT_SECRET: ${{ secrets.CSAS_CLIENT_SECRET }}
|
CSAS_CLIENT_SECRET: ${{ secrets.CSAS_CLIENT_SECRET }}
|
||||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||||
|
SMTP_HOST: ${{ secrets.SMTP_HOST }}
|
||||||
|
SMTP_PORT: ${{ secrets.SMTP_PORT }}
|
||||||
|
SMTP_USERNAME: ${{ secrets.SMTP_USERNAME }}
|
||||||
|
SMTP_PASSWORD: ${{ secrets.SMTP_PASSWORD }}
|
||||||
|
SMTP_USE_TLS: ${{ secrets.SMTP_USE_TLS }}
|
||||||
|
SMTP_USE_SSL: ${{ secrets.SMTP_USE_SSL }}
|
||||||
|
SMTP_FROM: ${{ secrets.SMTP_FROM }}
|
||||||
|
UNIRATE_API_KEY: ${{ secrets.UNIRATE_API_KEY }}
|
||||||
run: |
|
run: |
|
||||||
helm upgrade --install myapp ./7project/charts/myapp-chart \
|
helm upgrade --install myapp ./7project/src/charts/myapp-chart \
|
||||||
-n prod --create-namespace \
|
-n prod --create-namespace \
|
||||||
-f 7project/charts/myapp-chart/values-prod.yaml \
|
-f 7project/src/charts/myapp-chart/values-prod.yaml \
|
||||||
--set deployment="prod" \
|
--set deployment="prod" \
|
||||||
--set domain="$DOMAIN" \
|
--set domain="$DOMAIN" \
|
||||||
--set domain_scheme="$DOMAIN_SCHEME" \
|
--set domain_scheme="$DOMAIN_SCHEME" \
|
||||||
@@ -107,3 +121,12 @@ jobs:
|
|||||||
--set-string oauth.csas.clientId="$CSAS_CLIENT_ID" \
|
--set-string oauth.csas.clientId="$CSAS_CLIENT_ID" \
|
||||||
--set-string oauth.csas.clientSecret="$CSAS_CLIENT_SECRET" \
|
--set-string oauth.csas.clientSecret="$CSAS_CLIENT_SECRET" \
|
||||||
--set-string sentry_dsn="$SENTRY_DSN" \
|
--set-string sentry_dsn="$SENTRY_DSN" \
|
||||||
|
--set-string database.encryptionSecret="${{ secrets.PROD_DB_ENCRYPTION_KEY }}" \
|
||||||
|
--set-string smtp.host="$SMTP_HOST" \
|
||||||
|
--set smtp.port="$SMTP_PORT" \
|
||||||
|
--set-string smtp.username="$SMTP_USERNAME" \
|
||||||
|
--set-string smtp.password="$SMTP_PASSWORD" \
|
||||||
|
--set-string smtp.tls="$SMTP_USE_TLS" \
|
||||||
|
--set-string smtp.ssl="$SMTP_USE_SSL" \
|
||||||
|
--set-string smtp.from="$SMTP_FROM" \
|
||||||
|
--set-string unirate.key="$UNIRATE_API_KEY"
|
||||||
6
.github/workflows/frontend-pages.yml
vendored
6
.github/workflows/frontend-pages.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: 7project/frontend
|
working-directory: 7project/src/frontend
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -45,7 +45,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version: '20'
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
cache-dependency-path: 7project/frontend/package-lock.json
|
cache-dependency-path: 7project/src/frontend/package-lock.json
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: npm ci
|
run: npm ci
|
||||||
@@ -61,7 +61,7 @@ jobs:
|
|||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: frontend-dist
|
name: frontend-dist
|
||||||
path: 7project/frontend/dist
|
path: 7project/src/frontend/dist
|
||||||
|
|
||||||
deploy:
|
deploy:
|
||||||
name: Deploy to Cloudflare Pages
|
name: Deploy to Cloudflare Pages
|
||||||
|
|||||||
66
.github/workflows/run-tests.yml
vendored
Normal file
66
.github/workflows/run-tests.yml
vendored
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
name: Run Python Tests
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
services:
|
||||||
|
mariadb:
|
||||||
|
image: mariadb:11.4
|
||||||
|
env:
|
||||||
|
MARIADB_ROOT_PASSWORD: rootpw
|
||||||
|
MARIADB_DATABASE: group_project
|
||||||
|
MARIADB_USER: appuser
|
||||||
|
MARIADB_PASSWORD: apppass
|
||||||
|
ports:
|
||||||
|
- 3306:3306
|
||||||
|
options: >-
|
||||||
|
--health-cmd="mariadb-admin ping -h 127.0.0.1 -u root -prootpw --silent"
|
||||||
|
--health-interval=5s
|
||||||
|
--health-timeout=2s
|
||||||
|
--health-retries=20
|
||||||
|
|
||||||
|
env:
|
||||||
|
MARIADB_HOST: 127.0.0.1
|
||||||
|
MARIADB_PORT: "3306"
|
||||||
|
MARIADB_DB: group_project
|
||||||
|
MARIADB_USER: appuser
|
||||||
|
MARIADB_PASSWORD: apppass
|
||||||
|
# Ensure the application uses MariaDB (async) during tests
|
||||||
|
DATABASE_URL: mysql+asyncmy://appuser:apppass@127.0.0.1:3306/group_project
|
||||||
|
DISABLE_METRICS: "1"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Python 3.11
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
|
||||||
|
- name: Add test dependencies to requirements
|
||||||
|
run: |
|
||||||
|
echo "pytest==8.4.2" >> ./7project/src/backend/requirements.txt
|
||||||
|
echo "pytest-asyncio==1.2.0" >> ./7project/src/backend/requirements.txt
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r ./7project/src/backend/requirements.txt
|
||||||
|
|
||||||
|
- name: Run Alembic migrations
|
||||||
|
run: |
|
||||||
|
alembic upgrade head
|
||||||
|
working-directory: ./7project/src/backend
|
||||||
|
|
||||||
|
- name: Run tests with pytest
|
||||||
|
env:
|
||||||
|
PYTEST_RUN_CONFIG: "True"
|
||||||
|
run: pytest
|
||||||
|
working-directory: ./7project/src/backend
|
||||||
8
.idea/.gitignore
generated
vendored
Normal file
8
.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
||||||
|
# Editor-based HTTP Client requests
|
||||||
|
/httpRequests/
|
||||||
|
# Datasource local storage ignored files
|
||||||
|
/dataSources/
|
||||||
|
/dataSources.local.xml
|
||||||
16
7project/.gitignore
vendored
16
7project/.gitignore
vendored
@@ -1,8 +1,8 @@
|
|||||||
/tofu/controlplane.yaml
|
/src/tofu/controlplane.yaml
|
||||||
/tofu/kubeconfig
|
/src/tofu/kubeconfig
|
||||||
/tofu/talosconfig
|
/src/tofu/talosconfig
|
||||||
/tofu/terraform.tfstate
|
/src/tofu/terraform.tfstate
|
||||||
/tofu/terraform.tfstate.backup
|
/src/tofu/terraform.tfstate.backup
|
||||||
/tofu/worker.yaml
|
/src/tofu/worker.yaml
|
||||||
/tofu/.terraform.lock.hcl
|
/src/tofu/.terraform.lock.hcl
|
||||||
/tofu/.terraform/
|
/src/tofu/.terraform/
|
||||||
|
|||||||
8
7project/.idea/.gitignore
generated
vendored
Normal file
8
7project/.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
||||||
|
# Editor-based HTTP Client requests
|
||||||
|
/httpRequests/
|
||||||
|
# Datasource local storage ignored files
|
||||||
|
/dataSources/
|
||||||
|
/dataSources.local.xml
|
||||||
@@ -1,43 +1,6 @@
|
|||||||
# Lab 6: Design Document for Course Project
|
# Personal Finance Tracker
|
||||||
|
## Folder Structure
|
||||||
| Lab 6: | Design Document for Course Project |
|
- meetings: Contains note from meetings
|
||||||
| ----------- | ---------------------------------- |
|
- scr: Source code for the project
|
||||||
| Subject: | DAT515 Cloud Computing |
|
- checklist: Project checklist and self assessment tracking
|
||||||
| Deadline: | **September 19, 2025 23:59** |
|
- report.md: Detailed report of the project
|
||||||
| Grading: | No Grade |
|
|
||||||
| Submission: | Group |
|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
|
|
||||||
- [Table of Contents](#table-of-contents)
|
|
||||||
- [1. Design Document (design.md)](#1-design-document-designmd)
|
|
||||||
|
|
||||||
The design document is the first deliverable for your project.
|
|
||||||
We separated this out as a separate deliverable, with its own deadline, to ensure that you have a clear plan before you start coding.
|
|
||||||
This part only needs a cursory review by the teaching staff to ensure it is sufficiently comprehensive, while still realistic.
|
|
||||||
The teaching staff will assign you to a project mentor who will provide guidance and support throughout the development process.
|
|
||||||
|
|
||||||
## 1. Design Document (design.md)
|
|
||||||
|
|
||||||
You are required to prepare a design document for your application.
|
|
||||||
The design doc should be brief, well-organized and easy to understand.
|
|
||||||
The design doc should be prepared in markdown format and named `design.md` and submitted in the project group's repository.
|
|
||||||
Remember that you can use [mermaid diagrams](https://github.com/mermaid-js/mermaid#readme) in markdown files.
|
|
||||||
|
|
||||||
The design doc **should include** the following sections:
|
|
||||||
|
|
||||||
- **Overview**: A brief description of the application and its purpose.
|
|
||||||
- **Architecture**: The high-level architecture of the application, including components, interactions, and data flow.
|
|
||||||
- **Technologies**: The cloud computing technologies or services used in the application.
|
|
||||||
- **Deployment**: The deployment strategy for the application, including any infrastructure requirements.
|
|
||||||
|
|
||||||
The design document should be updated throughout the development process and reflect the final implementation of your project.
|
|
||||||
|
|
||||||
Optional sections may include:
|
|
||||||
|
|
||||||
- Security: The security measures implemented in the application to protect data and resources.
|
|
||||||
- Scalability: The scalability considerations for the application, including load balancing and auto-scaling.
|
|
||||||
- Monitoring: The monitoring and logging strategy for the application to track performance and detect issues.
|
|
||||||
- Disaster Recovery: The disaster recovery plan for the application to ensure business continuity in case of failures.
|
|
||||||
- Cost Analysis: The cost analysis of running the application on the cloud, including pricing models and cost-saving strategies.
|
|
||||||
- References: Any external sources or references used in the design document.
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
import app.celery_app # noqa: F401
|
|
||||||
from app.workers.celery_tasks import send_email
|
|
||||||
|
|
||||||
|
|
||||||
def enqueue_email(to: str, subject: str, body: str) -> None:
|
|
||||||
send_email.delay(to, subject, body)
|
|
||||||
@@ -1,121 +0,0 @@
|
|||||||
import json
|
|
||||||
import logging
|
|
||||||
from os.path import dirname, join
|
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
from app.core.db import async_session_maker
|
|
||||||
from app.models.user import User
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Reuse CSAS mTLS certs used by OAuth profile calls
|
|
||||||
OAUTH_DIR = join(dirname(__file__), "..", "oauth")
|
|
||||||
CERTS = (
|
|
||||||
join(OAUTH_DIR, "public_key.pem"),
|
|
||||||
join(OAUTH_DIR, "private_key.key"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def aload_ceska_sporitelna_transactions(user_id: str) -> None:
|
|
||||||
"""
|
|
||||||
Async entry point to load Česká spořitelna transactions for a single user.
|
|
||||||
Validates the user_id and performs a minimal placeholder action.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
uid = UUID(str(user_id))
|
|
||||||
except Exception:
|
|
||||||
logger.error("Invalid user_id provided to bank_scraper (async): %r", user_id)
|
|
||||||
return
|
|
||||||
|
|
||||||
await _aload_ceska_sporitelna_transactions(uid)
|
|
||||||
|
|
||||||
|
|
||||||
async def aload_all_ceska_sporitelna_transactions() -> None:
|
|
||||||
"""
|
|
||||||
Async entry point to load Česká spořitelna transactions for all users.
|
|
||||||
"""
|
|
||||||
async with async_session_maker() as session:
|
|
||||||
result = await session.execute(select(User))
|
|
||||||
users = result.unique().scalars().all()
|
|
||||||
logger.info("[BankScraper] Starting CSAS scrape for all users | count=%d", len(users))
|
|
||||||
|
|
||||||
processed = 0
|
|
||||||
for user in users:
|
|
||||||
try:
|
|
||||||
await _aload_ceska_sporitelna_transactions(user.id)
|
|
||||||
processed += 1
|
|
||||||
except Exception:
|
|
||||||
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
|
||||||
getattr(user, 'email', None))
|
|
||||||
logger.info("[BankScraper] Finished CSAS scrape for all users | processed=%d", processed)
|
|
||||||
|
|
||||||
|
|
||||||
async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|
||||||
async with async_session_maker() as session:
|
|
||||||
result = await session.execute(select(User).where(User.id == user_id))
|
|
||||||
user: User = result.unique().scalar_one_or_none()
|
|
||||||
if user is None:
|
|
||||||
logger.warning("User not found for id=%s", user_id)
|
|
||||||
return
|
|
||||||
|
|
||||||
cfg = user.config or {}
|
|
||||||
if "csas" not in cfg:
|
|
||||||
return
|
|
||||||
|
|
||||||
cfg = json.loads(cfg["csas"])
|
|
||||||
if "access_token" not in cfg:
|
|
||||||
return
|
|
||||||
|
|
||||||
accounts = []
|
|
||||||
try:
|
|
||||||
async with httpx.AsyncClient(cert=CERTS, timeout=httpx.Timeout(20.0)) as client:
|
|
||||||
response = await client.get(
|
|
||||||
"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts?size=10&page=0&sort=iban&order=desc",
|
|
||||||
headers={
|
|
||||||
"Authorization": f"Bearer {cfg['access_token']}",
|
|
||||||
"WEB-API-key": "09fdc637-3c57-4242-95f2-c2205a2438f3",
|
|
||||||
"user-involved": "false",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if response.status_code != httpx.codes.OK:
|
|
||||||
return
|
|
||||||
|
|
||||||
for account in response.json()["accounts"]:
|
|
||||||
accounts.append(account)
|
|
||||||
|
|
||||||
except (httpx.HTTPError,) as e:
|
|
||||||
logger.exception("[BankScraper] HTTP error during CSAS request | user_id=%s", user_id)
|
|
||||||
return
|
|
||||||
|
|
||||||
for account in accounts:
|
|
||||||
id = account["id"]
|
|
||||||
|
|
||||||
url = f"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts/{id}/transactions?size=100&page=0&sort=bookingdate&order=desc"
|
|
||||||
async with httpx.AsyncClient(cert=CERTS) as client:
|
|
||||||
response = await client.get(
|
|
||||||
url,
|
|
||||||
headers={
|
|
||||||
"Authorization": f"Bearer {cfg['access_token']}",
|
|
||||||
"WEB-API-key": "09fdc637-3c57-4242-95f2-c2205a2438f3",
|
|
||||||
"user-involved": "false",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if response.status_code != httpx.codes.OK:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Placeholder: just print the account transactions
|
|
||||||
|
|
||||||
transactions = response.json()["transactions"]
|
|
||||||
pass
|
|
||||||
|
|
||||||
for transaction in transactions:
|
|
||||||
#parse and store transaction to database
|
|
||||||
#create Transaction object and save to DB
|
|
||||||
#obj =
|
|
||||||
|
|
||||||
|
|
||||||
pass
|
|
||||||
pass
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
import logging
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from celery import shared_task
|
|
||||||
|
|
||||||
import app.services.bank_scraper
|
|
||||||
|
|
||||||
logger = logging.getLogger("celery_tasks")
|
|
||||||
if not logger.handlers:
|
|
||||||
_h = logging.StreamHandler()
|
|
||||||
logger.addHandler(_h)
|
|
||||||
logger.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def run_coro(coro) -> None:
|
|
||||||
"""Run an async coroutine in a fresh event loop without using run_until_complete.
|
|
||||||
Primary strategy runs in a new loop in the current thread. If that fails due to
|
|
||||||
debugger patches (e.g., Bad file descriptor from pydevd_nest_asyncio), fall back
|
|
||||||
to running in a dedicated thread with its own event loop.
|
|
||||||
"""
|
|
||||||
import threading
|
|
||||||
|
|
||||||
def _cleanup_loop(loop):
|
|
||||||
try:
|
|
||||||
pending = [t for t in asyncio.all_tasks(loop) if not t.done()]
|
|
||||||
for t in pending:
|
|
||||||
t.cancel()
|
|
||||||
if pending:
|
|
||||||
loop.run_until_complete(asyncio.gather(*pending, return_exceptions=True))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
loop.close()
|
|
||||||
finally:
|
|
||||||
asyncio.set_event_loop(None)
|
|
||||||
|
|
||||||
# First attempt: Run in current thread with a fresh event loop
|
|
||||||
try:
|
|
||||||
loop = asyncio.get_event_loop_policy().new_event_loop()
|
|
||||||
try:
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
task = loop.create_task(coro)
|
|
||||||
task.add_done_callback(lambda _t: loop.stop())
|
|
||||||
loop.run_forever()
|
|
||||||
exc = task.exception()
|
|
||||||
if exc:
|
|
||||||
raise exc
|
|
||||||
return
|
|
||||||
finally:
|
|
||||||
_cleanup_loop(loop)
|
|
||||||
except OSError as e:
|
|
||||||
logger.warning("run_coro primary strategy failed (%s). Falling back to thread runner.", e)
|
|
||||||
except Exception:
|
|
||||||
# For any other unexpected errors, try thread fallback as well
|
|
||||||
logger.exception("run_coro primary strategy raised; attempting thread fallback")
|
|
||||||
|
|
||||||
# Fallback: Run in a dedicated thread with its own event loop
|
|
||||||
error = {"exc": None}
|
|
||||||
|
|
||||||
def _thread_target():
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
try:
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
task = loop.create_task(coro)
|
|
||||||
task.add_done_callback(lambda _t: loop.stop())
|
|
||||||
loop.run_forever()
|
|
||||||
exc = task.exception()
|
|
||||||
if exc:
|
|
||||||
error["exc"] = exc
|
|
||||||
finally:
|
|
||||||
_cleanup_loop(loop)
|
|
||||||
|
|
||||||
th = threading.Thread(target=_thread_target, name="celery-async-runner", daemon=True)
|
|
||||||
th.start()
|
|
||||||
th.join()
|
|
||||||
if error["exc"] is not None:
|
|
||||||
raise error["exc"]
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.send_email")
|
|
||||||
def send_email(to: str, subject: str, body: str) -> None:
|
|
||||||
if not (to and subject and body):
|
|
||||||
logger.error("Email task missing fields. to=%r subject=%r body_len=%r", to, subject, len(body) if body else 0)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Placeholder for real email sending logic
|
|
||||||
logger.info("[Celery] Email sent | to=%s | subject=%s | body_len=%d", to, subject, len(body))
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.load_transactions")
|
|
||||||
def load_transactions(user_id: str) -> None:
|
|
||||||
if not user_id:
|
|
||||||
logger.error("Load transactions task missing user_id.")
|
|
||||||
return
|
|
||||||
|
|
||||||
run_coro(app.services.bank_scraper.aload_ceska_sporitelna_transactions(user_id))
|
|
||||||
|
|
||||||
# Placeholder for real transaction loading logic
|
|
||||||
logger.info("[Celery] Transactions loaded for user_id=%s", user_id)
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.load_all_transactions")
|
|
||||||
def load_all_transactions() -> None:
|
|
||||||
logger.info("[Celery] Starting load_all_transactions")
|
|
||||||
run_coro(app.services.bank_scraper.aload_all_ceska_sporitelna_transactions())
|
|
||||||
logger.info("[Celery] Finished load_all_transactions")
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
import uvicorn
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
uvicorn.run("app.app:app", host="0.0.0.0", log_level="info")
|
|
||||||
@@ -7,64 +7,64 @@ Focus on areas that align with your project goals and interests.
|
|||||||
The core deliverables are required.
|
The core deliverables are required.
|
||||||
This means that you must get at least 2 points for each item in this category.
|
This means that you must get at least 2 points for each item in this category.
|
||||||
|
|
||||||
| **Category** | **Item** | **Max Points** | **Points** |
|
| **Category** | **Item** | **Max Points** | **Points** | **Comments** |
|
||||||
| -------------------------------- | --------------------------------------- | -------------- | ---------------- |
|
|----------------------------------| --------------------------------------- | -------------- |-------------------------------------------------| |
|
||||||
| **Core Deliverables (Required)** | | | |
|
| **Core Deliverables (Required)** | | | | |
|
||||||
| Codebase & Organization | Well-organized project structure | 5 | |
|
| Codebase & Organization | Well-organized project structure | 5 | 5 | |
|
||||||
| | Clean, readable code | 5 | |
|
| | Clean, readable code | 5 | 4 | |
|
||||||
| | Use planning tool (e.g., GitHub issues) | 5 | |
|
| | Use planning tool (e.g., GitHub issues) | 5 | 4 | |
|
||||||
| | Proper version control usage | 5 | |
|
| | Proper version control usage | 5 | 5 | |
|
||||||
| | Complete source code | 5 | |
|
| 23 | Complete source code | 5 | 5 | |
|
||||||
| Documentation | Comprehensive reproducibility report | 10 | |
|
| Documentation | Comprehensive reproducibility report | 10 | 4-5 | |
|
||||||
| | Updated design document | 5 | |
|
| | Updated design document | 5 | 2 | |
|
||||||
| | Clear build/deployment instructions | 5 | |
|
| | Clear build/deployment instructions | 5 | 2 | |
|
||||||
| | Troubleshooting guide | 5 | |
|
| | Troubleshooting guide | 5 | 1 | |
|
||||||
| | Completed self-assessment table | 5 | |
|
| | Completed self-assessment table | 5 | 2 | |
|
||||||
| | Hour sheets for all members | 5 | |
|
| 14 | Hour sheets for all members | 5 | 3 | |
|
||||||
| Presentation Video | Project demonstration | 5 | |
|
| Presentation Video | Project demonstration | 5 | 0 | |
|
||||||
| | Code walk-through | 5 | |
|
| | Code walk-through | 5 | 0 | |
|
||||||
| | Deployment showcase | 5 | |
|
| 0 | Deployment showcase | 5 | 0 | |
|
||||||
| **Technical Implementation** | | | |
|
| **Technical Implementation** | | | | |
|
||||||
| Application Functionality | Basic functionality works | 10 | |
|
| Application Functionality | Basic functionality works | 10 | 8 | |
|
||||||
| | Advanced features implemented | 10 | |
|
| | Advanced features implemented | 10 | 0 | |
|
||||||
| | Error handling & robustness | 10 | |
|
| | Error handling & robustness | 10 | 4 | |
|
||||||
| | User-friendly interface | 5 | |
|
| 16 | User-friendly interface | 5 | 4 | |
|
||||||
| Backend & Architecture | Stateless web server | 5 | |
|
| Backend & Architecture | Stateless web server | 5 | 5 | |
|
||||||
| | Stateful application | 10 | |
|
| | Stateful application | 10 | ? WHAT DOES THIS MEAN | |
|
||||||
| | Database integration | 10 | |
|
| | Database integration | 10 | 10 | |
|
||||||
| | API design | 5 | |
|
| | API design | 5 | 5 | |
|
||||||
| | Microservices architecture | 10 | |
|
| 20 | Microservices architecture | 10 | 0 | |
|
||||||
| Cloud Integration | Basic cloud deployment | 10 | |
|
| Cloud Integration | Basic cloud deployment | 10 | 10 | |
|
||||||
| | Cloud APIs usage | 10 | |
|
| | Cloud APIs usage | 10 | ? WHAT DOES THIS MEAN | |
|
||||||
| | Serverless components | 10 | |
|
| | Serverless components | 10 | 0 | |
|
||||||
| | Advanced cloud services | 5 | |
|
| 10 | Advanced cloud services | 5 | 0 | |
|
||||||
| **DevOps & Deployment** | | | |
|
| **DevOps & Deployment** | | | | |
|
||||||
| Containerization | Basic Dockerfile | 5 | |
|
| Containerization | Basic Dockerfile | 5 | 5 | |
|
||||||
| | Optimized Dockerfile | 5 | |
|
| | Optimized Dockerfile | 5 | 0 | |
|
||||||
| | Docker Compose | 5 | |
|
| | Docker Compose | 5 | 5 - dev only | |
|
||||||
| | Persistent storage | 5 | |
|
| 15 | Persistent storage | 5 | 5 | |
|
||||||
| Deployment & Scaling | Manual deployment | 5 | |
|
| Deployment & Scaling | Manual deployment | 5 | 5 | |
|
||||||
| | Automated deployment | 5 | |
|
| | Automated deployment | 5 | 5 | |
|
||||||
| | Multiple replicas | 5 | |
|
| | Multiple replicas | 5 | 5 | |
|
||||||
| | Kubernetes deployment | 10 | |
|
| 20 | Kubernetes deployment | 10 | 10 | |
|
||||||
| **Quality Assurance** | | | |
|
| **Quality Assurance** | | | | |
|
||||||
| Testing | Unit tests | 5 | |
|
| Testing | Unit tests | 5 | 2 | |
|
||||||
| | Integration tests | 5 | |
|
| | Integration tests | 5 | 2 | |
|
||||||
| | End-to-end tests | 5 | |
|
| | End-to-end tests | 5 | 5 | |
|
||||||
| | Performance testing | 5 | |
|
| 9 | Performance testing | 5 | 0 | |
|
||||||
| Monitoring & Operations | Health checks | 5 | |
|
| Monitoring & Operations | Health checks | 5 | 5 | |
|
||||||
| | Logging | 5 | |
|
| | Logging | 5 | 2 - only to terminal add logstash | |
|
||||||
| | Metrics/Monitoring | 5 | |
|
| 9 | Metrics/Monitoring | 5 | 2 - only DB, need to create Prometheus endpoint | |
|
||||||
| Security | HTTPS/TLS | 5 | |
|
| Security | HTTPS/TLS | 5 | 5 | |
|
||||||
| | Authentication | 5 | |
|
| | Authentication | 5 | 5 | |
|
||||||
| | Authorization | 5 | |
|
| 15 | Authorization | 5 | 5 | |
|
||||||
| **Innovation & Excellence** | | | |
|
| **Innovation & Excellence** | | | | |
|
||||||
| Advanced Features and | AI/ML Integration | 10 | |
|
| Advanced Features and | AI/ML Integration | 10 | 0 | |
|
||||||
| Technical Excellence | Real-time features | 10 | |
|
| Technical Excellence | Real-time features | 10 | 0 | |
|
||||||
| | Creative problem solving | 10 | |
|
| | Creative problem solving | 10 | ? | |
|
||||||
| | Performance optimization | 5 | |
|
| | Performance optimization | 5 | 2 | |
|
||||||
| | Exceptional user experience | 5 | |
|
| 2 | Exceptional user experience | 5 | 0 | |
|
||||||
| **Total** | | **255** | **[Your Total]** |
|
| **Total** | | **255** | **153** | |
|
||||||
|
|
||||||
## Grading Scale
|
## Grading Scale
|
||||||
|
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
/* App-level styles moved to ui.css for a cleaner layout. */
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
export const BACKEND_URL: string =
|
|
||||||
import.meta.env.VITE_BACKEND_URL ?? '';
|
|
||||||
@@ -1,200 +0,0 @@
|
|||||||
import { useEffect, useMemo, useState } from 'react';
|
|
||||||
import { type Category, type Transaction, createTransaction, getCategories, getTransactions } from '../api';
|
|
||||||
import AccountPage from './AccountPage';
|
|
||||||
import AppearancePage from './AppearancePage';
|
|
||||||
import { BACKEND_URL } from '../config';
|
|
||||||
|
|
||||||
function formatAmount(n: number) {
|
|
||||||
return new Intl.NumberFormat(undefined, { minimumFractionDigits: 2, maximumFractionDigits: 2 }).format(n);
|
|
||||||
}
|
|
||||||
|
|
||||||
export default function Dashboard({ onLogout }: { onLogout: () => void }) {
|
|
||||||
const [current, setCurrent] = useState<'home' | 'account' | 'appearance'>('home');
|
|
||||||
const [transactions, setTransactions] = useState<Transaction[]>([]);
|
|
||||||
const [categories, setCategories] = useState<Category[]>([]);
|
|
||||||
const [loading, setLoading] = useState(true);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
// Start CSAS (George) OAuth after login
|
|
||||||
async function startOauthCsas() {
|
|
||||||
const base = BACKEND_URL.replace(/\/$/, '');
|
|
||||||
const url = `${base}/auth/csas/authorize`;
|
|
||||||
try {
|
|
||||||
const token = localStorage.getItem('token');
|
|
||||||
const res = await fetch(url, {
|
|
||||||
credentials: 'include',
|
|
||||||
headers: token ? { Authorization: `Bearer ${token}` } : undefined,
|
|
||||||
});
|
|
||||||
const data = await res.json();
|
|
||||||
if (data && typeof data.authorization_url === 'string') {
|
|
||||||
window.location.assign(data.authorization_url);
|
|
||||||
} else {
|
|
||||||
alert('Cannot start CSAS OAuth.');
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
alert('Cannot start CSAS OAuth.');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// New transaction form state
|
|
||||||
const [amount, setAmount] = useState<string>('');
|
|
||||||
const [description, setDescription] = useState('');
|
|
||||||
const [selectedCategoryId, setSelectedCategoryId] = useState<number | ''>('');
|
|
||||||
|
|
||||||
// Filters
|
|
||||||
const [minAmount, setMinAmount] = useState<string>('');
|
|
||||||
const [maxAmount, setMaxAmount] = useState<string>('');
|
|
||||||
const [filterCategoryId, setFilterCategoryId] = useState<number | ''>('');
|
|
||||||
const [searchText, setSearchText] = useState('');
|
|
||||||
|
|
||||||
async function loadAll() {
|
|
||||||
setLoading(true);
|
|
||||||
setError(null);
|
|
||||||
try {
|
|
||||||
const [txs, cats] = await Promise.all([getTransactions(), getCategories()]);
|
|
||||||
setTransactions(txs);
|
|
||||||
setCategories(cats);
|
|
||||||
} catch (err: any) {
|
|
||||||
setError(err?.message || 'Failed to load data');
|
|
||||||
} finally {
|
|
||||||
setLoading(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
useEffect(() => { loadAll(); }, []);
|
|
||||||
|
|
||||||
const last10 = useMemo(() => {
|
|
||||||
const sorted = [...transactions].sort((a, b) => b.id - a.id);
|
|
||||||
return sorted.slice(0, 10);
|
|
||||||
}, [transactions]);
|
|
||||||
|
|
||||||
const filtered = useMemo(() => {
|
|
||||||
let arr = last10;
|
|
||||||
const min = minAmount !== '' ? Number(minAmount) : undefined;
|
|
||||||
const max = maxAmount !== '' ? Number(maxAmount) : undefined;
|
|
||||||
if (min !== undefined) arr = arr.filter(t => t.amount >= min);
|
|
||||||
if (max !== undefined) arr = arr.filter(t => t.amount <= max);
|
|
||||||
if (filterCategoryId !== '') arr = arr.filter(t => t.category_ids.includes(filterCategoryId as number));
|
|
||||||
if (searchText.trim()) arr = arr.filter(t => (t.description || '').toLowerCase().includes(searchText.toLowerCase()));
|
|
||||||
return arr;
|
|
||||||
}, [last10, minAmount, maxAmount, filterCategoryId, searchText]);
|
|
||||||
|
|
||||||
function categoryNameById(id: number) { return categories.find(c => c.id === id)?.name || `#${id}`; }
|
|
||||||
|
|
||||||
async function handleCreate(e: React.FormEvent) {
|
|
||||||
e.preventDefault();
|
|
||||||
if (!amount) return;
|
|
||||||
const payload = {
|
|
||||||
amount: Number(amount),
|
|
||||||
description: description || undefined,
|
|
||||||
category_ids: selectedCategoryId !== '' ? [Number(selectedCategoryId)] : undefined,
|
|
||||||
};
|
|
||||||
try {
|
|
||||||
const created = await createTransaction(payload);
|
|
||||||
setTransactions(prev => [created, ...prev]);
|
|
||||||
setAmount(''); setDescription(''); setSelectedCategoryId('');
|
|
||||||
} catch (err: any) {
|
|
||||||
alert(err?.message || 'Failed to create transaction');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="app-layout">
|
|
||||||
<aside className="sidebar">
|
|
||||||
<div className="logo">7Project</div>
|
|
||||||
<nav className="nav">
|
|
||||||
<button className={current === 'home' ? 'active' : ''} onClick={() => setCurrent('home')}>Home</button>
|
|
||||||
<button className={current === 'account' ? 'active' : ''} onClick={() => setCurrent('account')}>Account</button>
|
|
||||||
<button className={current === 'appearance' ? 'active' : ''} onClick={() => setCurrent('appearance')}>Appearance</button>
|
|
||||||
</nav>
|
|
||||||
</aside>
|
|
||||||
<div className="content">
|
|
||||||
<div className="topbar">
|
|
||||||
<h2 style={{ margin: 0 }}>{current === 'home' ? 'Dashboard' : current === 'account' ? 'Account' : 'Appearance'}</h2>
|
|
||||||
<div className="actions">
|
|
||||||
<span className="user muted">Signed in</span>
|
|
||||||
<button className="btn" onClick={onLogout}>Logout</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<main className="page space-y">
|
|
||||||
{current === 'home' && (
|
|
||||||
<>
|
|
||||||
<section className="card">
|
|
||||||
<h3>Bank connections</h3>
|
|
||||||
<p className="muted">Connect your CSAS (George) account.</p>
|
|
||||||
<button className="btn" onClick={startOauthCsas}>Connect CSAS (George)</button>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section className="card">
|
|
||||||
<h3>Add Transaction</h3>
|
|
||||||
<form onSubmit={handleCreate} className="form-row">
|
|
||||||
<input className="input" type="number" step="0.01" placeholder="Amount" value={amount} onChange={(e) => setAmount(e.target.value)} required />
|
|
||||||
<input className="input" type="text" placeholder="Description (optional)" value={description} onChange={(e) => setDescription(e.target.value)} />
|
|
||||||
<select className="input" value={selectedCategoryId} onChange={(e) => setSelectedCategoryId(e.target.value ? Number(e.target.value) : '')}>
|
|
||||||
<option value="">No category</option>
|
|
||||||
{categories.map(c => (<option key={c.id} value={c.id}>{c.name}</option>))}
|
|
||||||
</select>
|
|
||||||
<button className="btn primary" type="submit">Add</button>
|
|
||||||
</form>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section className="card">
|
|
||||||
<h3>Filters</h3>
|
|
||||||
<div className="form-row">
|
|
||||||
<input className="input" type="number" step="0.01" placeholder="Min amount" value={minAmount} onChange={(e) => setMinAmount(e.target.value)} />
|
|
||||||
<input className="input" type="number" step="0.01" placeholder="Max amount" value={maxAmount} onChange={(e) => setMaxAmount(e.target.value)} />
|
|
||||||
<select className="input" value={filterCategoryId} onChange={(e) => setFilterCategoryId(e.target.value ? Number(e.target.value) : '')}>
|
|
||||||
<option value="">All categories</option>
|
|
||||||
{categories.map(c => (<option key={c.id} value={c.id}>{c.name}</option>))}
|
|
||||||
</select>
|
|
||||||
<input className="input" type="text" placeholder="Search in description" value={searchText} onChange={(e) => setSearchText(e.target.value)} />
|
|
||||||
</div>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section className="card">
|
|
||||||
<h3>Latest Transactions (last 10)</h3>
|
|
||||||
{loading ? (
|
|
||||||
<div>Loading…</div>
|
|
||||||
) : error ? (
|
|
||||||
<div style={{ color: 'crimson' }}>{error}</div>
|
|
||||||
) : filtered.length === 0 ? (
|
|
||||||
<div>No transactions</div>
|
|
||||||
) : (
|
|
||||||
<table className="table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th>ID</th>
|
|
||||||
<th style={{ textAlign: 'right' }}>Amount</th>
|
|
||||||
<th>Description</th>
|
|
||||||
<th>Categories</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody>
|
|
||||||
{filtered.map(t => (
|
|
||||||
<tr key={t.id}>
|
|
||||||
<td>{t.id}</td>
|
|
||||||
<td className="amount">{formatAmount(t.amount)}</td>
|
|
||||||
<td>{t.description || ''}</td>
|
|
||||||
<td>{t.category_ids.map(id => categoryNameById(id)).join(', ')}</td>
|
|
||||||
</tr>
|
|
||||||
))}
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
)}
|
|
||||||
</section>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{current === 'account' && (
|
|
||||||
// lazy import avoided for simplicity
|
|
||||||
<AccountPage onDeleted={onLogout} />
|
|
||||||
)}
|
|
||||||
|
|
||||||
{current === 'appearance' && (
|
|
||||||
<AppearancePage />
|
|
||||||
)}
|
|
||||||
</main>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,85 +0,0 @@
|
|||||||
:root {
|
|
||||||
--bg: #f7f7fb;
|
|
||||||
--panel: #ffffff;
|
|
||||||
--text: #9aa3b2;
|
|
||||||
--muted: #6b7280;
|
|
||||||
--primary: #6f49fe;
|
|
||||||
--primary-600: #5a37fb;
|
|
||||||
--border: #e5e7eb;
|
|
||||||
--radius: 12px;
|
|
||||||
--shadow: 0 1px 2px rgba(0,0,0,0.04), 0 8px 24px rgba(0,0,0,0.08);
|
|
||||||
|
|
||||||
font-family: Inter, ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto, Helvetica, Arial, "Apple Color Emoji", "Segoe UI Emoji";
|
|
||||||
color: var(--text);
|
|
||||||
}
|
|
||||||
|
|
||||||
* { box-sizing: border-box; }
|
|
||||||
|
|
||||||
html, body, #root { height: 100%; }
|
|
||||||
|
|
||||||
body { background: var(--bg); margin: 0; display: block; }
|
|
||||||
|
|
||||||
/* Dark theme variables */
|
|
||||||
body[data-theme="dark"] {
|
|
||||||
--bg: #161a2b;
|
|
||||||
--panel: #283046;
|
|
||||||
--text: #283046;
|
|
||||||
--muted: #cbd5e1;
|
|
||||||
--primary: #8b7bff;
|
|
||||||
--primary-600: #7b69ff;
|
|
||||||
--border: #283046;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Layout */
|
|
||||||
.app-layout { display: grid; grid-template-columns: 260px 1fr; height: 100%; }
|
|
||||||
.sidebar { background: #15172a; color: #e5e7eb; display: flex; flex-direction: column; padding: 20px 12px; }
|
|
||||||
.sidebar .logo { color: #fff; font-weight: 700; font-size: 18px; padding: 12px 14px; display: flex; align-items: center; gap: 10px; }
|
|
||||||
.nav { margin-top: 12px; display: grid; gap: 4px; }
|
|
||||||
.nav a, .nav button { color: #cbd5e1; text-align: left; background: transparent; border: 0; padding: 10px 12px; border-radius: 8px; cursor: pointer; }
|
|
||||||
.nav a.active, .nav a:hover, .nav button:hover { background: rgba(255,255,255,0.08); color: #fff; }
|
|
||||||
|
|
||||||
.content { display: flex; flex-direction: column; height: 100%; }
|
|
||||||
.topbar { height: 64px; display: flex; align-items: center; justify-content: space-between; padding: 0 24px; background: var(--panel); border-bottom: 1px solid var(--border); }
|
|
||||||
.topbar .user { color: var(--muted); }
|
|
||||||
.page { padding: 24px; max-width: 1100px; margin: auto; }
|
|
||||||
|
|
||||||
/* Cards */
|
|
||||||
.card { background: var(--panel); border: 1px solid var(--border); border-radius: var(--radius); box-shadow: var(--shadow); padding: 16px; }
|
|
||||||
.card h3 { margin: 0 0 12px; }
|
|
||||||
|
|
||||||
/* Forms */
|
|
||||||
.input, select, textarea { width: 100%; padding: 10px 12px; border-radius: 10px; border: 1px solid var(--border); background: #fff; color: var(--text); }
|
|
||||||
.input:focus, select:focus, textarea:focus { outline: 2px solid var(--primary); border-color: var(--primary); }
|
|
||||||
.form-row { display: grid; gap: 8px; grid-template-columns: repeat(4, minmax(0,1fr)); }
|
|
||||||
.form-row > * { min-width: 140px; }
|
|
||||||
.actions { display: flex; align-items: center; gap: 8px; }
|
|
||||||
|
|
||||||
/* Buttons */
|
|
||||||
.btn { border: 1px solid var(--border); background: #fff; color: var(--text); padding: 10px 14px; border-radius: 10px; cursor: pointer; }
|
|
||||||
.btn.primary { background: var(--primary); border-color: var(--primary); color: #fff; }
|
|
||||||
.btn.primary:hover { background: var(--primary-600); }
|
|
||||||
.btn.ghost { background: transparent; color: var(--muted); }
|
|
||||||
|
|
||||||
/* Tables */
|
|
||||||
.table { width: 100%; border-collapse: collapse; }
|
|
||||||
.table th, .table td { padding: 10px; border-bottom: 1px solid var(--border); }
|
|
||||||
.table th { text-align: left; color: var(--muted); font-weight: 600; }
|
|
||||||
.table td.amount { text-align: right; font-variant-numeric: tabular-nums; }
|
|
||||||
|
|
||||||
/* Segmented control */
|
|
||||||
.segmented { display: inline-flex; background: #f1f5f9; border-radius: 10px; padding: 4px; border: 1px solid var(--border); }
|
|
||||||
.segmented button { border: 0; background: transparent; padding: 8px 12px; border-radius: 8px; color: var(--muted); cursor: pointer; }
|
|
||||||
.segmented button.active { background: #fff; color: var(--text); box-shadow: var(--shadow); }
|
|
||||||
|
|
||||||
/* Auth layout */
|
|
||||||
body.auth-page #root {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
min-height: 100vh;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Utility */
|
|
||||||
.muted { color: var(--muted); }
|
|
||||||
.space-y > * + * { margin-top: 12px; }
|
|
||||||
@@ -8,7 +8,7 @@ Just copy the template below for each weekly meeting and fill in the details.
|
|||||||
|
|
||||||
## Administrative Info
|
## Administrative Info
|
||||||
|
|
||||||
- Date: 2025-10-08
|
- Date: 2025-10-16
|
||||||
- Attendees: Dejan Ribarovski, Lukas Trkan
|
- Attendees: Dejan Ribarovski, Lukas Trkan
|
||||||
- Notetaker: Dejan Ribarovski
|
- Notetaker: Dejan Ribarovski
|
||||||
|
|
||||||
@@ -43,8 +43,8 @@ Prepare 3-5 questions and topics you want to discuss with your mentor.
|
|||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
- [ ] OAuth
|
- [x] OAuth
|
||||||
- [ ] CI/CD fix
|
- [x] CI/CD fix
|
||||||
- [ ] Database local (multiple bank accounts)
|
- [ ] Database local (multiple bank accounts)
|
||||||
- [ ] Add tests and set up github pipeline
|
- [ ] Add tests and set up github pipeline
|
||||||
- [ ] Frontend imporvment - user experience
|
- [ ] Frontend imporvment - user experience
|
||||||
|
|||||||
54
7project/meetings/2025-10-23-meeting.md
Normal file
54
7project/meetings/2025-10-23-meeting.md
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# Weekly Meeting Notes
|
||||||
|
|
||||||
|
- Group 8 - Personal finance tracker
|
||||||
|
- Mentor: Jaychander
|
||||||
|
|
||||||
|
Keep all meeting notes in the `meetings.md` file in your project folder.
|
||||||
|
Just copy the template below for each weekly meeting and fill in the details.
|
||||||
|
|
||||||
|
## Administrative Info
|
||||||
|
|
||||||
|
- Date: 2025-10-23
|
||||||
|
- Attendees: Dejan
|
||||||
|
- Notetaker: Dejan
|
||||||
|
|
||||||
|
## Progress Update (Before Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [x] OAuth (BankID)
|
||||||
|
- [x] CI/CD fix
|
||||||
|
- [X] Database local (multiple bank accounts)
|
||||||
|
- [X] Add tests and set up github pipeline
|
||||||
|
- [X] Frontend imporvment - user experience
|
||||||
|
- [ ] make the report more clear - partly
|
||||||
|
|
||||||
|
Summary of what has been accomplished since the last meeting in the following categories.
|
||||||
|
|
||||||
|
### Coding
|
||||||
|
Improved Frontend, added Mock Bank, fixed deployment, fixed OAuth(BankID) on production, added basic tests
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
Not much - just updated the work done
|
||||||
|
|
||||||
|
## Questions and Topics for Discussion (Before Meeting)
|
||||||
|
|
||||||
|
This was not prepared, I planned to do it right before meeting, but Jaychander needed to go somewhere earlier.
|
||||||
|
|
||||||
|
1. Question 1
|
||||||
|
2. Question 2
|
||||||
|
3. Question 3
|
||||||
|
|
||||||
|
## Discussion Notes (During Meeting)
|
||||||
|
The tracker should not store the transactions in the database - security vulnerability.
|
||||||
|
|
||||||
|
## Action Items for Next Week (During Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [ ] Dont store data in database (security) - Load it on login (from CSAS API and local database), load automatically with email
|
||||||
|
- [ ] Go through the checklist
|
||||||
|
- [ ] Look for possible APIs (like stocks or financial details whatever)
|
||||||
|
- [ ] Report
|
||||||
|
|
||||||
|
---
|
||||||
51
7project/meetings/2025-10-30-meeting.md
Normal file
51
7project/meetings/2025-10-30-meeting.md
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# Weekly Meeting Notes
|
||||||
|
|
||||||
|
- Group 8 - Personal finance tracker
|
||||||
|
- Mentor: Jaychander
|
||||||
|
|
||||||
|
Keep all meeting notes in the `meetings.md` file in your project folder.
|
||||||
|
Just copy the template below for each weekly meeting and fill in the details.
|
||||||
|
|
||||||
|
## Administrative Info
|
||||||
|
|
||||||
|
- Date: 2025-10-30
|
||||||
|
- Attendees: Dejan, Lukas
|
||||||
|
- Notetaker: Dejan
|
||||||
|
|
||||||
|
## Progress Update (Before Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [ ] Dont store data in database (security) - Load it on login (from CSAS API and local database), load automatically with email
|
||||||
|
- [X] Go through the checklist
|
||||||
|
- [X] Look for possible APIs (like stocks or financial details whatever)
|
||||||
|
- [ ] Report - partly
|
||||||
|
|
||||||
|
Summary of what has been accomplished since the last meeting in the following categories.
|
||||||
|
|
||||||
|
### Coding
|
||||||
|
Implemented CSAS API transactions fetch, Added tests with testing database on github actions, redone UI,
|
||||||
|
added currency exchange rate with CNB API
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
Not much - just updated the work done
|
||||||
|
|
||||||
|
## Questions and Topics for Discussion (Before Meeting)
|
||||||
|
|
||||||
|
1. Security regarding storing transactions - possibility of encryption
|
||||||
|
2. Realisticaly what needs to be done for us to be done
|
||||||
|
3. Question 3
|
||||||
|
|
||||||
|
## Discussion Notes (During Meeting)
|
||||||
|
The tracker should not store the transactions in the database - security vulnerability.
|
||||||
|
|
||||||
|
## Action Items for Next Week (During Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [x] Change the name on frontend from 7project
|
||||||
|
- [x] Finalize the funcionality and everyting in the code part
|
||||||
|
- [ ] Try to finalize report with focus on reproducibility
|
||||||
|
- [ ] More high level explanation of the workflow in the report
|
||||||
|
|
||||||
|
---
|
||||||
47
7project/meetings/2025-11-6-meeting.md
Normal file
47
7project/meetings/2025-11-6-meeting.md
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
# Weekly Meeting Notes
|
||||||
|
|
||||||
|
- Group 8 - Personal finance tracker
|
||||||
|
- Mentor: Jaychander
|
||||||
|
|
||||||
|
Keep all meeting notes in the `meetings.md` file in your project folder.
|
||||||
|
Just copy the template below for each weekly meeting and fill in the details.
|
||||||
|
|
||||||
|
## Administrative Info
|
||||||
|
|
||||||
|
- Date: 2025-10-30
|
||||||
|
- Attendees: Dejan, Lukas
|
||||||
|
- Notetaker: Dejan
|
||||||
|
|
||||||
|
## Progress Update (Before Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [x] Change the name on frontend from 7project
|
||||||
|
- [x] Finalize the funcionality and everyting in the code part
|
||||||
|
- [x] Try to finalize report with focus on reproducibility
|
||||||
|
- [x] More high level explanation of the workflow in the report
|
||||||
|
|
||||||
|
Summary of what has been accomplished since the last meeting in the following categories.
|
||||||
|
|
||||||
|
### Coding
|
||||||
|
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
|
||||||
|
## Questions and Topics for Discussion (Before Meeting)
|
||||||
|
|
||||||
|
|
||||||
|
## Discussion Notes (During Meeting)
|
||||||
|
The tracker should not store the transactions in the database - security vulnerability.
|
||||||
|
|
||||||
|
## Action Items for Next Week (During Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [ ] video
|
||||||
|
- [ ] highlight the optional stuff in the report
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
@@ -1,242 +1,472 @@
|
|||||||
# Personal finance tracker
|
# Personal finance tracker
|
||||||
|
|
||||||
> **Instructions**:
|
<!--- **Instructions**:
|
||||||
> This template provides the structure for your project report.
|
> This template provides the structure for your project report.
|
||||||
> Replace the placeholder text with your actual content.
|
> Replace the placeholder text with your actual content.
|
||||||
> Remove instructions that are not relevant for your project, but leave the headings along with a (NA) label.
|
> Remove instructions that are not relevant for your project, but leave the headings along with a (NA) label. -->
|
||||||
|
|
||||||
## Project Overview
|
## Project Overview
|
||||||
|
|
||||||
**Project Name**: Personal Finance Tracker
|
**Project Name**: Personal Finance Tracker
|
||||||
|
|
||||||
|
**Deployment URL**: https://finance.ltrk.cz/
|
||||||
|
|
||||||
**Group Members**:
|
**Group Members**:
|
||||||
|
|
||||||
- 289229, Lukáš Trkan, lukastrkan
|
- 289229, Lukáš Trkan, lukastrkan
|
||||||
- 289258, Dejan Ribarovski, derib2613, ribardej
|
- 289258, Dejan Ribarovski, ribardej (derib2613)
|
||||||
|
|
||||||
**Brief Description**:
|
**Brief Description**:
|
||||||
Our application is a finance tracker, so a person can easily track his cash flow
|
Our application allows users to easily track their cash flow
|
||||||
through multiple bank accounts. Person can label transactions with custom categories
|
through multiple bank accounts. Users can label their transactions with custom categories that can be later used for
|
||||||
and later filter by them.
|
filtering and visualization. New transactions are automatically fetched in the background.
|
||||||
|
|
||||||
## Architecture Overview
|
## Architecture Overview
|
||||||
Our system is a full‑stack web application composed of a React frontend, a FastAPI backend, a PostgreSQL database, and asynchronous background workers powered by Celery with RabbitMQ. Redis is available for caching/kv and may be used by Celery as a result backend. The backend exposes REST endpoints for authentication (email/password and OAuth), users, categories, and transactions. A thin controller layer (FastAPI routers) lives under app/api. Infrastructure for Kubernetes is provided via OpenTofu (Terraform‑compatible) modules and the application is packaged via a Helm chart.
|
|
||||||
|
Our system is a full‑stack web application composed of a React frontend, a FastAPI backend,
|
||||||
|
a asynchronousMariaDB database with Maxscale, and background workers powered by Celery with RabbitMQ.
|
||||||
|
The backend exposes REST endpoints for authentication (email/password and OAuth), users, categories,
|
||||||
|
transactions, exchange rates and bank APIs. Infrastructure for Kubernetes is managed via Terraform/OpenTofu and
|
||||||
|
the application is packaged via a Helm chart. This all is deployed on private TalosOS cluster running on Proxmox VE with
|
||||||
|
CI/CD and with public access over Cloudflare tunnels. Static files for frontend are served via Cloudflare pages.
|
||||||
|
Other services deployed in the cluster includes Longhorn for persistent storage, Prometheus with Grafana for monitoring.
|
||||||
|
|
||||||
### High-Level Architecture
|
### High-Level Architecture
|
||||||
|
|
||||||
```mermaid
|
```mermaid
|
||||||
flowchart LR
|
flowchart TB
|
||||||
proc_queue[Message Queue] --> proc_queue_worker[Worker Service]
|
n3(("User")) <--> client["Frontend"]
|
||||||
proc_queue_worker --> ext_mail[(Email Service)]
|
proc_queue["Message Queue"] --> proc_queue_worker["Worker Service"]
|
||||||
proc_cron[Task planner] --> proc_queue
|
proc_queue_worker -- SMTP --> ext_mail[("Email Service")]
|
||||||
proc_queue_worker --> ext_bank[(Bank API)]
|
proc_queue_worker <-- HTTP request/response --> ext_bank[("Bank API")]
|
||||||
proc_queue_worker --> db
|
proc_queue_worker <--> db[("Database")]
|
||||||
client[Client/Frontend] <--> svc[Backend API]
|
proc_cron["Cron"] <-- HTTP request/response --> svc["Backend API"]
|
||||||
svc --> proc_queue
|
svc --> proc_queue
|
||||||
svc <--> db[(Database)]
|
n2["Cloudflare tunnel"] <-- HTTP request/response --> svc
|
||||||
svc <--> cache[(Cache)]
|
svc <--> db
|
||||||
|
svc <-- HTTP request/response --> api[("UniRate API")]
|
||||||
|
client <-- HTTP request/response --> n2
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The workflow works in the following way:
|
||||||
|
|
||||||
|
- Client connects to the frontend. After login, frontend automatically fetches the stored transactions from
|
||||||
|
the database via the backend API and currency rates from UniRate API.
|
||||||
|
- When the client opts for fetching new transactions via the Bank API, cron will trigger periodic fetching
|
||||||
|
using background worker.
|
||||||
|
- After successful load, these transactions are stored to the database and displayed to the client
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- The stored transactions are encrypted in the DB for security reasons.
|
||||||
|
- For every pull request the full APP is deployed on a separate URL and the tests are run by github CI/CD
|
||||||
|
- On every push to main, the production app is automatically updated
|
||||||
|
- UI is responsive for mobile devices
|
||||||
|
- Slow operations (emails, transactions fetching) are handled
|
||||||
|
in the background by Celery workers.
|
||||||
|
- App is monitored using prometheus metrics endpoint and metrics are shown in Grafana dashboard.
|
||||||
|
|
||||||
### Components
|
### Components
|
||||||
|
|
||||||
- Frontend (frontend/): React + TypeScript app built with Vite. Talks to the backend via REST, handles login/registration, shows latest transactions, filtering, and allows adding transactions.
|
- Frontend (frontend/): React + TypeScript app built with Vite. Talks to the backend via REST, handles
|
||||||
- Backend API (backend/app): FastAPI app with routers under app/api for auth, categories, and transactions. Uses FastAPI Users for auth (JWT + OAuth), SQLAlchemy ORM, and Pydantic v2 schemas.
|
login/registration, shows latest transactions, filtering, and allows adding transactions.
|
||||||
- Worker service (backend/app/workers): Celery worker handling asynchronous tasks (e.g., sending verification emails, future background processing).
|
- Backend API (backend/app): FastAPI app with routers under app/api for auth, users, categories, transactions, exchange
|
||||||
- Database (PostgreSQL): Persists users, categories, transactions; schema managed by Alembic migrations.
|
rates and bankAPI. Uses FastAPI Users for auth (JWT + OAuth), SQLAlchemy ORM, and Pydantic v2 schemas.
|
||||||
- Message Queue (RabbitMQ): Transports background jobs from the API to the worker.
|
- Worker service (backend/app/workers): Celery worker handling background tasks (emails, transactions fetching).
|
||||||
- Cache/Result Store (Redis): Available for caching or Celery result backend.
|
- Database (MariaDB with Maxscale): Persists users, categories, transactions; schema managed by Alembic migrations.
|
||||||
- Infrastructure as Code (tofu/): OpenTofu modules provisioning cluster services (RabbitMQ, Redis, Argo CD, cert-manager, Cloudflare tunnel, etc.).
|
- Message Queue (RabbitMQ): Queues background tasks for Celery workers.
|
||||||
|
- Infrastructure as Code (tofu/): OpenTofu modules provisioning cluster services (RabbitMQ, Redis, Cloudflare tunnel,
|
||||||
|
etc.).
|
||||||
- Deployment Chart (charts/myapp-chart/): Helm chart to deploy the application to Kubernetes.
|
- Deployment Chart (charts/myapp-chart/): Helm chart to deploy the application to Kubernetes.
|
||||||
|
|
||||||
### Technologies Used
|
### Technologies Used
|
||||||
|
|
||||||
- Backend: Python, FastAPI, FastAPI Users, SQLAlchemy, Pydantic, Alembic, Celery
|
- Backend: Python, FastAPI, FastAPI Users, SQLAlchemy, Pydantic, Alembic, Celery
|
||||||
- Frontend: React, TypeScript, Vite
|
- Frontend: React, TypeScript, Vite
|
||||||
- Database: PostgreSQL
|
- Database: MariaDB with Maxscale
|
||||||
- Messaging: RabbitMQ
|
- Background jobs: RabbitMQ, Celery
|
||||||
- Cache: Redis
|
|
||||||
- Containerization/Orchestration: Docker, Docker Compose (dev), Kubernetes, Helm
|
- Containerization/Orchestration: Docker, Docker Compose (dev), Kubernetes, Helm
|
||||||
- IaC/Platform: OpenTofu (Terraform), Argo CD, cert-manager, MetalLB, Cloudflare Tunnel, Prometheus
|
- IaC/Platform: Proxmox, Talos, Cloudflare pages, OpenTofu (Terraform), cert-manager, MetalLB, Cloudflare Tunnel,
|
||||||
|
Prometheus, Loki
|
||||||
|
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
|
|
||||||
|
Here are software and hardware prerequisites for the development and production environments. This section also
|
||||||
|
describes
|
||||||
|
necessary environment variables and key dependencies used in the project.
|
||||||
|
|
||||||
### System Requirements
|
### System Requirements
|
||||||
|
|
||||||
- Operating System: Linux, macOS, or Windows
|
#### Development
|
||||||
- Minimum RAM: 4 GB (8 GB recommended for running backend, frontend, and database together)
|
|
||||||
- Storage: 2 GB free (Docker images may require additional space)
|
- OS: Tested on MacOS, Linux and Windows should work as well
|
||||||
|
- Minimum RAM: 8 GB
|
||||||
|
- Storage: 10 GB+ free
|
||||||
|
|
||||||
|
#### Production
|
||||||
|
|
||||||
|
- 1 + 4 nodes
|
||||||
|
- CPU: 4 cores
|
||||||
|
- RAM: 8 GB
|
||||||
|
- Storage: 200 GB
|
||||||
|
|
||||||
### Required Software
|
### Required Software
|
||||||
|
|
||||||
- Docker Desktop or Docker Engine 24+
|
#### Development
|
||||||
- Docker Compose v2+
|
|
||||||
- Node.js 20+ and npm 10+ (for local frontend dev/build)
|
|
||||||
- Python 3.12+ (for local backend dev outside Docker)
|
|
||||||
- PostgreSQL 15+ (optional if running DB outside Docker)
|
|
||||||
- Helm 3.12+ and kubectl 1.29+ (for Kubernetes deployment)
|
|
||||||
- OpenTofu 1.7+ (for infrastructure provisioning)
|
|
||||||
|
|
||||||
### Environment Variables (common)
|
- Docker
|
||||||
|
- Docker Compose
|
||||||
|
- Node.js and npm
|
||||||
|
- Python 3.12
|
||||||
|
- MariaDB 11
|
||||||
|
|
||||||
- Backend: SECRET, FRONTEND_URL, BACKEND_URL, DATABASE_URL, RABBITMQ_URL, REDIS_URL
|
#### Production
|
||||||
- OAuth vars (Backend): MOJEID_CLIENT_ID/SECRET, BANKID_CLIENT_ID/SECRET (optional)
|
|
||||||
- Frontend: VITE_BACKEND_URL
|
##### Minimal:
|
||||||
|
|
||||||
|
- domain name with Cloudflare`s nameservers - tunnel, pages
|
||||||
|
- Kubernetes cluster
|
||||||
|
- kubectl
|
||||||
|
- Helm
|
||||||
|
- OpenTofu
|
||||||
|
|
||||||
|
##### Our setup specifics:
|
||||||
|
|
||||||
|
- Proxmox VE
|
||||||
|
- TalosOS cluster
|
||||||
|
- talosctl
|
||||||
|
- GitHub self-hosted runner with access to the cluster
|
||||||
|
- TailScale for remote access to cluster
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
#### Backend
|
||||||
|
|
||||||
|
- `MOJEID_CLIENT_ID`, `MOJEID_CLIENT_SECRET` \- OAuth client ID and secret for
|
||||||
|
[MojeID](https://www.mojeid.cz/en/provider/)
|
||||||
|
- `BANKID_CLIENT_ID`, `BANKID_CLIENT_SECRET` \- OAuth client ID and secret for [BankID](https://developer.bankid.cz/)
|
||||||
|
- `CSAS_CLIENT_ID`, `CSAS_CLIENT_SECRET` \- OAuth client ID and secret for [Česká
|
||||||
|
spořitelna](https://developers.erstegroup.com/docs/apis/bank.csas)
|
||||||
|
- `DATABASE_URL`(or `MARIADB_HOST`, `MARIADB_PORT`, `MARIADB_DB`, `MARIADB_USER`, `MARIADB_PASSWORD`) \- MariaDB
|
||||||
|
connection details
|
||||||
|
- `RABBITMQ_USERNAME`, `RABBITMQ_PASSWORD` \- credentials for RabbitMQ
|
||||||
|
- `SENTRY_DSN` \- Sentry DSN for error reporting
|
||||||
|
- `DB_ENCRYPTION_KEY` \- symmetric key for encrypting sensitive data in the database
|
||||||
|
- `SMTP_HOST`, `SMTP_PORT`, `SMTP_USERNAME`, `SMTP_PASSWORD`, `SMTP_USE_TLS`, `SMTP_USE_SSL`, `SMTP_FROM` \- SMTP
|
||||||
|
configuration (host, port, auth credentials, TLS/SSL options, sender).
|
||||||
|
- `UNIRATE_API_KEY` \- API key for UniRate.
|
||||||
|
|
||||||
|
#### Frontend
|
||||||
|
|
||||||
|
- `VITE_BACKEND_URL` \- URL of the backend API
|
||||||
|
|
||||||
### Dependencies (key libraries)
|
### Dependencies (key libraries)
|
||||||
I am not sure what is meant by "key libraries"
|
|
||||||
|
|
||||||
Backend: FastAPI, fastapi-users, SQLAlchemy, pydantic v2, Alembic, Celery
|
Backend: FastAPI, fastapi-users, SQLAlchemy, pydantic v2, Alembic, Celery, uvicorn, pytest
|
||||||
Frontend: React, TypeScript, Vite
|
Frontend: React, TypeScript, Vite
|
||||||
Services: PostgreSQL, RabbitMQ, Redis
|
|
||||||
|
|
||||||
## Build Instructions
|
## Local development
|
||||||
|
|
||||||
You can run the project with Docker Compose (recommended for local development) or run services manually.
|
You can run the project with Docker Compose and Python virtual environment for testing and development purposes
|
||||||
|
|
||||||
### 1) Clone the Repository
|
### 1) Clone the Repository
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/dat515-2025/Group-8.git
|
git clone https://github.com/dat515-2025/Group-8.git
|
||||||
cd 7project
|
cd Group-8/7project
|
||||||
```
|
```
|
||||||
|
|
||||||
### 2) Install dependencies
|
### 2) Install dependencies
|
||||||
|
|
||||||
Backend
|
Backend
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# In 7project/backend
|
cd backend
|
||||||
python3.12 -m venv .venv
|
python3 -m venv .venv
|
||||||
source .venv/bin/activate # Windows: .venv\Scripts\activate
|
source .venv/bin/activate
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
```
|
```
|
||||||
Frontend
|
|
||||||
|
### 3) Run Docker containers
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# In 7project/frontend
|
cd ..
|
||||||
npm install
|
docker compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
### 3) Manual Local Run
|
### 4) Prepare the database
|
||||||
|
|
||||||
Backend
|
|
||||||
```bash
|
```bash
|
||||||
# From the 7project/ directory
|
bash upgrade_database.sh
|
||||||
docker compose up --build
|
```
|
||||||
# This starts: PostgreSQL, RabbitMQ/Redis (if defined)
|
|
||||||
|
|
||||||
# Set environment variables (or create .env file)
|
### 5) Run backend
|
||||||
export SECRET=CHANGE_ME_SECRET
|
|
||||||
export BACKEND_URL=http://127.0.0.1:8000
|
|
||||||
export FRONTEND_URL=http://localhost:5173
|
|
||||||
export DATABASE_URL=postgresql+asyncpg://user:password@127.0.0.1:5432/app
|
|
||||||
export RABBITMQ_URL=amqp://guest:guest@127.0.0.1:5672/
|
|
||||||
export REDIS_URL=redis://127.0.0.1:6379/0
|
|
||||||
|
|
||||||
# Apply DB migrations (Alembic)
|
```bash
|
||||||
# From 7project/backend
|
cd backend
|
||||||
alembic upgrade head
|
|
||||||
|
|
||||||
# Run API
|
#TODO: set env variables
|
||||||
uvicorn app.app:fastApi --reload --host 0.0.0.0 --port 8000
|
uvicorn app.app:fastApi --reload --host 0.0.0.0 --port 8000
|
||||||
|
```
|
||||||
|
|
||||||
# Run Celery worker (optional, for emails/background tasks)
|
### 6) Run Celery worker (optional, in another terminal)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd Group-8/7project/src/backend
|
||||||
|
source .venv/bin/activate
|
||||||
celery -A app.celery_app.celery_app worker -l info
|
celery -A app.celery_app.celery_app worker -l info
|
||||||
```
|
```
|
||||||
|
|
||||||
Frontend
|
### 7) Install frontend dependencies and run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Configure backend URL for dev
|
cd ../frontend
|
||||||
echo 'VITE_BACKEND_URL=http://127.0.0.1:8000' > .env
|
npm i
|
||||||
npm run dev
|
npm run dev
|
||||||
# Open http://localhost:5173
|
|
||||||
```
|
```
|
||||||
|
|
||||||
- Backend default: http://127.0.0.1:8000 (OpenAPI at /docs)
|
- Backend available at: http://127.0.0.1:8000 (OpenAPI at /docs)
|
||||||
- Frontend default: http://localhost:5173
|
- Frontend available at: http://localhost:5173
|
||||||
|
|
||||||
If needed, adjust compose services/ports in compose.yml.
|
## Build Instructions
|
||||||
|
|
||||||
|
### Backend
|
||||||
|
|
||||||
|
App is separated into backend and frontend so it also needs to be built separately. Backend is build into docker image
|
||||||
|
and frontend is deployed as static files.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd 7project/backend
|
||||||
|
# Dont forget to set correct image tag with your registry and name
|
||||||
|
# For example lukastrkan/cc-app-demo or gitea.ltrk.dev/lukas/cc-app-demo
|
||||||
|
docker buildx build --platform linux/amd64,linux/arm64 -t CHANGE_ME --push .
|
||||||
|
```
|
||||||
|
|
||||||
|
### Frontend
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd project7/src/frontend
|
||||||
|
npm ci
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
## Deployment Instructions
|
## Deployment Instructions
|
||||||
|
|
||||||
### Local (Docker Compose)
|
Deployment is tested on TalosOS cluster with 1 control plane and 4 workers, cluster needs to be setup and configured
|
||||||
|
manually. Terraform/OpenTofu is then used to deploy base services to the cluster. App itself is deployed automatically
|
||||||
|
via GitHub actions and Helm chart. Frontend files are deployed to Cloudflare pages.
|
||||||
|
|
||||||
Described in the previous section (Manual Local Run)
|
### Setup Cluster
|
||||||
|
|
||||||
### Kubernetes (via OpenTofu + Helm)
|
Deployment should work on any Kubernetes cluster. However, we are using 4 TalosOS virtual machines (1 control plane, 3
|
||||||
|
workers)
|
||||||
|
running on top of Proxmox VE.
|
||||||
|
|
||||||
|
1) Create at least 4 VMs with TalosOS (4 cores, 8 GB RAM, 200 GB disk)
|
||||||
|
2) Install talosctl for your OS: https://docs.siderolabs.com/talos/v1.10/getting-started/talosctl
|
||||||
|
3) Generate Talos config
|
||||||
|
4) Navigate to tofu directory
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd 7project/src/tofu
|
||||||
|
````
|
||||||
|
|
||||||
|
5) Set IP addresses in environment variables
|
||||||
|
|
||||||
|
```bash
|
||||||
|
CONTROL_PLANE_IP=<control-plane-ip>
|
||||||
|
WORKER1_IP=<worker1-ip>
|
||||||
|
WORKER2_IP=<worker2-ip>
|
||||||
|
WORKER3_IP=<worker3-ip>
|
||||||
|
WORKER4_IP=<worker4-ip>
|
||||||
|
....
|
||||||
|
```
|
||||||
|
|
||||||
|
6) Create config files
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# change my-cluster to your desired cluster name
|
||||||
|
talosctl gen config my-cluster https://$CONTROL_PLANE_IP:6443
|
||||||
|
```
|
||||||
|
|
||||||
|
7) Edit the generated configs
|
||||||
|
|
||||||
|
Apply the following changes to `worker.yaml`:
|
||||||
|
|
||||||
|
1) Add mounts for persistent storage to `machine.kubelet.extraMounts` section:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
extraMounts:
|
||||||
|
- destination: /var/lib/longhorn
|
||||||
|
type: bindind.
|
||||||
|
source: /var/lib/longhorn
|
||||||
|
options:
|
||||||
|
- bind
|
||||||
|
- rshared
|
||||||
|
- rw
|
||||||
|
```
|
||||||
|
|
||||||
|
2) Change `machine.install.image` to image with extra modules:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
image: factory.talos.dev/metal-installer/88d1f7a5c4f1d3aba7df787c448c1d3d008ed29cfb34af53fa0df4336a56040b:v1.11.1
|
||||||
|
```
|
||||||
|
|
||||||
|
or you can use latest image generated at https://factory.talos.dev with following options:
|
||||||
|
|
||||||
|
- Bare-metal machine
|
||||||
|
- your Talos os version
|
||||||
|
- amd64 architecture
|
||||||
|
- siderolabs/iscsi-tools
|
||||||
|
- siderolabs/util-linux-tools
|
||||||
|
- (Optionally) siderolabs/qemu-guest-agent
|
||||||
|
|
||||||
|
Then copy "Initial Installation" value and paste it to the image field.
|
||||||
|
|
||||||
|
3) Add docker registry mirror to `machine.registries.mirrors` section:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
registries:
|
||||||
|
mirrors:
|
||||||
|
docker.io:
|
||||||
|
endpoints:
|
||||||
|
- https://mirror.gcr.io
|
||||||
|
- https://registry-1.docker.io
|
||||||
|
```
|
||||||
|
|
||||||
|
8) Apply configs to the VMs
|
||||||
|
|
||||||
|
```bash
|
||||||
|
talosctl apply-config --insecure --nodes $CONTROL_PLANE_IP --file controlplane.yaml
|
||||||
|
talosctl apply-config --insecure --nodes $WORKER1_IP --file worker.yaml
|
||||||
|
talosctl apply-config --insecure --nodes $WORKER2_IP --file worker.yaml
|
||||||
|
talosctl apply-config --insecure --nodes $WORKER3_IP --file worker.yaml
|
||||||
|
talosctl apply-config --insecure --nodes $WORKER4_IP --file worker.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
9) Boostrap the cluster and retrieve kubeconfig
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export TALOSCONFIG=$(pwd)/talosconfig
|
||||||
|
talosctl config endpoint https://$CONTROL_PLANE_IP:6443
|
||||||
|
talosctl config node $CONTROL_PLANE_IP
|
||||||
|
|
||||||
|
talosctl bootstrap
|
||||||
|
|
||||||
|
talosctl kubeconfig .
|
||||||
|
```
|
||||||
|
|
||||||
|
You can now use k8s client like https://headlamp.dev/ with the generated kubeconfig file.
|
||||||
|
|
||||||
|
### Install base services to the cluster
|
||||||
|
|
||||||
|
1) Copy and edit variables
|
||||||
|
|
||||||
1) Provision platform services (RabbitMQ/Redis/ingress/tunnel/etc.) with OpenTofu
|
|
||||||
```bash
|
```bash
|
||||||
cd tofu
|
|
||||||
# copy and edit variables
|
|
||||||
cp terraform.tfvars.example terraform.tfvars
|
cp terraform.tfvars.example terraform.tfvars
|
||||||
# authenticate to your cluster/cloud as needed, then:
|
```
|
||||||
|
|
||||||
|
- `metallb_ip_range` - set to range available in your network for load balancer services
|
||||||
|
- `mariadb_password` - password for internal mariadb user
|
||||||
|
- `mariadb_root_password` - password for root user
|
||||||
|
- `mariadb_user_name` - username for admin user
|
||||||
|
- `mariadb_user_host` - allowed hosts for admin user
|
||||||
|
- `mariadb_user_password` - password for admin user
|
||||||
|
- `metallb_maxscale_ip`, `metallb_service_ip`, `metallb_primary_ip`, `metallb_secondary_ip` - IPs for database
|
||||||
|
cluster,
|
||||||
|
set them to static IPs from the `metallb_ip_range`
|
||||||
|
- `s3_enabled`, `s3_bucket`, `s3_region`, `s3_endpoint`, `s3_key_id`, `s3_key_secret` - S3 compatible storage for
|
||||||
|
backups (optional)
|
||||||
|
- `phpmyadmin_enabled` - set to false if you want to disable phpmyadmin
|
||||||
|
- `rabbitmq-password` - password for RabbitMQ
|
||||||
|
|
||||||
|
- `cloudflare_account_id` - your Cloudflare account ID
|
||||||
|
- `cloudflare_api_token` - your Cloudflare API token with permissions to manage tunnels and DNS
|
||||||
|
- `cloudflare_email` - your Cloudflare account email
|
||||||
|
- `cloudflare_tunnel_name` - name for the tunnel
|
||||||
|
- `cloudflare_domain` - your domain name managed in Cloudflare
|
||||||
|
|
||||||
|
2) Deploy without Cloudflare module first
|
||||||
|
|
||||||
|
```bash
|
||||||
tofu init
|
tofu init
|
||||||
tofu plan
|
tofu apply -exclude modules.cloudflare
|
||||||
|
```
|
||||||
|
|
||||||
|
3) Deploy rest of the modules
|
||||||
|
|
||||||
|
```bash
|
||||||
tofu apply
|
tofu apply
|
||||||
```
|
```
|
||||||
|
|
||||||
2) Deploy the app using Helm
|
### Configure deployment
|
||||||
```bash
|
|
||||||
# Set the namespace
|
|
||||||
kubectl create namespace myapp || true
|
|
||||||
|
|
||||||
# Install/upgrade the chart with required values
|
1) Create self-hosted runner with access to the cluster or make cluster publicly accessible
|
||||||
helm upgrade --install myapp charts/myapp-chart \
|
2) Change `jobs.deploy.runs-on` in `.github/workflows/deploy-prod.yml` and in `.github/workflows/deploy-pr.yaml` to your
|
||||||
-n myapp \
|
runner label
|
||||||
-f charts/myapp-chart/values.yaml \
|
3) Add variables to GitHub in repository settings:
|
||||||
--set image.backend.repository=myorg/myapp-backend \
|
- `PROD_DOMAIN` - base domain for deployments (e.g. ltrk.cz)
|
||||||
--set image.backend.tag=latest \
|
- `DEV_FRONTEND_BASE_DOMAIN` - base domain for your cloudflare pages
|
||||||
--set env.BACKEND_URL="https://myapp.example.com" \
|
4) Add secrets to GitHub in repository settings:
|
||||||
--set env.FRONTEND_URL="https://myapp.example.com" \
|
- CLOUDFLARE_ACCOUNT_ID - same as in tofu/terraform.tfvars
|
||||||
--set env.SECRET="CHANGE_ME_SECRET"
|
- CLOUDFLARE_API_TOKEN - same as in tofu/terraform.tfvars
|
||||||
```
|
- DOCKER_USER - your docker registry username
|
||||||
Adjust values to your registry and domain. The chart’s NOTES.txt includes additional examples.
|
- DOCKER_PASSWORD - your docker registry password
|
||||||
|
- KUBE_CONFIG - content of your kubeconfig file for the cluster
|
||||||
3) Expose and access
|
- PROD_DB_PASSWORD - same as MARIADB_PASSWORD
|
||||||
- If using Cloudflare Tunnel or an ingress, configure DNS accordingly (see tofu/modules/cloudflare and deployment/tunnel.yaml).
|
- PROD_RABBITMQ_PASSWORD - same as MARIADB_PASSWORD
|
||||||
- For quick testing without ingress:
|
- PROD_DB_ENCRYPTION_KEY - same as DB_ENCRYPTION_KEY
|
||||||
```bash
|
- MOJEID_CLIENT_ID
|
||||||
kubectl -n myapp port-forward deploy/myapp-backend 8000:8000
|
- MOJEID_CLIENT_SECRET
|
||||||
kubectl -n myapp port-forward deploy/myapp-frontend 5173:80
|
- BANKID_CLIENT_ID
|
||||||
```
|
- BANKID_CLIENT_SECRET
|
||||||
|
- CSAS_CLIENT_ID
|
||||||
### Verification
|
- CSAS_CLIENT_SECRET
|
||||||
|
- SENTRY_DSN
|
||||||
```bash
|
- SMTP_HOST
|
||||||
# Check pods
|
- SMTP_PORT
|
||||||
kubectl -n myapp get pods
|
- SMTP_USERNAME
|
||||||
|
- SMTP_PASSWORD
|
||||||
# Backend health
|
- SMTP_FROM
|
||||||
curl -i http://127.0.0.1:8000/
|
- UNIRATE_API_KEY
|
||||||
# OpenAPI
|
5) On Github open Actions tab, select "Deploy Prod" and run workflow manually
|
||||||
open http://127.0.0.1:8000/docs
|
|
||||||
|
|
||||||
# Frontend (if port-forwarded)
|
|
||||||
open http://localhost:5173
|
|
||||||
```
|
|
||||||
|
|
||||||
## Testing Instructions
|
## Testing Instructions
|
||||||
|
|
||||||
### Unit Tests
|
The tests are located in 7project/backend/tests directory. All tests are run by GitHub actions on every pull request and
|
||||||
|
push to main.
|
||||||
|
See the workflow [here](../.github/workflows/run-tests.yml).
|
||||||
|
|
||||||
|
If you want to run the tests locally, the preferred way is to use a [bash script](backend/test_locally.sh)
|
||||||
|
that will start a test DB container with [docker compose](backend/docker-compose.test.yml) and remove it afterwards.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Commands to run unit tests
|
cd 7project/src/backend
|
||||||
# For example:
|
bash test_locally.sh
|
||||||
# go test ./...
|
```
|
||||||
# npm test
|
|
||||||
|
### Unit Tests
|
||||||
|
|
||||||
|
There are only 5 basic unit tests, since our services logic is very simple
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bash test_locally.sh --only-unit
|
||||||
```
|
```
|
||||||
|
|
||||||
### Integration Tests
|
### Integration Tests
|
||||||
|
|
||||||
|
There are 9 basic unit tests, testing the individual backend API logic
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Commands to run integration tests
|
bash test_locally.sh --only-integration
|
||||||
# Any setup required for integration tests
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### End-to-End Tests
|
### End-to-End Tests
|
||||||
|
|
||||||
|
There are 7 e2e tests, testing more complex app logic
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Commands to run e2e tests
|
bash test_locally.sh --only-e2e
|
||||||
# How to set up test environment
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage Examples
|
## Usage Examples
|
||||||
@@ -269,7 +499,12 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
|
|
||||||
### Frontend
|
### Frontend
|
||||||
|
|
||||||
- Start with: npm run dev in 7project/frontend
|
- Start with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run dev in 7project/src/frontend
|
||||||
|
```
|
||||||
|
|
||||||
- Ensure VITE_BACKEND_URL is set to the backend URL (e.g., http://127.0.0.1:8000)
|
- Ensure VITE_BACKEND_URL is set to the backend URL (e.g., http://127.0.0.1:8000)
|
||||||
- Open http://localhost:5173
|
- Open http://localhost:5173
|
||||||
- Login, view latest transactions, filter, and add new transactions from the UI.
|
- Login, view latest transactions, filter, and add new transactions from the UI.
|
||||||
@@ -293,15 +528,25 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
|
|
||||||
### Common Issues
|
### Common Issues
|
||||||
|
|
||||||
#### Issue 1: [Common problem]
|
#### Issue 1: Unable to apply Cloudflare terraform module
|
||||||
|
|
||||||
**Symptoms**: [What the user sees]
|
**Symptoms**: Terraform/OpenTofu apply fails during Cloudflare module deployment.
|
||||||
**Solution**: [Step-by-step fix]
|
This is caused by unknown variable not known beforehand.
|
||||||
|
|
||||||
#### Issue 2: [Another common problem]
|
**Solution**: Apply first without Cloudflare module and then apply again.
|
||||||
|
|
||||||
**Symptoms**: [What the user sees]
|
```bash
|
||||||
**Solution**: [Step-by-step fix]
|
tofu apply -exclude modules.cloudflare
|
||||||
|
tofu apply
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Issue 2: Pods are unable to start
|
||||||
|
|
||||||
|
**Symptoms**: Pods are unable to start with ImagePullBackOff error. This could be caused
|
||||||
|
by either hitting docker hub rate limits or by docker hub being down.
|
||||||
|
|
||||||
|
**Solution**: Make sure you updated the cluster config to use registry mirror as described in
|
||||||
|
"Setup Cluster" section.
|
||||||
|
|
||||||
### Debug Commands
|
### Debug Commands
|
||||||
|
|
||||||
@@ -313,24 +558,24 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Self-Assessment Table
|
## Progress Table
|
||||||
|
|
||||||
> Be honest and detailed in your assessments.
|
> Be honest and detailed in your assessments.
|
||||||
> This information is used for individual grading.
|
> This information is used for individual grading.
|
||||||
> Link to the specific commit on GitHub for each contribution.
|
> Link to the specific commit on GitHub for each contribution.
|
||||||
|
|
||||||
| Task/Component | Assigned To | Status | Time Spent | Difficulty | Notes |
|
| Task/Component | Assigned To | Status | Time Spent | Difficulty | Notes |
|
||||||
|-----------------------------------------------------------------------|-------------| ------------- |----------------|------------| ----------- |
|
|-------------------------------------------------------------------------------------------------------------------|-------------|----------------|------------|------------|-----------------------------------------------------------------------------------------------------|
|
||||||
| [Project Setup & Repository](https://github.com/dat515-2025/Group-8#) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
| [Project Setup & Repository](https://github.com/dat515-2025/Group-8#) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
||||||
| [Design Document](https://github.com/dat515-2025/Group-8/blob/main/6design/design.md) | Both | ✅ Complete | 2 Hours | Easy | [Any notes] |
|
| [Design Document](https://github.com/dat515-2025/Group-8/blob/main/6design/design.md) | Both | ✅ Complete | 4 Hours | Easy | [Any notes] |
|
||||||
| [Backend API Development](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/api) | Dejan | ✅ Complete | 10 hours | Medium | [Any notes] |
|
| [Backend API Development](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/api) | Dejan | ✅ Complete | 12 hours | Medium | [Any notes] |
|
||||||
| [Database Setup & Models](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/models) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
| [Database Setup & Models](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/models) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
||||||
| [Frontend Development](https://github.com/dat515-2025/Group-8/tree/main/7project/frontend) | Dejan | 🔄 In Progress | 7 hours so far | Medium | [Any notes] |
|
| [Frontend Development](https://github.com/dat515-2025/Group-8/tree/main/7project/frontend) | Dejan | ✅ Complete | 17 hours | Medium | [Any notes] |
|
||||||
| [Docker Configuration](https://github.com/dat515-2025/Group-8/blob/main/7project/compose.yml) | Lukas | ✅ Complete | [X hours] | Easy | [Any notes] |
|
| [Docker Configuration](https://github.com/dat515-2025/Group-8/blob/main/7project/compose.yml) | Lukas | ✅ Complete | 3 hours | Easy | [Any notes] |
|
||||||
| [Cloud Deployment](https://github.com/dat515-2025/Group-8/blob/main/7project/deployment/app-demo-deployment.yaml) | Lukas | ✅ Complete | [X hours] | Hard | [Any notes] |
|
| [Cloud Deployment](https://github.com/dat515-2025/Group-8/blob/main/7project/deployment/app-demo-deployment.yaml) | Lukas | ✅ Complete | [X hours] | Hard | Using Talos cluster running in proxmox - easy snapshots etc. Frontend deployed at Cloudflare pages. |
|
||||||
| [Testing Implementation](https://github.com/dat515-2025/group-name) | Dejan | ❌ Not Started | [X hours] | Medium | [Any notes] |
|
| [Testing Implementation](https://github.com/dat515-2025/group-name) | Dejan | ✅ Complete | 16 hours | Medium | [Any notes] |
|
||||||
| [Documentation](https://github.com/dat515-2025/group-name) | Both | ❌ Not Started | [X hours] | Easy | [Any notes] |
|
| [Documentation](https://github.com/dat515-2025/group-name) | Both | 🔄 In Progress | [X hours] | Easy | [Any notes] |
|
||||||
| [Presentation Video](https://github.com/dat515-2025/group-name) | Both | ❌ Not Started | [X hours] | Medium | [Any notes] |
|
| [Presentation Video](https://github.com/dat515-2025/group-name) | Both | ❌ Not Started | [X hours] | Medium | [Any notes] |
|
||||||
|
|
||||||
**Legend**: ✅ Complete | 🔄 In Progress | ⏳ Pending | ❌ Not Started
|
**Legend**: ✅ Complete | 🔄 In Progress | ⏳ Pending | ❌ Not Started
|
||||||
|
|
||||||
@@ -338,26 +583,48 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
|
|
||||||
> Link to the specific commit on GitHub for each contribution.
|
> Link to the specific commit on GitHub for each contribution.
|
||||||
|
|
||||||
### [Team Member 1 Name]
|
### [Lukáš]
|
||||||
|
|
||||||
| Date | Activity | Hours | Description |
|
## Hour Sheet
|
||||||
| --------- | ------------------- | ---------- | ----------------------------------- |
|
|
||||||
| [Date] | Initial Setup | [X.X] | Repository setup, project structure |
|
**Name:** Lukáš Trkan
|
||||||
| [Date] | Backend Development | [X.X] | Implemented user authentication |
|
|
||||||
| [Date] | Testing | [X.X] | Unit tests for API endpoints |
|
| Date | Activity | Hours | Description | Representative Commit / PR |
|
||||||
| [Date] | Documentation | [X.X] | Updated README and design doc |
|
|:----------------|:----------------------------|:--------|:------------------------------------------------------------------------------------|:------------------------------------------------------|
|
||||||
| **Total** | | **[XX.X]** | |
|
| 18.9. - 19.9. | Initial Setup & Design | 40 | Repository init, system design diagrams, basic Terraform setup | `feat(infrastructure): add basic terraform resources` |
|
||||||
|
| 20.9. - 5.10. | Core Infrastructure & CI/CD | 12 | K8s setup (ArgoCD), CI/CD workflows, RabbitMQ, Redis, Celery workers, DB migrations | `PR #2`, `feat(infrastructure): add rabbitmq cluster` |
|
||||||
|
| 6.10. - 9.10. | Frontend Infra & DB | 5 | Deployed frontend to Cloudflare, setup metrics, created database models | `PR #16` (Cloudflare), `PR #19` (DB structure) |
|
||||||
|
| 10.10. - 11.10. | Backend | 5 | Implemented OAuth support (MojeID, BankID) | `feat(auth): add support for OAuth and MojeID` |
|
||||||
|
| 12.10. | Infrastructure | 2 | Added database backups | `feat(infrastructure): add backups` |
|
||||||
|
| 16.10. | Infrastructure | 4 | Implemented secrets management, fixed deployment/env variables | `PR #29` (Deployment envs) |
|
||||||
|
| 17.10. | Monitoring | 1 | Added Sentry logging | `feat(app): add sentry loging` |
|
||||||
|
| 21.10. - 22.10. | Backend | 8 | Added ČSAS bank connection | `PR #32` (Fix React OAuth) |
|
||||||
|
| 29.10. - 30.10. | Backend | 5 | Implemented transaction encryption, add bank scraping | `PR #39` (CSAS Scraping) |
|
||||||
|
| 30.10. | Monitoring | 6 | Implemented Loki logging and basic Prometheus metrics | `PR #42` (Prometheus metrics) |
|
||||||
|
| 9.11. | Monitoring | 2 | Added custom Prometheus metrics | `PR #46` (Prometheus custom metrics) |
|
||||||
|
| 11.11. | Tests | 1 | Investigated and fixed broken Pytest environment | `fix(tests): set pytest env` |
|
||||||
|
| 11.11. - 12.11. | Features & Deployment | 6 | Added cron support, email sender service, updated workers & image | `PR #49` (Email), `PR #50` (Update workers) |
|
||||||
|
| 18.9 - 14.11 | Documentation | 8 | Updated report.md, design docs, and tfvars.example | `Create design.md`, `update report` |
|
||||||
|
| **Total** | | **105** | | |
|
||||||
|
|
||||||
### Dejan
|
### Dejan
|
||||||
|
|
||||||
| Date | Activity | Hours | Description |
|
| Date | Activity | Hours | Description | Representative Commit / PR |
|
||||||
|-------------|----------------------|--------|--------------------------------|
|
|:----------------|:---------------------|:-------|:--------------------------------------------------------------|:---------------------------------------------------------|
|
||||||
| 25.9. | Design | 1.5 | 6design |
|
| 25.9. | Design | 2 | 6design | |
|
||||||
| 9-11.10. | Backend APIs | 10 | Implemented Backend APIs |
|
| 9.10 to 11.10. | Backend APIs | 14 | Implemented Backend APIs | `PR #26`, `20-create-a-controller-layer-on-backend-side` |
|
||||||
| 13-15.10. | Frontend Development | 6.5 | Created user interface mockups |
|
| 13.10 to 15.10. | Frontend Development | 8 | Created user interface mockups | `PR #28`, `frontend basics` |
|
||||||
| Continually | Documantation | 3 | Documenting the dev process |
|
| Continually | Documentation | 7 | Documenting the dev process | |
|
||||||
| **Total** | | **21** | |
|
| 21.10 to 23.10 | Tests, frontend | 10 | Test basics, balance charts, and frontend improvement | `PR #31`, `30 create tests and set up a GitHub pipeline` |
|
||||||
|
| 28.10 to 30.10 | CI | 6 | Integrated tests with test database setup on github workflows | `PR #28`, `frontend basics` |
|
||||||
|
| 28.10 to 30.10 | Frontend | 8 | UI improvements and exchange rate API integration | `PR #28`, `frontend basics` |
|
||||||
|
| 4.11 to 6.11 | Tests | 6 | Test fixes improvement, more integration and e2e | `PR #28`, `frontend basics` |
|
||||||
|
| 4.11 to 6.11 | Frontend | 6 | Fixes, Improved UI, added support for mobile devices | `PR #28`, `frontend basics` |
|
||||||
|
| 11.11 | Backend APIs | 4 | Moved rates API, mock bank to Backend, few fixes | `PR #28`, `frontend basics` |
|
||||||
|
| 11.11 to 12.11 | Tests | 3 | Local testing DB container, few fixes | `PR #28`, `frontend basics` |
|
||||||
|
| 12.11 | Frontend | 3 | Enabled multiple transaction edits at once, CSAS button state | `PR #28`, `frontend basics` |
|
||||||
|
| 13.11 | Video | 3 | Video | |
|
||||||
|
| **Total** | | **80** | | |
|
||||||
|
|
||||||
### Group Total: [XXX.X] hours
|
### Group Total: [XXX.X] hours
|
||||||
|
|
||||||
@@ -371,24 +638,52 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
|
|
||||||
### Challenges Faced
|
### Challenges Faced
|
||||||
|
|
||||||
[Describe the main challenges and how you overcame them]
|
#### Slow cluster performance
|
||||||
|
|
||||||
|
This was caused by single SATA SSD disk running all VMs. This was solved by adding second NVMe disk just for Talos VMs.
|
||||||
|
|
||||||
|
#### Stucked IaC deployment
|
||||||
|
|
||||||
|
If the deployed module (helm chart for example) was not configured properly, it would get stuck and timeout resulting in
|
||||||
|
namespace that cannot be deleted.
|
||||||
|
This was solved by using snapshots in Proxmox and restoring if this happened.
|
||||||
|
|
||||||
### If We Did This Again
|
### If We Did This Again
|
||||||
|
|
||||||
|
#### Different framework
|
||||||
|
|
||||||
|
FastAPI lacks usable build in support for database migrations and implementing Alembic was a bit tricky.
|
||||||
|
Tricky was also integrating FastAPI auth system with React frontend, since there is no official project template.
|
||||||
|
Using .NET (which we considered initially) would probably solve these issues.
|
||||||
|
|
||||||
|
#### Private container registry
|
||||||
|
|
||||||
|
Using private container registry would allow us to include environment variables directly in the image during build.
|
||||||
|
This would simplify deployment and CI/CD setup.
|
||||||
|
|
||||||
[What would you do differently? What worked well that you'd keep?]
|
[What would you do differently? What worked well that you'd keep?]
|
||||||
|
|
||||||
### Individual Growth
|
### Individual Growth
|
||||||
|
|
||||||
#### [Team Member 1 Name]
|
#### [Lukas]
|
||||||
|
|
||||||
[Personal reflection on growth, challenges, and learning]
|
This course finally forced me to learn kubernetes (been on by TODO list for at least 3 years).
|
||||||
|
I had some prior experience with terraform/opentofu from work but this improved by understanding of it.
|
||||||
|
|
||||||
#### [Team Member 2 Name]
|
The biggest challenge for me was time tracking since I am used to tracking to projects, not to tasks.
|
||||||
|
(I am bad even at that :) ).
|
||||||
|
|
||||||
|
It was also interesting experience to be the one responsible for the initial project structure/design/setup
|
||||||
|
used not only by myself.
|
||||||
|
|
||||||
|
#### [Dejan]
|
||||||
|
|
||||||
|
Since I do not have a job, this project was probably the most complex one I have ever worked on.
|
||||||
|
It was also the first school project where I was encouraged to use AI.
|
||||||
|
|
||||||
[Personal reflection on growth, challenges, and learning]
|
|
||||||
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**Report Completion Date**: [Date]
|
**Report Completion Date**: [Date]
|
||||||
**Last Updated**: 15.10.2025
|
**Last Updated**: 13.11.2025
|
||||||
23
7project/src/README.md
Normal file
23
7project/src/README.md
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
## Folder structure
|
||||||
|
|
||||||
|
- `src/`
|
||||||
|
- `backend/`
|
||||||
|
- `alembic/` - database migrations
|
||||||
|
- `app/` - main application code
|
||||||
|
- `tests/` - tests
|
||||||
|
- `docker-compose.test.yml` - docker compose for testing database
|
||||||
|
- `Dockerfile` - production Dockerfile
|
||||||
|
- `main.py` - App entrypoint
|
||||||
|
- `requirements.txt` - Python dependencies
|
||||||
|
- `test_locally.sh` - script to run tests with temporary database
|
||||||
|
- `charts/`
|
||||||
|
- `myapp-chart/` - Helm chart for deploying the application, supports prod and dev environments
|
||||||
|
- `frontend/` - React frontend application
|
||||||
|
- `tofu/` - Terraform/OpenTofu services deployment configurations
|
||||||
|
- `modules/` - separated modules for different services
|
||||||
|
- `main.tf` - main deployment configuration
|
||||||
|
- `variables.tf` - deployment variables
|
||||||
|
- `terraform.tfvars.example` - example variables file
|
||||||
|
- `compose.yaml` - Docker Compose file for local development
|
||||||
|
- `create_migration.sh` - script to create new Alembic database migration
|
||||||
|
- `upgrade_database.sh` - script to upgrade database to latest Alembic revision
|
||||||
8
7project/src/backend/.idea/.gitignore
generated
vendored
Normal file
8
7project/src/backend/.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
||||||
|
# Editor-based HTTP Client requests
|
||||||
|
/httpRequests/
|
||||||
|
# Datasource local storage ignored files
|
||||||
|
/dataSources/
|
||||||
|
/dataSources.local.xml
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM python:3.11-slim
|
FROM python:3.11-trixie
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
@@ -25,7 +25,8 @@ if not DATABASE_URL:
|
|||||||
|
|
||||||
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
||||||
|
|
||||||
ssl_enabled = os.getenv("MARIADB_HOST", "localhost") != "localhost"
|
host_env = os.getenv("MARIADB_HOST", "localhost")
|
||||||
|
ssl_enabled = host_env not in {"localhost", "127.0.0.1"}
|
||||||
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
||||||
|
|
||||||
def run_migrations_offline() -> None:
|
def run_migrations_offline() -> None:
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
"""add date to transaction
|
||||||
|
|
||||||
|
Revision ID: 1f2a3c4d5e6f
|
||||||
|
Revises: eabec90a94fe
|
||||||
|
Create Date: 2025-10-22 16:18:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.sql import func
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '1f2a3c4d5e6f'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = 'eabec90a94fe'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema by adding date column with server default current_date."""
|
||||||
|
op.add_column(
|
||||||
|
'transaction',
|
||||||
|
sa.Column('date', sa.Date(), nullable=False, server_default=sa.text('CURRENT_DATE'))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema by removing date column."""
|
||||||
|
op.drop_column('transaction', 'date')
|
||||||
@@ -0,0 +1,47 @@
|
|||||||
|
"""Add encrypted type
|
||||||
|
|
||||||
|
Revision ID: 46b9e702e83f
|
||||||
|
Revises: 1f2a3c4d5e6f
|
||||||
|
Create Date: 2025-10-29 13:26:24.568523
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy_utils
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '46b9e702e83f'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '1f2a3c4d5e6f'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('transaction', 'amount',
|
||||||
|
existing_type=mysql.FLOAT(),
|
||||||
|
type_=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
||||||
|
existing_nullable=False)
|
||||||
|
op.alter_column('transaction', 'description',
|
||||||
|
existing_type=mysql.VARCHAR(length=255),
|
||||||
|
type_=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
||||||
|
existing_nullable=True)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('transaction', 'description',
|
||||||
|
existing_type=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
||||||
|
type_=mysql.VARCHAR(length=255),
|
||||||
|
existing_nullable=True)
|
||||||
|
op.alter_column('transaction', 'amount',
|
||||||
|
existing_type=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
||||||
|
type_=mysql.FLOAT(),
|
||||||
|
existing_nullable=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
"""Cascade categories
|
||||||
|
|
||||||
|
Revision ID: 59cebf320c4a
|
||||||
|
Revises: 46b9e702e83f
|
||||||
|
Create Date: 2025-10-30 13:42:44.555284
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '59cebf320c4a'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = '46b9e702e83f'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('category_transaction', sa.Column('category_id', sa.Integer(), nullable=False))
|
||||||
|
op.add_column('category_transaction', sa.Column('transaction_id', sa.Integer(), nullable=False))
|
||||||
|
op.drop_constraint(op.f('category_transaction_ibfk_2'), 'category_transaction', type_='foreignkey')
|
||||||
|
op.drop_constraint(op.f('category_transaction_ibfk_1'), 'category_transaction', type_='foreignkey')
|
||||||
|
op.create_foreign_key(None, 'category_transaction', 'transaction', ['transaction_id'], ['id'], ondelete='CASCADE')
|
||||||
|
op.create_foreign_key(None, 'category_transaction', 'categories', ['category_id'], ['id'], ondelete='CASCADE')
|
||||||
|
op.drop_column('category_transaction', 'id_category')
|
||||||
|
op.drop_column('category_transaction', 'id_transaction')
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('category_transaction', sa.Column('id_transaction', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
|
||||||
|
op.add_column('category_transaction', sa.Column('id_category', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
|
||||||
|
op.drop_constraint(None, 'category_transaction', type_='foreignkey')
|
||||||
|
op.drop_constraint(None, 'category_transaction', type_='foreignkey')
|
||||||
|
op.create_foreign_key(op.f('category_transaction_ibfk_1'), 'category_transaction', 'categories', ['id_category'], ['id'])
|
||||||
|
op.create_foreign_key(op.f('category_transaction_ibfk_2'), 'category_transaction', 'transaction', ['id_transaction'], ['id'])
|
||||||
|
op.drop_column('category_transaction', 'transaction_id')
|
||||||
|
op.drop_column('category_transaction', 'category_id')
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -24,6 +24,23 @@ async def delete_me(
|
|||||||
await user_manager.delete(user)
|
await user_manager.delete(user)
|
||||||
|
|
||||||
# Keep existing paths as-is under /auth/* and /users/*
|
# Keep existing paths as-is under /auth/* and /users/*
|
||||||
|
from fastapi import Request, Response
|
||||||
|
from app.core.security import revoke_token, extract_bearer_token
|
||||||
|
|
||||||
|
|
||||||
|
@router.post(
|
||||||
|
"/auth/jwt/logout",
|
||||||
|
status_code=status.HTTP_204_NO_CONTENT,
|
||||||
|
tags=["auth"],
|
||||||
|
summary="Log out and revoke current token",
|
||||||
|
)
|
||||||
|
async def custom_logout(request: Request) -> Response:
|
||||||
|
"""Revoke the current bearer token so it cannot be used anymore."""
|
||||||
|
token = extract_bearer_token(request)
|
||||||
|
if token:
|
||||||
|
revoke_token(token)
|
||||||
|
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
router.include_router(
|
router.include_router(
|
||||||
fastapi_users.get_auth_router(auth_backend), prefix="/auth/jwt", tags=["auth"]
|
fastapi_users.get_auth_router(auth_backend), prefix="/auth/jwt", tags=["auth"]
|
||||||
)
|
)
|
||||||
@@ -5,7 +5,7 @@ from sqlalchemy import select, delete
|
|||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from app.models.categories import Category
|
from app.models.categories import Category
|
||||||
from app.schemas.category import CategoryCreate, CategoryRead
|
from app.schemas.category import CategoryCreate, CategoryRead, CategoryUpdate
|
||||||
from app.services.db import get_async_session
|
from app.services.db import get_async_session
|
||||||
from app.services.user_service import current_active_user
|
from app.services.user_service import current_active_user
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
@@ -43,6 +43,37 @@ async def list_categories(
|
|||||||
return list(res.scalars())
|
return list(res.scalars())
|
||||||
|
|
||||||
|
|
||||||
|
@router.patch("/{category_id}", response_model=CategoryRead)
|
||||||
|
async def update_category(
|
||||||
|
category_id: int,
|
||||||
|
payload: CategoryUpdate,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
res = await session.execute(
|
||||||
|
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
||||||
|
)
|
||||||
|
category = res.scalar_one_or_none()
|
||||||
|
if not category:
|
||||||
|
raise HTTPException(status_code=404, detail="Category not found")
|
||||||
|
|
||||||
|
# If name changed, check uniqueness per user
|
||||||
|
if payload.name is not None and payload.name != category.name:
|
||||||
|
dup = await session.execute(
|
||||||
|
select(Category.id).where(Category.user_id == user.id, Category.name == payload.name)
|
||||||
|
)
|
||||||
|
if dup.scalar_one_or_none() is not None:
|
||||||
|
raise HTTPException(status_code=409, detail="Category with this name already exists")
|
||||||
|
category.name = payload.name
|
||||||
|
|
||||||
|
if payload.description is not None:
|
||||||
|
category.description = payload.description
|
||||||
|
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(category)
|
||||||
|
return category
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{category_id}", response_model=CategoryRead)
|
@router.get("/{category_id}", response_model=CategoryRead)
|
||||||
async def get_category(
|
async def get_category(
|
||||||
category_id: int,
|
category_id: int,
|
||||||
66
7project/src/backend/app/api/exchange_rates.py
Normal file
66
7project/src/backend/app/api/exchange_rates.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import os
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, status
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/exchange-rates", tags=["exchange-rates"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", status_code=status.HTTP_200_OK)
|
||||||
|
async def get_exchange_rates(symbols: str = Query("EUR,USD,NOK", description="Comma-separated currency codes to fetch vs CZK")):
|
||||||
|
"""
|
||||||
|
Fetch exchange rates from UniRate API on the backend and return CZK-per-target rates.
|
||||||
|
- Always requests CZK in addition to requested symbols to compute conversion from USD-base.
|
||||||
|
- Returns a list of {currencyCode, rate} where rate is CZK per 1 unit of the target currency.
|
||||||
|
"""
|
||||||
|
api_key = os.getenv("UNIRATE_API_KEY")
|
||||||
|
if not api_key:
|
||||||
|
raise HTTPException(status_code=500, detail="Server is not configured with UNIRATE_API_KEY")
|
||||||
|
|
||||||
|
# Ensure CZK is included for conversion
|
||||||
|
requested = [s.strip().upper() for s in symbols.split(",") if s.strip()]
|
||||||
|
if "CZK" not in requested:
|
||||||
|
requested.append("CZK")
|
||||||
|
query_symbols = ",".join(sorted(set(requested)))
|
||||||
|
|
||||||
|
url = f"https://unirateapi.com/api/rates?api_key={api_key}&symbols={query_symbols}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=httpx.Timeout(15.0)) as client:
|
||||||
|
resp = await client.get(url)
|
||||||
|
if resp.status_code != httpx.codes.OK:
|
||||||
|
raise HTTPException(status_code=502, detail=f"Upstream UniRate error: HTTP {resp.status_code}")
|
||||||
|
data = resp.json()
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
raise HTTPException(status_code=502, detail=f"Failed to contact UniRate: {str(e)}")
|
||||||
|
|
||||||
|
# Validate response structure
|
||||||
|
rates = data.get("rates") if isinstance(data, dict) else None
|
||||||
|
base = data.get("base") if isinstance(data, dict) else None
|
||||||
|
if not rates or base != "USD" or "CZK" not in rates:
|
||||||
|
# Prefer upstream message when available
|
||||||
|
detail = data.get("message") if isinstance(data, dict) else None
|
||||||
|
if not detail and isinstance(data, dict):
|
||||||
|
err = data.get("error")
|
||||||
|
if isinstance(err, dict):
|
||||||
|
detail = err.get("info")
|
||||||
|
raise HTTPException(status_code=502, detail=detail or "Invalid response from UniRate API")
|
||||||
|
|
||||||
|
czk_per_usd = rates["CZK"]
|
||||||
|
|
||||||
|
# Build result excluding CZK itself
|
||||||
|
result = []
|
||||||
|
for code in requested:
|
||||||
|
if code == "CZK":
|
||||||
|
continue
|
||||||
|
target_per_usd = rates.get(code)
|
||||||
|
if target_per_usd in (None, 0):
|
||||||
|
# Skip unavailable or invalid
|
||||||
|
continue
|
||||||
|
czk_per_target = czk_per_usd / target_per_usd
|
||||||
|
result.append({"currencyCode": code, "rate": czk_per_target})
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
116
7project/src/backend/app/api/mock_bank.py
Normal file
116
7project/src/backend/app/api/mock_bank.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import List, Optional
|
||||||
|
import random
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
from pydantic import BaseModel, Field, conint, confloat, validator
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from app.services.db import get_async_session
|
||||||
|
from app.services.user_service import current_active_user
|
||||||
|
from app.models.user import User
|
||||||
|
from app.models.transaction import Transaction
|
||||||
|
from app.models.categories import Category
|
||||||
|
from app.schemas.transaction import TransactionRead
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/mock-bank", tags=["mock-bank"])
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateOptions(BaseModel):
|
||||||
|
count: conint(strict=True, gt=0) = Field(default=10, description="Number of transactions to generate")
|
||||||
|
minAmount: confloat(strict=True) = Field(default=-200.0, description="Minimum transaction amount")
|
||||||
|
maxAmount: confloat(strict=True) = Field(default=200.0, description="Maximum transaction amount")
|
||||||
|
startDate: Optional[str] = Field(None, description="Earliest date (YYYY-MM-DD)")
|
||||||
|
endDate: Optional[str] = Field(None, description="Latest date (YYYY-MM-DD)")
|
||||||
|
categoryIds: List[int] = Field(default_factory=list, description="Optional category IDs to assign randomly")
|
||||||
|
|
||||||
|
@validator("maxAmount")
|
||||||
|
def _validate_amounts(cls, v, values):
|
||||||
|
min_amt = values.get("minAmount")
|
||||||
|
if min_amt is not None and v < min_amt:
|
||||||
|
raise ValueError("maxAmount must be greater than or equal to minAmount")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@validator("endDate")
|
||||||
|
def _validate_dates(cls, v, values):
|
||||||
|
sd = values.get("startDate")
|
||||||
|
if v and sd:
|
||||||
|
try:
|
||||||
|
ed = datetime.strptime(v, "%Y-%m-%d").date()
|
||||||
|
st = datetime.strptime(sd, "%Y-%m-%d").date()
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError("Invalid date format, expected YYYY-MM-DD")
|
||||||
|
if ed < st:
|
||||||
|
raise ValueError("endDate must be greater than or equal to startDate")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class GeneratedTransaction(BaseModel):
|
||||||
|
amount: float
|
||||||
|
date: str # YYYY-MM-DD
|
||||||
|
category_ids: List[int] = []
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/generate", response_model=List[GeneratedTransaction])
|
||||||
|
async def generate_mock_transactions(
|
||||||
|
options: GenerateOptions,
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
# Seed randomness per user to make results less erratic across multiple calls in quick succession
|
||||||
|
seed = int(datetime.utcnow().timestamp()) ^ int(user.id)
|
||||||
|
rnd = random.Random(seed)
|
||||||
|
|
||||||
|
# Determine date range
|
||||||
|
if options.startDate:
|
||||||
|
start_date = datetime.strptime(options.startDate, "%Y-%m-%d").date()
|
||||||
|
else:
|
||||||
|
start_date = (datetime.utcnow() - timedelta(days=365)).date()
|
||||||
|
if options.endDate:
|
||||||
|
end_date = datetime.strptime(options.endDate, "%Y-%m-%d").date()
|
||||||
|
else:
|
||||||
|
end_date = datetime.utcnow().date()
|
||||||
|
|
||||||
|
span_days = max(0, (end_date - start_date).days)
|
||||||
|
|
||||||
|
results: List[GeneratedTransaction] = []
|
||||||
|
for _ in range(options.count):
|
||||||
|
amount = round(rnd.uniform(options.minAmount, options.maxAmount), 2)
|
||||||
|
# Pick a random date in the inclusive range
|
||||||
|
rand_day = rnd.randint(0, span_days) if span_days > 0 else 0
|
||||||
|
tx_date = start_date + timedelta(days=rand_day)
|
||||||
|
# Pick category randomly from provided list, or empty
|
||||||
|
if options.categoryIds:
|
||||||
|
cat = [rnd.choice(options.categoryIds)]
|
||||||
|
else:
|
||||||
|
cat = []
|
||||||
|
# Optional simple description for flavor
|
||||||
|
desc = None
|
||||||
|
# Assemble
|
||||||
|
results.append(GeneratedTransaction(
|
||||||
|
amount=amount,
|
||||||
|
date=tx_date.isoformat(),
|
||||||
|
category_ids=cat,
|
||||||
|
description=desc,
|
||||||
|
))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/scrape")
|
||||||
|
async def scrape_mock_bank():
|
||||||
|
# 80% of the time: nothing to scrape
|
||||||
|
if random.random() < 0.8:
|
||||||
|
return []
|
||||||
|
|
||||||
|
transactions = []
|
||||||
|
count = random.randint(1, 10)
|
||||||
|
for _ in range(count):
|
||||||
|
transactions.append({
|
||||||
|
"amount": round(random.uniform(-200.0, 200.0), 2),
|
||||||
|
"date": (datetime.utcnow().date() - timedelta(days=random.randint(0, 30))).isoformat(),
|
||||||
|
"description": "Mock transaction",
|
||||||
|
})
|
||||||
|
|
||||||
|
return transactions
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
from datetime import date
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select, and_, func
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from app.models.transaction import Transaction
|
from app.models.transaction import Transaction
|
||||||
@@ -23,6 +24,7 @@ def _to_read_model(tx: Transaction) -> TransactionRead:
|
|||||||
id=tx.id,
|
id=tx.id,
|
||||||
amount=tx.amount,
|
amount=tx.amount,
|
||||||
description=tx.description,
|
description=tx.description,
|
||||||
|
date=tx.date,
|
||||||
category_ids=[c.id for c in (tx.categories or [])],
|
category_ids=[c.id for c in (tx.categories or [])],
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -33,7 +35,21 @@ async def create_transaction(
|
|||||||
session: AsyncSession = Depends(get_async_session),
|
session: AsyncSession = Depends(get_async_session),
|
||||||
user: User = Depends(current_active_user),
|
user: User = Depends(current_active_user),
|
||||||
):
|
):
|
||||||
tx = Transaction(amount=payload.amount, description=payload.description, user_id=user.id)
|
# Build transaction; set `date` only if provided to let DB default apply otherwise
|
||||||
|
tx_kwargs = dict(
|
||||||
|
amount=payload.amount,
|
||||||
|
description=payload.description,
|
||||||
|
user_id=user.id,
|
||||||
|
)
|
||||||
|
if payload.date is not None:
|
||||||
|
parsed_date = payload.date
|
||||||
|
if isinstance(parsed_date, str):
|
||||||
|
try:
|
||||||
|
parsed_date = date.fromisoformat(parsed_date)
|
||||||
|
except ValueError:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid date format, expected YYYY-MM-DD")
|
||||||
|
tx_kwargs["date"] = parsed_date
|
||||||
|
tx = Transaction(**tx_kwargs)
|
||||||
|
|
||||||
# Attach categories if provided (and owned by user)
|
# Attach categories if provided (and owned by user)
|
||||||
if payload.category_ids:
|
if payload.category_ids:
|
||||||
@@ -60,11 +76,18 @@ async def create_transaction(
|
|||||||
|
|
||||||
@router.get("/", response_model=List[TransactionRead])
|
@router.get("/", response_model=List[TransactionRead])
|
||||||
async def list_transactions(
|
async def list_transactions(
|
||||||
|
start_date: Optional[date] = None,
|
||||||
|
end_date: Optional[date] = None,
|
||||||
session: AsyncSession = Depends(get_async_session),
|
session: AsyncSession = Depends(get_async_session),
|
||||||
user: User = Depends(current_active_user),
|
user: User = Depends(current_active_user),
|
||||||
):
|
):
|
||||||
|
cond = [Transaction.user_id == user.id]
|
||||||
|
if start_date is not None:
|
||||||
|
cond.append(Transaction.date >= start_date)
|
||||||
|
if end_date is not None:
|
||||||
|
cond.append(Transaction.date <= end_date)
|
||||||
res = await session.execute(
|
res = await session.execute(
|
||||||
select(Transaction).where(Transaction.user_id == user.id).order_by(Transaction.id)
|
select(Transaction).where(and_(*cond)).order_by(Transaction.date, Transaction.id)
|
||||||
)
|
)
|
||||||
txs = list(res.scalars())
|
txs = list(res.scalars())
|
||||||
# Eagerly load categories for each transaction
|
# Eagerly load categories for each transaction
|
||||||
@@ -73,6 +96,36 @@ async def list_transactions(
|
|||||||
return [_to_read_model(tx) for tx in txs]
|
return [_to_read_model(tx) for tx in txs]
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/balance_series")
|
||||||
|
async def get_balance_series(
|
||||||
|
start_date: Optional[date] = None,
|
||||||
|
end_date: Optional[date] = None,
|
||||||
|
session: AsyncSession = Depends(get_async_session),
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
cond = [Transaction.user_id == user.id]
|
||||||
|
if start_date is not None:
|
||||||
|
cond.append(Transaction.date >= start_date)
|
||||||
|
if end_date is not None:
|
||||||
|
cond.append(Transaction.date <= end_date)
|
||||||
|
res = await session.execute(
|
||||||
|
select(Transaction).where(and_(*cond)).order_by(Transaction.date, Transaction.id)
|
||||||
|
)
|
||||||
|
txs = list(res.scalars())
|
||||||
|
# Group by date and accumulate
|
||||||
|
daily = {}
|
||||||
|
for tx in txs:
|
||||||
|
key = tx.date.isoformat() if hasattr(tx.date, 'isoformat') else str(tx.date)
|
||||||
|
daily[key] = daily.get(key, 0.0) + float(tx.amount)
|
||||||
|
# Build cumulative series sorted by date
|
||||||
|
series = []
|
||||||
|
running = 0.0
|
||||||
|
for d in sorted(daily.keys()):
|
||||||
|
running += daily[d]
|
||||||
|
series.append({"date": d, "balance": running})
|
||||||
|
return series
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{transaction_id}", response_model=TransactionRead)
|
@router.get("/{transaction_id}", response_model=TransactionRead)
|
||||||
async def get_transaction(
|
async def get_transaction(
|
||||||
transaction_id: int,
|
transaction_id: int,
|
||||||
@@ -111,6 +164,14 @@ async def update_transaction(
|
|||||||
tx.amount = payload.amount
|
tx.amount = payload.amount
|
||||||
if payload.description is not None:
|
if payload.description is not None:
|
||||||
tx.description = payload.description
|
tx.description = payload.description
|
||||||
|
if payload.date is not None:
|
||||||
|
new_date = payload.date
|
||||||
|
if isinstance(new_date, str):
|
||||||
|
try:
|
||||||
|
new_date = date.fromisoformat(new_date)
|
||||||
|
except ValueError:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid date format, expected YYYY-MM-DD")
|
||||||
|
tx.date = new_date
|
||||||
|
|
||||||
if payload.category_ids is not None:
|
if payload.category_ids is not None:
|
||||||
# Preload categories to avoid async lazy-load during assignment
|
# Preload categories to avoid async lazy-load during assignment
|
||||||
@@ -1,11 +1,17 @@
|
|||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from pythonjsonlogger import jsonlogger
|
||||||
|
|
||||||
from fastapi import Depends, FastAPI
|
from fastapi import Depends, FastAPI, HTTPException
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from prometheus_fastapi_instrumentator import Instrumentator, metrics
|
||||||
from starlette.requests import Request
|
from starlette.requests import Request
|
||||||
|
|
||||||
|
from app.services.prometheus import number_of_users, number_of_transactions
|
||||||
|
|
||||||
from app.services import bank_scraper
|
from app.services import bank_scraper
|
||||||
from app.workers.celery_tasks import load_transactions, load_all_transactions
|
from app.workers.celery_tasks import load_transactions, load_all_transactions
|
||||||
from app.models.user import User, OAuthAccount
|
from app.models.user import User, OAuthAccount
|
||||||
@@ -15,18 +21,23 @@ from app.api.auth import router as auth_router
|
|||||||
from app.api.csas import router as csas_router
|
from app.api.csas import router as csas_router
|
||||||
from app.api.categories import router as categories_router
|
from app.api.categories import router as categories_router
|
||||||
from app.api.transactions import router as transactions_router
|
from app.api.transactions import router as transactions_router
|
||||||
from app.services.user_service import auth_backend, current_active_verified_user, fastapi_users, get_oauth_provider, UserManager, get_jwt_strategy
|
from app.api.exchange_rates import router as exchange_rates_router
|
||||||
|
from app.services.user_service import auth_backend, current_active_verified_user, fastapi_users, get_oauth_provider, \
|
||||||
|
UserManager, get_jwt_strategy
|
||||||
|
from app.core.security import extract_bearer_token, is_token_revoked, decode_and_verify_jwt
|
||||||
|
from app.services.user_service import SECRET
|
||||||
|
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
import sentry_sdk
|
import sentry_sdk
|
||||||
from fastapi_users.db import SQLAlchemyUserDatabase
|
from fastapi_users.db import SQLAlchemyUserDatabase
|
||||||
from app.core.db import async_session_maker
|
from app.core.db import async_session_maker, engine
|
||||||
|
from app.core.base import Base
|
||||||
|
|
||||||
sentry_sdk.init(
|
sentry_sdk.init(
|
||||||
dsn=os.getenv("SENTRY_DSN"),
|
dsn=os.getenv("SENTRY_DSN"),
|
||||||
send_default_pii=True,
|
send_default_pii=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
fastApi = FastAPI()
|
fastApi = FastAPI()
|
||||||
|
|
||||||
# CORS for frontend dev server
|
# CORS for frontend dev server
|
||||||
@@ -42,11 +53,59 @@ fastApi.add_middleware(
|
|||||||
allow_headers=["*"],
|
allow_headers=["*"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if not os.getenv("PYTEST_RUN_CONFIG"):
|
||||||
|
prometheus = Instrumentator().instrument(fastApi)
|
||||||
|
# Register custom metrics
|
||||||
|
prometheus.add(number_of_users()).add(number_of_transactions())
|
||||||
|
prometheus.expose(
|
||||||
|
fastApi,
|
||||||
|
endpoint="/metrics",
|
||||||
|
include_in_schema=True,
|
||||||
|
)
|
||||||
|
|
||||||
fastApi.include_router(auth_router)
|
fastApi.include_router(auth_router)
|
||||||
fastApi.include_router(categories_router)
|
fastApi.include_router(categories_router)
|
||||||
fastApi.include_router(transactions_router)
|
fastApi.include_router(transactions_router)
|
||||||
|
fastApi.include_router(exchange_rates_router)
|
||||||
|
from app.api.mock_bank import router as mock_bank_router
|
||||||
|
fastApi.include_router(mock_bank_router)
|
||||||
|
|
||||||
|
for h in list(logging.root.handlers):
|
||||||
|
logging.root.removeHandler(h)
|
||||||
|
|
||||||
|
_log_handler = logging.StreamHandler(sys.stdout)
|
||||||
|
_formatter = jsonlogger.JsonFormatter(
|
||||||
|
fmt='%(asctime)s %(levelname)s %(name)s %(message)s %(pathname)s %(lineno)d %(process)d %(thread)d'
|
||||||
|
)
|
||||||
|
_log_handler.setFormatter(_formatter)
|
||||||
|
|
||||||
|
logging.root.setLevel(logging.INFO)
|
||||||
|
logging.root.addHandler(_log_handler)
|
||||||
|
|
||||||
|
for _name in ("uvicorn", "uvicorn.error", "uvicorn.access"):
|
||||||
|
_logger = logging.getLogger(_name)
|
||||||
|
_logger.handlers = [_log_handler]
|
||||||
|
_logger.propagate = True
|
||||||
|
|
||||||
|
|
||||||
|
@fastApi.middleware("http")
|
||||||
|
async def auth_guard(request: Request, call_next):
|
||||||
|
# Enforce revoked/expired JWTs are rejected globally
|
||||||
|
token = extract_bearer_token(request)
|
||||||
|
if token:
|
||||||
|
from fastapi import Response, status as _status
|
||||||
|
# Deny if token is revoked
|
||||||
|
if is_token_revoked(token):
|
||||||
|
return Response(status_code=_status.HTTP_401_UNAUTHORIZED)
|
||||||
|
# Deny if token is expired or invalid
|
||||||
|
try:
|
||||||
|
decode_and_verify_jwt(token, SECRET)
|
||||||
|
except Exception:
|
||||||
|
return Response(status_code=_status.HTTP_401_UNAUTHORIZED)
|
||||||
|
return await call_next(request)
|
||||||
|
|
||||||
|
|
||||||
logging.basicConfig(filename='app.log', level=logging.INFO, format='%(asctime)s %(message)s')
|
|
||||||
@fastApi.middleware("http")
|
@fastApi.middleware("http")
|
||||||
async def log_traffic(request: Request, call_next):
|
async def log_traffic(request: Request, call_next):
|
||||||
start_time = datetime.now()
|
start_time = datetime.now()
|
||||||
@@ -64,16 +123,17 @@ async def log_traffic(request: Request, call_next):
|
|||||||
"process_time": process_time,
|
"process_time": process_time,
|
||||||
"client_host": client_host
|
"client_host": client_host
|
||||||
}
|
}
|
||||||
logging.info(str(log_params))
|
logging.getLogger(__name__).info("http_request", extra=log_params)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
fastApi.include_router(
|
fastApi.include_router(
|
||||||
fastapi_users.get_oauth_router(
|
fastapi_users.get_oauth_router(
|
||||||
get_oauth_provider("MojeID"),
|
get_oauth_provider("MojeID"),
|
||||||
auth_backend,
|
auth_backend,
|
||||||
"SECRET",
|
"SECRET",
|
||||||
associate_by_email=True,
|
associate_by_email=True,
|
||||||
redirect_url=os.getenv("FRONTEND_DOMAIN_SCHEME") + "/auth/mojeid/callback",
|
redirect_url=os.getenv("FRONTEND_DOMAIN_SCHEME", "http://localhost:3000") + "/auth/mojeid/callback",
|
||||||
),
|
),
|
||||||
prefix="/auth/mojeid",
|
prefix="/auth/mojeid",
|
||||||
tags=["auth"],
|
tags=["auth"],
|
||||||
@@ -85,7 +145,7 @@ fastApi.include_router(
|
|||||||
auth_backend,
|
auth_backend,
|
||||||
"SECRET",
|
"SECRET",
|
||||||
associate_by_email=True,
|
associate_by_email=True,
|
||||||
redirect_url=os.getenv("FRONTEND_DOMAIN_SCHEME") + "/auth/bankid/callback",
|
redirect_url=os.getenv("FRONTEND_DOMAIN_SCHEME", "http://localhost:3000") + "/auth/bankid/callback",
|
||||||
),
|
),
|
||||||
prefix="/auth/bankid",
|
prefix="/auth/bankid",
|
||||||
tags=["auth"],
|
tags=["auth"],
|
||||||
@@ -93,6 +153,7 @@ fastApi.include_router(
|
|||||||
|
|
||||||
fastApi.include_router(csas_router)
|
fastApi.include_router(csas_router)
|
||||||
|
|
||||||
|
|
||||||
# Liveness/root endpoint
|
# Liveness/root endpoint
|
||||||
@fastApi.get("/", include_in_schema=False)
|
@fastApi.get("/", include_in_schema=False)
|
||||||
async def root():
|
async def root():
|
||||||
@@ -103,20 +164,13 @@ async def root():
|
|||||||
async def authenticated_route(user: User = Depends(current_active_verified_user)):
|
async def authenticated_route(user: User = Depends(current_active_verified_user)):
|
||||||
return {"message": f"Hello {user.email}!"}
|
return {"message": f"Hello {user.email}!"}
|
||||||
|
|
||||||
@fastApi.get("/sentry-debug")
|
|
||||||
async def trigger_error():
|
|
||||||
division_by_zero = 1 / 0
|
|
||||||
|
|
||||||
|
@fastApi.get("/_cron", include_in_schema=False)
|
||||||
|
async def handle_cron(request: Request):
|
||||||
|
# endpoint accessed by Clodflare => return 404
|
||||||
|
if request.headers.get("cf-connecting-ip"):
|
||||||
|
raise HTTPException(status_code=404)
|
||||||
|
|
||||||
@fastApi.get("/debug/scrape/csas/all", tags=["debug"])
|
logging.info("[Cron] Triggering scheduled tasks via HTTP endpoint")
|
||||||
async def debug_scrape_csas_all():
|
|
||||||
logging.info("[Debug] Queueing CSAS scrape for all users via HTTP endpoint (Celery)")
|
|
||||||
task = load_all_transactions.delay()
|
task = load_all_transactions.delay()
|
||||||
return {"status": "queued", "action": "csas_scrape_all", "task_id": getattr(task, 'id', None)}
|
return {"status": "queued", "action": "csas_scrape_all", "task_id": getattr(task, 'id', None)}
|
||||||
|
|
||||||
|
|
||||||
@fastApi.post("/debug/scrape/csas/{user_id}", tags=["debug"])
|
|
||||||
async def debug_scrape_csas_user(user_id: str, user: User = Depends(current_active_verified_user)):
|
|
||||||
logging.info("[Debug] Queueing CSAS scrape for single user via HTTP endpoint (Celery) | user_id=%s", user_id)
|
|
||||||
task = load_transactions.delay(user_id)
|
|
||||||
return {"status": "queued", "action": "csas_scrape_single", "user_id": user_id, "task_id": getattr(task, 'id', None)}
|
|
||||||
@@ -1,5 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
from app.core.base import Base
|
from app.core.base import Base
|
||||||
|
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||||
@@ -19,9 +21,11 @@ from app.models.user import User
|
|||||||
from app.models.transaction import Transaction
|
from app.models.transaction import Transaction
|
||||||
from app.models.categories import Category
|
from app.models.categories import Category
|
||||||
|
|
||||||
ssl_enabled = os.getenv("MARIADB_HOST", "localhost") != "localhost"
|
host_env = os.getenv("MARIADB_HOST", "localhost")
|
||||||
|
ssl_enabled = host_env not in {"localhost", "127.0.0.1"}
|
||||||
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
||||||
|
|
||||||
|
# Async engine/session for the async parts of the app
|
||||||
engine = create_async_engine(
|
engine = create_async_engine(
|
||||||
DATABASE_URL,
|
DATABASE_URL,
|
||||||
pool_pre_ping=True,
|
pool_pre_ping=True,
|
||||||
@@ -29,3 +33,13 @@ engine = create_async_engine(
|
|||||||
connect_args=connect_args,
|
connect_args=connect_args,
|
||||||
)
|
)
|
||||||
async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
|
async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
|
||||||
|
|
||||||
|
# Synchronous engine/session for sync utilities (e.g., bank_scraper)
|
||||||
|
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
||||||
|
engine_sync = create_engine(
|
||||||
|
SYNC_DATABASE_URL,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
echo=os.getenv("SQL_ECHO", "0") == "1",
|
||||||
|
connect_args=connect_args,
|
||||||
|
)
|
||||||
|
sync_session_maker = sessionmaker(bind=engine_sync, expire_on_commit=False)
|
||||||
52
7project/src/backend/app/core/security.py
Normal file
52
7project/src/backend/app/core/security.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
from typing import Optional
|
||||||
|
import re
|
||||||
|
import jwt
|
||||||
|
from fastapi import Request
|
||||||
|
|
||||||
|
# Simple in-memory revocation store for revoked JWT tokens.
|
||||||
|
#
|
||||||
|
# Limitations:
|
||||||
|
# - All revoked tokens will be lost if the process restarts (data loss on restart).
|
||||||
|
# - Not suitable for multi-instance deployments: the revocation list is not shared between instances.
|
||||||
|
# A token revoked in one instance will not be recognized as revoked in others.
|
||||||
|
#
|
||||||
|
# For production, use a persistent and shared store (e.g., Redis or a database).
|
||||||
|
_REVOKED_TOKENS: set[str] = set()
|
||||||
|
|
||||||
|
# Bearer token regex
|
||||||
|
_BEARER_RE = re.compile(r"^[Bb]earer\s+(.+)$")
|
||||||
|
|
||||||
|
|
||||||
|
def extract_bearer_token(request: Request) -> Optional[str]:
|
||||||
|
auth = request.headers.get("authorization")
|
||||||
|
if not auth:
|
||||||
|
return None
|
||||||
|
m = _BEARER_RE.match(auth)
|
||||||
|
if not m:
|
||||||
|
return None
|
||||||
|
return m.group(1).strip()
|
||||||
|
|
||||||
|
|
||||||
|
def revoke_token(token: str) -> None:
|
||||||
|
if token:
|
||||||
|
_REVOKED_TOKENS.add(token)
|
||||||
|
|
||||||
|
|
||||||
|
def is_token_revoked(token: str) -> bool:
|
||||||
|
return token in _REVOKED_TOKENS
|
||||||
|
|
||||||
|
|
||||||
|
def decode_and_verify_jwt(token: str, secret: str) -> dict:
|
||||||
|
"""
|
||||||
|
Decode the JWT using the shared secret, verifying expiration and signature.
|
||||||
|
Audience is not verified here to be compatible with fastapi-users default tokens.
|
||||||
|
Raises jwt.ExpiredSignatureError if expired.
|
||||||
|
Raises jwt.InvalidTokenError for other issues.
|
||||||
|
Returns the decoded payload dict on success.
|
||||||
|
"""
|
||||||
|
return jwt.decode(
|
||||||
|
token,
|
||||||
|
secret,
|
||||||
|
algorithms=["HS256"],
|
||||||
|
options={"verify_aud": False},
|
||||||
|
) # verify_exp is True by default
|
||||||
@@ -7,8 +7,8 @@ from app.core.base import Base
|
|||||||
association_table = Table(
|
association_table = Table(
|
||||||
"category_transaction",
|
"category_transaction",
|
||||||
Base.metadata,
|
Base.metadata,
|
||||||
Column("id_category", Integer, ForeignKey("categories.id")),
|
Column("category_id", Integer, ForeignKey("categories.id", ondelete="CASCADE"), primary_key=True),
|
||||||
Column("id_transaction", Integer, ForeignKey("transaction.id"))
|
Column("transaction_id", Integer, ForeignKey("transaction.id", ondelete="CASCADE"), primary_key=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -1,17 +1,24 @@
|
|||||||
|
import os
|
||||||
from fastapi_users_db_sqlalchemy import GUID
|
from fastapi_users_db_sqlalchemy import GUID
|
||||||
from sqlalchemy import Column, Integer, String, Float, ForeignKey
|
from sqlalchemy import Column, Integer, String, Float, ForeignKey, Date, func
|
||||||
from sqlalchemy.orm import relationship
|
from sqlalchemy.orm import relationship
|
||||||
|
from sqlalchemy_utils import EncryptedType
|
||||||
|
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||||
|
|
||||||
from app.core.base import Base
|
from app.core.base import Base
|
||||||
from app.models.categories import association_table
|
from app.models.categories import association_table
|
||||||
|
|
||||||
|
SECRET_KEY = os.environ.get("DB_ENCRYPTION_KEY", "localdev")
|
||||||
|
|
||||||
|
|
||||||
class Transaction(Base):
|
class Transaction(Base):
|
||||||
__tablename__ = "transaction"
|
__tablename__ = "transaction"
|
||||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
amount = Column(Float, nullable=False)
|
amount = Column(EncryptedType(Float, SECRET_KEY, engine=FernetEngine), nullable=False)
|
||||||
description = Column(String(length=255), nullable=True)
|
description = Column(EncryptedType(String(length=255), SECRET_KEY, engine=FernetEngine), nullable=True)
|
||||||
|
date = Column(Date, nullable=False, server_default=func.current_date())
|
||||||
user_id = Column(GUID, ForeignKey("user.id"), nullable=False)
|
user_id = Column(GUID, ForeignKey("user.id"), nullable=False)
|
||||||
|
|
||||||
# Relationship
|
# Relationship
|
||||||
user = relationship("User", back_populates="transactions")
|
user = relationship("User", back_populates="transactions")
|
||||||
categories = relationship("Category", secondary=association_table, back_populates="transactions")
|
categories = relationship("Category", secondary=association_table, back_populates="transactions", passive_deletes=True)
|
||||||
@@ -11,6 +11,11 @@ class CategoryCreate(CategoryBase):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CategoryUpdate(BaseModel):
|
||||||
|
name: Optional[str] = None
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
class CategoryRead(CategoryBase):
|
class CategoryRead(CategoryBase):
|
||||||
id: int
|
id: int
|
||||||
model_config = ConfigDict(from_attributes=True)
|
model_config = ConfigDict(from_attributes=True)
|
||||||
@@ -1,10 +1,13 @@
|
|||||||
from typing import List, Optional
|
from typing import List, Optional, Union
|
||||||
|
from datetime import date
|
||||||
from pydantic import BaseModel, Field, ConfigDict
|
from pydantic import BaseModel, Field, ConfigDict
|
||||||
|
|
||||||
|
|
||||||
class TransactionBase(BaseModel):
|
class TransactionBase(BaseModel):
|
||||||
amount: float = Field(..., gt=-1e18, lt=1e18)
|
amount: float = Field(..., gt=-1e18, lt=1e18)
|
||||||
description: Optional[str] = None
|
description: Optional[str] = None
|
||||||
|
# accept either ISO date string or date object
|
||||||
|
date: Optional[Union[date, str]] = None
|
||||||
|
|
||||||
class TransactionCreate(TransactionBase):
|
class TransactionCreate(TransactionBase):
|
||||||
category_ids: Optional[List[int]] = None
|
category_ids: Optional[List[int]] = None
|
||||||
@@ -12,10 +15,12 @@ class TransactionCreate(TransactionBase):
|
|||||||
class TransactionUpdate(BaseModel):
|
class TransactionUpdate(BaseModel):
|
||||||
amount: Optional[float] = Field(None, gt=-1e18, lt=1e18)
|
amount: Optional[float] = Field(None, gt=-1e18, lt=1e18)
|
||||||
description: Optional[str] = None
|
description: Optional[str] = None
|
||||||
|
# accept either ISO date string or date object
|
||||||
|
date: Optional[Union[date, str]] = None
|
||||||
category_ids: Optional[List[int]] = None
|
category_ids: Optional[List[int]] = None
|
||||||
|
|
||||||
class TransactionRead(TransactionBase):
|
class TransactionRead(TransactionBase):
|
||||||
id: int
|
id: int
|
||||||
category_ids: List[int] = []
|
category_ids: List[int] = []
|
||||||
|
date: Optional[Union[date, str]]
|
||||||
model_config = ConfigDict(from_attributes=True)
|
model_config = ConfigDict(from_attributes=True)
|
||||||
@@ -1,10 +1,11 @@
|
|||||||
import uuid
|
import uuid
|
||||||
from typing import Optional
|
from typing import Optional, Dict, Any
|
||||||
from fastapi_users import schemas
|
from fastapi_users import schemas
|
||||||
|
|
||||||
class UserRead(schemas.BaseUser[uuid.UUID]):
|
class UserRead(schemas.BaseUser[uuid.UUID]):
|
||||||
first_name: Optional[str] = None
|
first_name: Optional[str] = None
|
||||||
last_name: Optional[str] = None
|
last_name: Optional[str] = None
|
||||||
|
config: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
class UserCreate(schemas.BaseUserCreate):
|
class UserCreate(schemas.BaseUserCreate):
|
||||||
first_name: Optional[str] = None
|
first_name: Optional[str] = None
|
||||||
178
7project/src/backend/app/services/bank_scraper.py
Normal file
178
7project/src/backend/app/services/bank_scraper.py
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from os.path import dirname, join
|
||||||
|
from time import strptime
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from app.core.db import sync_session_maker
|
||||||
|
from app.models.transaction import Transaction
|
||||||
|
from app.models.user import User
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
OAUTH_DIR = join(dirname(__file__), "..", "oauth")
|
||||||
|
CERTS = (
|
||||||
|
join(OAUTH_DIR, "public_key.pem"),
|
||||||
|
join(OAUTH_DIR, "private_key.key"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_mock_bank_transactions(user_id: str) -> None:
|
||||||
|
try:
|
||||||
|
uid = UUID(str(user_id))
|
||||||
|
except Exception:
|
||||||
|
logger.error("Invalid user_id provided to bank_scraper (sync): %r", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
_load_mock_bank_transactions(uid)
|
||||||
|
|
||||||
|
|
||||||
|
def load_all_mock_bank_transactions() -> None:
|
||||||
|
with sync_session_maker() as session:
|
||||||
|
users = session.execute(select(User)).unique().scalars().all()
|
||||||
|
logger.info("[BankScraper] Starting Mock Bank scrape for all users | count=%d", len(users))
|
||||||
|
|
||||||
|
processed = 0
|
||||||
|
for user in users:
|
||||||
|
try:
|
||||||
|
_load_mock_bank_transactions(user.id)
|
||||||
|
processed += 1
|
||||||
|
except Exception:
|
||||||
|
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
||||||
|
getattr(user, 'email', None))
|
||||||
|
logger.info("[BankScraper] Finished Mock Bank scrape for all users | processed=%d", processed)
|
||||||
|
|
||||||
|
|
||||||
|
def _load_mock_bank_transactions(user_id: UUID) -> None:
|
||||||
|
with sync_session_maker() as session:
|
||||||
|
user: User | None = session.execute(select(User).where(User.id == user_id)).unique().scalar_one_or_none()
|
||||||
|
if user is None:
|
||||||
|
logger.warning("User not found for id=%s", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
transactions = []
|
||||||
|
with httpx.Client() as client:
|
||||||
|
response = client.get(f"{os.getenv('APP_POD_URL')}/mock-bank/scrape")
|
||||||
|
if response.status_code != httpx.codes.OK:
|
||||||
|
return
|
||||||
|
for transaction in response.json():
|
||||||
|
transactions.append(
|
||||||
|
Transaction(
|
||||||
|
amount=transaction["amount"],
|
||||||
|
description=transaction.get("description"),
|
||||||
|
date=strptime(transaction["date"], "%Y-%m-%d"),
|
||||||
|
user_id=user_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for transaction in transactions:
|
||||||
|
session.add(transaction)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def load_ceska_sporitelna_transactions(user_id: str) -> None:
|
||||||
|
try:
|
||||||
|
uid = UUID(str(user_id))
|
||||||
|
except Exception:
|
||||||
|
logger.error("Invalid user_id provided to bank_scraper (sync): %r", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
_load_ceska_sporitelna_transactions(uid)
|
||||||
|
|
||||||
|
|
||||||
|
def load_all_ceska_sporitelna_transactions() -> None:
|
||||||
|
with sync_session_maker() as session:
|
||||||
|
users = session.execute(select(User)).unique().scalars().all()
|
||||||
|
logger.info("[BankScraper] Starting CSAS scrape for all users | count=%d", len(users))
|
||||||
|
|
||||||
|
processed = 0
|
||||||
|
for user in users:
|
||||||
|
try:
|
||||||
|
_load_ceska_sporitelna_transactions(user.id)
|
||||||
|
processed += 1
|
||||||
|
except Exception:
|
||||||
|
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
||||||
|
getattr(user, 'email', None))
|
||||||
|
logger.info("[BankScraper] Finished CSAS scrape for all users | processed=%d", processed)
|
||||||
|
|
||||||
|
|
||||||
|
def _load_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
||||||
|
with sync_session_maker() as session:
|
||||||
|
user: User | None = session.execute(select(User).where(User.id == user_id)).unique().scalar_one_or_none()
|
||||||
|
if user is None:
|
||||||
|
logger.warning("User not found for id=%s", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
cfg = user.config or {}
|
||||||
|
if "csas" not in cfg:
|
||||||
|
return
|
||||||
|
|
||||||
|
cfg = json.loads(cfg["csas"])
|
||||||
|
if "access_token" not in cfg:
|
||||||
|
return
|
||||||
|
|
||||||
|
accounts = []
|
||||||
|
try:
|
||||||
|
with httpx.Client(cert=CERTS, timeout=httpx.Timeout(20.0)) as client:
|
||||||
|
response = client.get(
|
||||||
|
"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts?size=10&page=0&sort=iban&order=desc",
|
||||||
|
headers={
|
||||||
|
"Authorization": f"Bearer {cfg['access_token']}",
|
||||||
|
"WEB-API-key": "09fdc637-3c57-4242-95f2-c2205a2438f3",
|
||||||
|
"user-involved": "false",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if response.status_code != httpx.codes.OK:
|
||||||
|
return
|
||||||
|
|
||||||
|
for account in response.json().get("accounts", []):
|
||||||
|
accounts.append(account)
|
||||||
|
|
||||||
|
except (httpx.HTTPError,) as e:
|
||||||
|
logger.exception("[BankScraper] HTTP error during CSAS request | user_id=%s", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
for account in accounts:
|
||||||
|
acc_id = account.get("id")
|
||||||
|
if not acc_id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
url = f"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts/{acc_id}/transactions?size=100&page=0&sort=bookingdate&order=desc"
|
||||||
|
with httpx.Client(cert=CERTS) as client:
|
||||||
|
response = client.get(
|
||||||
|
url,
|
||||||
|
headers={
|
||||||
|
"Authorization": f"Bearer {cfg['access_token']}",
|
||||||
|
"WEB-API-key": "09fdc637-3c57-4242-95f2-c2205a2438f3",
|
||||||
|
"user-involved": "false",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if response.status_code != httpx.codes.OK:
|
||||||
|
continue
|
||||||
|
|
||||||
|
transactions = response.json().get("transactions", [])
|
||||||
|
|
||||||
|
for transaction in transactions:
|
||||||
|
description = transaction.get("entryDetails", {}).get("transactionDetails", {}).get(
|
||||||
|
"additionalRemittanceInformation")
|
||||||
|
date_str = transaction.get("bookingDate", {}).get("date")
|
||||||
|
date = strptime(date_str, "%Y-%m-%d") if date_str else None
|
||||||
|
amount = transaction.get("amount", {}).get("value")
|
||||||
|
if transaction.get("creditDebitIndicator") == "DBIT" and amount is not None:
|
||||||
|
amount = -abs(amount)
|
||||||
|
|
||||||
|
if amount is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
obj = Transaction(
|
||||||
|
amount=amount,
|
||||||
|
description=description,
|
||||||
|
date=date,
|
||||||
|
user_id=user_id,
|
||||||
|
)
|
||||||
|
session.add(obj)
|
||||||
|
session.commit()
|
||||||
48
7project/src/backend/app/services/prometheus.py
Normal file
48
7project/src/backend/app/services/prometheus.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
from typing import Callable
|
||||||
|
from prometheus_fastapi_instrumentator.metrics import Info
|
||||||
|
from prometheus_client import Gauge
|
||||||
|
from sqlalchemy import select, func
|
||||||
|
|
||||||
|
from app.core.db import async_session_maker
|
||||||
|
from app.models.transaction import Transaction
|
||||||
|
from app.models.user import User
|
||||||
|
|
||||||
|
|
||||||
|
def number_of_users() -> Callable[[Info], None]:
|
||||||
|
METRIC = Gauge(
|
||||||
|
"number_of_users_total",
|
||||||
|
"Number of registered users.",
|
||||||
|
labelnames=("users",)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def instrumentation(info: Info) -> None:
|
||||||
|
try:
|
||||||
|
async with async_session_maker() as session:
|
||||||
|
result = await session.execute(select(func.count(User.id)))
|
||||||
|
user_count = result.scalar_one() or 0
|
||||||
|
except Exception:
|
||||||
|
# In case of DB errors, avoid crashing metrics endpoint
|
||||||
|
user_count = 0
|
||||||
|
METRIC.labels(users="total").set(user_count)
|
||||||
|
|
||||||
|
return instrumentation
|
||||||
|
|
||||||
|
|
||||||
|
def number_of_transactions() -> Callable[[Info], None]:
|
||||||
|
METRIC = Gauge(
|
||||||
|
"number_of_transactions_total",
|
||||||
|
"Number of transactions stored.",
|
||||||
|
labelnames=("transactions",)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def instrumentation(info: Info) -> None:
|
||||||
|
try:
|
||||||
|
async with async_session_maker() as session:
|
||||||
|
result = await session.execute(select(func.count()).select_from(Transaction))
|
||||||
|
transaction_count = result.scalar_one() or 0
|
||||||
|
except Exception:
|
||||||
|
# In case of DB errors, avoid crashing metrics endpoint
|
||||||
|
transaction_count = 0
|
||||||
|
METRIC.labels(transactions="total").set(transaction_count)
|
||||||
|
|
||||||
|
return instrumentation
|
||||||
@@ -14,11 +14,10 @@ from httpx_oauth.oauth2 import BaseOAuth2
|
|||||||
|
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.oauth.bank_id import BankID
|
from app.oauth.bank_id import BankID
|
||||||
from app.oauth.csas import CSASOAuth
|
from app.workers.celery_tasks import send_email
|
||||||
from app.oauth.custom_openid import CustomOpenID
|
from app.oauth.custom_openid import CustomOpenID
|
||||||
from app.oauth.moje_id import MojeIDOAuth
|
from app.oauth.moje_id import MojeIDOAuth
|
||||||
from app.services.db import get_user_db
|
from app.services.db import get_user_db
|
||||||
from app.core.queue import enqueue_email
|
|
||||||
|
|
||||||
SECRET = os.getenv("SECRET", "CHANGE_ME_SECRET")
|
SECRET = os.getenv("SECRET", "CHANGE_ME_SECRET")
|
||||||
|
|
||||||
@@ -87,7 +86,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
|
|||||||
"Pokud jsi registraci neprováděl(a), tento email ignoruj.\n"
|
"Pokud jsi registraci neprováděl(a), tento email ignoruj.\n"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
enqueue_email(to=user.email, subject=subject, body=body)
|
send_email.delay(user.email, subject, body)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("[Email Fallback] To:", user.email)
|
print("[Email Fallback] To:", user.email)
|
||||||
print("[Email Fallback] Subject:", subject)
|
print("[Email Fallback] Subject:", subject)
|
||||||
86
7project/src/backend/app/workers/celery_tasks.py
Normal file
86
7project/src/backend/app/workers/celery_tasks.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import smtplib
|
||||||
|
from email.message import EmailMessage
|
||||||
|
|
||||||
|
import app.services.bank_scraper
|
||||||
|
from app.celery_app import celery_app
|
||||||
|
|
||||||
|
logger = logging.getLogger("celery_tasks")
|
||||||
|
if not logger.handlers:
|
||||||
|
_h = logging.StreamHandler()
|
||||||
|
logger.addHandler(_h)
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(name="workers.send_email")
|
||||||
|
def send_email(to: str, subject: str, body: str) -> None:
|
||||||
|
if not (to and subject and body):
|
||||||
|
logger.error("Email task missing fields. to=%r subject=%r body_len=%r", to, subject, len(body) if body else 0)
|
||||||
|
return
|
||||||
|
|
||||||
|
host = os.getenv("SMTP_HOST")
|
||||||
|
if not host:
|
||||||
|
logger.error("SMTP_HOST is not configured; cannot send email")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
port = int(os.getenv("SMTP_PORT", "25"))
|
||||||
|
username = os.getenv("SMTP_USERNAME")
|
||||||
|
password = os.getenv("SMTP_PASSWORD")
|
||||||
|
use_tls = os.getenv("SMTP_USE_TLS", "0").lower() in {"1", "true", "yes"}
|
||||||
|
use_ssl = os.getenv("SMTP_USE_SSL", "0").lower() in {"1", "true", "yes"}
|
||||||
|
timeout = int(os.getenv("SMTP_TIMEOUT", "10"))
|
||||||
|
mail_from = os.getenv("SMTP_FROM") or username or "noreply@localhost"
|
||||||
|
|
||||||
|
# Build message
|
||||||
|
msg = EmailMessage()
|
||||||
|
msg["To"] = to
|
||||||
|
msg["From"] = mail_from
|
||||||
|
msg["Subject"] = subject
|
||||||
|
msg.set_content(body)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if use_ssl:
|
||||||
|
with smtplib.SMTP_SSL(host=host, port=port, timeout=timeout) as smtp:
|
||||||
|
if username and password:
|
||||||
|
smtp.login(username, password)
|
||||||
|
smtp.send_message(msg)
|
||||||
|
else:
|
||||||
|
with smtplib.SMTP(host=host, port=port, timeout=timeout) as smtp:
|
||||||
|
# STARTTLS if requested
|
||||||
|
if use_tls:
|
||||||
|
smtp.starttls()
|
||||||
|
if username and password:
|
||||||
|
smtp.login(username, password)
|
||||||
|
smtp.send_message(msg)
|
||||||
|
logger.info("[Celery] Email sent | to=%s | subject=%s | body_len=%d", to, subject, len(body))
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to send email via SMTP to=%s subject=%s host=%s port=%s tls=%s ssl=%s", to, subject,
|
||||||
|
host, port, use_tls, use_ssl)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(name="workers.load_transactions")
|
||||||
|
def load_transactions(user_id: str) -> None:
|
||||||
|
if not user_id:
|
||||||
|
logger.error("Load transactions task missing user_id.")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("[Celery] Starting load_transactions | user_id=%s", user_id)
|
||||||
|
try:
|
||||||
|
# Use synchronous bank scraper functions directly, mirroring load_all_transactions
|
||||||
|
app.services.bank_scraper.load_mock_bank_transactions(user_id)
|
||||||
|
app.services.bank_scraper.load_ceska_sporitelna_transactions(user_id)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to load transactions for user_id=%s", user_id)
|
||||||
|
else:
|
||||||
|
logger.info("[Celery] Finished load_transactions | user_id=%s", user_id)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(name="workers.load_all_transactions")
|
||||||
|
def load_all_transactions() -> None:
|
||||||
|
logger.info("[Celery] Starting load_all_transactions")
|
||||||
|
# Now use synchronous bank scraper functions directly
|
||||||
|
app.services.bank_scraper.load_all_mock_bank_transactions()
|
||||||
|
app.services.bank_scraper.load_all_ceska_sporitelna_transactions()
|
||||||
|
logger.info("[Celery] Finished load_all_transactions")
|
||||||
20
7project/src/backend/docker-compose.test.yml
Normal file
20
7project/src/backend/docker-compose.test.yml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
version: "3.9"
|
||||||
|
services:
|
||||||
|
mariadb:
|
||||||
|
image: mariadb:11.4
|
||||||
|
container_name: test-mariadb
|
||||||
|
environment:
|
||||||
|
MARIADB_ROOT_PASSWORD: rootpw
|
||||||
|
MARIADB_DATABASE: group_project
|
||||||
|
MARIADB_USER: appuser
|
||||||
|
MARIADB_PASSWORD: apppass
|
||||||
|
ports:
|
||||||
|
- "3307:3306" # host:container (use 3307 on host to avoid conflicts)
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "mariadb-admin", "ping", "-h", "127.0.0.1", "-u", "root", "-prootpw", "--silent"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 2s
|
||||||
|
retries: 20
|
||||||
|
# Truly ephemeral, fast storage (removed when container stops)
|
||||||
|
tmpfs:
|
||||||
|
- /var/lib/mysql
|
||||||
4
7project/src/backend/main.py
Normal file
4
7project/src/backend/main.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
import uvicorn
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
uvicorn.run("app.app:fastApi", host="0.0.0.0", log_level="info")
|
||||||
5
7project/src/backend/pyproject.toml
Normal file
5
7project/src/backend/pyproject.toml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
[tool.pytest.ini_options]
|
||||||
|
pythonpath = "."
|
||||||
|
asyncio_mode = "auto"
|
||||||
|
asyncio_default_fixture_loop_scope = "session"
|
||||||
|
asyncio_default_test_loop_scope = "session"
|
||||||
@@ -38,6 +38,8 @@ MarkupSafe==3.0.2
|
|||||||
multidict==6.6.4
|
multidict==6.6.4
|
||||||
packaging==25.0
|
packaging==25.0
|
||||||
pamqp==3.3.0
|
pamqp==3.3.0
|
||||||
|
prometheus-fastapi-instrumentator==7.1.0
|
||||||
|
prometheus_client==0.23.1
|
||||||
prompt_toolkit==3.0.52
|
prompt_toolkit==3.0.52
|
||||||
propcache==0.3.2
|
propcache==0.3.2
|
||||||
pwdlib==0.2.1
|
pwdlib==0.2.1
|
||||||
@@ -54,6 +56,7 @@ sentry-sdk==2.42.0
|
|||||||
six==1.17.0
|
six==1.17.0
|
||||||
sniffio==1.3.1
|
sniffio==1.3.1
|
||||||
SQLAlchemy==2.0.43
|
SQLAlchemy==2.0.43
|
||||||
|
SQLAlchemy-Utils==0.42.0
|
||||||
starlette==0.48.0
|
starlette==0.48.0
|
||||||
tomli==2.2.1
|
tomli==2.2.1
|
||||||
typing-inspection==0.4.1
|
typing-inspection==0.4.1
|
||||||
@@ -67,3 +70,4 @@ watchfiles==1.1.0
|
|||||||
wcwidth==0.2.14
|
wcwidth==0.2.14
|
||||||
websockets==15.0.1
|
websockets==15.0.1
|
||||||
yarl==1.20.1
|
yarl==1.20.1
|
||||||
|
python-json-logger==2.0.7
|
||||||
113
7project/src/backend/test_locally.sh
Executable file
113
7project/src/backend/test_locally.sh
Executable file
@@ -0,0 +1,113 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Run tests against a disposable local MariaDB on host port 3307 using Docker Compose.
|
||||||
|
# Requirements: Docker, docker compose plugin, Python, Alembic, pytest.
|
||||||
|
# Usage:
|
||||||
|
# chmod +x ./test_locally.sh
|
||||||
|
# # From 7project/backend directory
|
||||||
|
# ./test_locally.sh [--only-unit|--only-integration|--only-e2e] [pytest-args...]
|
||||||
|
# # Examples:
|
||||||
|
# ./test_locally.sh --only-unit -q
|
||||||
|
# ./test_locally.sh --only-integration -k "login"
|
||||||
|
# ./test_locally.sh --only-e2e -vv
|
||||||
|
#
|
||||||
|
# This script will:
|
||||||
|
# 1) Start a MariaDB 11.4 container (ephemeral storage, port 3307)
|
||||||
|
# 2) Wait until it's healthy
|
||||||
|
# 3) Export env vars expected by the app (DATABASE_URL etc.)
|
||||||
|
# 4) Run Alembic migrations
|
||||||
|
# 5) Run pytest
|
||||||
|
# 6) Tear everything down (containers and tmpfs data)
|
||||||
|
|
||||||
|
COMPOSE_FILE="docker-compose.test.yml"
|
||||||
|
SERVICE_NAME="mariadb"
|
||||||
|
CONTAINER_NAME="test-mariadb"
|
||||||
|
|
||||||
|
if ! command -v docker >/dev/null 2>&1; then
|
||||||
|
echo "Docker is required but not found in PATH" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if ! docker compose version >/dev/null 2>&1; then
|
||||||
|
echo "Docker Compose V2 plugin is required (docker compose)" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Bring up the DB
|
||||||
|
echo "Starting MariaDB (port 3307) with docker compose..."
|
||||||
|
docker compose -f "$COMPOSE_FILE" up -d
|
||||||
|
|
||||||
|
# Ensure we clean up on exit
|
||||||
|
cleanup() {
|
||||||
|
echo "\nTearing down docker compose stack..."
|
||||||
|
docker compose -f "$COMPOSE_FILE" down -v || true
|
||||||
|
}
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
# Wait for healthy container
|
||||||
|
echo -n "Waiting for MariaDB to become healthy"
|
||||||
|
for i in {1..60}; do
|
||||||
|
status=$(docker inspect -f '{{.State.Health.Status}}' "$CONTAINER_NAME" 2>/dev/null || echo "")
|
||||||
|
if [ "$status" = "healthy" ]; then
|
||||||
|
echo " -> healthy"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
echo -n "."
|
||||||
|
sleep 1
|
||||||
|
if [ $i -eq 60 ]; then
|
||||||
|
echo "\nMariaDB did not become healthy in time" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Export env vars for the app/tests (match app/core/db.py expectations)
|
||||||
|
export MARIADB_HOST=127.0.0.1
|
||||||
|
export MARIADB_PORT=3307
|
||||||
|
export MARIADB_DB=group_project
|
||||||
|
export MARIADB_USER=appuser
|
||||||
|
export MARIADB_PASSWORD=apppass
|
||||||
|
export DATABASE_URL="mysql+asyncmy://$MARIADB_USER:$MARIADB_PASSWORD@$MARIADB_HOST:$MARIADB_PORT/$MARIADB_DB"
|
||||||
|
export PYTEST_RUN_CONFIG="True"
|
||||||
|
|
||||||
|
# Determine which tests to run based on flags
|
||||||
|
UNIT_TESTS="tests/test_unit_user_service.py"
|
||||||
|
INTEGRATION_TESTS="tests/test_integration_app.py"
|
||||||
|
E2E_TESTS="tests/test_e2e.py"
|
||||||
|
|
||||||
|
FLAG_COUNT=0
|
||||||
|
TEST_TARGET=""
|
||||||
|
declare -a PYTEST_ARGS=()
|
||||||
|
for arg in "$@"; do
|
||||||
|
case "$arg" in
|
||||||
|
--only-unit)
|
||||||
|
TEST_TARGET="$UNIT_TESTS"; FLAG_COUNT=$((FLAG_COUNT+1));;
|
||||||
|
--only-integration)
|
||||||
|
TEST_TARGET="$INTEGRATION_TESTS"; FLAG_COUNT=$((FLAG_COUNT+1));;
|
||||||
|
--only-e2e)
|
||||||
|
TEST_TARGET="$E2E_TESTS"; FLAG_COUNT=$((FLAG_COUNT+1));;
|
||||||
|
*)
|
||||||
|
PYTEST_ARGS+=("$arg");;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ "$FLAG_COUNT" -gt 1 ]; then
|
||||||
|
echo "Error: Use only one of --only-unit, --only-integration, or --only-e2e" >&2
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run Alembic migrations then tests
|
||||||
|
pushd . >/dev/null
|
||||||
|
echo "Running Alembic migrations..."
|
||||||
|
alembic upgrade head
|
||||||
|
|
||||||
|
echo "Running pytest..."
|
||||||
|
if [ -n "$TEST_TARGET" ]; then
|
||||||
|
# Use "${PYTEST_ARGS[@]:-}" to safely expand empty array with 'set -u'
|
||||||
|
pytest "$TEST_TARGET" "${PYTEST_ARGS[@]:-}"
|
||||||
|
else
|
||||||
|
# Use "${PYTEST_ARGS[@]:-}" to safely expand empty array with 'set -u'
|
||||||
|
pytest "${PYTEST_ARGS[@]:-}"
|
||||||
|
fi
|
||||||
|
popd >/dev/null
|
||||||
|
|
||||||
|
# Cleanup handled by trap
|
||||||
44
7project/src/backend/tests/conftest.py
Normal file
44
7project/src/backend/tests/conftest.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import sys
|
||||||
|
import uuid
|
||||||
|
import types
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from httpx import AsyncClient, ASGITransport
|
||||||
|
|
||||||
|
# Stub sentry_sdk to avoid optional dependency issues during import of app
|
||||||
|
stub = types.ModuleType("sentry_sdk")
|
||||||
|
stub.init = lambda *args, **kwargs: None
|
||||||
|
sys.modules.setdefault("sentry_sdk", stub)
|
||||||
|
|
||||||
|
# Import the FastAPI application
|
||||||
|
from app.app import fastApi as app # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def fastapi_app():
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def client(fastapi_app):
|
||||||
|
return TestClient(fastapi_app, raise_server_exceptions=True)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="function")
|
||||||
|
async def test_user(fastapi_app):
|
||||||
|
"""
|
||||||
|
Creates a new user asynchronously and returns their credentials.
|
||||||
|
Does NOT log them in.
|
||||||
|
Using AsyncClient with ASGITransport avoids event loop conflicts with DB connections.
|
||||||
|
"""
|
||||||
|
unique_email = f"testuser_{uuid.uuid4()}@example.com"
|
||||||
|
password = "a_strong_password"
|
||||||
|
user_payload = {"email": unique_email, "password": password}
|
||||||
|
|
||||||
|
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
response = await ac.post("/auth/register", json=user_payload)
|
||||||
|
assert response.status_code == 201
|
||||||
|
|
||||||
|
return {"username": unique_email, "password": password}
|
||||||
|
|
||||||
210
7project/src/backend/tests/test_e2e.py
Normal file
210
7project/src/backend/tests/test_e2e.py
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
import pytest
|
||||||
|
import uuid
|
||||||
|
from httpx import AsyncClient, ASGITransport
|
||||||
|
from fastapi import status
|
||||||
|
|
||||||
|
|
||||||
|
def test_e2e(client):
|
||||||
|
# 1) Service is alive
|
||||||
|
alive = client.get("/")
|
||||||
|
assert alive.status_code == status.HTTP_200_OK
|
||||||
|
|
||||||
|
# 2) Attempt to login without payload should fail fast (validation error)
|
||||||
|
login = client.post("/auth/jwt/login")
|
||||||
|
assert login.status_code in (status.HTTP_400_BAD_REQUEST, status.HTTP_422_UNPROCESSABLE_CONTENT)
|
||||||
|
|
||||||
|
# 3) Protected endpoint should not be accessible without token
|
||||||
|
me = client.get("/users/me")
|
||||||
|
assert me.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_e2e_full_user_lifecycle(fastapi_app, test_user):
|
||||||
|
# Use an AsyncClient with ASGITransport for async tests
|
||||||
|
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
login_payload = test_user
|
||||||
|
|
||||||
|
# 1. Log in with the new credentials
|
||||||
|
login_resp = await ac.post("/auth/jwt/login", data=login_payload)
|
||||||
|
assert login_resp.status_code == status.HTTP_200_OK
|
||||||
|
token = login_resp.json()["access_token"]
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# 2. Access a protected endpoint
|
||||||
|
me_resp = await ac.get("/users/me", headers=headers)
|
||||||
|
assert me_resp.status_code == status.HTTP_200_OK
|
||||||
|
assert me_resp.json()["email"] == test_user["username"]
|
||||||
|
|
||||||
|
# 3. Update the user's profile
|
||||||
|
update_payload = {"first_name": "Test"}
|
||||||
|
patch_resp = await ac.patch("/users/me", json=update_payload, headers=headers)
|
||||||
|
assert patch_resp.status_code == status.HTTP_200_OK
|
||||||
|
assert patch_resp.json()["first_name"] == "Test"
|
||||||
|
|
||||||
|
# 4. Log out
|
||||||
|
logout_resp = await ac.post("/auth/jwt/logout", headers=headers)
|
||||||
|
assert logout_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
# 5. Verify token is invalid
|
||||||
|
me_again_resp = await ac.get("/users/me", headers=headers)
|
||||||
|
assert me_again_resp.status_code == status.HTTP_401_UNAUTHORIZED
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_e2e_transaction_workflow(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
# 1. Log in to get the token
|
||||||
|
login_resp = await ac.post("/auth/jwt/login", data=test_user)
|
||||||
|
token = login_resp.json()["access_token"]
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# NEW STEP: Create a category first to get a valid ID
|
||||||
|
category_payload = {"name": "Test Category for E2E"}
|
||||||
|
create_category_resp = await ac.post("/categories/create", json=category_payload, headers=headers)
|
||||||
|
assert create_category_resp.status_code == status.HTTP_201_CREATED
|
||||||
|
category_id = create_category_resp.json()["id"]
|
||||||
|
|
||||||
|
# 2. Create a new transaction
|
||||||
|
tx_payload = {"amount": -55.40, "description": "Milk and eggs"}
|
||||||
|
tx_resp = await ac.post("/transactions/create", json=tx_payload, headers=headers)
|
||||||
|
assert tx_resp.status_code == status.HTTP_201_CREATED
|
||||||
|
tx_id = tx_resp.json()["id"]
|
||||||
|
|
||||||
|
# 3. Assign the category
|
||||||
|
assign_resp = await ac.post(f"/transactions/{tx_id}/categories/{category_id}", headers=headers)
|
||||||
|
assert assign_resp.status_code == status.HTTP_200_OK
|
||||||
|
|
||||||
|
# 4. Verify assignment
|
||||||
|
get_tx_resp = await ac.get(f"/transactions/{tx_id}", headers=headers)
|
||||||
|
assert category_id in get_tx_resp.json()["category_ids"]
|
||||||
|
|
||||||
|
# 5. Unassign the category
|
||||||
|
unassign_resp = await ac.delete(f"/transactions/{tx_id}/categories/{category_id}", headers=headers)
|
||||||
|
assert unassign_resp.status_code == status.HTTP_200_OK
|
||||||
|
|
||||||
|
# 6. Get the transaction again and verify the category is gone
|
||||||
|
get_tx_again_resp = await ac.get(f"/transactions/{tx_id}", headers=headers)
|
||||||
|
final_tx_data = get_tx_again_resp.json()
|
||||||
|
assert category_id not in final_tx_data["category_ids"]
|
||||||
|
|
||||||
|
# 7. Delete the transaction for cleanup
|
||||||
|
delete_resp = await ac.delete(f"/transactions/{tx_id}/delete", headers=headers)
|
||||||
|
assert delete_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
# NEW STEP: Clean up the created category
|
||||||
|
delete_category_resp = await ac.delete(f"/categories/{category_id}", headers=headers)
|
||||||
|
assert delete_category_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_register_then_login_and_fetch_me(fastapi_app):
|
||||||
|
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
# Use unique email to avoid duplicates across runs
|
||||||
|
suffix = uuid.uuid4().hex[:8]
|
||||||
|
email = f"newuser_{suffix}@example.com"
|
||||||
|
password = "StrongPassw0rd!"
|
||||||
|
|
||||||
|
reg = await ac.post("/auth/register", json={"email": email, "password": password})
|
||||||
|
assert reg.status_code in (status.HTTP_201_CREATED, status.HTTP_200_OK)
|
||||||
|
|
||||||
|
login = await ac.post("/auth/jwt/login", data={"username": email, "password": password})
|
||||||
|
assert login.status_code == status.HTTP_200_OK
|
||||||
|
token = login.json()["access_token"]
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
try:
|
||||||
|
me = await ac.get("/users/me", headers=headers)
|
||||||
|
assert me.status_code == status.HTTP_200_OK
|
||||||
|
assert me.json()["email"] == email
|
||||||
|
finally:
|
||||||
|
# Cleanup: delete the created user so future runs won’t conflict
|
||||||
|
d = await ac.delete("/users/me", headers=headers)
|
||||||
|
assert d.status_code == status.HTTP_204_NO_CONTENT
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_delete_current_user_revokes_access(fastapi_app):
|
||||||
|
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
email = "todelete@example.com"
|
||||||
|
password = "Passw0rd!"
|
||||||
|
reg = await ac.post("/auth/register", json={"email": email, "password": password})
|
||||||
|
assert reg.status_code in (status.HTTP_200_OK, status.HTTP_201_CREATED)
|
||||||
|
|
||||||
|
login = await ac.post("/auth/jwt/login", data={"username": email, "password": password})
|
||||||
|
token = login.json()["access_token"]
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# Delete self
|
||||||
|
d = await ac.delete("/users/me", headers=headers)
|
||||||
|
assert d.status_code == status.HTTP_204_NO_CONTENT
|
||||||
|
|
||||||
|
# Access should now fail
|
||||||
|
me = await ac.get("/users/me", headers=headers)
|
||||||
|
assert me.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_update_category_conflict_and_404(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
a = (await ac.post("/categories/create", json={"name": "A"}, headers=h)).json()
|
||||||
|
b = (await ac.post("/categories/create", json={"name": "B"}, headers=h)).json()
|
||||||
|
|
||||||
|
# Attempt to rename A -> B should conflict
|
||||||
|
conflict = await ac.patch(f"/categories/{a['id']}", json={"name": "B"}, headers=h)
|
||||||
|
assert conflict.status_code == status.HTTP_409_CONFLICT
|
||||||
|
|
||||||
|
# Update non-existent
|
||||||
|
missing = await ac.patch("/categories/999999", json={"name": "Z"}, headers=h)
|
||||||
|
assert missing.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_category_cross_user_isolation(fastapi_app):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
# Generate unique emails for both users
|
||||||
|
sfx = uuid.uuid4().hex[:8]
|
||||||
|
u1 = {"email": f"u1_{sfx}@example.com", "password": "Aaaaaa1!"}
|
||||||
|
u2 = {"email": f"u2_{sfx}@example.com", "password": "Aaaaaa1!"}
|
||||||
|
|
||||||
|
# user1
|
||||||
|
assert (await ac.post("/auth/register", json=u1)).status_code in (200, 201)
|
||||||
|
t1 = (await ac.post("/auth/jwt/login", data={"username": u1["email"], "password": u1["password"]})).json()["access_token"]
|
||||||
|
h1 = {"Authorization": f"Bearer {t1}"}
|
||||||
|
|
||||||
|
# user1 creates a category
|
||||||
|
c = (await ac.post("/categories/create", json={"name": "Private"}, headers=h1)).json()
|
||||||
|
cat_id = c["id"]
|
||||||
|
|
||||||
|
# user2
|
||||||
|
assert (await ac.post("/auth/register", json=u2)).status_code in (200, 201)
|
||||||
|
t2 = (await ac.post("/auth/jwt/login", data={"username": u2["email"], "password": u2["password"]})).json()["access_token"]
|
||||||
|
h2 = {"Authorization": f"Bearer {t2}"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# user2 cannot read/delete user1's category
|
||||||
|
g = await ac.get(f"/categories/{cat_id}", headers=h2)
|
||||||
|
assert g.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
d = await ac.delete(f"/categories/{cat_id}", headers=h2)
|
||||||
|
assert d.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
finally:
|
||||||
|
# Cleanup: remove the created category as its owner
|
||||||
|
try:
|
||||||
|
_ = await ac.delete(f"/categories/{cat_id}", headers=h1)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
# Cleanup: delete both users to avoid email conflicts later
|
||||||
|
try:
|
||||||
|
_ = await ac.delete("/users/me", headers=h1)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
_ = await ac.delete("/users/me", headers=h2)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
159
7project/src/backend/tests/test_integration_app.py
Normal file
159
7project/src/backend/tests/test_integration_app.py
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
from fastapi import status
|
||||||
|
import pytest
|
||||||
|
from httpx import AsyncClient, ASGITransport
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_and_get_category(fastapi_app, test_user):
|
||||||
|
# Use AsyncClient for async tests
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
# 1. Log in to get an auth token
|
||||||
|
login_resp = await ac.post("/auth/jwt/login", data=test_user)
|
||||||
|
token = login_resp.json()["access_token"]
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# 2. Define and create the new category
|
||||||
|
category_name = "Async Integration Test"
|
||||||
|
category_payload = {"name": category_name}
|
||||||
|
create_resp = await ac.post("/categories/create", json=category_payload, headers=headers)
|
||||||
|
|
||||||
|
# 3. Assert creation was successful
|
||||||
|
assert create_resp.status_code == status.HTTP_201_CREATED
|
||||||
|
created_data = create_resp.json()
|
||||||
|
category_id = created_data["id"]
|
||||||
|
assert created_data["name"] == category_name
|
||||||
|
|
||||||
|
# 4. GET the list of categories to verify
|
||||||
|
list_resp = await ac.get("/categories/", headers=headers)
|
||||||
|
assert list_resp.status_code == status.HTTP_200_OK
|
||||||
|
|
||||||
|
# 5. Check that our new category is in the list
|
||||||
|
categories_list = list_resp.json()
|
||||||
|
assert any(cat["name"] == category_name for cat in categories_list)
|
||||||
|
|
||||||
|
delete_resp = await ac.delete(f"/categories/{category_id}", headers=headers)
|
||||||
|
assert delete_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_transaction_missing_amount_fails(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
# 1. Log in to get an auth token
|
||||||
|
login_resp = await ac.post("/auth/jwt/login", data=test_user)
|
||||||
|
token = login_resp.json()["access_token"]
|
||||||
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# 2. Define an invalid payload
|
||||||
|
invalid_payload = {"description": "This should fail"}
|
||||||
|
|
||||||
|
# 3. Attempt to create the transaction
|
||||||
|
resp = await ac.post("/transactions/create", json=invalid_payload, headers=headers)
|
||||||
|
|
||||||
|
# 4. Assert the expected validation error
|
||||||
|
assert resp.status_code == status.HTTP_422_UNPROCESSABLE_CONTENT
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_login_invalid_credentials(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
bad = await ac.post("/auth/jwt/login", data={"username": test_user["username"], "password": "nope"})
|
||||||
|
assert bad.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_400_BAD_REQUEST)
|
||||||
|
unknown = await ac.post("/auth/jwt/login", data={"username": "nouser@example.com", "password": "x"})
|
||||||
|
assert unknown.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_category_duplicate_name_conflict(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
p = {"name": "Food"}
|
||||||
|
r1 = await ac.post("/categories/create", json=p, headers=h)
|
||||||
|
assert r1.status_code == status.HTTP_201_CREATED
|
||||||
|
r2 = await ac.post("/categories/create", json=p, headers=h)
|
||||||
|
assert r2.status_code == status.HTTP_409_CONFLICT
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_transaction_invalid_date_format(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
bad = await ac.post("/transactions/create", json={"amount": 10, "description": "x", "date": "31-12-2024"}, headers=h)
|
||||||
|
assert bad.status_code == status.HTTP_400_BAD_REQUEST
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_update_transaction_rejects_duplicate_category_ids(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
tx = (await ac.post("/transactions/create", json={"amount": 5, "description": "x"}, headers=h)).json()
|
||||||
|
dup = await ac.patch(f"/transactions/{tx['id']}/edit", json={"category_ids": [1, 1]}, headers=h)
|
||||||
|
assert dup.status_code == status.HTTP_400_BAD_REQUEST
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_assign_unassign_category_not_found_cases(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# Create tx and category
|
||||||
|
tx = (await ac.post("/transactions/create", json={"amount": 1, "description": "a"}, headers=h)).json()
|
||||||
|
cat = (await ac.post("/categories/create", json={"name": "X"}, headers=h)).json()
|
||||||
|
|
||||||
|
# Missing transaction
|
||||||
|
r1 = await ac.post(f"/transactions/999999/categories/{cat['id']}", headers=h)
|
||||||
|
assert r1.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
|
||||||
|
# Missing category
|
||||||
|
r2 = await ac.post(f"/transactions/{tx['id']}/categories/999999", headers=h)
|
||||||
|
assert r2.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_transactions_date_filter_and_balance_series(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
# Seed transactions spanning days
|
||||||
|
data = [
|
||||||
|
{"amount": 100, "description": "day1", "date": "2024-01-01"},
|
||||||
|
{"amount": -25, "description": "day2", "date": "2024-01-02"},
|
||||||
|
{"amount": 50, "description": "day3", "date": "2024-01-03"},
|
||||||
|
]
|
||||||
|
for p in data:
|
||||||
|
r = await ac.post("/transactions/create", json=p, headers=h)
|
||||||
|
assert r.status_code == status.HTTP_201_CREATED
|
||||||
|
|
||||||
|
# Filtered list (2nd and 3rd only)
|
||||||
|
lst = await ac.get("/transactions/", params={"start_date": "2024-01-02", "end_date": "2024-01-03"}, headers=h)
|
||||||
|
assert lst.status_code == status.HTTP_200_OK
|
||||||
|
assert len(lst.json()) == 2
|
||||||
|
|
||||||
|
# Balance series should be cumulative per date
|
||||||
|
series = await ac.get("/transactions/balance_series", headers=h)
|
||||||
|
assert series.status_code == status.HTTP_200_OK
|
||||||
|
s = series.json()
|
||||||
|
assert s == [
|
||||||
|
{"date": "2024-01-01", "balance": 100.0},
|
||||||
|
{"date": "2024-01-02", "balance": 75.0},
|
||||||
|
{"date": "2024-01-03", "balance": 125.0},
|
||||||
|
]
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_delete_transaction_not_found(fastapi_app, test_user):
|
||||||
|
transport = ASGITransport(app=fastapi_app)
|
||||||
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
|
r = await ac.delete("/transactions/9999999/delete", headers=h)
|
||||||
|
assert r.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
|
||||||
63
7project/src/backend/tests/test_unit_user_service.py
Normal file
63
7project/src/backend/tests/test_unit_user_service.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import pytest
|
||||||
|
from fastapi import status
|
||||||
|
from app.services import user_service
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_oauth_provider_known_unknown():
|
||||||
|
# Known providers should return a provider instance
|
||||||
|
bankid = user_service.get_oauth_provider("BankID")
|
||||||
|
mojeid = user_service.get_oauth_provider("MojeID")
|
||||||
|
assert bankid is not None
|
||||||
|
assert mojeid is not None
|
||||||
|
|
||||||
|
# Unknown should return None
|
||||||
|
assert user_service.get_oauth_provider("DoesNotExist") is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_jwt_strategy_lifetime():
|
||||||
|
strategy = user_service.get_jwt_strategy()
|
||||||
|
assert strategy is not None
|
||||||
|
# Basic smoke check: strategy has a lifetime set to 604800
|
||||||
|
assert getattr(strategy, "lifetime_seconds", None) in (604800,)
|
||||||
|
|
||||||
|
def test_root_ok(client):
|
||||||
|
resp = client.get("/")
|
||||||
|
assert resp.status_code == status.HTTP_200_OK
|
||||||
|
assert resp.json() == {"status": "ok"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_authenticated_route_requires_auth(client):
|
||||||
|
resp = client.get("/authenticated-route")
|
||||||
|
assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_on_after_request_verify_enqueues_email(monkeypatch):
|
||||||
|
calls = {}
|
||||||
|
|
||||||
|
class FakeCeleryTask:
|
||||||
|
def delay(to: str, subject: str, body: str):
|
||||||
|
calls.setdefault("emails", []).append({
|
||||||
|
"to": to,
|
||||||
|
"subject": subject,
|
||||||
|
"body": body,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Patch the enqueue_email used inside user_service
|
||||||
|
monkeypatch.setattr(user_service, "send_email", FakeCeleryTask)
|
||||||
|
|
||||||
|
class DummyUser:
|
||||||
|
def __init__(self, email):
|
||||||
|
self.email = email
|
||||||
|
|
||||||
|
mgr = user_service.UserManager(user_db=None) # user_db not needed for this method
|
||||||
|
user = DummyUser("test@example.com")
|
||||||
|
|
||||||
|
# Call the hook
|
||||||
|
await mgr.on_after_request_verify(user, token="abc123", request=None)
|
||||||
|
|
||||||
|
# Verify one email has been enqueued with expected content
|
||||||
|
assert len(calls.get("emails", [])) == 1
|
||||||
|
email = calls["emails"][0]
|
||||||
|
assert email["to"] == "test@example.com"
|
||||||
|
assert "ověření účtu" in email["subject"].lower()
|
||||||
|
assert "abc123" in email["body"]
|
||||||
30
7project/src/charts/README.md
Normal file
30
7project/src/charts/README.md
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Helm chart deployment
|
||||||
|
|
||||||
|
This directory contains a Helm chart for deploying the app to a cluster, it support bot production and preview
|
||||||
|
deployment.
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
- `myapp-chart/`
|
||||||
|
- `templates/`
|
||||||
|
- `app-deployment.yaml` - Kubernetes Deployment for the application
|
||||||
|
- `cron.yaml` - cronjob for periodic tasks - periodically calls app endpoint
|
||||||
|
- `database.yaml` - Creates database using MariaDB operator. Production database is kept, but preview/dev
|
||||||
|
database is dropped after uninstalling the chart.
|
||||||
|
- `database-grant.yaml` - Defines rights for the database user
|
||||||
|
- `database-user.yaml` - Creates database user
|
||||||
|
- `monitoring.yaml` - Adds /metrics endpoint to Prometheus scraping
|
||||||
|
- `prod.yaml` - Application secrets
|
||||||
|
- `rabbitmq-cluster.yaml` - Defines RabbitMQ cluster for this deployment
|
||||||
|
- `rabbitmq-permission.yalm` - Defines RabbitMQ user permissions
|
||||||
|
- `rabbitmq-queue.yaml` - Defines RabbitMQ queue
|
||||||
|
- `rabbitmq-user.yaml` - Defines RabbitMQ user
|
||||||
|
- `rabbitmq-user-secret.yaml` - Defines RabbitMQ user secret
|
||||||
|
- `service.yaml` - Kubernetes Service for the application
|
||||||
|
- `tunnel.yaml` - Cloudflare tunnel for accessing the application¨
|
||||||
|
- `worker-deployment.yaml` - Kubernetes Deployment for the Celery worker, uses same image as the app-deployment,
|
||||||
|
but with different entrypoint
|
||||||
|
- `Chart.yaml` - Helm chart metadata
|
||||||
|
- `values.yaml` - list of all configurable values
|
||||||
|
- `values-dev.yaml` - default values for development/preview deployment
|
||||||
|
- `values-prod.yaml` - default values for production deployment
|
||||||
@@ -8,10 +8,12 @@ spec:
|
|||||||
selector:
|
selector:
|
||||||
matchLabels:
|
matchLabels:
|
||||||
app: {{ .Values.app.name }}
|
app: {{ .Values.app.name }}
|
||||||
|
endpoint: metrics
|
||||||
template:
|
template:
|
||||||
metadata:
|
metadata:
|
||||||
labels:
|
labels:
|
||||||
app: {{ .Values.app.name }}
|
app: {{ .Values.app.name }}
|
||||||
|
endpoint: metrics
|
||||||
spec:
|
spec:
|
||||||
containers:
|
containers:
|
||||||
- name: {{ .Values.app.name }}
|
- name: {{ .Values.app.name }}
|
||||||
@@ -88,6 +90,11 @@ spec:
|
|||||||
secretKeyRef:
|
secretKeyRef:
|
||||||
name: prod
|
name: prod
|
||||||
key: CSAS_CLIENT_SECRET
|
key: CSAS_CLIENT_SECRET
|
||||||
|
- name: UNIRATE_API_KEY
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: UNIRATE_API_KEY
|
||||||
- name: DOMAIN
|
- name: DOMAIN
|
||||||
value: {{ required "Set .Values.domain" .Values.domain | quote }}
|
value: {{ required "Set .Values.domain" .Values.domain | quote }}
|
||||||
- name: DOMAIN_SCHEME
|
- name: DOMAIN_SCHEME
|
||||||
@@ -101,6 +108,11 @@ spec:
|
|||||||
secretKeyRef:
|
secretKeyRef:
|
||||||
name: prod
|
name: prod
|
||||||
key: SENTRY_DSN
|
key: SENTRY_DSN
|
||||||
|
- name: DB_ENCRYPTION_KEY
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: DB_ENCRYPTION_KEY
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
httpGet:
|
httpGet:
|
||||||
path: /
|
path: /
|
||||||
25
7project/src/charts/myapp-chart/templates/cron.yaml
Normal file
25
7project/src/charts/myapp-chart/templates/cron.yaml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{{- if .Values.cron.enabled }}
|
||||||
|
apiVersion: batch/v1
|
||||||
|
kind: CronJob
|
||||||
|
metadata:
|
||||||
|
name: cronjob
|
||||||
|
spec:
|
||||||
|
schedule: {{ .Values.cron.schedule | quote }}
|
||||||
|
concurrencyPolicy: {{ .Values.cron.concurrencyPolicy | quote }}
|
||||||
|
jobTemplate:
|
||||||
|
spec:
|
||||||
|
template:
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: cronjob
|
||||||
|
image: curlimages/curl:latest
|
||||||
|
imagePullPolicy: IfNotPresent
|
||||||
|
args:
|
||||||
|
- -sS
|
||||||
|
- -o
|
||||||
|
- /dev/null
|
||||||
|
- -w
|
||||||
|
- "%{http_code}"
|
||||||
|
- {{ printf "%s://%s.%s.svc.cluster.local%s" .Values.cron.scheme .Values.app.name .Release.Namespace .Values.cron.endpoint | quote }}
|
||||||
|
restartPolicy: OnFailure
|
||||||
|
{{- end }}
|
||||||
14
7project/src/charts/myapp-chart/templates/monitoring.yaml
Normal file
14
7project/src/charts/myapp-chart/templates/monitoring.yaml
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
apiVersion: monitoring.coreos.com/v1
|
||||||
|
kind: ServiceMonitor
|
||||||
|
metadata:
|
||||||
|
name: fastapi-servicemonitor
|
||||||
|
labels:
|
||||||
|
release: kube-prometheus-stack
|
||||||
|
spec:
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: {{ .Values.app.name }}
|
||||||
|
endpoints:
|
||||||
|
- port: http
|
||||||
|
path: /metrics
|
||||||
|
interval: 15s
|
||||||
@@ -18,3 +18,12 @@ stringData:
|
|||||||
RABBITMQ_PASSWORD: {{ .Values.rabbitmq.password | default "" | quote }}
|
RABBITMQ_PASSWORD: {{ .Values.rabbitmq.password | default "" | quote }}
|
||||||
RABBITMQ_USERNAME: {{ .Values.rabbitmq.username | quote }}
|
RABBITMQ_USERNAME: {{ .Values.rabbitmq.username | quote }}
|
||||||
SENTRY_DSN: {{ .Values.sentry_dsn | quote }}
|
SENTRY_DSN: {{ .Values.sentry_dsn | quote }}
|
||||||
|
DB_ENCRYPTION_KEY: {{ required "Set .Values.database.encryptionSecret" .Values.database.encryptionSecret | quote }}
|
||||||
|
SMTP_HOST: {{ .Values.smtp.host | default "" | quote }}
|
||||||
|
SMTP_PORT: {{ .Values.smtp.port | default 587 | quote }}
|
||||||
|
SMTP_USERNAME: {{ .Values.smtp.username | default "" | quote }}
|
||||||
|
SMTP_PASSWORD: {{ .Values.smtp.password | default "" | quote }}
|
||||||
|
SMTP_USE_TLS: {{ .Values.smtp.tls | default false | quote }}
|
||||||
|
SMTP_USE_SSL: {{ .Values.smtp.ssl | default false | quote }}
|
||||||
|
SMTP_FROM: {{ .Values.smtp.from | default "" | quote }}
|
||||||
|
UNIRATE_API_KEY: {{ .Values.unirate.key | default "" | quote }}
|
||||||
@@ -2,9 +2,12 @@ apiVersion: v1
|
|||||||
kind: Service
|
kind: Service
|
||||||
metadata:
|
metadata:
|
||||||
name: {{ .Values.app.name }}
|
name: {{ .Values.app.name }}
|
||||||
|
labels:
|
||||||
|
app: {{ .Values.app.name }}
|
||||||
spec:
|
spec:
|
||||||
ports:
|
ports:
|
||||||
- port: {{ .Values.service.port }}
|
- name: http
|
||||||
|
port: {{ .Values.service.port }}
|
||||||
targetPort: {{ .Values.app.port }}
|
targetPort: {{ .Values.app.port }}
|
||||||
selector:
|
selector:
|
||||||
app: {{ .Values.app.name }}
|
app: {{ .Values.app.name }}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user