mirror of
https://github.com/dat515-2025/Group-8.git
synced 2026-03-22 15:12:08 +01:00
Compare commits
70 Commits
f58083870f
...
test_arm_b
| Author | SHA1 | Date | |
|---|---|---|---|
| 3749aa4525 | |||
| 94aa64addc | |||
| ba1677b2d3 | |||
|
|
8ea1ef9eea | ||
|
|
4b614902b2 | ||
| a152ecbe4d | |||
| 7d7dd98d0f | |||
| 5aca071ac2 | |||
|
|
80991c7390 | ||
|
|
1403e0029b | ||
| aa63e51e6a | |||
|
|
4aaaba3956 | ||
|
|
f0c28ba9e1 | ||
|
|
b560c07d62 | ||
|
|
f0b1452e30 | ||
| 6effb2793a | |||
|
|
ba7798259c | ||
| deb67f421e | |||
| 74557eeea8 | |||
| 2e0619d03f | |||
| 31add42d6d | |||
| 4de79169a2 | |||
| 59d53967b0 | |||
| f3086f8c73 | |||
|
|
fd437b1caf | ||
| 96ebc27001 | |||
|
|
922651fdbf | ||
|
|
e164b185e0 | ||
|
|
186b4fd09a | ||
|
|
280d495335 | ||
|
|
e73233c90a | ||
|
|
aade78bf3f | ||
|
|
50e489a8e0 | ||
|
|
1679abb71f | ||
| 573404dead | |||
| d57dd82a64 | |||
| 50f37c1161 | |||
| ae22d2ee5f | |||
| 509608f8c9 | |||
| ed723d1d13 | |||
| b0dee5e289 | |||
| 640da2ee04 | |||
| ab9aefd140 | |||
|
|
4eaf46e77e | ||
|
|
a30ae4d010 | ||
|
|
ef26e88713 | ||
|
|
2e1dddb4f8 | ||
|
|
25e587cea8 | ||
|
|
3cdefc33fc | ||
|
|
5954e56956 | ||
|
|
8575ef8ff5 | ||
| c53e314b2a | |||
| c0bc44622f | |||
| 3d31ff4631 | |||
|
|
8b92b9bd18 | ||
|
|
3d26ed6a62 | ||
|
|
67b44539f2 | ||
|
|
ff9cc712db | ||
| dc7ce9e6a1 | |||
| 188cdf5727 | |||
| 4cf0d2a981 | |||
| 9986cce8f9 | |||
| b3b5717e9e | |||
|
|
1da927dc07 | ||
| 537d050080 | |||
| 1e4f342176 | |||
| c62e0adcf3 | |||
| 24d86abfc4 | |||
| 21305f18e2 | |||
| e708f7b18b |
4
.github/workflows/build-image.yaml
vendored
4
.github/workflows/build-image.yaml
vendored
@@ -15,7 +15,7 @@ on:
|
|||||||
context:
|
context:
|
||||||
description: "Docker build context path"
|
description: "Docker build context path"
|
||||||
required: false
|
required: false
|
||||||
default: "7project/backend"
|
default: "7project/src/backend"
|
||||||
type: string
|
type: string
|
||||||
pr_number:
|
pr_number:
|
||||||
description: "PR number (required when mode=pr)"
|
description: "PR number (required when mode=pr)"
|
||||||
@@ -94,7 +94,7 @@ jobs:
|
|||||||
tags: |
|
tags: |
|
||||||
${{ env.IMAGE_REPO }}:${{ env.TAG1 }}
|
${{ env.IMAGE_REPO }}:${{ env.TAG1 }}
|
||||||
${{ env.IMAGE_REPO }}:${{ env.TAG2 }}
|
${{ env.IMAGE_REPO }}:${{ env.TAG2 }}
|
||||||
platforms: linux/amd64
|
platforms: linux/arm64,linux/amd64
|
||||||
|
|
||||||
- name: Set outputs
|
- name: Set outputs
|
||||||
id: set
|
id: set
|
||||||
|
|||||||
14
.github/workflows/deploy-pr.yaml
vendored
14
.github/workflows/deploy-pr.yaml
vendored
@@ -21,7 +21,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
mode: pr
|
mode: pr
|
||||||
image_repo: lukastrkan/cc-app-demo
|
image_repo: lukastrkan/cc-app-demo
|
||||||
context: 7project/backend
|
context: 7project/src/backend
|
||||||
pr_number: ${{ github.event.pull_request.number }}
|
pr_number: ${{ github.event.pull_request.number }}
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@ jobs:
|
|||||||
runner: vhs
|
runner: vhs
|
||||||
mode: pr
|
mode: pr
|
||||||
pr_number: ${{ github.event.pull_request.number }}
|
pr_number: ${{ github.event.pull_request.number }}
|
||||||
base_domain: ${{ vars.DEV_BASE_DOMAIN }}
|
base_domain: ${{ vars.PROD_DOMAIN }}
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
frontend:
|
frontend:
|
||||||
@@ -77,7 +77,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Helm upgrade/install PR preview
|
- name: Helm upgrade/install PR preview
|
||||||
env:
|
env:
|
||||||
DEV_BASE_DOMAIN: ${{ secrets.BASE_DOMAIN }}
|
DEV_BASE_DOMAIN: ${{ vars.BASE_DOMAIN }}
|
||||||
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
||||||
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
||||||
DIGEST: ${{ needs.build.outputs.digest }}
|
DIGEST: ${{ needs.build.outputs.digest }}
|
||||||
@@ -85,13 +85,14 @@ jobs:
|
|||||||
DOMAIN_SCHEME: "${{ needs.get_urls.outputs.backend_url_scheme }}"
|
DOMAIN_SCHEME: "${{ needs.get_urls.outputs.backend_url_scheme }}"
|
||||||
FRONTEND_DOMAIN: "${{ needs.get_urls.outputs.frontend_url }}"
|
FRONTEND_DOMAIN: "${{ needs.get_urls.outputs.frontend_url }}"
|
||||||
FRONTEND_DOMAIN_SCHEME: "${{ needs.get_urls.outputs.frontend_url_scheme }}"
|
FRONTEND_DOMAIN_SCHEME: "${{ needs.get_urls.outputs.frontend_url_scheme }}"
|
||||||
|
UNIRATE_API_KEY: ${{ secrets.UNIRATE_API_KEY }}
|
||||||
run: |
|
run: |
|
||||||
PR=${{ github.event.pull_request.number }}
|
PR=${{ github.event.pull_request.number }}
|
||||||
RELEASE=myapp-pr-$PR
|
RELEASE=myapp-pr-$PR
|
||||||
NAMESPACE=pr-$PR
|
NAMESPACE=pr-$PR
|
||||||
helm upgrade --install "$RELEASE" ./7project/charts/myapp-chart \
|
helm upgrade --install "$RELEASE" ./7project/src/charts/myapp-chart \
|
||||||
-n "$NAMESPACE" --create-namespace \
|
-n "$NAMESPACE" --create-namespace \
|
||||||
-f 7project/charts/myapp-chart/values-dev.yaml \
|
-f 7project/src/charts/myapp-chart/values-dev.yaml \
|
||||||
--set prNumber="$PR" \
|
--set prNumber="$PR" \
|
||||||
--set deployment="pr-$PR" \
|
--set deployment="pr-$PR" \
|
||||||
--set domain="$DOMAIN" \
|
--set domain="$DOMAIN" \
|
||||||
@@ -102,7 +103,8 @@ jobs:
|
|||||||
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
||||||
--set-string database.password="$DB_PASSWORD" \
|
--set-string database.password="$DB_PASSWORD" \
|
||||||
--set-string database.encryptionSecret="$PR" \
|
--set-string database.encryptionSecret="$PR" \
|
||||||
--set-string app.name="finance-tracker-pr-$PR"
|
--set-string app.name="finance-tracker-pr-$PR" \
|
||||||
|
--set-string unirate.key="$UNIRATE_API_KEY"
|
||||||
|
|
||||||
- name: Post preview URLs as PR comment
|
- name: Post preview URLs as PR comment
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
|
|||||||
32
.github/workflows/deploy-prod.yaml
vendored
32
.github/workflows/deploy-prod.yaml
vendored
@@ -4,9 +4,9 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches: [ "main" ]
|
branches: [ "main" ]
|
||||||
paths:
|
paths:
|
||||||
- 7project/backend/**
|
- ../../7project/src/backend/**
|
||||||
- 7project/frontend/**
|
- ../../7project/src/frontend/**
|
||||||
- 7project/charts/myapp-chart/**
|
- ../../7project/src/charts/myapp-chart/**
|
||||||
- .github/workflows/deploy-prod.yaml
|
- .github/workflows/deploy-prod.yaml
|
||||||
- .github/workflows/build-image.yaml
|
- .github/workflows/build-image.yaml
|
||||||
- .github/workflows/frontend-pages.yml
|
- .github/workflows/frontend-pages.yml
|
||||||
@@ -27,15 +27,17 @@ jobs:
|
|||||||
|
|
||||||
build:
|
build:
|
||||||
name: Build and push image (reusable)
|
name: Build and push image (reusable)
|
||||||
|
needs: [test]
|
||||||
uses: ./.github/workflows/build-image.yaml
|
uses: ./.github/workflows/build-image.yaml
|
||||||
with:
|
with:
|
||||||
mode: prod
|
mode: prod
|
||||||
image_repo: lukastrkan/cc-app-demo
|
image_repo: lukastrkan/cc-app-demo
|
||||||
context: 7project/backend
|
context: 7project/src/backend
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
get_urls:
|
get_urls:
|
||||||
name: Generate Production URLs
|
name: Generate Production URLs
|
||||||
|
needs: [test]
|
||||||
uses: ./.github/workflows/url_generator.yml
|
uses: ./.github/workflows/url_generator.yml
|
||||||
with:
|
with:
|
||||||
mode: prod
|
mode: prod
|
||||||
@@ -92,10 +94,18 @@ jobs:
|
|||||||
CSAS_CLIENT_ID: ${{ secrets.CSAS_CLIENT_ID }}
|
CSAS_CLIENT_ID: ${{ secrets.CSAS_CLIENT_ID }}
|
||||||
CSAS_CLIENT_SECRET: ${{ secrets.CSAS_CLIENT_SECRET }}
|
CSAS_CLIENT_SECRET: ${{ secrets.CSAS_CLIENT_SECRET }}
|
||||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||||
|
SMTP_HOST: ${{ secrets.SMTP_HOST }}
|
||||||
|
SMTP_PORT: ${{ secrets.SMTP_PORT }}
|
||||||
|
SMTP_USERNAME: ${{ secrets.SMTP_USERNAME }}
|
||||||
|
SMTP_PASSWORD: ${{ secrets.SMTP_PASSWORD }}
|
||||||
|
SMTP_USE_TLS: ${{ secrets.SMTP_USE_TLS }}
|
||||||
|
SMTP_USE_SSL: ${{ secrets.SMTP_USE_SSL }}
|
||||||
|
SMTP_FROM: ${{ secrets.SMTP_FROM }}
|
||||||
|
UNIRATE_API_KEY: ${{ secrets.UNIRATE_API_KEY }}
|
||||||
run: |
|
run: |
|
||||||
helm upgrade --install myapp ./7project/charts/myapp-chart \
|
helm upgrade --install myapp ./7project/src/charts/myapp-chart \
|
||||||
-n prod --create-namespace \
|
-n prod --create-namespace \
|
||||||
-f 7project/charts/myapp-chart/values-prod.yaml \
|
-f 7project/src/charts/myapp-chart/values-prod.yaml \
|
||||||
--set deployment="prod" \
|
--set deployment="prod" \
|
||||||
--set domain="$DOMAIN" \
|
--set domain="$DOMAIN" \
|
||||||
--set domain_scheme="$DOMAIN_SCHEME" \
|
--set domain_scheme="$DOMAIN_SCHEME" \
|
||||||
@@ -111,4 +121,12 @@ jobs:
|
|||||||
--set-string oauth.csas.clientId="$CSAS_CLIENT_ID" \
|
--set-string oauth.csas.clientId="$CSAS_CLIENT_ID" \
|
||||||
--set-string oauth.csas.clientSecret="$CSAS_CLIENT_SECRET" \
|
--set-string oauth.csas.clientSecret="$CSAS_CLIENT_SECRET" \
|
||||||
--set-string sentry_dsn="$SENTRY_DSN" \
|
--set-string sentry_dsn="$SENTRY_DSN" \
|
||||||
--set-string database.encryptionSecret="${{ secrets.PROD_DB_ENCRYPTION_KEY }}"
|
--set-string database.encryptionSecret="${{ secrets.PROD_DB_ENCRYPTION_KEY }}" \
|
||||||
|
--set-string smtp.host="$SMTP_HOST" \
|
||||||
|
--set smtp.port="$SMTP_PORT" \
|
||||||
|
--set-string smtp.username="$SMTP_USERNAME" \
|
||||||
|
--set-string smtp.password="$SMTP_PASSWORD" \
|
||||||
|
--set-string smtp.tls="$SMTP_USE_TLS" \
|
||||||
|
--set-string smtp.ssl="$SMTP_USE_SSL" \
|
||||||
|
--set-string smtp.from="$SMTP_FROM" \
|
||||||
|
--set-string unirate.key="$UNIRATE_API_KEY"
|
||||||
6
.github/workflows/frontend-pages.yml
vendored
6
.github/workflows/frontend-pages.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: 7project/frontend
|
working-directory: 7project/src/frontend
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -45,7 +45,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version: '20'
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
cache-dependency-path: 7project/frontend/package-lock.json
|
cache-dependency-path: 7project/src/frontend/package-lock.json
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: npm ci
|
run: npm ci
|
||||||
@@ -61,7 +61,7 @@ jobs:
|
|||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: frontend-dist
|
name: frontend-dist
|
||||||
path: 7project/frontend/dist
|
path: 7project/src/frontend/dist
|
||||||
|
|
||||||
deploy:
|
deploy:
|
||||||
name: Deploy to Cloudflare Pages
|
name: Deploy to Cloudflare Pages
|
||||||
|
|||||||
15
.github/workflows/run-tests.yml
vendored
15
.github/workflows/run-tests.yml
vendored
@@ -31,6 +31,9 @@ jobs:
|
|||||||
MARIADB_DB: group_project
|
MARIADB_DB: group_project
|
||||||
MARIADB_USER: appuser
|
MARIADB_USER: appuser
|
||||||
MARIADB_PASSWORD: apppass
|
MARIADB_PASSWORD: apppass
|
||||||
|
# Ensure the application uses MariaDB (async) during tests
|
||||||
|
DATABASE_URL: mysql+asyncmy://appuser:apppass@127.0.0.1:3306/group_project
|
||||||
|
DISABLE_METRICS: "1"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
@@ -43,19 +46,21 @@ jobs:
|
|||||||
|
|
||||||
- name: Add test dependencies to requirements
|
- name: Add test dependencies to requirements
|
||||||
run: |
|
run: |
|
||||||
echo "pytest==8.4.2" >> ./7project/backend/requirements.txt
|
echo "pytest==8.4.2" >> ./7project/src/backend/requirements.txt
|
||||||
echo "pytest-asyncio==1.2.0" >> ./7project/backend/requirements.txt
|
echo "pytest-asyncio==1.2.0" >> ./7project/src/backend/requirements.txt
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install -r ./7project/backend/requirements.txt
|
pip install -r ./7project/src/backend/requirements.txt
|
||||||
|
|
||||||
- name: Run Alembic migrations
|
- name: Run Alembic migrations
|
||||||
run: |
|
run: |
|
||||||
alembic upgrade head
|
alembic upgrade head
|
||||||
working-directory: ./7project/backend
|
working-directory: ./7project/src/backend
|
||||||
|
|
||||||
- name: Run tests with pytest
|
- name: Run tests with pytest
|
||||||
|
env:
|
||||||
|
PYTEST_RUN_CONFIG: "True"
|
||||||
run: pytest
|
run: pytest
|
||||||
working-directory: ./7project/backend
|
working-directory: ./7project/src/backend
|
||||||
8
.idea/.gitignore
generated
vendored
Normal file
8
.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
||||||
|
# Editor-based HTTP Client requests
|
||||||
|
/httpRequests/
|
||||||
|
# Datasource local storage ignored files
|
||||||
|
/dataSources/
|
||||||
|
/dataSources.local.xml
|
||||||
16
7project/.gitignore
vendored
16
7project/.gitignore
vendored
@@ -1,8 +1,8 @@
|
|||||||
/tofu/controlplane.yaml
|
/src/tofu/controlplane.yaml
|
||||||
/tofu/kubeconfig
|
/src/tofu/kubeconfig
|
||||||
/tofu/talosconfig
|
/src/tofu/talosconfig
|
||||||
/tofu/terraform.tfstate
|
/src/tofu/terraform.tfstate
|
||||||
/tofu/terraform.tfstate.backup
|
/src/tofu/terraform.tfstate.backup
|
||||||
/tofu/worker.yaml
|
/src/tofu/worker.yaml
|
||||||
/tofu/.terraform.lock.hcl
|
/src/tofu/.terraform.lock.hcl
|
||||||
/tofu/.terraform/
|
/src/tofu/.terraform/
|
||||||
|
|||||||
8
7project/.idea/.gitignore
generated
vendored
Normal file
8
7project/.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
||||||
|
# Editor-based HTTP Client requests
|
||||||
|
/httpRequests/
|
||||||
|
# Datasource local storage ignored files
|
||||||
|
/dataSources/
|
||||||
|
/dataSources.local.xml
|
||||||
@@ -1,43 +1,6 @@
|
|||||||
# Lab 6: Design Document for Course Project
|
# Personal Finance Tracker
|
||||||
|
## Folder Structure
|
||||||
| Lab 6: | Design Document for Course Project |
|
- meetings: Contains note from meetings
|
||||||
| ----------- | ---------------------------------- |
|
- scr: Source code for the project
|
||||||
| Subject: | DAT515 Cloud Computing |
|
- checklist: Project checklist and self assessment tracking
|
||||||
| Deadline: | **September 19, 2025 23:59** |
|
- report.md: Detailed report of the project
|
||||||
| Grading: | No Grade |
|
|
||||||
| Submission: | Group |
|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
|
|
||||||
- [Table of Contents](#table-of-contents)
|
|
||||||
- [1. Design Document (design.md)](#1-design-document-designmd)
|
|
||||||
|
|
||||||
The design document is the first deliverable for your project.
|
|
||||||
We separated this out as a separate deliverable, with its own deadline, to ensure that you have a clear plan before you start coding.
|
|
||||||
This part only needs a cursory review by the teaching staff to ensure it is sufficiently comprehensive, while still realistic.
|
|
||||||
The teaching staff will assign you to a project mentor who will provide guidance and support throughout the development process.
|
|
||||||
|
|
||||||
## 1. Design Document (design.md)
|
|
||||||
|
|
||||||
You are required to prepare a design document for your application.
|
|
||||||
The design doc should be brief, well-organized and easy to understand.
|
|
||||||
The design doc should be prepared in markdown format and named `design.md` and submitted in the project group's repository.
|
|
||||||
Remember that you can use [mermaid diagrams](https://github.com/mermaid-js/mermaid#readme) in markdown files.
|
|
||||||
|
|
||||||
The design doc **should include** the following sections:
|
|
||||||
|
|
||||||
- **Overview**: A brief description of the application and its purpose.
|
|
||||||
- **Architecture**: The high-level architecture of the application, including components, interactions, and data flow.
|
|
||||||
- **Technologies**: The cloud computing technologies or services used in the application.
|
|
||||||
- **Deployment**: The deployment strategy for the application, including any infrastructure requirements.
|
|
||||||
|
|
||||||
The design document should be updated throughout the development process and reflect the final implementation of your project.
|
|
||||||
|
|
||||||
Optional sections may include:
|
|
||||||
|
|
||||||
- Security: The security measures implemented in the application to protect data and resources.
|
|
||||||
- Scalability: The scalability considerations for the application, including load balancing and auto-scaling.
|
|
||||||
- Monitoring: The monitoring and logging strategy for the application to track performance and detect issues.
|
|
||||||
- Disaster Recovery: The disaster recovery plan for the application to ensure business continuity in case of failures.
|
|
||||||
- Cost Analysis: The cost analysis of running the application on the cloud, including pricing models and cost-saving strategies.
|
|
||||||
- References: Any external sources or references used in the design document.
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
import logging
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from celery import shared_task
|
|
||||||
|
|
||||||
import app.services.bank_scraper
|
|
||||||
|
|
||||||
logger = logging.getLogger("celery_tasks")
|
|
||||||
if not logger.handlers:
|
|
||||||
_h = logging.StreamHandler()
|
|
||||||
logger.addHandler(_h)
|
|
||||||
logger.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
def run_coro(coro) -> None:
|
|
||||||
"""Run an async coroutine in a fresh event loop without using run_until_complete.
|
|
||||||
Primary strategy runs in a new loop in the current thread. If that fails due to
|
|
||||||
debugger patches (e.g., Bad file descriptor from pydevd_nest_asyncio), fall back
|
|
||||||
to running in a dedicated thread with its own event loop.
|
|
||||||
"""
|
|
||||||
import threading
|
|
||||||
|
|
||||||
def _cleanup_loop(loop):
|
|
||||||
try:
|
|
||||||
pending = [t for t in asyncio.all_tasks(loop) if not t.done()]
|
|
||||||
for t in pending:
|
|
||||||
t.cancel()
|
|
||||||
if pending:
|
|
||||||
loop.run_until_complete(asyncio.gather(*pending, return_exceptions=True))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
loop.close()
|
|
||||||
finally:
|
|
||||||
asyncio.set_event_loop(None)
|
|
||||||
|
|
||||||
# First attempt: Run in current thread with a fresh event loop
|
|
||||||
try:
|
|
||||||
loop = asyncio.get_event_loop_policy().new_event_loop()
|
|
||||||
try:
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
task = loop.create_task(coro)
|
|
||||||
task.add_done_callback(lambda _t: loop.stop())
|
|
||||||
loop.run_forever()
|
|
||||||
exc = task.exception()
|
|
||||||
if exc:
|
|
||||||
raise exc
|
|
||||||
return
|
|
||||||
finally:
|
|
||||||
_cleanup_loop(loop)
|
|
||||||
except OSError as e:
|
|
||||||
logger.warning("run_coro primary strategy failed (%s). Falling back to thread runner.", e)
|
|
||||||
except Exception:
|
|
||||||
# For any other unexpected errors, try thread fallback as well
|
|
||||||
logger.exception("run_coro primary strategy raised; attempting thread fallback")
|
|
||||||
|
|
||||||
# Fallback: Run in a dedicated thread with its own event loop
|
|
||||||
error = {"exc": None}
|
|
||||||
|
|
||||||
def _thread_target():
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
try:
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
task = loop.create_task(coro)
|
|
||||||
task.add_done_callback(lambda _t: loop.stop())
|
|
||||||
loop.run_forever()
|
|
||||||
exc = task.exception()
|
|
||||||
if exc:
|
|
||||||
error["exc"] = exc
|
|
||||||
finally:
|
|
||||||
_cleanup_loop(loop)
|
|
||||||
|
|
||||||
th = threading.Thread(target=_thread_target, name="celery-async-runner", daemon=True)
|
|
||||||
th.start()
|
|
||||||
th.join()
|
|
||||||
if error["exc"] is not None:
|
|
||||||
raise error["exc"]
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.send_email")
|
|
||||||
def send_email(to: str, subject: str, body: str) -> None:
|
|
||||||
if not (to and subject and body):
|
|
||||||
logger.error("Email task missing fields. to=%r subject=%r body_len=%r", to, subject, len(body) if body else 0)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Placeholder for real email sending logic
|
|
||||||
logger.info("[Celery] Email sent | to=%s | subject=%s | body_len=%d", to, subject, len(body))
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.load_transactions")
|
|
||||||
def load_transactions(user_id: str) -> None:
|
|
||||||
if not user_id:
|
|
||||||
logger.error("Load transactions task missing user_id.")
|
|
||||||
return
|
|
||||||
|
|
||||||
run_coro(app.services.bank_scraper.aload_ceska_sporitelna_transactions(user_id))
|
|
||||||
|
|
||||||
# Placeholder for real transaction loading logic
|
|
||||||
logger.info("[Celery] Transactions loaded for user_id=%s", user_id)
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.load_all_transactions")
|
|
||||||
def load_all_transactions() -> None:
|
|
||||||
logger.info("[Celery] Starting load_all_transactions")
|
|
||||||
run_coro(app.services.bank_scraper.aload_all_ceska_sporitelna_transactions())
|
|
||||||
logger.info("[Celery] Finished load_all_transactions")
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
import uvicorn
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
uvicorn.run("app.app:app", host="0.0.0.0", log_level="info")
|
|
||||||
@@ -7,64 +7,64 @@ Focus on areas that align with your project goals and interests.
|
|||||||
The core deliverables are required.
|
The core deliverables are required.
|
||||||
This means that you must get at least 2 points for each item in this category.
|
This means that you must get at least 2 points for each item in this category.
|
||||||
|
|
||||||
| **Category** | **Item** | **Max Points** | **Points** |
|
| **Category** | **Item** | **Max Points** | **Points** | **Comments** |
|
||||||
|----------------------------------| --------------------------------------- | -------------- |-------------------------------------------------|
|
|----------------------------------| --------------------------------------- | -------------- |-------------------------------------------------| |
|
||||||
| **Core Deliverables (Required)** | | | |
|
| **Core Deliverables (Required)** | | | | |
|
||||||
| Codebase & Organization | Well-organized project structure | 5 | 5 |
|
| Codebase & Organization | Well-organized project structure | 5 | 5 | |
|
||||||
| | Clean, readable code | 5 | 4 |
|
| | Clean, readable code | 5 | 4 | |
|
||||||
| | Use planning tool (e.g., GitHub issues) | 5 | 4 |
|
| | Use planning tool (e.g., GitHub issues) | 5 | 4 | |
|
||||||
| | Proper version control usage | 5 | 5 |
|
| | Proper version control usage | 5 | 5 | |
|
||||||
| 23 | Complete source code | 5 | 5 |
|
| 23 | Complete source code | 5 | 5 | |
|
||||||
| Documentation | Comprehensive reproducibility report | 10 | 4-5 |
|
| Documentation | Comprehensive reproducibility report | 10 | 4-5 | |
|
||||||
| | Updated design document | 5 | 2 |
|
| | Updated design document | 5 | 2 | |
|
||||||
| | Clear build/deployment instructions | 5 | 2 |
|
| | Clear build/deployment instructions | 5 | 2 | |
|
||||||
| | Troubleshooting guide | 5 | 1 |
|
| | Troubleshooting guide | 5 | 1 | |
|
||||||
| | Completed self-assessment table | 5 | 2 |
|
| | Completed self-assessment table | 5 | 2 | |
|
||||||
| 14 | Hour sheets for all members | 5 | 3 |
|
| 14 | Hour sheets for all members | 5 | 3 | |
|
||||||
| Presentation Video | Project demonstration | 5 | 0 |
|
| Presentation Video | Project demonstration | 5 | 0 | |
|
||||||
| | Code walk-through | 5 | 0 |
|
| | Code walk-through | 5 | 0 | |
|
||||||
| 0 | Deployment showcase | 5 | 0 |
|
| 0 | Deployment showcase | 5 | 0 | |
|
||||||
| **Technical Implementation** | | | |
|
| **Technical Implementation** | | | | |
|
||||||
| Application Functionality | Basic functionality works | 10 | 8 |
|
| Application Functionality | Basic functionality works | 10 | 8 | |
|
||||||
| | Advanced features implemented | 10 | 0 |
|
| | Advanced features implemented | 10 | 0 | |
|
||||||
| | Error handling & robustness | 10 | 4 |
|
| | Error handling & robustness | 10 | 4 | |
|
||||||
| 16 | User-friendly interface | 5 | 4 |
|
| 16 | User-friendly interface | 5 | 4 | |
|
||||||
| Backend & Architecture | Stateless web server | 5 | 5 |
|
| Backend & Architecture | Stateless web server | 5 | 5 | |
|
||||||
| | Stateful application | 10 | ? WHAT DOES THIS MEAN |
|
| | Stateful application | 10 | ? WHAT DOES THIS MEAN | |
|
||||||
| | Database integration | 10 | 10 |
|
| | Database integration | 10 | 10 | |
|
||||||
| | API design | 5 | 5 |
|
| | API design | 5 | 5 | |
|
||||||
| 20 | Microservices architecture | 10 | 0 |
|
| 20 | Microservices architecture | 10 | 0 | |
|
||||||
| Cloud Integration | Basic cloud deployment | 10 | 10 |
|
| Cloud Integration | Basic cloud deployment | 10 | 10 | |
|
||||||
| | Cloud APIs usage | 10 | ? WHAT DOES THIS MEAN |
|
| | Cloud APIs usage | 10 | ? WHAT DOES THIS MEAN | |
|
||||||
| | Serverless components | 10 | 0 |
|
| | Serverless components | 10 | 0 | |
|
||||||
| 10 | Advanced cloud services | 5 | 0 |
|
| 10 | Advanced cloud services | 5 | 0 | |
|
||||||
| **DevOps & Deployment** | | | |
|
| **DevOps & Deployment** | | | | |
|
||||||
| Containerization | Basic Dockerfile | 5 | 5 |
|
| Containerization | Basic Dockerfile | 5 | 5 | |
|
||||||
| | Optimized Dockerfile | 5 | 0 |
|
| | Optimized Dockerfile | 5 | 0 | |
|
||||||
| | Docker Compose | 5 | 5 - dev only |
|
| | Docker Compose | 5 | 5 - dev only | |
|
||||||
| 15 | Persistent storage | 5 | 5 |
|
| 15 | Persistent storage | 5 | 5 | |
|
||||||
| Deployment & Scaling | Manual deployment | 5 | 5 |
|
| Deployment & Scaling | Manual deployment | 5 | 5 | |
|
||||||
| | Automated deployment | 5 | 5 |
|
| | Automated deployment | 5 | 5 | |
|
||||||
| | Multiple replicas | 5 | 5 |
|
| | Multiple replicas | 5 | 5 | |
|
||||||
| 20 | Kubernetes deployment | 10 | 10 |
|
| 20 | Kubernetes deployment | 10 | 10 | |
|
||||||
| **Quality Assurance** | | | |
|
| **Quality Assurance** | | | | |
|
||||||
| Testing | Unit tests | 5 | 2 |
|
| Testing | Unit tests | 5 | 2 | |
|
||||||
| | Integration tests | 5 | 2 |
|
| | Integration tests | 5 | 2 | |
|
||||||
| | End-to-end tests | 5 | 5 |
|
| | End-to-end tests | 5 | 5 | |
|
||||||
| 9 | Performance testing | 5 | 0 |
|
| 9 | Performance testing | 5 | 0 | |
|
||||||
| Monitoring & Operations | Health checks | 5 | 5 |
|
| Monitoring & Operations | Health checks | 5 | 5 | |
|
||||||
| | Logging | 5 | 2 - only to terminal add logstash |
|
| | Logging | 5 | 2 - only to terminal add logstash | |
|
||||||
| 9 | Metrics/Monitoring | 5 | 2 - only DB, need to create Prometheus endpoint |
|
| 9 | Metrics/Monitoring | 5 | 2 - only DB, need to create Prometheus endpoint | |
|
||||||
| Security | HTTPS/TLS | 5 | 5 |
|
| Security | HTTPS/TLS | 5 | 5 | |
|
||||||
| | Authentication | 5 | 5 |
|
| | Authentication | 5 | 5 | |
|
||||||
| 15 | Authorization | 5 | 5 |
|
| 15 | Authorization | 5 | 5 | |
|
||||||
| **Innovation & Excellence** | | | |
|
| **Innovation & Excellence** | | | | |
|
||||||
| Advanced Features and | AI/ML Integration | 10 | 0 |
|
| Advanced Features and | AI/ML Integration | 10 | 0 | |
|
||||||
| Technical Excellence | Real-time features | 10 | 0 |
|
| Technical Excellence | Real-time features | 10 | 0 | |
|
||||||
| | Creative problem solving | 10 | ? |
|
| | Creative problem solving | 10 | ? | |
|
||||||
| | Performance optimization | 5 | 2 |
|
| | Performance optimization | 5 | 2 | |
|
||||||
| 2 | Exceptional user experience | 5 | 0 |
|
| 2 | Exceptional user experience | 5 | 0 | |
|
||||||
| **Total** | | **255** | **153** |
|
| **Total** | | **255** | **153** | |
|
||||||
|
|
||||||
## Grading Scale
|
## Grading Scale
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
export const BACKEND_URL: string =
|
|
||||||
import.meta.env.VITE_BACKEND_URL ?? '';
|
|
||||||
|
|
||||||
export const VITE_UNIRATE_API_KEY: string =
|
|
||||||
import.meta.env.VITE_UNIRATE_API_KEY ?? 'wYXMiA0bz8AVRHtiS9hbKIr4VP3k5Qff8XnQdKQM45YM3IwFWP6y73r3KMkv1590';
|
|
||||||
@@ -1,283 +1,459 @@
|
|||||||
# Personal finance tracker
|
# Personal finance tracker
|
||||||
|
|
||||||
> **Instructions**:
|
<!--- **Instructions**:
|
||||||
> This template provides the structure for your project report.
|
> This template provides the structure for your project report.
|
||||||
> Replace the placeholder text with your actual content.
|
> Replace the placeholder text with your actual content.
|
||||||
> Remove instructions that are not relevant for your project, but leave the headings along with a (NA) label.
|
> Remove instructions that are not relevant for your project, but leave the headings along with a (NA) label. -->
|
||||||
|
|
||||||
## Project Overview
|
## Project Overview
|
||||||
|
|
||||||
**Project Name**: Personal Finance Tracker
|
**Project Name**: Personal Finance Tracker
|
||||||
|
|
||||||
|
**Deployment URL**: https://finance.ltrk.cz/
|
||||||
|
|
||||||
**Group Members**:
|
**Group Members**:
|
||||||
|
|
||||||
- 289229, Lukáš Trkan, lukastrkan
|
- 289229, Lukáš Trkan, lukastrkan
|
||||||
- 289258, Dejan Ribarovski, derib2613, ribardej
|
- 289258, Dejan Ribarovski, ribardej (derib2613)
|
||||||
|
|
||||||
**Brief Description**:
|
**Brief Description**:
|
||||||
Our application is a finance tracker, so a person can easily track his cash flow
|
Our application allows users to easily track their cash flow
|
||||||
through multiple bank accounts. Person can label transactions with custom categories
|
through multiple bank accounts. Users can label their transactions with custom categories that can be later used for
|
||||||
and later filter by them.
|
filtering and visualization. New transactions are automatically fetched in the background.
|
||||||
|
|
||||||
## Architecture Overview
|
## Architecture Overview
|
||||||
Our system is a full‑stack web application composed of a React frontend, a FastAPI backend, a PostgreSQL database, and asynchronous background workers powered by Celery with RabbitMQ. Redis is available for caching/kv and may be used by Celery as a result backend. The backend exposes REST endpoints for authentication (email/password and OAuth), users, categories, and transactions. A thin controller layer (FastAPI routers) lives under app/api. Infrastructure for Kubernetes is provided via OpenTofu (Terraform‑compatible) modules and the application is packaged via a Helm chart.
|
|
||||||
|
Our system is a full‑stack web application composed of a React frontend, a FastAPI backend,
|
||||||
|
a asynchronousMariaDB database with Maxscale, and background workers powered by Celery with RabbitMQ.
|
||||||
|
The backend exposes REST endpoints for authentication (email/password and OAuth), users, categories,
|
||||||
|
transactions, exchange rates and bank APIs. Infrastructure for Kubernetes is managed via Terraform/OpenTofu and
|
||||||
|
the application is packaged via a Helm chart. This all is deployed on private TalosOS cluster running on Proxmox VE with
|
||||||
|
CI/CD and with public access over Cloudflare tunnels. Static files for frontend are served via Cloudflare pages.
|
||||||
|
Other services deployed in the cluster includes Longhorn for persistent storage, Prometheus with Grafana for monitoring.
|
||||||
|
|
||||||
### High-Level Architecture
|
### High-Level Architecture
|
||||||
|
|
||||||
```mermaid
|
```mermaid
|
||||||
flowchart LR
|
flowchart TB
|
||||||
proc_queue[Message Queue] --> proc_queue_worker[Worker Service]
|
n3(("User")) <--> client["Frontend"]
|
||||||
proc_queue_worker --> ext_mail[(Email Service)]
|
proc_queue["Message Queue"] --> proc_queue_worker["Worker Service"]
|
||||||
proc_cron[Task planner] --> proc_queue
|
proc_queue_worker -- SMTP --> ext_mail[("Email Service")]
|
||||||
proc_queue_worker --> ext_bank[(Bank API)]
|
proc_queue_worker <-- HTTP request/response --> ext_bank[("Bank API")]
|
||||||
proc_queue_worker --> db
|
proc_queue_worker <--> db[("Database")]
|
||||||
client[Client/Frontend] <--> svc[Backend API]
|
proc_cron["Cron"] <-- HTTP request/response --> svc["Backend API"]
|
||||||
svc --> proc_queue
|
svc --> proc_queue
|
||||||
svc <--> db[(Database)]
|
n2["Cloudflare tunnel"] <-- HTTP request/response --> svc
|
||||||
|
svc <--> db
|
||||||
|
svc <-- HTTP request/response --> api[("UniRate API")]
|
||||||
|
client <-- HTTP request/response --> n2
|
||||||
```
|
```
|
||||||
|
|
||||||
The workflow works in the following way:
|
The workflow works in the following way:
|
||||||
|
|
||||||
- Client connects to the frontend. After login, frontend automatically fetches the stored transactions from
|
- Client connects to the frontend. After login, frontend automatically fetches the stored transactions from
|
||||||
the database via the backend API
|
the database via the backend API and currency rates from UniRate API.
|
||||||
- When the client opts for fetching new transactions via the Bank API, the backend delegates the task
|
- When the client opts for fetching new transactions via the Bank API, cron will trigger periodic fetching
|
||||||
to a background worker service via the Message queue.
|
using background worker.
|
||||||
- After successful load, these transactions are stored to the database and displayed to the client
|
- After successful load, these transactions are stored to the database and displayed to the client
|
||||||
- There is also a Task planner, that executes periodic tasks, like fetching new transactions automatically from the Bank API
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- The stored transactions are encrypted in the DB for security reasons.
|
||||||
|
- For every pull request the full APP is deployed on a separate URL and the tests are run by github CI/CD
|
||||||
|
- On every push to main, the production app is automatically updated
|
||||||
|
- UI is responsive for mobile devices
|
||||||
|
- Slow operations (emails, transactions fetching) are handled
|
||||||
|
in the background by Celery workers.
|
||||||
|
- App is monitored using prometheus metrics endpoint and metrics are shown in Grafana dashboard.
|
||||||
|
|
||||||
### Components
|
### Components
|
||||||
|
|
||||||
- Frontend (frontend/): React + TypeScript app built with Vite. Talks to the backend via REST, handles login/registration, shows latest transactions, filtering, and allows adding transactions.
|
- Frontend (frontend/): React + TypeScript app built with Vite. Talks to the backend via REST, handles
|
||||||
- Backend API (backend/app): FastAPI app with routers under app/api for auth, categories, and transactions. Uses FastAPI Users for auth (JWT + OAuth), SQLAlchemy ORM, and Pydantic v2 schemas.
|
login/registration, shows latest transactions, filtering, and allows adding transactions.
|
||||||
- Worker service (backend/app/workers): Celery worker handling asynchronous tasks (e.g., sending verification emails, future background processing).
|
- Backend API (backend/app): FastAPI app with routers under app/api for auth, users, categories, transactions, exchange
|
||||||
- Database (PostgreSQL): Persists users, categories, transactions; schema managed by Alembic migrations.
|
rates and bankAPI. Uses FastAPI Users for auth (JWT + OAuth), SQLAlchemy ORM, and Pydantic v2 schemas.
|
||||||
- Message Queue (RabbitMQ): Transports background jobs from the API to the worker.
|
- Worker service (backend/app/workers): Celery worker handling background tasks (emails, transactions fetching).
|
||||||
- Cache/Result Store (Redis): Available for caching or Celery result backend.
|
- Database (MariaDB with Maxscale): Persists users, categories, transactions; schema managed by Alembic migrations.
|
||||||
- Infrastructure as Code (tofu/): OpenTofu modules provisioning cluster services (RabbitMQ, Redis, Argo CD, cert-manager, Cloudflare tunnel, etc.).
|
- Message Queue (RabbitMQ): Queues background tasks for Celery workers.
|
||||||
|
- Infrastructure as Code (tofu/): OpenTofu modules provisioning cluster services (RabbitMQ, Redis, Cloudflare tunnel,
|
||||||
|
etc.).
|
||||||
- Deployment Chart (charts/myapp-chart/): Helm chart to deploy the application to Kubernetes.
|
- Deployment Chart (charts/myapp-chart/): Helm chart to deploy the application to Kubernetes.
|
||||||
|
|
||||||
### Technologies Used
|
### Technologies Used
|
||||||
|
|
||||||
- Backend: Python, FastAPI, FastAPI Users, SQLAlchemy, Pydantic, Alembic, Celery
|
- Backend: Python, FastAPI, FastAPI Users, SQLAlchemy, Pydantic, Alembic, Celery
|
||||||
- Frontend: React, TypeScript, Vite
|
- Frontend: React, TypeScript, Vite
|
||||||
- Database: MariaDB (Maxscale)
|
- Database: MariaDB with Maxscale
|
||||||
- Background jobs: RabbitMQ, Celery
|
- Background jobs: RabbitMQ, Celery
|
||||||
- Containerization/Orchestration: Docker, Docker Compose (dev), Kubernetes, Helm
|
- Containerization/Orchestration: Docker, Docker Compose (dev), Kubernetes, Helm
|
||||||
- IaC/Platform: Proxmox, Talos, Cloudflare pages, OpenTofu (Terraform), cert-manager, MetalLB, Cloudflare Tunnel, Prometheus, Loki
|
- IaC/Platform: Proxmox, Talos, Cloudflare pages, OpenTofu (Terraform), cert-manager, MetalLB, Cloudflare Tunnel,
|
||||||
|
Prometheus, Loki
|
||||||
|
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
|
|
||||||
### System Requirements
|
### System Requirements
|
||||||
|
|
||||||
- Operating System (dev): Linux, macOS, or Windows with Docker support
|
#### Development
|
||||||
- Operating System (prod): Linux with kubernetes
|
|
||||||
- Minimum RAM: 4 GB (8 GB recommended for running backend, frontend, and database together)
|
- Minimum RAM: 8 GB
|
||||||
- Storage: 4 GB free (Docker images may require additional space)
|
- Storage: 10 GB+ free
|
||||||
|
|
||||||
|
#### Production
|
||||||
|
|
||||||
|
- 1 + 4 nodes
|
||||||
|
- CPU: 4 cores
|
||||||
|
- RAM: 8 GB
|
||||||
|
- Storage: 200 GB
|
||||||
|
|
||||||
### Required Software
|
### Required Software
|
||||||
|
|
||||||
- Docker Desktop or Docker Engine
|
#### Development
|
||||||
|
|
||||||
|
- Docker
|
||||||
- Docker Compose
|
- Docker Compose
|
||||||
- Node.js and npm
|
- Node.js and npm
|
||||||
- Python 3.12+
|
- Python 3.12
|
||||||
- MariaDB 11
|
- MariaDB 11
|
||||||
- Helm 3.12+ and kubectl 1.29+
|
|
||||||
|
#### Production
|
||||||
|
|
||||||
|
##### Minimal:
|
||||||
|
|
||||||
|
- domain name with Cloudflare`s nameservers - tunnel, pages
|
||||||
|
- Kubernetes cluster
|
||||||
|
- kubectl
|
||||||
|
- Helm
|
||||||
- OpenTofu
|
- OpenTofu
|
||||||
|
|
||||||
### Environment Variables (common)
|
##### Our setup specifics:
|
||||||
|
|
||||||
# TODO: UPDATE
|
- Proxmox VE
|
||||||
- Backend: SECRET, FRONTEND_URL, BACKEND_URL, DATABASE_URL, RABBITMQ_URL, REDIS_URL
|
- TalosOS cluster
|
||||||
|
- talosctl
|
||||||
|
- GitHub self-hosted runner with access to the cluster
|
||||||
|
- TailScale for remote access to cluster
|
||||||
|
|
||||||
- OAuth vars (Backend): MOJEID_CLIENT_ID/SECRET, BANKID_CLIENT_ID/SECRET (optional)
|
### Environment Variables
|
||||||
- Frontend: VITE_BACKEND_URL
|
|
||||||
|
#### Backend
|
||||||
|
|
||||||
|
- `MOJEID_CLIENT_ID`, `MOJEID_CLIENT_SECRET` \- OAuth client ID and secret for
|
||||||
|
[MojeID](https://www.mojeid.cz/en/provider/)
|
||||||
|
- `BANKID_CLIENT_ID`, `BANKID_CLIENT_SECRET` \- OAuth client ID and secret for [BankID](https://developer.bankid.cz/)
|
||||||
|
- `CSAS_CLIENT_ID`, `CSAS_CLIENT_SECRET` \- OAuth client ID and secret for [Česká
|
||||||
|
spořitelna](https://developers.erstegroup.com/docs/apis/bank.csas)
|
||||||
|
- `DATABASE_URL`(or `MARIADB_HOST`, `MARIADB_PORT`, `MARIADB_DB`, `MARIADB_USER`, `MARIADB_PASSWORD`) \- MariaDB
|
||||||
|
connection details
|
||||||
|
- `RABBITMQ_USERNAME`, `RABBITMQ_PASSWORD` \- credentials for RabbitMQ
|
||||||
|
- `SENTRY_DSN` \- Sentry DSN for error reporting
|
||||||
|
- `DB_ENCRYPTION_KEY` \- symmetric key for encrypting sensitive data in the database
|
||||||
|
- `SMTP_HOST`, `SMTP_PORT`, `SMTP_USERNAME`, `SMTP_PASSWORD`, `SMTP_USE_TLS`, `SMTP_USE_SSL`, `SMTP_FROM` \- SMTP
|
||||||
|
configuration (host, port, auth credentials, TLS/SSL options, sender).
|
||||||
|
- `UNIRATE_API_KEY` \- API key for UniRate.
|
||||||
|
|
||||||
|
#### Frontend
|
||||||
|
|
||||||
|
- `VITE_BACKEND_URL` \- URL of the backend API
|
||||||
|
|
||||||
### Dependencies (key libraries)
|
### Dependencies (key libraries)
|
||||||
Backend: FastAPI, fastapi-users, SQLAlchemy, pydantic v2, Alembic, Celery, uvicorn
|
|
||||||
|
Backend: FastAPI, fastapi-users, SQLAlchemy, pydantic v2, Alembic, Celery, uvicorn, pytest
|
||||||
Frontend: React, TypeScript, Vite
|
Frontend: React, TypeScript, Vite
|
||||||
|
|
||||||
## Local development
|
## Local development
|
||||||
|
|
||||||
You can run the project with Docker Compose and Python virtual environment for testing and dev purposes
|
You can run the project with Docker Compose and Python virtual environment for testing and development purposes
|
||||||
|
|
||||||
### 1) Clone the Repository
|
### 1) Clone the Repository
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/dat515-2025/Group-8.git
|
git clone https://github.com/dat515-2025/Group-8.git
|
||||||
cd 7project
|
cd Group-8/7project
|
||||||
```
|
```
|
||||||
|
|
||||||
### 2) Install dependencies
|
### 2) Install dependencies
|
||||||
|
|
||||||
Backend
|
Backend
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
cd backend
|
||||||
python3 -m venv .venv
|
python3 -m venv .venv
|
||||||
source .venv/bin/activate
|
source .venv/bin/activate
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
```
|
```
|
||||||
Frontend
|
|
||||||
|
### 3) Run Docker containers
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# In 7project/frontend
|
cd ..
|
||||||
npm install
|
docker compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
### 3) Manual Local Run
|
### 4) Prepare the database
|
||||||
|
|
||||||
Backend
|
|
||||||
```bash
|
```bash
|
||||||
# From the 7project/ directory
|
|
||||||
docker compose up --build
|
|
||||||
# This starts: MariaDB, RabbitMQ
|
|
||||||
|
|
||||||
# Set environment variables (or create .env file)
|
|
||||||
# TODO: fix
|
|
||||||
export SECRET=CHANGE_ME_SECRET
|
|
||||||
export FRONTEND_DOMAIN_SCHEME=http://localhost:5173
|
|
||||||
export BANKID_CLIENT_ID=CHANGE_ME
|
|
||||||
export BANKID_CLIENT_SECRET=CHANGE_ME
|
|
||||||
export CSAS_CLIENT_ID=CHANGE_ME
|
|
||||||
export CSAS_CLIENT_SECRET=CHANGE_ME
|
|
||||||
export MOJEID_CLIENT_ID=CHANGE_ME
|
|
||||||
export MOJEID_CLIENT_SECRET=CHANGE_ME
|
|
||||||
# Apply DB migrations (Alembic)
|
|
||||||
# From 7project
|
|
||||||
bash upgrade_database.sh
|
bash upgrade_database.sh
|
||||||
|
```
|
||||||
|
|
||||||
# Run API
|
### 5) Run backend
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd backend
|
||||||
|
|
||||||
|
#TODO: set env variables
|
||||||
uvicorn app.app:fastApi --reload --host 0.0.0.0 --port 8000
|
uvicorn app.app:fastApi --reload --host 0.0.0.0 --port 8000
|
||||||
|
```
|
||||||
|
|
||||||
|
### 6) Run Celery worker (optional, in another terminal)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd Group-8/7project/src/backend
|
||||||
|
source .venv/bin/activate
|
||||||
celery -A app.celery_app.celery_app worker -l info
|
celery -A app.celery_app.celery_app worker -l info
|
||||||
```
|
```
|
||||||
|
|
||||||
Frontend
|
### 7) Install frontend dependencies and run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Configure backend URL for dev
|
cd ../frontend
|
||||||
echo 'VITE_BACKEND_URL=http://127.0.0.1:8000' > .env
|
npm i
|
||||||
npm run dev
|
npm run dev
|
||||||
# Open http://localhost:5173
|
|
||||||
```
|
```
|
||||||
|
|
||||||
- Backend default: http://127.0.0.1:8000 (OpenAPI at /docs)
|
- Backend available at: http://127.0.0.1:8000 (OpenAPI at /docs)
|
||||||
- Frontend default: http://localhost:5173
|
- Frontend available at: http://localhost:5173
|
||||||
|
|
||||||
## Build Instructions
|
## Build Instructions
|
||||||
|
|
||||||
### Backend
|
### Backend
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# run in project7/backend
|
cd 7project/backend
|
||||||
docker buildx build --platform linux/amd64,linux/arm64 -t your_container_registry/your_name --push .
|
# Dont forget to set correct image tag with your registry and name
|
||||||
|
# For example lukastrkan/cc-app-demo or gitea.ltrk.dev/lukas/cc-app-demo
|
||||||
|
docker buildx build --platform linux/amd64,linux/arm64 -t CHANGE_ME --push .
|
||||||
```
|
```
|
||||||
|
|
||||||
### Frontend
|
### Frontend
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# run in project7/frontend
|
cd project7/src/frontend
|
||||||
npm ci
|
npm ci
|
||||||
npm run build
|
npm run build
|
||||||
```
|
```
|
||||||
|
|
||||||
## Deployment Instructions
|
## Deployment Instructions
|
||||||
|
|
||||||
### Setup Cluster
|
### Setup Cluster
|
||||||
Deployment should work on any Kubernetes cluster. However, we are using 4 TalosOS virtual machines (1 control plane, 3 workers)
|
|
||||||
|
Deployment should work on any Kubernetes cluster. However, we are using 4 TalosOS virtual machines (1 control plane, 3
|
||||||
|
workers)
|
||||||
running on top of Proxmox VE.
|
running on top of Proxmox VE.
|
||||||
|
|
||||||
1) Create 4 VMs with TalosOS
|
1) Create at least 4 VMs with TalosOS (4 cores, 8 GB RAM, 200 GB disk)
|
||||||
2) Install talosctl for your OS: https://docs.siderolabs.com/talos/v1.10/getting-started/talosctl
|
2) Install talosctl for your OS: https://docs.siderolabs.com/talos/v1.10/getting-started/talosctl
|
||||||
3) Generate Talos config
|
3) Generate Talos config
|
||||||
```bash
|
4) Navigate to tofu directory
|
||||||
# TODO: add commands
|
|
||||||
```
|
|
||||||
4) Edit the generated worker.yaml
|
|
||||||
- add google container registry mirror
|
|
||||||
- add modules from config generator
|
|
||||||
- add extramounts for persistent storage
|
|
||||||
- add kernel modules
|
|
||||||
|
|
||||||
5) Apply the config to the VMs
|
|
||||||
```bash
|
```bash
|
||||||
#TODO: add config apply commands
|
cd 7project/src/tofu
|
||||||
|
````
|
||||||
|
|
||||||
|
5) Set IP addresses in environment variables
|
||||||
|
|
||||||
|
```bash
|
||||||
|
CONTROL_PLANE_IP=<control-plane-ip>
|
||||||
|
WORKER1_IP=<worker1-ip>
|
||||||
|
WORKER2_IP=<worker2-ip>
|
||||||
|
WORKER3_IP=<worker3-ip>
|
||||||
|
WORKER4_IP=<worker4-ip>
|
||||||
|
....
|
||||||
```
|
```
|
||||||
|
|
||||||
6) Verify the cluster is up
|
6) Create config files
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# change my-cluster to your desired cluster name
|
||||||
|
talosctl gen config my-cluster https://$CONTROL_PLANE_IP:6443
|
||||||
```
|
```
|
||||||
|
|
||||||
7) Export kubeconfig
|
7) Edit the generated configs
|
||||||
```bash
|
|
||||||
# TODO: add export command
|
Apply the following changes to `worker.yaml`:
|
||||||
|
|
||||||
|
1) Add mounts for persistent storage to `machine.kubelet.extraMounts` section:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
extraMounts:
|
||||||
|
- destination: /var/lib/longhorn
|
||||||
|
type: bindind.
|
||||||
|
source: /var/lib/longhorn
|
||||||
|
options:
|
||||||
|
- bind
|
||||||
|
- rshared
|
||||||
|
- rw
|
||||||
```
|
```
|
||||||
|
|
||||||
|
2) Change `machine.install.image` to image with extra modules:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
image: factory.talos.dev/metal-installer/88d1f7a5c4f1d3aba7df787c448c1d3d008ed29cfb34af53fa0df4336a56040b:v1.11.1
|
||||||
|
```
|
||||||
|
|
||||||
|
or you can use latest image generated at https://factory.talos.dev with following options:
|
||||||
|
|
||||||
|
- Bare-metal machine
|
||||||
|
- your Talos os version
|
||||||
|
- amd64 architecture
|
||||||
|
- siderolabs/iscsi-tools
|
||||||
|
- siderolabs/util-linux-tools
|
||||||
|
- (Optionally) siderolabs/qemu-guest-agent
|
||||||
|
|
||||||
|
Then copy "Initial Installation" value and paste it to the image field.
|
||||||
|
|
||||||
|
3) Add docker registry mirror to `machine.registries.mirrors` section:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
registries:
|
||||||
|
mirrors:
|
||||||
|
docker.io:
|
||||||
|
endpoints:
|
||||||
|
- https://mirror.gcr.io
|
||||||
|
- https://registry-1.docker.io
|
||||||
|
```
|
||||||
|
|
||||||
|
8) Apply configs to the VMs
|
||||||
|
|
||||||
|
```bash
|
||||||
|
talosctl apply-config --insecure --nodes $CONTROL_PLANE_IP --file controlplane.yaml
|
||||||
|
talosctl apply-config --insecure --nodes $WORKER1_IP --file worker.yaml
|
||||||
|
talosctl apply-config --insecure --nodes $WORKER2_IP --file worker.yaml
|
||||||
|
talosctl apply-config --insecure --nodes $WORKER3_IP --file worker.yaml
|
||||||
|
talosctl apply-config --insecure --nodes $WORKER4_IP --file worker.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
9) Boostrap the cluster and retrieve kubeconfig
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export TALOSCONFIG=$(pwd)/talosconfig
|
||||||
|
talosctl config endpoint https://$CONTROL_PLANE_IP:6443
|
||||||
|
talosctl config node $CONTROL_PLANE_IP
|
||||||
|
|
||||||
|
talosctl bootstrap
|
||||||
|
|
||||||
|
talosctl kubeconfig .
|
||||||
|
```
|
||||||
|
|
||||||
|
You can now use k8s client like https://headlamp.dev/ with the generated kubeconfig file.
|
||||||
|
|
||||||
|
### Install base services to the cluster
|
||||||
|
|
||||||
|
1) Copy and edit variables
|
||||||
|
|
||||||
### Install
|
|
||||||
1) Install base services to cluster
|
|
||||||
```bash
|
```bash
|
||||||
cd tofu
|
|
||||||
# copy and edit variables
|
|
||||||
cp terraform.tfvars.example terraform.tfvars
|
cp terraform.tfvars.example terraform.tfvars
|
||||||
# authenticate to your cluster/cloud as needed, then:
|
```
|
||||||
|
|
||||||
|
- `metallb_ip_range` - set to range available in your network for load balancer services
|
||||||
|
- `mariadb_password` - password for internal mariadb user
|
||||||
|
- `mariadb_root_password` - password for root user
|
||||||
|
- `mariadb_user_name` - username for admin user
|
||||||
|
- `mariadb_user_host` - allowed hosts for admin user
|
||||||
|
- `mariadb_user_password` - password for admin user
|
||||||
|
- `metallb_maxscale_ip`, `metallb_service_ip`, `metallb_primary_ip`, `metallb_secondary_ip` - IPs for database
|
||||||
|
cluster,
|
||||||
|
set them to static IPs from the `metallb_ip_range`
|
||||||
|
- `s3_enabled`, `s3_bucket`, `s3_region`, `s3_endpoint`, `s3_key_id`, `s3_key_secret` - S3 compatible storage for
|
||||||
|
backups (optional)
|
||||||
|
- `phpmyadmin_enabled` - set to false if you want to disable phpmyadmin
|
||||||
|
- `rabbitmq-password` - password for RabbitMQ
|
||||||
|
|
||||||
|
- `cloudflare_account_id` - your Cloudflare account ID
|
||||||
|
- `cloudflare_api_token` - your Cloudflare API token with permissions to manage tunnels and DNS
|
||||||
|
- `cloudflare_email` - your Cloudflare account email
|
||||||
|
- `cloudflare_tunnel_name` - name for the tunnel
|
||||||
|
- `cloudflare_domain` - your domain name managed in Cloudflare
|
||||||
|
|
||||||
|
2) Deploy without Cloudflare module first
|
||||||
|
|
||||||
|
```bash
|
||||||
tofu init
|
tofu init
|
||||||
tofu apply -exclude modules.cloudflare
|
tofu apply -exclude modules.cloudflare
|
||||||
|
```
|
||||||
|
|
||||||
|
3) Deploy rest of the modules
|
||||||
|
|
||||||
|
```bash
|
||||||
tofu apply
|
tofu apply
|
||||||
```
|
```
|
||||||
|
|
||||||
2) Deploy the app using Helm
|
### Configure deployment
|
||||||
```bash
|
|
||||||
# Set the namespace
|
|
||||||
kubectl create namespace myapp || true
|
|
||||||
|
|
||||||
# Install/upgrade the chart with required values
|
1) Create self-hosted runner with access to the cluster or make cluster publicly accessible
|
||||||
helm upgrade --install myapp charts/myapp-chart \
|
2) Change `jobs.deploy.runs-on` in `.github/workflows/deploy-prod.yml` and in `.github/workflows/deploy-pr.yaml` to your
|
||||||
-n myapp \
|
runner label
|
||||||
-f charts/myapp-chart/values.yaml \
|
3) Add variables to GitHub in repository settings:
|
||||||
--set image.backend.repository=myorg/myapp-backend \
|
- `PROD_DOMAIN` - base domain for deployments (e.g. ltrk.cz)
|
||||||
--set image.backend.tag=latest \
|
- `DEV_FRONTEND_BASE_DOMAIN` - base domain for your cloudflare pages
|
||||||
--set env.BACKEND_URL="https://myapp.example.com" \
|
4) Add secrets to GitHub in repository settings:
|
||||||
--set env.FRONTEND_URL="https://myapp.example.com" \
|
- CLOUDFLARE_ACCOUNT_ID - same as in tofu/terraform.tfvars
|
||||||
--set env.SECRET="CHANGE_ME_SECRET"
|
- CLOUDFLARE_API_TOKEN - same as in tofu/terraform.tfvars
|
||||||
```
|
- DOCKER_USER - your docker registry username
|
||||||
Adjust values to your registry and domain. The chart’s NOTES.txt includes additional examples.
|
- DOCKER_PASSWORD - your docker registry password
|
||||||
|
- KUBE_CONFIG - content of your kubeconfig file for the cluster
|
||||||
3) Expose and access
|
- PROD_DB_PASSWORD - same as MARIADB_PASSWORD
|
||||||
- If using Cloudflare Tunnel or an ingress, configure DNS accordingly (see tofu/modules/cloudflare and deployment/tunnel.yaml).
|
- PROD_RABBITMQ_PASSWORD - same as MARIADB_PASSWORD
|
||||||
- For quick testing without ingress:
|
- PROD_DB_ENCRYPTION_KEY - same as DB_ENCRYPTION_KEY
|
||||||
```bash
|
- MOJEID_CLIENT_ID
|
||||||
kubectl -n myapp port-forward deploy/myapp-backend 8000:8000
|
- MOJEID_CLIENT_SECRET
|
||||||
kubectl -n myapp port-forward deploy/myapp-frontend 5173:80
|
- BANKID_CLIENT_ID
|
||||||
```
|
- BANKID_CLIENT_SECRET
|
||||||
|
- CSAS_CLIENT_ID
|
||||||
### Verification
|
- CSAS_CLIENT_SECRET
|
||||||
|
- SENTRY_DSN
|
||||||
```bash
|
- SMTP_HOST
|
||||||
# Check pods
|
- SMTP_PORT
|
||||||
kubectl -n myapp get pods
|
- SMTP_USERNAME
|
||||||
|
- SMTP_PASSWORD
|
||||||
# Backend health
|
- SMTP_FROM
|
||||||
curl -i http://127.0.0.1:8000/
|
- UNIRATE_API_KEY
|
||||||
# OpenAPI
|
5) On Github open Actions tab, select "Deploy Prod" and run workflow manually
|
||||||
open http://127.0.0.1:8000/docs
|
|
||||||
|
|
||||||
# Frontend (if port-forwarded)
|
|
||||||
open http://localhost:5173
|
|
||||||
```
|
|
||||||
|
|
||||||
## Testing Instructions
|
## Testing Instructions
|
||||||
The tests are located in 7project/backend/tests directory
|
|
||||||
If you want to test locally, you have to have the DB running locally as well (start the docker compose in /backend).
|
The tests are located in 7project/backend/tests directory. All tests are run by GitHub actions on every pull request and
|
||||||
|
push to main.
|
||||||
|
See the workflow [here](../.github/workflows/run-tests.yml).
|
||||||
|
|
||||||
|
If you want to run the tests locally, the preferred way is to use a [bash script](backend/test_locally.sh)
|
||||||
|
that will start a test DB container with [docker compose](backend/docker-compose.test.yml) and remove it afterwards.
|
||||||
```bash
|
```bash
|
||||||
cd backend
|
cd 7project/src/backend
|
||||||
|
bash test_locally.sh
|
||||||
```
|
```
|
||||||
|
|
||||||
### Unit Tests
|
### Unit Tests
|
||||||
There are only 3 basic unit tests, since our services logic is very simple
|
|
||||||
|
There are only 5 basic unit tests, since our services logic is very simple
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pytest tests/test_unit_user_service.py
|
bash test_locally.sh --only-unit
|
||||||
```
|
```
|
||||||
|
|
||||||
### Integration Tests
|
### Integration Tests
|
||||||
There are 11 basic unit tests, testing the individual backend API logic
|
|
||||||
|
There are 9 basic unit tests, testing the individual backend API logic
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pytest tests/test_integration_app.py
|
bash test_locally.sh --only-integration
|
||||||
```
|
```
|
||||||
|
|
||||||
### End-to-End Tests
|
### End-to-End Tests
|
||||||
There are 7 e2e tests testing more complex app logic
|
|
||||||
|
There are 7 e2e tests, testing more complex app logic
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pytest tests/test_e2e.py
|
bash test_locally.sh --only-e2e
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage Examples
|
## Usage Examples
|
||||||
@@ -310,7 +486,10 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
|
|
||||||
### Frontend
|
### Frontend
|
||||||
|
|
||||||
- Start with: npm run dev in 7project/frontend
|
- Start with:
|
||||||
|
```bash
|
||||||
|
npm run dev in 7project/src/frontend
|
||||||
|
```
|
||||||
- Ensure VITE_BACKEND_URL is set to the backend URL (e.g., http://127.0.0.1:8000)
|
- Ensure VITE_BACKEND_URL is set to the backend URL (e.g., http://127.0.0.1:8000)
|
||||||
- Open http://localhost:5173
|
- Open http://localhost:5173
|
||||||
- Login, view latest transactions, filter, and add new transactions from the UI.
|
- Login, view latest transactions, filter, and add new transactions from the UI.
|
||||||
@@ -332,6 +511,9 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
|
running on arm
|
||||||
|
tofu apply error
|
||||||
|
|
||||||
### Common Issues
|
### Common Issues
|
||||||
|
|
||||||
#### Issue 1: [Common problem]
|
#### Issue 1: [Common problem]
|
||||||
@@ -360,18 +542,18 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
> This information is used for individual grading.
|
> This information is used for individual grading.
|
||||||
> Link to the specific commit on GitHub for each contribution.
|
> Link to the specific commit on GitHub for each contribution.
|
||||||
|
|
||||||
| Task/Component | Assigned To | Status | Time Spent | Difficulty | Notes |
|
| Task/Component | Assigned To | Status | Time Spent | Difficulty | Notes |
|
||||||
|-----------------------------------------------------------------------|-------------| ------------- |------------|------------| ----------- |
|
|-------------------------------------------------------------------------------------------------------------------|-------------|----------------|------------|------------|-----------------------------------------------------------------------------------------------------|
|
||||||
| [Project Setup & Repository](https://github.com/dat515-2025/Group-8#) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
| [Project Setup & Repository](https://github.com/dat515-2025/Group-8#) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
||||||
| [Design Document](https://github.com/dat515-2025/Group-8/blob/main/6design/design.md) | Both | ✅ Complete | 4 Hours | Easy | [Any notes] |
|
| [Design Document](https://github.com/dat515-2025/Group-8/blob/main/6design/design.md) | Both | ✅ Complete | 4 Hours | Easy | [Any notes] |
|
||||||
| [Backend API Development](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/api) | Dejan | ✅ Complete | 12 hours | Medium | [Any notes] |
|
| [Backend API Development](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/api) | Dejan | ✅ Complete | 12 hours | Medium | [Any notes] |
|
||||||
| [Database Setup & Models](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/models) | Lukas | 🔄 In Progress | [X hours] | Medium | [Any notes] |
|
| [Database Setup & Models](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/models) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
||||||
| [Frontend Development](https://github.com/dat515-2025/Group-8/tree/main/7project/frontend) | Dejan | ✅ Complete | 17 hours | Medium | [Any notes] |
|
| [Frontend Development](https://github.com/dat515-2025/Group-8/tree/main/7project/frontend) | Dejan | ✅ Complete | 17 hours | Medium | [Any notes] |
|
||||||
| [Docker Configuration](https://github.com/dat515-2025/Group-8/blob/main/7project/compose.yml) | Lukas | ✅ Complete | [X hours] | Easy | [Any notes] |
|
| [Docker Configuration](https://github.com/dat515-2025/Group-8/blob/main/7project/compose.yml) | Lukas | ✅ Complete | 3 hours | Easy | [Any notes] |
|
||||||
| [Cloud Deployment](https://github.com/dat515-2025/Group-8/blob/main/7project/deployment/app-demo-deployment.yaml) | Lukas | ✅ Complete | [X hours] | Hard | [Any notes] |
|
| [Cloud Deployment](https://github.com/dat515-2025/Group-8/blob/main/7project/deployment/app-demo-deployment.yaml) | Lukas | ✅ Complete | [X hours] | Hard | Using Talos cluster running in proxmox - easy snapshots etc. Frontend deployed at Cloudflare pages. |
|
||||||
| [Testing Implementation](https://github.com/dat515-2025/group-name) | Dejan | ✅ Complete | 16 hours | Medium | [Any notes] |
|
| [Testing Implementation](https://github.com/dat515-2025/group-name) | Dejan | ✅ Complete | 16 hours | Medium | [Any notes] |
|
||||||
| [Documentation](https://github.com/dat515-2025/group-name) | Both | 🔄 In Progress | [X hours] | Easy | [Any notes] |
|
| [Documentation](https://github.com/dat515-2025/group-name) | Both | 🔄 In Progress | [X hours] | Easy | [Any notes] |
|
||||||
| [Presentation Video](https://github.com/dat515-2025/group-name) | Both | ❌ Not Started | [X hours] | Medium | [Any notes] |
|
| [Presentation Video](https://github.com/dat515-2025/group-name) | Both | ❌ Not Started | [X hours] | Medium | [Any notes] |
|
||||||
|
|
||||||
**Legend**: ✅ Complete | 🔄 In Progress | ⏳ Pending | ❌ Not Started
|
**Legend**: ✅ Complete | 🔄 In Progress | ⏳ Pending | ❌ Not Started
|
||||||
|
|
||||||
@@ -381,30 +563,46 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
|
|
||||||
### [Lukáš]
|
### [Lukáš]
|
||||||
|
|
||||||
| Date | Activity | Hours | Description |
|
## Hour Sheet
|
||||||
|----------------|---------------------|------------|----------------------------------------------------|
|
|
||||||
| 4.10 to 10.10 | Initial Setup | 40 | Repository setup, project structure, cluster setup |
|
**Name:** Lukáš Trkan
|
||||||
| 14.10 to 16.10 | Backend Development | 12 | Implemented user authentication - oauth |
|
|
||||||
| 8.10 to 12.10 | CI/CD | 10 | Created database schema and models |
|
| Date | Activity | Hours | Description | Representative Commit / PR |
|
||||||
| [Date] | Testing | [X.X] | Unit tests for API endpoints |
|
|:----------------|:----------------------------|:--------|:------------------------------------------------------------------------------------|:------------------------------------------------------|
|
||||||
| [Date] | Documentation | [X.X] | Updated README and design doc |
|
| 18.9. - 19.9. | Initial Setup & Design | 40 | Repository init, system design diagrams, basic Terraform setup | `feat(infrastructure): add basic terraform resources` |
|
||||||
| **Total** | | **[XX.X]** | |
|
| 20.9. - 5.10. | Core Infrastructure & CI/CD | 12 | K8s setup (ArgoCD), CI/CD workflows, RabbitMQ, Redis, Celery workers, DB migrations | `PR #2`, `feat(infrastructure): add rabbitmq cluster` |
|
||||||
|
| 6.10. - 9.10. | Frontend Infra & DB | 5 | Deployed frontend to Cloudflare, setup metrics, created database models | `PR #16` (Cloudflare), `PR #19` (DB structure) |
|
||||||
|
| 10.10. - 11.10. | Backend | 5 | Implemented OAuth support (MojeID, BankID) | `feat(auth): add support for OAuth and MojeID` |
|
||||||
|
| 12.10. | Infrastructure | 2 | Added database backups | `feat(infrastructure): add backups` |
|
||||||
|
| 16.10. | Infrastructure | 4 | Implemented secrets management, fixed deployment/env variables | `PR #29` (Deployment envs) |
|
||||||
|
| 17.10. | Monitoring | 1 | Added Sentry logging | `feat(app): add sentry loging` |
|
||||||
|
| 21.10. - 22.10. | Backend | 8 | Added ČSAS bank connection | `PR #32` (Fix React OAuth) |
|
||||||
|
| 29.10. - 30.10. | Backend | 5 | Implemented transaction encryption, add bank scraping | `PR #39` (CSAS Scraping) |
|
||||||
|
| 30.10. | Monitoring | 6 | Implemented Loki logging and basic Prometheus metrics | `PR #42` (Prometheus metrics) |
|
||||||
|
| 9.11. | Monitoring | 2 | Added custom Prometheus metrics | `PR #46` (Prometheus custom metrics) |
|
||||||
|
| 11.11. | Tests | 1 | Investigated and fixed broken Pytest environment | `fix(tests): set pytest env` |
|
||||||
|
| 11.11. - 12.11. | Features & Deployment | 6 | Added cron support, email sender service, updated workers & image | `PR #49` (Email), `PR #50` (Update workers) |
|
||||||
|
| 18.9 - 14.11 | Documentation | 8 | Updated report.md, design docs, and tfvars.example | `Create design.md`, `update report` |
|
||||||
|
| **Total** | | **105** | | |
|
||||||
|
|
||||||
### Dejan
|
### Dejan
|
||||||
|
|
||||||
| Date | Activity | Hours | Description |
|
| Date | Activity | Hours | Description | Representative Commit / PR |
|
||||||
|-----------------|----------------------|--------|---------------------------------------------------------------|
|
|:----------------|:-------------------------|:-------|:--------------------------------------------------------------|:---------------------------------------------------------|
|
||||||
| 25.9. | Design | 2 | 6design |
|
| 25.9. | Design | 2 | 6design | |
|
||||||
| 9.10 to 11.10. | Backend APIs | 12 | Implemented Backend APIs |
|
| 9.10 to 11.10. | Backend APIs | 14 | Implemented Backend APIs | `PR #26`, `20-create-a-controller-layer-on-backend-side` |
|
||||||
| 13.10 to 15.10. | Frontend Development | 8 | Created user interface mockups |
|
| 13.10 to 15.10. | Frontend Development | 8 | Created user interface mockups | `PR #28`, `frontend basics` |
|
||||||
| Continually | Documentation | 6 | Documenting the dev process |
|
| Continually | Documentation | 7 | Documenting the dev process | |
|
||||||
| 21.10 to 23.10 | Tests, frontend | 10 | Test basics, balance charts, and frontend improvement |
|
| 21.10 to 23.10 | Tests, frontend | 10 | Test basics, balance charts, and frontend improvement | `PR #31`, `30 create tests and set up a GitHub pipeline` |
|
||||||
| 28.10 to 30.10 | CI | 6 | Integrated tests with test database setup on github workflows |
|
| 28.10 to 30.10 | CI | 6 | Integrated tests with test database setup on github workflows | `PR #28`, `frontend basics` |
|
||||||
| 28.10 to 30.10 | Frontend | 7 | UI improvements and exchange rate API integration |
|
| 28.10 to 30.10 | Frontend | 8 | UI improvements and exchange rate API integration | `PR #28`, `frontend basics` |
|
||||||
| 4.11 to 6.11 | Tests | 6 | Test fixes improvement, more integration and e2e |
|
| 4.11 to 6.11 | Tests | 6 | Test fixes improvement, more integration and e2e | `PR #28`, `frontend basics` |
|
||||||
| 4.11 to 6.11 | Frontend | 6 | Fixes, Improved UI, added support for mobile devices |
|
| 4.11 to 6.11 | Frontend | 6 | Fixes, Improved UI, added support for mobile devices | `PR #28`, `frontend basics` |
|
||||||
| **Total** | | **63** | |
|
| 11.11 | Backend APIs | 4 | Moved rates API, mock bank to Backend, few fixes | `PR #28`, `frontend basics` |
|
||||||
|
| 11.11 to 12.11 | Tests | 3 | Local testing DB container, few fixes | `PR #28`, `frontend basics` |
|
||||||
|
| 12.11 | Frontend | 3 | Enabled multiple transaction edits at once, CSAS button state | `PR #28`, `frontend basics` |
|
||||||
|
| 13.11 | Video | 3 | Video | |
|
||||||
|
| **Total** | | **80** | | |
|
||||||
|
|
||||||
### Group Total: [XXX.X] hours
|
### Group Total: [XXX.X] hours
|
||||||
|
|
||||||
@@ -418,19 +616,46 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
|
|
||||||
### Challenges Faced
|
### Challenges Faced
|
||||||
|
|
||||||
[Describe the main challenges and how you overcame them]
|
#### Slow cluster performance
|
||||||
|
|
||||||
|
This was caused by single SATA SSD disk running all VMs. This was solved by adding second NVMe disk just for Talos VMs.
|
||||||
|
|
||||||
|
#### Stucked IaC deployment
|
||||||
|
|
||||||
|
If the deployed module (helm chart for example) was not configured properly, it would get stuck and timeout resulting in
|
||||||
|
namespace that cannot be deleted.
|
||||||
|
This was solved by using snapshots in Proxmox and restoring if this happened.
|
||||||
|
|
||||||
### If We Did This Again
|
### If We Did This Again
|
||||||
|
|
||||||
|
#### Different framework
|
||||||
|
|
||||||
|
FastAPI lacks usable build in support for database migrations and implementing Alembic was a bit tricky.
|
||||||
|
Tricky was also integrating FastAPI auth system with React frontend, since there is no official project template.
|
||||||
|
Using .NET (which we considered initially) would probably solve these issues.
|
||||||
|
|
||||||
[What would you do differently? What worked well that you'd keep?]
|
[What would you do differently? What worked well that you'd keep?]
|
||||||
|
|
||||||
### Individual Growth
|
### Individual Growth
|
||||||
|
|
||||||
#### [Team Member 1 Name]
|
#### [Lukas]
|
||||||
|
|
||||||
|
This course finally forced me to learn kubernetes (been on by TODO list for at least 3 years).
|
||||||
|
I had some prior experience with terraform/opentofu from work but this improved by understanding of it.
|
||||||
|
|
||||||
|
The biggest challenge for me was time tracking since I am used to tracking to projects, not to tasks.
|
||||||
|
(I am bad even at that :) ).
|
||||||
|
|
||||||
|
It was also interesting experience to be the one responsible for the initial project structure/design/setup
|
||||||
|
used not only by myself.
|
||||||
|
|
||||||
[Personal reflection on growth, challenges, and learning]
|
[Personal reflection on growth, challenges, and learning]
|
||||||
|
|
||||||
#### [Team Member 2 Name]
|
#### [Dejan]
|
||||||
|
Since I do not have a job, this project was probably the most complex one I have ever worked on.
|
||||||
|
It was also the first school project where I was encouraged to use AI.
|
||||||
|
|
||||||
|
Lukas
|
||||||
|
|
||||||
[Personal reflection on growth, challenges, and learning]
|
[Personal reflection on growth, challenges, and learning]
|
||||||
|
|
||||||
@@ -438,4 +663,4 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
---
|
---
|
||||||
|
|
||||||
**Report Completion Date**: [Date]
|
**Report Completion Date**: [Date]
|
||||||
**Last Updated**: 15.10.2025
|
**Last Updated**: 13.11.2025
|
||||||
23
7project/src/README.md
Normal file
23
7project/src/README.md
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
## Folder structure
|
||||||
|
|
||||||
|
- `src/`
|
||||||
|
- `backend/`
|
||||||
|
- `alembic/` - database migrations
|
||||||
|
- `app/` - main application code
|
||||||
|
- `tests/` - tests
|
||||||
|
- `docker-compose.test.yml` - docker compose for testing database
|
||||||
|
- `Dockerfile` - production Dockerfile
|
||||||
|
- `main.py` - App entrypoint
|
||||||
|
- `requirements.txt` - Python dependencies
|
||||||
|
- `test_locally.sh` - script to run tests with temporary database
|
||||||
|
- `charts/`
|
||||||
|
- `myapp-chart/` - Helm chart for deploying the application, supports prod and dev environments
|
||||||
|
- `frontend/` - React frontend application
|
||||||
|
- `tofu/` - Terraform/OpenTofu services deployment configurations
|
||||||
|
- `modules/` - separated modules for different services
|
||||||
|
- `main.tf` - main deployment configuration
|
||||||
|
- `variables.tf` - deployment variables
|
||||||
|
- `terraform.tfvars.example` - example variables file
|
||||||
|
- `compose.yaml` - Docker Compose file for local development
|
||||||
|
- `create_migration.sh` - script to create new Alembic database migration
|
||||||
|
- `upgrade_database.sh` - script to upgrade database to latest Alembic revision
|
||||||
8
7project/src/backend/.idea/.gitignore
generated
vendored
Normal file
8
7project/src/backend/.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Default ignored files
|
||||||
|
/shelf/
|
||||||
|
/workspace.xml
|
||||||
|
# Editor-based HTTP Client requests
|
||||||
|
/httpRequests/
|
||||||
|
# Datasource local storage ignored files
|
||||||
|
/dataSources/
|
||||||
|
/dataSources.local.xml
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM python:3.11-slim
|
FROM python:3.11-trixie
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
66
7project/src/backend/app/api/exchange_rates.py
Normal file
66
7project/src/backend/app/api/exchange_rates.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import os
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, status
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/exchange-rates", tags=["exchange-rates"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", status_code=status.HTTP_200_OK)
|
||||||
|
async def get_exchange_rates(symbols: str = Query("EUR,USD,NOK", description="Comma-separated currency codes to fetch vs CZK")):
|
||||||
|
"""
|
||||||
|
Fetch exchange rates from UniRate API on the backend and return CZK-per-target rates.
|
||||||
|
- Always requests CZK in addition to requested symbols to compute conversion from USD-base.
|
||||||
|
- Returns a list of {currencyCode, rate} where rate is CZK per 1 unit of the target currency.
|
||||||
|
"""
|
||||||
|
api_key = os.getenv("UNIRATE_API_KEY")
|
||||||
|
if not api_key:
|
||||||
|
raise HTTPException(status_code=500, detail="Server is not configured with UNIRATE_API_KEY")
|
||||||
|
|
||||||
|
# Ensure CZK is included for conversion
|
||||||
|
requested = [s.strip().upper() for s in symbols.split(",") if s.strip()]
|
||||||
|
if "CZK" not in requested:
|
||||||
|
requested.append("CZK")
|
||||||
|
query_symbols = ",".join(sorted(set(requested)))
|
||||||
|
|
||||||
|
url = f"https://unirateapi.com/api/rates?api_key={api_key}&symbols={query_symbols}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=httpx.Timeout(15.0)) as client:
|
||||||
|
resp = await client.get(url)
|
||||||
|
if resp.status_code != httpx.codes.OK:
|
||||||
|
raise HTTPException(status_code=502, detail=f"Upstream UniRate error: HTTP {resp.status_code}")
|
||||||
|
data = resp.json()
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
raise HTTPException(status_code=502, detail=f"Failed to contact UniRate: {str(e)}")
|
||||||
|
|
||||||
|
# Validate response structure
|
||||||
|
rates = data.get("rates") if isinstance(data, dict) else None
|
||||||
|
base = data.get("base") if isinstance(data, dict) else None
|
||||||
|
if not rates or base != "USD" or "CZK" not in rates:
|
||||||
|
# Prefer upstream message when available
|
||||||
|
detail = data.get("message") if isinstance(data, dict) else None
|
||||||
|
if not detail and isinstance(data, dict):
|
||||||
|
err = data.get("error")
|
||||||
|
if isinstance(err, dict):
|
||||||
|
detail = err.get("info")
|
||||||
|
raise HTTPException(status_code=502, detail=detail or "Invalid response from UniRate API")
|
||||||
|
|
||||||
|
czk_per_usd = rates["CZK"]
|
||||||
|
|
||||||
|
# Build result excluding CZK itself
|
||||||
|
result = []
|
||||||
|
for code in requested:
|
||||||
|
if code == "CZK":
|
||||||
|
continue
|
||||||
|
target_per_usd = rates.get(code)
|
||||||
|
if target_per_usd in (None, 0):
|
||||||
|
# Skip unavailable or invalid
|
||||||
|
continue
|
||||||
|
czk_per_target = czk_per_usd / target_per_usd
|
||||||
|
result.append({"currencyCode": code, "rate": czk_per_target})
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
116
7project/src/backend/app/api/mock_bank.py
Normal file
116
7project/src/backend/app/api/mock_bank.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import List, Optional
|
||||||
|
import random
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
from pydantic import BaseModel, Field, conint, confloat, validator
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from app.services.db import get_async_session
|
||||||
|
from app.services.user_service import current_active_user
|
||||||
|
from app.models.user import User
|
||||||
|
from app.models.transaction import Transaction
|
||||||
|
from app.models.categories import Category
|
||||||
|
from app.schemas.transaction import TransactionRead
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/mock-bank", tags=["mock-bank"])
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateOptions(BaseModel):
|
||||||
|
count: conint(strict=True, gt=0) = Field(default=10, description="Number of transactions to generate")
|
||||||
|
minAmount: confloat(strict=True) = Field(default=-200.0, description="Minimum transaction amount")
|
||||||
|
maxAmount: confloat(strict=True) = Field(default=200.0, description="Maximum transaction amount")
|
||||||
|
startDate: Optional[str] = Field(None, description="Earliest date (YYYY-MM-DD)")
|
||||||
|
endDate: Optional[str] = Field(None, description="Latest date (YYYY-MM-DD)")
|
||||||
|
categoryIds: List[int] = Field(default_factory=list, description="Optional category IDs to assign randomly")
|
||||||
|
|
||||||
|
@validator("maxAmount")
|
||||||
|
def _validate_amounts(cls, v, values):
|
||||||
|
min_amt = values.get("minAmount")
|
||||||
|
if min_amt is not None and v < min_amt:
|
||||||
|
raise ValueError("maxAmount must be greater than or equal to minAmount")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@validator("endDate")
|
||||||
|
def _validate_dates(cls, v, values):
|
||||||
|
sd = values.get("startDate")
|
||||||
|
if v and sd:
|
||||||
|
try:
|
||||||
|
ed = datetime.strptime(v, "%Y-%m-%d").date()
|
||||||
|
st = datetime.strptime(sd, "%Y-%m-%d").date()
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError("Invalid date format, expected YYYY-MM-DD")
|
||||||
|
if ed < st:
|
||||||
|
raise ValueError("endDate must be greater than or equal to startDate")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class GeneratedTransaction(BaseModel):
|
||||||
|
amount: float
|
||||||
|
date: str # YYYY-MM-DD
|
||||||
|
category_ids: List[int] = []
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/generate", response_model=List[GeneratedTransaction])
|
||||||
|
async def generate_mock_transactions(
|
||||||
|
options: GenerateOptions,
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
# Seed randomness per user to make results less erratic across multiple calls in quick succession
|
||||||
|
seed = int(datetime.utcnow().timestamp()) ^ int(user.id)
|
||||||
|
rnd = random.Random(seed)
|
||||||
|
|
||||||
|
# Determine date range
|
||||||
|
if options.startDate:
|
||||||
|
start_date = datetime.strptime(options.startDate, "%Y-%m-%d").date()
|
||||||
|
else:
|
||||||
|
start_date = (datetime.utcnow() - timedelta(days=365)).date()
|
||||||
|
if options.endDate:
|
||||||
|
end_date = datetime.strptime(options.endDate, "%Y-%m-%d").date()
|
||||||
|
else:
|
||||||
|
end_date = datetime.utcnow().date()
|
||||||
|
|
||||||
|
span_days = max(0, (end_date - start_date).days)
|
||||||
|
|
||||||
|
results: List[GeneratedTransaction] = []
|
||||||
|
for _ in range(options.count):
|
||||||
|
amount = round(rnd.uniform(options.minAmount, options.maxAmount), 2)
|
||||||
|
# Pick a random date in the inclusive range
|
||||||
|
rand_day = rnd.randint(0, span_days) if span_days > 0 else 0
|
||||||
|
tx_date = start_date + timedelta(days=rand_day)
|
||||||
|
# Pick category randomly from provided list, or empty
|
||||||
|
if options.categoryIds:
|
||||||
|
cat = [rnd.choice(options.categoryIds)]
|
||||||
|
else:
|
||||||
|
cat = []
|
||||||
|
# Optional simple description for flavor
|
||||||
|
desc = None
|
||||||
|
# Assemble
|
||||||
|
results.append(GeneratedTransaction(
|
||||||
|
amount=amount,
|
||||||
|
date=tx_date.isoformat(),
|
||||||
|
category_ids=cat,
|
||||||
|
description=desc,
|
||||||
|
))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/scrape")
|
||||||
|
async def scrape_mock_bank():
|
||||||
|
# 80% of the time: nothing to scrape
|
||||||
|
if random.random() < 0.8:
|
||||||
|
return []
|
||||||
|
|
||||||
|
transactions = []
|
||||||
|
count = random.randint(1, 10)
|
||||||
|
for _ in range(count):
|
||||||
|
transactions.append({
|
||||||
|
"amount": round(random.uniform(-200.0, 200.0), 2),
|
||||||
|
"date": (datetime.utcnow().date() - timedelta(days=random.randint(0, 30))).isoformat(),
|
||||||
|
"description": "Mock transaction",
|
||||||
|
})
|
||||||
|
|
||||||
|
return transactions
|
||||||
@@ -1,10 +1,11 @@
|
|||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pythonjsonlogger import jsonlogger
|
from pythonjsonlogger import jsonlogger
|
||||||
|
|
||||||
from fastapi import Depends, FastAPI
|
from fastapi import Depends, FastAPI, HTTPException
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from prometheus_fastapi_instrumentator import Instrumentator, metrics
|
from prometheus_fastapi_instrumentator import Instrumentator, metrics
|
||||||
from starlette.requests import Request
|
from starlette.requests import Request
|
||||||
@@ -20,6 +21,7 @@ from app.api.auth import router as auth_router
|
|||||||
from app.api.csas import router as csas_router
|
from app.api.csas import router as csas_router
|
||||||
from app.api.categories import router as categories_router
|
from app.api.categories import router as categories_router
|
||||||
from app.api.transactions import router as transactions_router
|
from app.api.transactions import router as transactions_router
|
||||||
|
from app.api.exchange_rates import router as exchange_rates_router
|
||||||
from app.services.user_service import auth_backend, current_active_verified_user, fastapi_users, get_oauth_provider, \
|
from app.services.user_service import auth_backend, current_active_verified_user, fastapi_users, get_oauth_provider, \
|
||||||
UserManager, get_jwt_strategy
|
UserManager, get_jwt_strategy
|
||||||
from app.core.security import extract_bearer_token, is_token_revoked, decode_and_verify_jwt
|
from app.core.security import extract_bearer_token, is_token_revoked, decode_and_verify_jwt
|
||||||
@@ -28,7 +30,8 @@ from app.services.user_service import SECRET
|
|||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
import sentry_sdk
|
import sentry_sdk
|
||||||
from fastapi_users.db import SQLAlchemyUserDatabase
|
from fastapi_users.db import SQLAlchemyUserDatabase
|
||||||
from app.core.db import async_session_maker
|
from app.core.db import async_session_maker, engine
|
||||||
|
from app.core.base import Base
|
||||||
|
|
||||||
sentry_sdk.init(
|
sentry_sdk.init(
|
||||||
dsn=os.getenv("SENTRY_DSN"),
|
dsn=os.getenv("SENTRY_DSN"),
|
||||||
@@ -50,21 +53,23 @@ fastApi.add_middleware(
|
|||||||
allow_headers=["*"],
|
allow_headers=["*"],
|
||||||
)
|
)
|
||||||
|
|
||||||
prometheus = Instrumentator().instrument(fastApi)
|
|
||||||
|
|
||||||
# Register custom metrics
|
if not os.getenv("PYTEST_RUN_CONFIG"):
|
||||||
prometheus.add(number_of_users()).add(number_of_transactions())
|
prometheus = Instrumentator().instrument(fastApi)
|
||||||
|
# Register custom metrics
|
||||||
prometheus.expose(
|
prometheus.add(number_of_users()).add(number_of_transactions())
|
||||||
fastApi,
|
prometheus.expose(
|
||||||
endpoint="/metrics",
|
fastApi,
|
||||||
include_in_schema=True,
|
endpoint="/metrics",
|
||||||
)
|
include_in_schema=True,
|
||||||
|
)
|
||||||
|
|
||||||
fastApi.include_router(auth_router)
|
fastApi.include_router(auth_router)
|
||||||
fastApi.include_router(categories_router)
|
fastApi.include_router(categories_router)
|
||||||
fastApi.include_router(transactions_router)
|
fastApi.include_router(transactions_router)
|
||||||
|
fastApi.include_router(exchange_rates_router)
|
||||||
|
from app.api.mock_bank import router as mock_bank_router
|
||||||
|
fastApi.include_router(mock_bank_router)
|
||||||
|
|
||||||
for h in list(logging.root.handlers):
|
for h in list(logging.root.handlers):
|
||||||
logging.root.removeHandler(h)
|
logging.root.removeHandler(h)
|
||||||
@@ -78,7 +83,6 @@ _log_handler.setFormatter(_formatter)
|
|||||||
logging.root.setLevel(logging.INFO)
|
logging.root.setLevel(logging.INFO)
|
||||||
logging.root.addHandler(_log_handler)
|
logging.root.addHandler(_log_handler)
|
||||||
|
|
||||||
|
|
||||||
for _name in ("uvicorn", "uvicorn.error", "uvicorn.access"):
|
for _name in ("uvicorn", "uvicorn.error", "uvicorn.access"):
|
||||||
_logger = logging.getLogger(_name)
|
_logger = logging.getLogger(_name)
|
||||||
_logger.handlers = [_log_handler]
|
_logger.handlers = [_log_handler]
|
||||||
@@ -161,16 +165,12 @@ async def authenticated_route(user: User = Depends(current_active_verified_user)
|
|||||||
return {"message": f"Hello {user.email}!"}
|
return {"message": f"Hello {user.email}!"}
|
||||||
|
|
||||||
|
|
||||||
@fastApi.get("/debug/scrape/csas/all", tags=["debug"])
|
@fastApi.get("/_cron", include_in_schema=False)
|
||||||
async def debug_scrape_csas_all():
|
async def handle_cron(request: Request):
|
||||||
logging.info("[Debug] Queueing CSAS scrape for all users via HTTP endpoint (Celery)")
|
# endpoint accessed by Clodflare => return 404
|
||||||
|
if request.headers.get("cf-connecting-ip"):
|
||||||
|
raise HTTPException(status_code=404)
|
||||||
|
|
||||||
|
logging.info("[Cron] Triggering scheduled tasks via HTTP endpoint")
|
||||||
task = load_all_transactions.delay()
|
task = load_all_transactions.delay()
|
||||||
return {"status": "queued", "action": "csas_scrape_all", "task_id": getattr(task, 'id', None)}
|
return {"status": "queued", "action": "csas_scrape_all", "task_id": getattr(task, 'id', None)}
|
||||||
|
|
||||||
|
|
||||||
@fastApi.post("/debug/scrape/csas/{user_id}", tags=["debug"])
|
|
||||||
async def debug_scrape_csas_user(user_id: str, user: User = Depends(current_active_verified_user)):
|
|
||||||
logging.info("[Debug] Queueing CSAS scrape for single user via HTTP endpoint (Celery) | user_id=%s", user_id)
|
|
||||||
task = load_transactions.delay(user_id)
|
|
||||||
return {"status": "queued", "action": "csas_scrape_single", "user_id": user_id,
|
|
||||||
"task_id": getattr(task, 'id', None)}
|
|
||||||
@@ -1,5 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
from app.core.base import Base
|
from app.core.base import Base
|
||||||
|
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||||
@@ -23,6 +25,7 @@ host_env = os.getenv("MARIADB_HOST", "localhost")
|
|||||||
ssl_enabled = host_env not in {"localhost", "127.0.0.1"}
|
ssl_enabled = host_env not in {"localhost", "127.0.0.1"}
|
||||||
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
||||||
|
|
||||||
|
# Async engine/session for the async parts of the app
|
||||||
engine = create_async_engine(
|
engine = create_async_engine(
|
||||||
DATABASE_URL,
|
DATABASE_URL,
|
||||||
pool_pre_ping=True,
|
pool_pre_ping=True,
|
||||||
@@ -30,3 +33,13 @@ engine = create_async_engine(
|
|||||||
connect_args=connect_args,
|
connect_args=connect_args,
|
||||||
)
|
)
|
||||||
async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
|
async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
|
||||||
|
|
||||||
|
# Synchronous engine/session for sync utilities (e.g., bank_scraper)
|
||||||
|
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
||||||
|
engine_sync = create_engine(
|
||||||
|
SYNC_DATABASE_URL,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
echo=os.getenv("SQL_ECHO", "0") == "1",
|
||||||
|
connect_args=connect_args,
|
||||||
|
)
|
||||||
|
sync_session_maker = sessionmaker(bind=engine_sync, expire_on_commit=False)
|
||||||
@@ -1,10 +1,11 @@
|
|||||||
import uuid
|
import uuid
|
||||||
from typing import Optional
|
from typing import Optional, Dict, Any
|
||||||
from fastapi_users import schemas
|
from fastapi_users import schemas
|
||||||
|
|
||||||
class UserRead(schemas.BaseUser[uuid.UUID]):
|
class UserRead(schemas.BaseUser[uuid.UUID]):
|
||||||
first_name: Optional[str] = None
|
first_name: Optional[str] = None
|
||||||
last_name: Optional[str] = None
|
last_name: Optional[str] = None
|
||||||
|
config: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
class UserCreate(schemas.BaseUserCreate):
|
class UserCreate(schemas.BaseUserCreate):
|
||||||
first_name: Optional[str] = None
|
first_name: Optional[str] = None
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
from os.path import dirname, join
|
from os.path import dirname, join
|
||||||
from time import strptime
|
from time import strptime
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
@@ -7,7 +8,7 @@ from uuid import UUID
|
|||||||
import httpx
|
import httpx
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
|
|
||||||
from app.core.db import async_session_maker
|
from app.core.db import sync_session_maker
|
||||||
from app.models.transaction import Transaction
|
from app.models.transaction import Transaction
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
|
|
||||||
@@ -20,26 +21,78 @@ CERTS = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def aload_ceska_sporitelna_transactions(user_id: str) -> None:
|
def load_mock_bank_transactions(user_id: str) -> None:
|
||||||
try:
|
try:
|
||||||
uid = UUID(str(user_id))
|
uid = UUID(str(user_id))
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error("Invalid user_id provided to bank_scraper (async): %r", user_id)
|
logger.error("Invalid user_id provided to bank_scraper (sync): %r", user_id)
|
||||||
return
|
return
|
||||||
|
|
||||||
await _aload_ceska_sporitelna_transactions(uid)
|
_load_mock_bank_transactions(uid)
|
||||||
|
|
||||||
|
|
||||||
async def aload_all_ceska_sporitelna_transactions() -> None:
|
def load_all_mock_bank_transactions() -> None:
|
||||||
async with async_session_maker() as session:
|
with sync_session_maker() as session:
|
||||||
result = await session.execute(select(User))
|
users = session.execute(select(User)).unique().scalars().all()
|
||||||
users = result.unique().scalars().all()
|
logger.info("[BankScraper] Starting Mock Bank scrape for all users | count=%d", len(users))
|
||||||
|
|
||||||
|
processed = 0
|
||||||
|
for user in users:
|
||||||
|
try:
|
||||||
|
_load_mock_bank_transactions(user.id)
|
||||||
|
processed += 1
|
||||||
|
except Exception:
|
||||||
|
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
||||||
|
getattr(user, 'email', None))
|
||||||
|
logger.info("[BankScraper] Finished Mock Bank scrape for all users | processed=%d", processed)
|
||||||
|
|
||||||
|
|
||||||
|
def _load_mock_bank_transactions(user_id: UUID) -> None:
|
||||||
|
with sync_session_maker() as session:
|
||||||
|
user: User | None = session.execute(select(User).where(User.id == user_id)).unique().scalar_one_or_none()
|
||||||
|
if user is None:
|
||||||
|
logger.warning("User not found for id=%s", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
transactions = []
|
||||||
|
with httpx.Client() as client:
|
||||||
|
response = client.get(f"{os.getenv('APP_POD_URL')}/mock-bank/scrape")
|
||||||
|
if response.status_code != httpx.codes.OK:
|
||||||
|
return
|
||||||
|
for transaction in response.json():
|
||||||
|
transactions.append(
|
||||||
|
Transaction(
|
||||||
|
amount=transaction["amount"],
|
||||||
|
description=transaction.get("description"),
|
||||||
|
date=strptime(transaction["date"], "%Y-%m-%d"),
|
||||||
|
user_id=user_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for transaction in transactions:
|
||||||
|
session.add(transaction)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def load_ceska_sporitelna_transactions(user_id: str) -> None:
|
||||||
|
try:
|
||||||
|
uid = UUID(str(user_id))
|
||||||
|
except Exception:
|
||||||
|
logger.error("Invalid user_id provided to bank_scraper (sync): %r", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
_load_ceska_sporitelna_transactions(uid)
|
||||||
|
|
||||||
|
|
||||||
|
def load_all_ceska_sporitelna_transactions() -> None:
|
||||||
|
with sync_session_maker() as session:
|
||||||
|
users = session.execute(select(User)).unique().scalars().all()
|
||||||
logger.info("[BankScraper] Starting CSAS scrape for all users | count=%d", len(users))
|
logger.info("[BankScraper] Starting CSAS scrape for all users | count=%d", len(users))
|
||||||
|
|
||||||
processed = 0
|
processed = 0
|
||||||
for user in users:
|
for user in users:
|
||||||
try:
|
try:
|
||||||
await _aload_ceska_sporitelna_transactions(user.id)
|
_load_ceska_sporitelna_transactions(user.id)
|
||||||
processed += 1
|
processed += 1
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
||||||
@@ -47,10 +100,9 @@ async def aload_all_ceska_sporitelna_transactions() -> None:
|
|||||||
logger.info("[BankScraper] Finished CSAS scrape for all users | processed=%d", processed)
|
logger.info("[BankScraper] Finished CSAS scrape for all users | processed=%d", processed)
|
||||||
|
|
||||||
|
|
||||||
async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
def _load_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
||||||
async with (async_session_maker() as session):
|
with sync_session_maker() as session:
|
||||||
result = await session.execute(select(User).where(User.id == user_id))
|
user: User | None = session.execute(select(User).where(User.id == user_id)).unique().scalar_one_or_none()
|
||||||
user: User = result.unique().scalar_one_or_none()
|
|
||||||
if user is None:
|
if user is None:
|
||||||
logger.warning("User not found for id=%s", user_id)
|
logger.warning("User not found for id=%s", user_id)
|
||||||
return
|
return
|
||||||
@@ -65,8 +117,8 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|||||||
|
|
||||||
accounts = []
|
accounts = []
|
||||||
try:
|
try:
|
||||||
async with httpx.AsyncClient(cert=CERTS, timeout=httpx.Timeout(20.0)) as client:
|
with httpx.Client(cert=CERTS, timeout=httpx.Timeout(20.0)) as client:
|
||||||
response = await client.get(
|
response = client.get(
|
||||||
"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts?size=10&page=0&sort=iban&order=desc",
|
"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts?size=10&page=0&sort=iban&order=desc",
|
||||||
headers={
|
headers={
|
||||||
"Authorization": f"Bearer {cfg['access_token']}",
|
"Authorization": f"Bearer {cfg['access_token']}",
|
||||||
@@ -77,7 +129,7 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|||||||
if response.status_code != httpx.codes.OK:
|
if response.status_code != httpx.codes.OK:
|
||||||
return
|
return
|
||||||
|
|
||||||
for account in response.json()["accounts"]:
|
for account in response.json().get("accounts", []):
|
||||||
accounts.append(account)
|
accounts.append(account)
|
||||||
|
|
||||||
except (httpx.HTTPError,) as e:
|
except (httpx.HTTPError,) as e:
|
||||||
@@ -85,11 +137,13 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|||||||
return
|
return
|
||||||
|
|
||||||
for account in accounts:
|
for account in accounts:
|
||||||
id = account["id"]
|
acc_id = account.get("id")
|
||||||
|
if not acc_id:
|
||||||
|
continue
|
||||||
|
|
||||||
url = f"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts/{id}/transactions?size=100&page=0&sort=bookingdate&order=desc"
|
url = f"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts/{acc_id}/transactions?size=100&page=0&sort=bookingdate&order=desc"
|
||||||
async with httpx.AsyncClient(cert=CERTS) as client:
|
with httpx.Client(cert=CERTS) as client:
|
||||||
response = await client.get(
|
response = client.get(
|
||||||
url,
|
url,
|
||||||
headers={
|
headers={
|
||||||
"Authorization": f"Bearer {cfg['access_token']}",
|
"Authorization": f"Bearer {cfg['access_token']}",
|
||||||
@@ -100,7 +154,7 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|||||||
if response.status_code != httpx.codes.OK:
|
if response.status_code != httpx.codes.OK:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
transactions = response.json()["transactions"]
|
transactions = response.json().get("transactions", [])
|
||||||
|
|
||||||
for transaction in transactions:
|
for transaction in transactions:
|
||||||
description = transaction.get("entryDetails", {}).get("transactionDetails", {}).get(
|
description = transaction.get("entryDetails", {}).get("transactionDetails", {}).get(
|
||||||
@@ -108,9 +162,12 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|||||||
date_str = transaction.get("bookingDate", {}).get("date")
|
date_str = transaction.get("bookingDate", {}).get("date")
|
||||||
date = strptime(date_str, "%Y-%m-%d") if date_str else None
|
date = strptime(date_str, "%Y-%m-%d") if date_str else None
|
||||||
amount = transaction.get("amount", {}).get("value")
|
amount = transaction.get("amount", {}).get("value")
|
||||||
if transaction.get("creditDebitIndicator") == "DBIT":
|
if transaction.get("creditDebitIndicator") == "DBIT" and amount is not None:
|
||||||
amount = -abs(amount)
|
amount = -abs(amount)
|
||||||
|
|
||||||
|
if amount is None:
|
||||||
|
continue
|
||||||
|
|
||||||
obj = Transaction(
|
obj = Transaction(
|
||||||
amount=amount,
|
amount=amount,
|
||||||
description=description,
|
description=description,
|
||||||
@@ -118,7 +175,4 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
)
|
)
|
||||||
session.add(obj)
|
session.add(obj)
|
||||||
await session.commit()
|
session.commit()
|
||||||
|
|
||||||
pass
|
|
||||||
pass
|
|
||||||
86
7project/src/backend/app/workers/celery_tasks.py
Normal file
86
7project/src/backend/app/workers/celery_tasks.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import smtplib
|
||||||
|
from email.message import EmailMessage
|
||||||
|
|
||||||
|
import app.services.bank_scraper
|
||||||
|
from app.celery_app import celery_app
|
||||||
|
|
||||||
|
logger = logging.getLogger("celery_tasks")
|
||||||
|
if not logger.handlers:
|
||||||
|
_h = logging.StreamHandler()
|
||||||
|
logger.addHandler(_h)
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(name="workers.send_email")
|
||||||
|
def send_email(to: str, subject: str, body: str) -> None:
|
||||||
|
if not (to and subject and body):
|
||||||
|
logger.error("Email task missing fields. to=%r subject=%r body_len=%r", to, subject, len(body) if body else 0)
|
||||||
|
return
|
||||||
|
|
||||||
|
host = os.getenv("SMTP_HOST")
|
||||||
|
if not host:
|
||||||
|
logger.error("SMTP_HOST is not configured; cannot send email")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
port = int(os.getenv("SMTP_PORT", "25"))
|
||||||
|
username = os.getenv("SMTP_USERNAME")
|
||||||
|
password = os.getenv("SMTP_PASSWORD")
|
||||||
|
use_tls = os.getenv("SMTP_USE_TLS", "0").lower() in {"1", "true", "yes"}
|
||||||
|
use_ssl = os.getenv("SMTP_USE_SSL", "0").lower() in {"1", "true", "yes"}
|
||||||
|
timeout = int(os.getenv("SMTP_TIMEOUT", "10"))
|
||||||
|
mail_from = os.getenv("SMTP_FROM") or username or "noreply@localhost"
|
||||||
|
|
||||||
|
# Build message
|
||||||
|
msg = EmailMessage()
|
||||||
|
msg["To"] = to
|
||||||
|
msg["From"] = mail_from
|
||||||
|
msg["Subject"] = subject
|
||||||
|
msg.set_content(body)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if use_ssl:
|
||||||
|
with smtplib.SMTP_SSL(host=host, port=port, timeout=timeout) as smtp:
|
||||||
|
if username and password:
|
||||||
|
smtp.login(username, password)
|
||||||
|
smtp.send_message(msg)
|
||||||
|
else:
|
||||||
|
with smtplib.SMTP(host=host, port=port, timeout=timeout) as smtp:
|
||||||
|
# STARTTLS if requested
|
||||||
|
if use_tls:
|
||||||
|
smtp.starttls()
|
||||||
|
if username and password:
|
||||||
|
smtp.login(username, password)
|
||||||
|
smtp.send_message(msg)
|
||||||
|
logger.info("[Celery] Email sent | to=%s | subject=%s | body_len=%d", to, subject, len(body))
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to send email via SMTP to=%s subject=%s host=%s port=%s tls=%s ssl=%s", to, subject,
|
||||||
|
host, port, use_tls, use_ssl)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(name="workers.load_transactions")
|
||||||
|
def load_transactions(user_id: str) -> None:
|
||||||
|
if not user_id:
|
||||||
|
logger.error("Load transactions task missing user_id.")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info("[Celery] Starting load_transactions | user_id=%s", user_id)
|
||||||
|
try:
|
||||||
|
# Use synchronous bank scraper functions directly, mirroring load_all_transactions
|
||||||
|
app.services.bank_scraper.load_mock_bank_transactions(user_id)
|
||||||
|
app.services.bank_scraper.load_ceska_sporitelna_transactions(user_id)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to load transactions for user_id=%s", user_id)
|
||||||
|
else:
|
||||||
|
logger.info("[Celery] Finished load_transactions | user_id=%s", user_id)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(name="workers.load_all_transactions")
|
||||||
|
def load_all_transactions() -> None:
|
||||||
|
logger.info("[Celery] Starting load_all_transactions")
|
||||||
|
# Now use synchronous bank scraper functions directly
|
||||||
|
app.services.bank_scraper.load_all_mock_bank_transactions()
|
||||||
|
app.services.bank_scraper.load_all_ceska_sporitelna_transactions()
|
||||||
|
logger.info("[Celery] Finished load_all_transactions")
|
||||||
20
7project/src/backend/docker-compose.test.yml
Normal file
20
7project/src/backend/docker-compose.test.yml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
version: "3.9"
|
||||||
|
services:
|
||||||
|
mariadb:
|
||||||
|
image: mariadb:11.4
|
||||||
|
container_name: test-mariadb
|
||||||
|
environment:
|
||||||
|
MARIADB_ROOT_PASSWORD: rootpw
|
||||||
|
MARIADB_DATABASE: group_project
|
||||||
|
MARIADB_USER: appuser
|
||||||
|
MARIADB_PASSWORD: apppass
|
||||||
|
ports:
|
||||||
|
- "3307:3306" # host:container (use 3307 on host to avoid conflicts)
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "mariadb-admin", "ping", "-h", "127.0.0.1", "-u", "root", "-prootpw", "--silent"]
|
||||||
|
interval: 5s
|
||||||
|
timeout: 2s
|
||||||
|
retries: 20
|
||||||
|
# Truly ephemeral, fast storage (removed when container stops)
|
||||||
|
tmpfs:
|
||||||
|
- /var/lib/mysql
|
||||||
4
7project/src/backend/main.py
Normal file
4
7project/src/backend/main.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
import uvicorn
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
uvicorn.run("app.app:fastApi", host="0.0.0.0", log_level="info")
|
||||||
113
7project/src/backend/test_locally.sh
Executable file
113
7project/src/backend/test_locally.sh
Executable file
@@ -0,0 +1,113 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Run tests against a disposable local MariaDB on host port 3307 using Docker Compose.
|
||||||
|
# Requirements: Docker, docker compose plugin, Python, Alembic, pytest.
|
||||||
|
# Usage:
|
||||||
|
# chmod +x ./test_locally.sh
|
||||||
|
# # From 7project/backend directory
|
||||||
|
# ./test_locally.sh [--only-unit|--only-integration|--only-e2e] [pytest-args...]
|
||||||
|
# # Examples:
|
||||||
|
# ./test_locally.sh --only-unit -q
|
||||||
|
# ./test_locally.sh --only-integration -k "login"
|
||||||
|
# ./test_locally.sh --only-e2e -vv
|
||||||
|
#
|
||||||
|
# This script will:
|
||||||
|
# 1) Start a MariaDB 11.4 container (ephemeral storage, port 3307)
|
||||||
|
# 2) Wait until it's healthy
|
||||||
|
# 3) Export env vars expected by the app (DATABASE_URL etc.)
|
||||||
|
# 4) Run Alembic migrations
|
||||||
|
# 5) Run pytest
|
||||||
|
# 6) Tear everything down (containers and tmpfs data)
|
||||||
|
|
||||||
|
COMPOSE_FILE="docker-compose.test.yml"
|
||||||
|
SERVICE_NAME="mariadb"
|
||||||
|
CONTAINER_NAME="test-mariadb"
|
||||||
|
|
||||||
|
if ! command -v docker >/dev/null 2>&1; then
|
||||||
|
echo "Docker is required but not found in PATH" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if ! docker compose version >/dev/null 2>&1; then
|
||||||
|
echo "Docker Compose V2 plugin is required (docker compose)" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Bring up the DB
|
||||||
|
echo "Starting MariaDB (port 3307) with docker compose..."
|
||||||
|
docker compose -f "$COMPOSE_FILE" up -d
|
||||||
|
|
||||||
|
# Ensure we clean up on exit
|
||||||
|
cleanup() {
|
||||||
|
echo "\nTearing down docker compose stack..."
|
||||||
|
docker compose -f "$COMPOSE_FILE" down -v || true
|
||||||
|
}
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
# Wait for healthy container
|
||||||
|
echo -n "Waiting for MariaDB to become healthy"
|
||||||
|
for i in {1..60}; do
|
||||||
|
status=$(docker inspect -f '{{.State.Health.Status}}' "$CONTAINER_NAME" 2>/dev/null || echo "")
|
||||||
|
if [ "$status" = "healthy" ]; then
|
||||||
|
echo " -> healthy"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
echo -n "."
|
||||||
|
sleep 1
|
||||||
|
if [ $i -eq 60 ]; then
|
||||||
|
echo "\nMariaDB did not become healthy in time" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Export env vars for the app/tests (match app/core/db.py expectations)
|
||||||
|
export MARIADB_HOST=127.0.0.1
|
||||||
|
export MARIADB_PORT=3307
|
||||||
|
export MARIADB_DB=group_project
|
||||||
|
export MARIADB_USER=appuser
|
||||||
|
export MARIADB_PASSWORD=apppass
|
||||||
|
export DATABASE_URL="mysql+asyncmy://$MARIADB_USER:$MARIADB_PASSWORD@$MARIADB_HOST:$MARIADB_PORT/$MARIADB_DB"
|
||||||
|
export PYTEST_RUN_CONFIG="True"
|
||||||
|
|
||||||
|
# Determine which tests to run based on flags
|
||||||
|
UNIT_TESTS="tests/test_unit_user_service.py"
|
||||||
|
INTEGRATION_TESTS="tests/test_integration_app.py"
|
||||||
|
E2E_TESTS="tests/test_e2e.py"
|
||||||
|
|
||||||
|
FLAG_COUNT=0
|
||||||
|
TEST_TARGET=""
|
||||||
|
declare -a PYTEST_ARGS=()
|
||||||
|
for arg in "$@"; do
|
||||||
|
case "$arg" in
|
||||||
|
--only-unit)
|
||||||
|
TEST_TARGET="$UNIT_TESTS"; FLAG_COUNT=$((FLAG_COUNT+1));;
|
||||||
|
--only-integration)
|
||||||
|
TEST_TARGET="$INTEGRATION_TESTS"; FLAG_COUNT=$((FLAG_COUNT+1));;
|
||||||
|
--only-e2e)
|
||||||
|
TEST_TARGET="$E2E_TESTS"; FLAG_COUNT=$((FLAG_COUNT+1));;
|
||||||
|
*)
|
||||||
|
PYTEST_ARGS+=("$arg");;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ "$FLAG_COUNT" -gt 1 ]; then
|
||||||
|
echo "Error: Use only one of --only-unit, --only-integration, or --only-e2e" >&2
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run Alembic migrations then tests
|
||||||
|
pushd . >/dev/null
|
||||||
|
echo "Running Alembic migrations..."
|
||||||
|
alembic upgrade head
|
||||||
|
|
||||||
|
echo "Running pytest..."
|
||||||
|
if [ -n "$TEST_TARGET" ]; then
|
||||||
|
# Use "${PYTEST_ARGS[@]:-}" to safely expand empty array with 'set -u'
|
||||||
|
pytest "$TEST_TARGET" "${PYTEST_ARGS[@]:-}"
|
||||||
|
else
|
||||||
|
# Use "${PYTEST_ARGS[@]:-}" to safely expand empty array with 'set -u'
|
||||||
|
pytest "${PYTEST_ARGS[@]:-}"
|
||||||
|
fi
|
||||||
|
popd >/dev/null
|
||||||
|
|
||||||
|
# Cleanup handled by trap
|
||||||
@@ -3,17 +3,6 @@ import pytest
|
|||||||
from httpx import AsyncClient, ASGITransport
|
from httpx import AsyncClient, ASGITransport
|
||||||
|
|
||||||
|
|
||||||
def test_root_ok(client):
|
|
||||||
resp = client.get("/")
|
|
||||||
assert resp.status_code == status.HTTP_200_OK
|
|
||||||
assert resp.json() == {"status": "ok"}
|
|
||||||
|
|
||||||
|
|
||||||
def test_authenticated_route_requires_auth(client):
|
|
||||||
resp = client.get("/authenticated-route")
|
|
||||||
assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_create_and_get_category(fastapi_app, test_user):
|
async def test_create_and_get_category(fastapi_app, test_user):
|
||||||
# Use AsyncClient for async tests
|
# Use AsyncClient for async tests
|
||||||
@@ -165,6 +154,6 @@ async def test_delete_transaction_not_found(fastapi_app, test_user):
|
|||||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||||
h = {"Authorization": f"Bearer {token}"}
|
h = {"Authorization": f"Bearer {token}"}
|
||||||
r = await ac.delete("/transactions/999999/delete", headers=h)
|
r = await ac.delete("/transactions/9999999/delete", headers=h)
|
||||||
assert r.status_code == status.HTTP_404_NOT_FOUND
|
assert r.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
import types
|
|
||||||
import asyncio
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from fastapi import status
|
||||||
from app.services import user_service
|
from app.services import user_service
|
||||||
|
|
||||||
|
|
||||||
@@ -22,6 +20,15 @@ def test_get_jwt_strategy_lifetime():
|
|||||||
# Basic smoke check: strategy has a lifetime set to 604800
|
# Basic smoke check: strategy has a lifetime set to 604800
|
||||||
assert getattr(strategy, "lifetime_seconds", None) in (604800,)
|
assert getattr(strategy, "lifetime_seconds", None) in (604800,)
|
||||||
|
|
||||||
|
def test_root_ok(client):
|
||||||
|
resp = client.get("/")
|
||||||
|
assert resp.status_code == status.HTTP_200_OK
|
||||||
|
assert resp.json() == {"status": "ok"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_authenticated_route_requires_auth(client):
|
||||||
|
resp = client.get("/authenticated-route")
|
||||||
|
assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_on_after_request_verify_enqueues_email(monkeypatch):
|
async def test_on_after_request_verify_enqueues_email(monkeypatch):
|
||||||
@@ -90,6 +90,11 @@ spec:
|
|||||||
secretKeyRef:
|
secretKeyRef:
|
||||||
name: prod
|
name: prod
|
||||||
key: CSAS_CLIENT_SECRET
|
key: CSAS_CLIENT_SECRET
|
||||||
|
- name: UNIRATE_API_KEY
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: UNIRATE_API_KEY
|
||||||
- name: DOMAIN
|
- name: DOMAIN
|
||||||
value: {{ required "Set .Values.domain" .Values.domain | quote }}
|
value: {{ required "Set .Values.domain" .Values.domain | quote }}
|
||||||
- name: DOMAIN_SCHEME
|
- name: DOMAIN_SCHEME
|
||||||
25
7project/src/charts/myapp-chart/templates/cron.yaml
Normal file
25
7project/src/charts/myapp-chart/templates/cron.yaml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{{- if .Values.cron.enabled }}
|
||||||
|
apiVersion: batch/v1
|
||||||
|
kind: CronJob
|
||||||
|
metadata:
|
||||||
|
name: cronjob
|
||||||
|
spec:
|
||||||
|
schedule: {{ .Values.cron.schedule | quote }}
|
||||||
|
concurrencyPolicy: {{ .Values.cron.concurrencyPolicy | quote }}
|
||||||
|
jobTemplate:
|
||||||
|
spec:
|
||||||
|
template:
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: cronjob
|
||||||
|
image: curlimages/curl:latest
|
||||||
|
imagePullPolicy: IfNotPresent
|
||||||
|
args:
|
||||||
|
- -sS
|
||||||
|
- -o
|
||||||
|
- /dev/null
|
||||||
|
- -w
|
||||||
|
- "%{http_code}"
|
||||||
|
- {{ printf "%s://%s.%s.svc.cluster.local%s" .Values.cron.scheme .Values.app.name .Release.Namespace .Values.cron.endpoint | quote }}
|
||||||
|
restartPolicy: OnFailure
|
||||||
|
{{- end }}
|
||||||
@@ -19,3 +19,11 @@ stringData:
|
|||||||
RABBITMQ_USERNAME: {{ .Values.rabbitmq.username | quote }}
|
RABBITMQ_USERNAME: {{ .Values.rabbitmq.username | quote }}
|
||||||
SENTRY_DSN: {{ .Values.sentry_dsn | quote }}
|
SENTRY_DSN: {{ .Values.sentry_dsn | quote }}
|
||||||
DB_ENCRYPTION_KEY: {{ required "Set .Values.database.encryptionSecret" .Values.database.encryptionSecret | quote }}
|
DB_ENCRYPTION_KEY: {{ required "Set .Values.database.encryptionSecret" .Values.database.encryptionSecret | quote }}
|
||||||
|
SMTP_HOST: {{ .Values.smtp.host | default "" | quote }}
|
||||||
|
SMTP_PORT: {{ .Values.smtp.port | default 587 | quote }}
|
||||||
|
SMTP_USERNAME: {{ .Values.smtp.username | default "" | quote }}
|
||||||
|
SMTP_PASSWORD: {{ .Values.smtp.password | default "" | quote }}
|
||||||
|
SMTP_USE_TLS: {{ .Values.smtp.tls | default false | quote }}
|
||||||
|
SMTP_USE_SSL: {{ .Values.smtp.ssl | default false | quote }}
|
||||||
|
SMTP_FROM: {{ .Values.smtp.from | default "" | quote }}
|
||||||
|
UNIRATE_API_KEY: {{ .Values.unirate.key | default "" | quote }}
|
||||||
@@ -85,3 +85,40 @@ spec:
|
|||||||
secretKeyRef:
|
secretKeyRef:
|
||||||
name: prod
|
name: prod
|
||||||
key: DB_ENCRYPTION_KEY
|
key: DB_ENCRYPTION_KEY
|
||||||
|
- name: SMTP_HOST
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_HOST
|
||||||
|
- name: SMTP_PORT
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_PORT
|
||||||
|
- name: SMTP_USERNAME
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_USERNAME
|
||||||
|
- name: SMTP_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_PASSWORD
|
||||||
|
- name: SMTP_USE_TLS
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_USE_TLS
|
||||||
|
- name: SMTP_USE_SSL
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_USE_SSL
|
||||||
|
- name: SMTP_FROM
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_FROM
|
||||||
|
- name: APP_POD_URL
|
||||||
|
value: {{ printf "http://%s.%s.svc.cluster.local" .Values.app.name .Release.Namespace | quote }}
|
||||||
@@ -5,3 +5,6 @@ app:
|
|||||||
|
|
||||||
worker:
|
worker:
|
||||||
replicas: 3
|
replicas: 3
|
||||||
|
|
||||||
|
cron:
|
||||||
|
enabled: true
|
||||||
@@ -13,6 +13,9 @@ deployment: ""
|
|||||||
domain: ""
|
domain: ""
|
||||||
domain_scheme: ""
|
domain_scheme: ""
|
||||||
|
|
||||||
|
unirate:
|
||||||
|
key: ""
|
||||||
|
|
||||||
frontend_domain: ""
|
frontend_domain: ""
|
||||||
frontend_domain_scheme: ""
|
frontend_domain_scheme: ""
|
||||||
|
|
||||||
@@ -35,6 +38,23 @@ worker:
|
|||||||
# Queue name for Celery worker and for CRD Queue
|
# Queue name for Celery worker and for CRD Queue
|
||||||
mailQueueName: "mail_queue"
|
mailQueueName: "mail_queue"
|
||||||
|
|
||||||
|
cron:
|
||||||
|
enabled: false
|
||||||
|
schedule: "*/5 * * * *" # every 5 minutes
|
||||||
|
scheme: "http"
|
||||||
|
endpoint: "/_cron"
|
||||||
|
concurrencyPolicy: "Forbid"
|
||||||
|
|
||||||
|
smtp:
|
||||||
|
host:
|
||||||
|
port: 587
|
||||||
|
username: ""
|
||||||
|
password: ""
|
||||||
|
tls: false
|
||||||
|
ssl: false
|
||||||
|
from: ""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
service:
|
service:
|
||||||
port: 80
|
port: 80
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user