mirror of
https://github.com/dat515-2025/Group-8.git
synced 2026-03-22 15:12:08 +01:00
Compare commits
4 Commits
db9092b78f
...
merge/pr_d
| Author | SHA1 | Date | |
|---|---|---|---|
| 00bd885873 | |||
| f492e0cc03 | |||
| cb3747357e | |||
| 31bebaf247 |
4
.github/workflows/build-image.yaml
vendored
4
.github/workflows/build-image.yaml
vendored
@@ -15,7 +15,7 @@ on:
|
|||||||
context:
|
context:
|
||||||
description: "Docker build context path"
|
description: "Docker build context path"
|
||||||
required: false
|
required: false
|
||||||
default: "7project/src/backend"
|
default: "7project/backend"
|
||||||
type: string
|
type: string
|
||||||
pr_number:
|
pr_number:
|
||||||
description: "PR number (required when mode=pr)"
|
description: "PR number (required when mode=pr)"
|
||||||
@@ -94,7 +94,7 @@ jobs:
|
|||||||
tags: |
|
tags: |
|
||||||
${{ env.IMAGE_REPO }}:${{ env.TAG1 }}
|
${{ env.IMAGE_REPO }}:${{ env.TAG1 }}
|
||||||
${{ env.IMAGE_REPO }}:${{ env.TAG2 }}
|
${{ env.IMAGE_REPO }}:${{ env.TAG2 }}
|
||||||
platforms: linux/arm64,linux/amd64
|
platforms: linux/amd64
|
||||||
|
|
||||||
- name: Set outputs
|
- name: Set outputs
|
||||||
id: set
|
id: set
|
||||||
|
|||||||
73
.github/workflows/deploy-pr.yaml
vendored
73
.github/workflows/deploy-pr.yaml
vendored
@@ -9,11 +9,6 @@ permissions:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
|
||||||
name: Run Python Tests
|
|
||||||
if: github.event.action != 'closed'
|
|
||||||
uses: ./.github/workflows/run-tests.yml
|
|
||||||
|
|
||||||
build:
|
build:
|
||||||
if: github.event.action != 'closed'
|
if: github.event.action != 'closed'
|
||||||
name: Build and push image (reusable)
|
name: Build and push image (reusable)
|
||||||
@@ -21,32 +16,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
mode: pr
|
mode: pr
|
||||||
image_repo: lukastrkan/cc-app-demo
|
image_repo: lukastrkan/cc-app-demo
|
||||||
context: 7project/src/backend
|
context: 7project/backend
|
||||||
pr_number: ${{ github.event.pull_request.number }}
|
pr_number: ${{ github.event.pull_request.number }}
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
get_urls:
|
|
||||||
if: github.event.action != 'closed'
|
|
||||||
name: Generate Preview URLs
|
|
||||||
uses: ./.github/workflows/url_generator.yml
|
|
||||||
with:
|
|
||||||
runner: vhs
|
|
||||||
mode: pr
|
|
||||||
pr_number: ${{ github.event.pull_request.number }}
|
|
||||||
base_domain: ${{ vars.PROD_DOMAIN }}
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
frontend:
|
|
||||||
if: github.event.action != 'closed'
|
|
||||||
name: Frontend - Build and Deploy to Cloudflare Pages (PR)
|
|
||||||
needs: [get_urls]
|
|
||||||
uses: ./.github/workflows/frontend-pages.yml
|
|
||||||
with:
|
|
||||||
mode: pr
|
|
||||||
pr_number: ${{ github.event.pull_request.number }}
|
|
||||||
backend_url_scheme: ${{ needs.get_urls.outputs.backend_url_scheme }}
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
deploy:
|
deploy:
|
||||||
if: github.event.action != 'closed'
|
if: github.event.action != 'closed'
|
||||||
name: Helm upgrade/install (PR preview)
|
name: Helm upgrade/install (PR preview)
|
||||||
@@ -54,7 +27,7 @@ jobs:
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: pr-${{ github.event.pull_request.number }}
|
group: pr-${{ github.event.pull_request.number }}
|
||||||
cancel-in-progress: false
|
cancel-in-progress: false
|
||||||
needs: [build, frontend, get_urls]
|
needs: [build]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -77,49 +50,47 @@ jobs:
|
|||||||
|
|
||||||
- name: Helm upgrade/install PR preview
|
- name: Helm upgrade/install PR preview
|
||||||
env:
|
env:
|
||||||
DEV_BASE_DOMAIN: ${{ vars.BASE_DOMAIN }}
|
DEV_BASE_DOMAIN: ${{ secrets.BASE_DOMAIN }}
|
||||||
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
||||||
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
||||||
|
IMAGE_REPO: ${{ needs.build.outputs.image_repo }}
|
||||||
DIGEST: ${{ needs.build.outputs.digest }}
|
DIGEST: ${{ needs.build.outputs.digest }}
|
||||||
DOMAIN: "${{ needs.get_urls.outputs.backend_url }}"
|
|
||||||
DOMAIN_SCHEME: "${{ needs.get_urls.outputs.backend_url_scheme }}"
|
|
||||||
FRONTEND_DOMAIN: "${{ needs.get_urls.outputs.frontend_url }}"
|
|
||||||
FRONTEND_DOMAIN_SCHEME: "${{ needs.get_urls.outputs.frontend_url_scheme }}"
|
|
||||||
UNIRATE_API_KEY: ${{ secrets.UNIRATE_API_KEY }}
|
|
||||||
run: |
|
run: |
|
||||||
PR=${{ github.event.pull_request.number }}
|
PR=${{ github.event.pull_request.number }}
|
||||||
|
if [ -z "$PR" ]; then echo "PR number missing"; exit 1; fi
|
||||||
|
if [ -z "$DEV_BASE_DOMAIN" ]; then echo "Secret DEV_BASE_DOMAIN is required (e.g., dev.example.com)"; exit 1; fi
|
||||||
|
if [ -z "$RABBITMQ_PASSWORD" ]; then echo "Secret DEV_RABBITMQ_PASSWORD is required"; exit 1; fi
|
||||||
|
if [ -z "$DB_PASSWORD" ]; then echo "Secret DEV_DB_PASSWORD is required"; exit 1; fi
|
||||||
RELEASE=myapp-pr-$PR
|
RELEASE=myapp-pr-$PR
|
||||||
NAMESPACE=pr-$PR
|
NAMESPACE=pr-$PR
|
||||||
helm upgrade --install "$RELEASE" ./7project/src/charts/myapp-chart \
|
DOMAIN=pr-$PR.$DEV_BASE_DOMAIN
|
||||||
|
if [ -z "$IMAGE_REPO" ]; then IMAGE_REPO="lukastrkan/cc-app-demo"; fi
|
||||||
|
helm upgrade --install "$RELEASE" ./7project/charts/myapp-chart \
|
||||||
-n "$NAMESPACE" --create-namespace \
|
-n "$NAMESPACE" --create-namespace \
|
||||||
-f 7project/src/charts/myapp-chart/values-dev.yaml \
|
-f 7project/charts/myapp-chart/values-dev.yaml \
|
||||||
--set prNumber="$PR" \
|
--set prNumber="$PR" \
|
||||||
--set deployment="pr-$PR" \
|
--set deployment="pr-$PR" \
|
||||||
--set domain="$DOMAIN" \
|
--set domain="$DOMAIN" \
|
||||||
--set domain_scheme="$DOMAIN_SCHEME" \
|
--set image.repository="$IMAGE_REPO" \
|
||||||
--set frontend_domain="$FRONTEND_DOMAIN" \
|
|
||||||
--set frontend_domain_scheme="$FRONTEND_DOMAIN_SCHEME" \
|
|
||||||
--set image.digest="$DIGEST" \
|
--set image.digest="$DIGEST" \
|
||||||
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
||||||
--set-string database.password="$DB_PASSWORD" \
|
--set-string database.password="$DB_PASSWORD"
|
||||||
--set-string database.encryptionSecret="$PR" \
|
|
||||||
--set-string app.name="finance-tracker-pr-$PR" \
|
|
||||||
--set-string unirate.key="$UNIRATE_API_KEY"
|
|
||||||
|
|
||||||
- name: Post preview URLs as PR comment
|
- name: Post preview URL as PR comment
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
env:
|
env:
|
||||||
BACKEND_URL: ${{ needs.get_urls.outputs.backend_url_scheme }}
|
DEV_BASE_DOMAIN: ${{ secrets.BASE_DOMAIN }}
|
||||||
FRONTEND_URL: ${{ needs.get_urls.outputs.frontend_url_scheme }}
|
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const pr = context.payload.pull_request;
|
const pr = context.payload.pull_request;
|
||||||
if (!pr) { core.setFailed('No pull_request context'); return; }
|
if (!pr) { core.setFailed('No pull_request context'); return; }
|
||||||
const prNumber = pr.number;
|
const prNumber = pr.number;
|
||||||
const backendUrl = process.env.BACKEND_URL || '(not available)';
|
const domainBase = process.env.DEV_BASE_DOMAIN;
|
||||||
const frontendUrl = process.env.FRONTEND_URL || '(not available)';
|
if (!domainBase) { core.setFailed('DEV_BASE_DOMAIN is required'); return; }
|
||||||
const marker = '<!-- preview-comment-marker -->';
|
const domain = `pr-${prNumber}.${domainBase}`;
|
||||||
const body = `${marker}\nPreview environment is running\n- Frontend: ${frontendUrl}\n- Backend: ${backendUrl}\n`;
|
const url = `https://${domain}`;
|
||||||
|
const marker = '<!-- preview-link -->';
|
||||||
|
const body = `${marker}\nPreview environment is running: ${url}\n`;
|
||||||
const { owner, repo } = context.repo;
|
const { owner, repo } = context.repo;
|
||||||
const { data: comments } = await github.rest.issues.listComments({ owner, repo, issue_number: prNumber, per_page: 100 });
|
const { data: comments } = await github.rest.issues.listComments({ owner, repo, issue_number: prNumber, per_page: 100 });
|
||||||
const existing = comments.find(c => c.body && c.body.includes(marker));
|
const existing = comments.find(c => c.body && c.body.includes(marker));
|
||||||
|
|||||||
88
.github/workflows/deploy-prod.yaml
vendored
88
.github/workflows/deploy-prod.yaml
vendored
@@ -4,12 +4,10 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches: [ "main" ]
|
branches: [ "main" ]
|
||||||
paths:
|
paths:
|
||||||
- ../../7project/src/backend/**
|
- 7project/backend/**
|
||||||
- ../../7project/src/frontend/**
|
- 7project/charts/myapp-chart/**
|
||||||
- ../../7project/src/charts/myapp-chart/**
|
|
||||||
- .github/workflows/deploy-prod.yaml
|
- .github/workflows/deploy-prod.yaml
|
||||||
- .github/workflows/build-image.yaml
|
- .github/workflows/build-image.yaml
|
||||||
- .github/workflows/frontend-pages.yml
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
|
||||||
@@ -21,43 +19,19 @@ concurrency:
|
|||||||
cancel-in-progress: false
|
cancel-in-progress: false
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
|
||||||
name: Run Python Tests
|
|
||||||
uses: ./.github/workflows/run-tests.yml
|
|
||||||
|
|
||||||
build:
|
build:
|
||||||
name: Build and push image (reusable)
|
name: Build and push image (reusable)
|
||||||
needs: [test]
|
|
||||||
uses: ./.github/workflows/build-image.yaml
|
uses: ./.github/workflows/build-image.yaml
|
||||||
with:
|
with:
|
||||||
mode: prod
|
mode: prod
|
||||||
image_repo: lukastrkan/cc-app-demo
|
image_repo: lukastrkan/cc-app-demo
|
||||||
context: 7project/src/backend
|
context: 7project/backend
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
get_urls:
|
|
||||||
name: Generate Production URLs
|
|
||||||
needs: [test]
|
|
||||||
uses: ./.github/workflows/url_generator.yml
|
|
||||||
with:
|
|
||||||
mode: prod
|
|
||||||
runner: vhs
|
|
||||||
base_domain: ${{ vars.PROD_DOMAIN }}
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
frontend:
|
|
||||||
name: Frontend - Build and Deploy to Cloudflare Pages (prod)
|
|
||||||
needs: [get_urls]
|
|
||||||
uses: ./.github/workflows/frontend-pages.yml
|
|
||||||
with:
|
|
||||||
mode: prod
|
|
||||||
backend_url_scheme: ${{ needs.get_urls.outputs.backend_url_scheme }}
|
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
deploy:
|
deploy:
|
||||||
name: Helm upgrade/install (prod)
|
name: Helm upgrade/install (prod)
|
||||||
runs-on: vhs
|
runs-on: vhs
|
||||||
needs: [build, frontend, get_urls]
|
needs: [build]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -80,53 +54,25 @@ jobs:
|
|||||||
|
|
||||||
- name: Helm upgrade/install prod
|
- name: Helm upgrade/install prod
|
||||||
env:
|
env:
|
||||||
DOMAIN: ${{ needs.get_urls.outputs.backend_url }}
|
DOMAIN: ${{ secrets.PROD_DOMAIN }}
|
||||||
DOMAIN_SCHEME: ${{ needs.get_urls.outputs.backend_url_scheme }}
|
|
||||||
FRONTEND_DOMAIN: ${{ needs.get_urls.outputs.frontend_url }}
|
|
||||||
FRONTEND_DOMAIN_SCHEME: ${{ needs.get_urls.outputs.frontend_url_scheme }}
|
|
||||||
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
||||||
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
||||||
|
IMAGE_REPO: ${{ needs.build.outputs.image_repo }}
|
||||||
DIGEST: ${{ needs.build.outputs.digest }}
|
DIGEST: ${{ needs.build.outputs.digest }}
|
||||||
BANKID_CLIENT_ID: ${{ secrets.BANKID_CLIENT_ID }}
|
|
||||||
BANKID_CLIENT_SECRET: ${{ secrets.BANKID_CLIENT_SECRET }}
|
|
||||||
MOJEID_CLIENT_ID: ${{ secrets.MOJEID_CLIENT_ID }}
|
|
||||||
MOJEID_CLIENT_SECRET: ${{ secrets.MOJEID_CLIENT_SECRET }}
|
|
||||||
CSAS_CLIENT_ID: ${{ secrets.CSAS_CLIENT_ID }}
|
|
||||||
CSAS_CLIENT_SECRET: ${{ secrets.CSAS_CLIENT_SECRET }}
|
|
||||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
|
||||||
SMTP_HOST: ${{ secrets.SMTP_HOST }}
|
|
||||||
SMTP_PORT: ${{ secrets.SMTP_PORT }}
|
|
||||||
SMTP_USERNAME: ${{ secrets.SMTP_USERNAME }}
|
|
||||||
SMTP_PASSWORD: ${{ secrets.SMTP_PASSWORD }}
|
|
||||||
SMTP_USE_TLS: ${{ secrets.SMTP_USE_TLS }}
|
|
||||||
SMTP_USE_SSL: ${{ secrets.SMTP_USE_SSL }}
|
|
||||||
SMTP_FROM: ${{ secrets.SMTP_FROM }}
|
|
||||||
UNIRATE_API_KEY: ${{ secrets.UNIRATE_API_KEY }}
|
|
||||||
run: |
|
run: |
|
||||||
helm upgrade --install myapp ./7project/src/charts/myapp-chart \
|
if [ -z "$DOMAIN" ]; then
|
||||||
|
echo "Secret PROD_DOMAIN is required (e.g., app.example.com)"; exit 1; fi
|
||||||
|
if [ -z "$RABBITMQ_PASSWORD" ]; then
|
||||||
|
echo "Secret PROD_RABBITMQ_PASSWORD is required"; exit 1; fi
|
||||||
|
if [ -z "$DB_PASSWORD" ]; then
|
||||||
|
echo "Secret PROD_DB_PASSWORD is required"; exit 1; fi
|
||||||
|
if [ -z "$IMAGE_REPO" ]; then IMAGE_REPO="lukastrkan/cc-app-demo"; fi
|
||||||
|
helm upgrade --install myapp ./7project/charts/myapp-chart \
|
||||||
-n prod --create-namespace \
|
-n prod --create-namespace \
|
||||||
-f 7project/src/charts/myapp-chart/values-prod.yaml \
|
-f 7project/charts/myapp-chart/values-prod.yaml \
|
||||||
--set deployment="prod" \
|
--set deployment="prod" \
|
||||||
--set domain="$DOMAIN" \
|
--set domain="$DOMAIN" \
|
||||||
--set domain_scheme="$DOMAIN_SCHEME" \
|
--set image.repository="$IMAGE_REPO" \
|
||||||
--set frontend_domain="$FRONTEND_DOMAIN" \
|
|
||||||
--set frontend_domain_scheme="$FRONTEND_DOMAIN_SCHEME" \
|
|
||||||
--set image.digest="$DIGEST" \
|
--set image.digest="$DIGEST" \
|
||||||
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
||||||
--set-string database.password="$DB_PASSWORD" \
|
--set-string database.password="$DB_PASSWORD"
|
||||||
--set-string oauth.bankid.clientId="$BANKID_CLIENT_ID" \
|
|
||||||
--set-string oauth.bankid.clientSecret="$BANKID_CLIENT_SECRET" \
|
|
||||||
--set-string oauth.mojeid.clientId="$MOJEID_CLIENT_ID" \
|
|
||||||
--set-string oauth.mojeid.clientSecret="$MOJEID_CLIENT_SECRET" \
|
|
||||||
--set-string oauth.csas.clientId="$CSAS_CLIENT_ID" \
|
|
||||||
--set-string oauth.csas.clientSecret="$CSAS_CLIENT_SECRET" \
|
|
||||||
--set-string sentry_dsn="$SENTRY_DSN" \
|
|
||||||
--set-string database.encryptionSecret="${{ secrets.PROD_DB_ENCRYPTION_KEY }}" \
|
|
||||||
--set-string smtp.host="$SMTP_HOST" \
|
|
||||||
--set smtp.port="$SMTP_PORT" \
|
|
||||||
--set-string smtp.username="$SMTP_USERNAME" \
|
|
||||||
--set-string smtp.password="$SMTP_PASSWORD" \
|
|
||||||
--set-string smtp.tls="$SMTP_USE_TLS" \
|
|
||||||
--set-string smtp.ssl="$SMTP_USE_SSL" \
|
|
||||||
--set-string smtp.from="$SMTP_FROM" \
|
|
||||||
--set-string unirate.key="$UNIRATE_API_KEY"
|
|
||||||
|
|||||||
135
.github/workflows/frontend-pages.yml
vendored
135
.github/workflows/frontend-pages.yml
vendored
@@ -1,135 +0,0 @@
|
|||||||
name: Frontend - Build and Deploy to Cloudflare Pages
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
mode:
|
|
||||||
description: "Build mode: 'prod' or 'pr'"
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
pr_number:
|
|
||||||
description: 'PR number (required when mode=pr)'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
project_name:
|
|
||||||
description: 'Cloudflare Pages project name (overrides default)'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
backend_url_scheme:
|
|
||||||
description: 'The full scheme URL for the backend (e.g., https://api.example.com)'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
secrets:
|
|
||||||
CLOUDFLARE_API_TOKEN:
|
|
||||||
required: true
|
|
||||||
CLOUDFLARE_ACCOUNT_ID:
|
|
||||||
required: true
|
|
||||||
outputs:
|
|
||||||
deployed_url:
|
|
||||||
description: 'URL of deployed frontend'
|
|
||||||
value: ${{ jobs.deploy.outputs.deployed_url }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: Build frontend
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
working-directory: 7project/src/frontend
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Use Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '20'
|
|
||||||
cache: 'npm'
|
|
||||||
cache-dependency-path: 7project/src/frontend/package-lock.json
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm ci
|
|
||||||
|
|
||||||
- name: Set backend URL from workflow input
|
|
||||||
run: |
|
|
||||||
echo "VITE_BACKEND_URL=${{ inputs.backend_url_scheme }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
run: npm run build
|
|
||||||
|
|
||||||
- name: Upload build artifact
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: frontend-dist
|
|
||||||
path: 7project/src/frontend/dist
|
|
||||||
|
|
||||||
deploy:
|
|
||||||
name: Deploy to Cloudflare Pages
|
|
||||||
needs: build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
deployed_url: ${{ steps.out.outputs.deployed_url }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Download build artifact
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: frontend-dist
|
|
||||||
path: dist
|
|
||||||
|
|
||||||
- name: Determine project name and branch
|
|
||||||
id: pname
|
|
||||||
env:
|
|
||||||
INPUT_MODE: ${{ inputs.mode }}
|
|
||||||
INPUT_PR: ${{ inputs.pr_number }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
# Prefer manual input, then repo variable, fallback to repo-name
|
|
||||||
INPUT_NAME='${{ inputs.project_name }}'
|
|
||||||
VAR_NAME='${{ vars.CF_PAGES_PROJECT_NAME }}'
|
|
||||||
if [ -n "$INPUT_NAME" ]; then PNAME_RAW="$INPUT_NAME";
|
|
||||||
elif [ -n "$VAR_NAME" ]; then PNAME_RAW="$VAR_NAME";
|
|
||||||
else PNAME_RAW="${GITHUB_REPOSITORY##*/}-frontend"; fi
|
|
||||||
# Normalize project name to lowercase to satisfy Cloudflare Pages naming
|
|
||||||
PNAME="${PNAME_RAW,,}"
|
|
||||||
# Determine branch for Pages
|
|
||||||
if [ "${INPUT_MODE}" = "pr" ]; then
|
|
||||||
if [ -z "${INPUT_PR}" ]; then echo "pr_number is required when mode=pr"; exit 1; fi
|
|
||||||
PBRANCH="pr-${INPUT_PR}"
|
|
||||||
else
|
|
||||||
PBRANCH="main"
|
|
||||||
fi
|
|
||||||
echo "project_name=$PNAME" >> $GITHUB_OUTPUT
|
|
||||||
echo "branch=$PBRANCH" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Ensure Cloudflare Pages project exists
|
|
||||||
env:
|
|
||||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
|
||||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
|
||||||
PNAME: ${{ steps.pname.outputs.project_name }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
npx wrangler pages project create "$PNAME" --production-branch=main || true
|
|
||||||
|
|
||||||
- name: Deploy using Cloudflare Wrangler
|
|
||||||
uses: cloudflare/wrangler-action@v3
|
|
||||||
with:
|
|
||||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
|
||||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
|
||||||
command: pages deploy dist --project-name=${{ steps.pname.outputs.project_name }} --branch=${{ steps.pname.outputs.branch }}
|
|
||||||
|
|
||||||
- name: Compute deployed URL
|
|
||||||
id: out
|
|
||||||
env:
|
|
||||||
PNAME: ${{ steps.pname.outputs.project_name }}
|
|
||||||
PBRANCH: ${{ steps.pname.outputs.branch }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
if [ "$PBRANCH" = "main" ]; then
|
|
||||||
URL="https://${PNAME}.pages.dev"
|
|
||||||
else
|
|
||||||
URL="https://${PBRANCH}.${PNAME}.pages.dev"
|
|
||||||
fi
|
|
||||||
echo "deployed_url=$URL" >> $GITHUB_OUTPUT
|
|
||||||
66
.github/workflows/run-tests.yml
vendored
66
.github/workflows/run-tests.yml
vendored
@@ -1,66 +0,0 @@
|
|||||||
name: Run Python Tests
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
services:
|
|
||||||
mariadb:
|
|
||||||
image: mariadb:11.4
|
|
||||||
env:
|
|
||||||
MARIADB_ROOT_PASSWORD: rootpw
|
|
||||||
MARIADB_DATABASE: group_project
|
|
||||||
MARIADB_USER: appuser
|
|
||||||
MARIADB_PASSWORD: apppass
|
|
||||||
ports:
|
|
||||||
- 3306:3306
|
|
||||||
options: >-
|
|
||||||
--health-cmd="mariadb-admin ping -h 127.0.0.1 -u root -prootpw --silent"
|
|
||||||
--health-interval=5s
|
|
||||||
--health-timeout=2s
|
|
||||||
--health-retries=20
|
|
||||||
|
|
||||||
env:
|
|
||||||
MARIADB_HOST: 127.0.0.1
|
|
||||||
MARIADB_PORT: "3306"
|
|
||||||
MARIADB_DB: group_project
|
|
||||||
MARIADB_USER: appuser
|
|
||||||
MARIADB_PASSWORD: apppass
|
|
||||||
# Ensure the application uses MariaDB (async) during tests
|
|
||||||
DATABASE_URL: mysql+asyncmy://appuser:apppass@127.0.0.1:3306/group_project
|
|
||||||
DISABLE_METRICS: "1"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Check out repository code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up Python 3.11
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.11'
|
|
||||||
|
|
||||||
- name: Add test dependencies to requirements
|
|
||||||
run: |
|
|
||||||
echo "pytest==8.4.2" >> ./7project/src/backend/requirements.txt
|
|
||||||
echo "pytest-asyncio==1.2.0" >> ./7project/src/backend/requirements.txt
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install -r ./7project/src/backend/requirements.txt
|
|
||||||
|
|
||||||
- name: Run Alembic migrations
|
|
||||||
run: |
|
|
||||||
alembic upgrade head
|
|
||||||
working-directory: ./7project/src/backend
|
|
||||||
|
|
||||||
- name: Run tests with pytest
|
|
||||||
env:
|
|
||||||
PYTEST_RUN_CONFIG: "True"
|
|
||||||
run: pytest
|
|
||||||
working-directory: ./7project/src/backend
|
|
||||||
74
.github/workflows/url_generator.yml
vendored
74
.github/workflows/url_generator.yml
vendored
@@ -1,74 +0,0 @@
|
|||||||
name: Generate Preview or Production URLs
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
mode:
|
|
||||||
description: "Build mode: 'prod' or 'pr'"
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
pr_number:
|
|
||||||
description: 'PR number (required when mode=pr)'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
runner:
|
|
||||||
description: 'The runner to use for this job'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: 'ubuntu-latest'
|
|
||||||
base_domain:
|
|
||||||
description: 'The base domain for production URLs (e.g., example.com)'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
backend_url:
|
|
||||||
description: "The backend URL without scheme (e.g., api.example.com)"
|
|
||||||
value: ${{ jobs.generate-urls.outputs.backend_url }}
|
|
||||||
frontend_url:
|
|
||||||
description: "The frontend URL without scheme (e.g., app.example.com)"
|
|
||||||
value: ${{ jobs.generate-urls.outputs.frontend_url }}
|
|
||||||
backend_url_scheme:
|
|
||||||
description: "The backend URL with scheme (e.g., https://api.example.com)"
|
|
||||||
value: ${{ jobs.generate-urls.outputs.backend_url_scheme }}
|
|
||||||
frontend_url_scheme:
|
|
||||||
description: "The frontend URL with scheme (e.g., https://app.example.com)"
|
|
||||||
value: ${{ jobs.generate-urls.outputs.frontend_url_scheme }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
generate-urls:
|
|
||||||
permissions:
|
|
||||||
contents: none
|
|
||||||
runs-on: ${{ inputs.runner }}
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
backend_url: ${{ steps.set_urls.outputs.backend_url }}
|
|
||||||
frontend_url: ${{ steps.set_urls.outputs.frontend_url }}
|
|
||||||
backend_url_scheme: ${{ steps.set_urls.outputs.backend_url_scheme }}
|
|
||||||
frontend_url_scheme: ${{ steps.set_urls.outputs.frontend_url_scheme }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Generate URLs
|
|
||||||
id: set_urls
|
|
||||||
env:
|
|
||||||
BASE_DOMAIN: ${{ inputs.base_domain }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
if [ "${{ inputs.mode }}" = "prod" ]; then
|
|
||||||
BACKEND_URL="api.${BASE_DOMAIN}"
|
|
||||||
FRONTEND_URL="finance.${BASE_DOMAIN}"
|
|
||||||
else
|
|
||||||
# This is your current logic
|
|
||||||
FRONTEND_URL="pr-${{ inputs.pr_number }}.group-8-frontend.pages.dev"
|
|
||||||
BACKEND_URL="api-pr-${{ inputs.pr_number }}.${BASE_DOMAIN}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
FRONTEND_URL_SCHEME="https://$FRONTEND_URL"
|
|
||||||
BACKEND_URL_SCHEME="https://$BACKEND_URL"
|
|
||||||
|
|
||||||
# This part correctly writes to GITHUB_OUTPUT for the step
|
|
||||||
echo "backend_url_scheme=$BACKEND_URL_SCHEME" >> $GITHUB_OUTPUT
|
|
||||||
echo "frontend_url_scheme=$FRONTEND_URL_SCHEME" >> $GITHUB_OUTPUT
|
|
||||||
echo "backend_url=$BACKEND_URL" >> $GITHUB_OUTPUT
|
|
||||||
echo "frontend_url=$FRONTEND_URL" >> $GITHUB_OUTPUT
|
|
||||||
8
.idea/.gitignore
generated
vendored
8
.idea/.gitignore
generated
vendored
@@ -1,8 +0,0 @@
|
|||||||
# Default ignored files
|
|
||||||
/shelf/
|
|
||||||
/workspace.xml
|
|
||||||
# Editor-based HTTP Client requests
|
|
||||||
/httpRequests/
|
|
||||||
# Datasource local storage ignored files
|
|
||||||
/dataSources/
|
|
||||||
/dataSources.local.xml
|
|
||||||
@@ -45,11 +45,11 @@ flowchart LR
|
|||||||
proc_cron[Task planner] --> proc_queue
|
proc_cron[Task planner] --> proc_queue
|
||||||
proc_queue_worker --> ext_bank[(Bank API)]
|
proc_queue_worker --> ext_bank[(Bank API)]
|
||||||
proc_queue_worker --> db
|
proc_queue_worker --> db
|
||||||
client[Client/UI] <--> api[API Gateway / Web Server]
|
client[Client/UI] --> api[API Gateway / Web Server]
|
||||||
api <--> svc[Web API]
|
api --> svc[Web API]
|
||||||
svc --> proc_queue
|
svc --> proc_queue
|
||||||
svc <--> db[(Database)]
|
svc --> db[(Database)]
|
||||||
svc <--> cache[(Cache)]
|
svc --> cache[(Cache)]
|
||||||
```
|
```
|
||||||
|
|
||||||
- Components and responsibilities: What does each box do?
|
- Components and responsibilities: What does each box do?
|
||||||
|
|||||||
16
7project/.gitignore
vendored
16
7project/.gitignore
vendored
@@ -1,8 +1,8 @@
|
|||||||
/src/tofu/controlplane.yaml
|
/tofu/controlplane.yaml
|
||||||
/src/tofu/kubeconfig
|
/tofu/kubeconfig
|
||||||
/src/tofu/talosconfig
|
/tofu/talosconfig
|
||||||
/src/tofu/terraform.tfstate
|
/tofu/terraform.tfstate
|
||||||
/src/tofu/terraform.tfstate.backup
|
/tofu/terraform.tfstate.backup
|
||||||
/src/tofu/worker.yaml
|
/tofu/worker.yaml
|
||||||
/src/tofu/.terraform.lock.hcl
|
/tofu/.terraform.lock.hcl
|
||||||
/src/tofu/.terraform/
|
/tofu/.terraform/
|
||||||
|
|||||||
8
7project/.idea/.gitignore
generated
vendored
8
7project/.idea/.gitignore
generated
vendored
@@ -1,8 +0,0 @@
|
|||||||
# Default ignored files
|
|
||||||
/shelf/
|
|
||||||
/workspace.xml
|
|
||||||
# Editor-based HTTP Client requests
|
|
||||||
/httpRequests/
|
|
||||||
# Datasource local storage ignored files
|
|
||||||
/dataSources/
|
|
||||||
/dataSources.local.xml
|
|
||||||
@@ -1,6 +1,43 @@
|
|||||||
# Personal Finance Tracker
|
# Lab 6: Design Document for Course Project
|
||||||
## Folder Structure
|
|
||||||
- meetings: Contains note from meetings
|
| Lab 6: | Design Document for Course Project |
|
||||||
- scr: Source code for the project
|
| ----------- | ---------------------------------- |
|
||||||
- checklist: Project checklist and self assessment tracking
|
| Subject: | DAT515 Cloud Computing |
|
||||||
- report.md: Detailed report of the project
|
| Deadline: | **September 19, 2025 23:59** |
|
||||||
|
| Grading: | No Grade |
|
||||||
|
| Submission: | Group |
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
- [Table of Contents](#table-of-contents)
|
||||||
|
- [1. Design Document (design.md)](#1-design-document-designmd)
|
||||||
|
|
||||||
|
The design document is the first deliverable for your project.
|
||||||
|
We separated this out as a separate deliverable, with its own deadline, to ensure that you have a clear plan before you start coding.
|
||||||
|
This part only needs a cursory review by the teaching staff to ensure it is sufficiently comprehensive, while still realistic.
|
||||||
|
The teaching staff will assign you to a project mentor who will provide guidance and support throughout the development process.
|
||||||
|
|
||||||
|
## 1. Design Document (design.md)
|
||||||
|
|
||||||
|
You are required to prepare a design document for your application.
|
||||||
|
The design doc should be brief, well-organized and easy to understand.
|
||||||
|
The design doc should be prepared in markdown format and named `design.md` and submitted in the project group's repository.
|
||||||
|
Remember that you can use [mermaid diagrams](https://github.com/mermaid-js/mermaid#readme) in markdown files.
|
||||||
|
|
||||||
|
The design doc **should include** the following sections:
|
||||||
|
|
||||||
|
- **Overview**: A brief description of the application and its purpose.
|
||||||
|
- **Architecture**: The high-level architecture of the application, including components, interactions, and data flow.
|
||||||
|
- **Technologies**: The cloud computing technologies or services used in the application.
|
||||||
|
- **Deployment**: The deployment strategy for the application, including any infrastructure requirements.
|
||||||
|
|
||||||
|
The design document should be updated throughout the development process and reflect the final implementation of your project.
|
||||||
|
|
||||||
|
Optional sections may include:
|
||||||
|
|
||||||
|
- Security: The security measures implemented in the application to protect data and resources.
|
||||||
|
- Scalability: The scalability considerations for the application, including load balancing and auto-scaling.
|
||||||
|
- Monitoring: The monitoring and logging strategy for the application to track performance and detect issues.
|
||||||
|
- Disaster Recovery: The disaster recovery plan for the application to ensure business continuity in case of failures.
|
||||||
|
- Cost Analysis: The cost analysis of running the application on the cloud, including pricing models and cost-saving strategies.
|
||||||
|
- References: Any external sources or references used in the design document.
|
||||||
|
|||||||
8
7project/backend/Dockerfile
Normal file
8
7project/backend/Dockerfile
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
COPY . .
|
||||||
|
EXPOSE 8000
|
||||||
|
CMD alembic upgrade head && uvicorn app.app:app --host 0.0.0.0 --port 8000
|
||||||
@@ -11,7 +11,7 @@ script_location = %(here)s/alembic
|
|||||||
# Uncomment the line below if you want the files to be prepended with date and time
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
# for all available tokens
|
# for all available tokens
|
||||||
file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
# defaults to the current working directory. for multiple paths, the path separator
|
# defaults to the current working directory. for multiple paths, the path separator
|
||||||
@@ -25,8 +25,7 @@ if not DATABASE_URL:
|
|||||||
|
|
||||||
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
||||||
|
|
||||||
host_env = os.getenv("MARIADB_HOST", "localhost")
|
ssl_enabled = os.getenv("MARIADB_HOST", "localhost") != "localhost"
|
||||||
ssl_enabled = host_env not in {"localhost", "127.0.0.1"}
|
|
||||||
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
||||||
|
|
||||||
def run_migrations_offline() -> None:
|
def run_migrations_offline() -> None:
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
"""add categories
|
"""Init migration
|
||||||
|
|
||||||
Revision ID: 63e072f09836
|
Revision ID: 81f275275556
|
||||||
Revises:
|
Revises:
|
||||||
Create Date: 2025-10-09 14:56:14.653249
|
Create Date: 2025-09-24 17:39:25.346690
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Sequence, Union
|
from typing import Sequence, Union
|
||||||
@@ -13,7 +13,7 @@ import sqlalchemy as sa
|
|||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision: str = '63e072f09836'
|
revision: str = '81f275275556'
|
||||||
down_revision: Union[str, Sequence[str], None] = None
|
down_revision: Union[str, Sequence[str], None] = None
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
@@ -22,6 +22,12 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
"""Upgrade schema."""
|
"""Upgrade schema."""
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('transaction',
|
||||||
|
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||||
|
sa.Column('amount', sa.Float(), nullable=False),
|
||||||
|
sa.Column('description', sa.String(length=255), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint('id')
|
||||||
|
)
|
||||||
op.create_table('user',
|
op.create_table('user',
|
||||||
sa.Column('first_name', sa.String(length=100), nullable=True),
|
sa.Column('first_name', sa.String(length=100), nullable=True),
|
||||||
sa.Column('last_name', sa.String(length=100), nullable=True),
|
sa.Column('last_name', sa.String(length=100), nullable=True),
|
||||||
@@ -34,38 +40,13 @@ def upgrade() -> None:
|
|||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
|
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
|
||||||
op.create_table('categories',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('name', sa.String(length=100), nullable=False),
|
|
||||||
sa.Column('description', sa.String(length=255), nullable=True),
|
|
||||||
sa.Column('user_id', fastapi_users_db_sqlalchemy.generics.GUID(), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
|
||||||
sa.UniqueConstraint('name')
|
|
||||||
)
|
|
||||||
op.create_table('transaction',
|
|
||||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
|
||||||
sa.Column('amount', sa.Float(), nullable=False),
|
|
||||||
sa.Column('description', sa.String(length=255), nullable=True),
|
|
||||||
sa.Column('user_id', fastapi_users_db_sqlalchemy.generics.GUID(), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
op.create_table('category_transaction',
|
|
||||||
sa.Column('id_category', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('id_transaction', sa.Integer(), nullable=True),
|
|
||||||
sa.ForeignKeyConstraint(['id_category'], ['categories.id'], ),
|
|
||||||
sa.ForeignKeyConstraint(['id_transaction'], ['transaction.id'], )
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
"""Downgrade schema."""
|
"""Downgrade schema."""
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.drop_table('category_transaction')
|
|
||||||
op.drop_table('transaction')
|
|
||||||
op.drop_table('categories')
|
|
||||||
op.drop_index(op.f('ix_user_email'), table_name='user')
|
op.drop_index(op.f('ix_user_email'), table_name='user')
|
||||||
op.drop_table('user')
|
op.drop_table('user')
|
||||||
|
op.drop_table('transaction')
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
56
7project/backend/app/app.py
Normal file
56
7project/backend/app/app.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
from fastapi import Depends, FastAPI
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
|
||||||
|
from app.models.user import User
|
||||||
|
|
||||||
|
from app.schemas.user import UserCreate, UserRead, UserUpdate
|
||||||
|
from app.services.user_service import auth_backend, current_active_verified_user, fastapi_users
|
||||||
|
|
||||||
|
app = FastAPI()
|
||||||
|
|
||||||
|
# CORS for frontend dev server
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=[
|
||||||
|
"http://localhost:5173",
|
||||||
|
"http://127.0.0.1:5173",
|
||||||
|
],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
app.include_router(
|
||||||
|
fastapi_users.get_auth_router(auth_backend), prefix="/auth/jwt", tags=["auth"]
|
||||||
|
)
|
||||||
|
app.include_router(
|
||||||
|
fastapi_users.get_register_router(UserRead, UserCreate),
|
||||||
|
prefix="/auth",
|
||||||
|
tags=["auth"],
|
||||||
|
)
|
||||||
|
app.include_router(
|
||||||
|
fastapi_users.get_reset_password_router(),
|
||||||
|
prefix="/auth",
|
||||||
|
tags=["auth"],
|
||||||
|
)
|
||||||
|
app.include_router(
|
||||||
|
fastapi_users.get_verify_router(UserRead),
|
||||||
|
prefix="/auth",
|
||||||
|
tags=["auth"],
|
||||||
|
)
|
||||||
|
app.include_router(
|
||||||
|
fastapi_users.get_users_router(UserRead, UserUpdate),
|
||||||
|
prefix="/users",
|
||||||
|
tags=["users"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Liveness/root endpoint
|
||||||
|
@app.get("/", include_in_schema=False)
|
||||||
|
async def root():
|
||||||
|
return {"status": "ok", "message": "Welcome to the FastAPI application!"}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/authenticated-route")
|
||||||
|
async def authenticated_route(user: User = Depends(current_active_verified_user)):
|
||||||
|
return {"message": f"Hello {user.email}!"}
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
||||||
from sqlalchemy import create_engine
|
|
||||||
from sqlalchemy.orm import sessionmaker
|
|
||||||
from app.core.base import Base
|
from app.core.base import Base
|
||||||
|
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||||
@@ -19,13 +17,10 @@ if not DATABASE_URL:
|
|||||||
# Load all models to register them
|
# Load all models to register them
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.models.transaction import Transaction
|
from app.models.transaction import Transaction
|
||||||
from app.models.categories import Category
|
|
||||||
|
|
||||||
host_env = os.getenv("MARIADB_HOST", "localhost")
|
ssl_enabled = os.getenv("MARIADB_HOST", "localhost") != "localhost"
|
||||||
ssl_enabled = host_env not in {"localhost", "127.0.0.1"}
|
|
||||||
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
||||||
|
|
||||||
# Async engine/session for the async parts of the app
|
|
||||||
engine = create_async_engine(
|
engine = create_async_engine(
|
||||||
DATABASE_URL,
|
DATABASE_URL,
|
||||||
pool_pre_ping=True,
|
pool_pre_ping=True,
|
||||||
@@ -33,13 +28,3 @@ engine = create_async_engine(
|
|||||||
connect_args=connect_args,
|
connect_args=connect_args,
|
||||||
)
|
)
|
||||||
async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
|
async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
|
||||||
|
|
||||||
# Synchronous engine/session for sync utilities (e.g., bank_scraper)
|
|
||||||
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
|
||||||
engine_sync = create_engine(
|
|
||||||
SYNC_DATABASE_URL,
|
|
||||||
pool_pre_ping=True,
|
|
||||||
echo=os.getenv("SQL_ECHO", "0") == "1",
|
|
||||||
connect_args=connect_args,
|
|
||||||
)
|
|
||||||
sync_session_maker = sessionmaker(bind=engine_sync, expire_on_commit=False)
|
|
||||||
6
7project/backend/app/core/queue.py
Normal file
6
7project/backend/app/core/queue.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import app.celery_app # noqa: F401
|
||||||
|
from app.workers.celery_tasks import send_email
|
||||||
|
|
||||||
|
|
||||||
|
def enqueue_email(to: str, subject: str, body: str) -> None:
|
||||||
|
send_email.delay(to, subject, body)
|
||||||
9
7project/backend/app/models/transaction.py
Normal file
9
7project/backend/app/models/transaction.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from sqlalchemy import Column, Integer, String, Float
|
||||||
|
from app.core.base import Base
|
||||||
|
|
||||||
|
class Transaction(Base):
|
||||||
|
__tablename__ = "transaction"
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
amount = Column(Float, nullable=False)
|
||||||
|
description = Column(String(length=255), nullable=True)
|
||||||
|
|
||||||
7
7project/backend/app/models/user.py
Normal file
7
7project/backend/app/models/user.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
from sqlalchemy import Column, String
|
||||||
|
from fastapi_users.db import SQLAlchemyBaseUserTableUUID
|
||||||
|
from app.core.base import Base
|
||||||
|
|
||||||
|
class User(SQLAlchemyBaseUserTableUUID, Base):
|
||||||
|
first_name = Column(String(length=100), nullable=True)
|
||||||
|
last_name = Column(String(length=100), nullable=True)
|
||||||
@@ -1,17 +1,16 @@
|
|||||||
import uuid
|
import uuid
|
||||||
from typing import Optional, Dict, Any
|
from typing import Optional
|
||||||
from fastapi_users import schemas
|
from fastapi_users import schemas
|
||||||
|
|
||||||
class UserRead(schemas.BaseUser[uuid.UUID]):
|
class UserRead(schemas.BaseUser[uuid.UUID]):
|
||||||
first_name: Optional[str] = None
|
first_name: Optional[str] = None
|
||||||
last_name: Optional[str] = None
|
surname: Optional[str] = None
|
||||||
config: Optional[Dict[str, Any]] = None
|
|
||||||
|
|
||||||
class UserCreate(schemas.BaseUserCreate):
|
class UserCreate(schemas.BaseUserCreate):
|
||||||
first_name: Optional[str] = None
|
first_name: Optional[str] = None
|
||||||
last_name: Optional[str] = None
|
surname: Optional[str] = None
|
||||||
|
|
||||||
class UserUpdate(schemas.BaseUserUpdate):
|
class UserUpdate(schemas.BaseUserUpdate):
|
||||||
first_name: Optional[str] = None
|
first_name: Optional[str] = None
|
||||||
last_name: Optional[str] = None
|
surname: Optional[str] = None
|
||||||
|
|
||||||
@@ -4,13 +4,11 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
|||||||
from fastapi_users.db import SQLAlchemyUserDatabase
|
from fastapi_users.db import SQLAlchemyUserDatabase
|
||||||
|
|
||||||
from ..core.db import async_session_maker
|
from ..core.db import async_session_maker
|
||||||
from ..models.user import User, OAuthAccount
|
from ..models.user import User
|
||||||
|
|
||||||
|
|
||||||
async def get_async_session() -> AsyncGenerator[AsyncSession, None]:
|
async def get_async_session() -> AsyncGenerator[AsyncSession, None]:
|
||||||
async with async_session_maker() as session:
|
async with async_session_maker() as session:
|
||||||
yield session
|
yield session
|
||||||
|
|
||||||
|
|
||||||
async def get_user_db(session: AsyncSession = Depends(get_async_session)):
|
async def get_user_db(session: AsyncSession = Depends(get_async_session)):
|
||||||
yield SQLAlchemyUserDatabase(session, User, OAuthAccount)
|
yield SQLAlchemyUserDatabase(session, User)
|
||||||
@@ -3,66 +3,26 @@ import uuid
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from fastapi import Depends, Request
|
from fastapi import Depends, Request
|
||||||
from fastapi_users import BaseUserManager, FastAPIUsers, UUIDIDMixin, models
|
from fastapi_users import BaseUserManager, FastAPIUsers, UUIDIDMixin
|
||||||
from fastapi_users.authentication import (
|
from fastapi_users.authentication import (
|
||||||
AuthenticationBackend,
|
AuthenticationBackend,
|
||||||
BearerTransport,
|
BearerTransport,
|
||||||
)
|
)
|
||||||
from fastapi_users.authentication.strategy.jwt import JWTStrategy
|
from fastapi_users.authentication.strategy.jwt import JWTStrategy
|
||||||
from fastapi_users.db import SQLAlchemyUserDatabase
|
from fastapi_users.db import SQLAlchemyUserDatabase
|
||||||
from httpx_oauth.oauth2 import BaseOAuth2
|
|
||||||
|
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.oauth.bank_id import BankID
|
|
||||||
from app.workers.celery_tasks import send_email
|
|
||||||
from app.oauth.custom_openid import CustomOpenID
|
|
||||||
from app.oauth.moje_id import MojeIDOAuth
|
|
||||||
from app.services.db import get_user_db
|
from app.services.db import get_user_db
|
||||||
|
from app.core.queue import enqueue_email
|
||||||
|
|
||||||
SECRET = os.getenv("SECRET", "CHANGE_ME_SECRET")
|
SECRET = os.getenv("SECRET", "CHANGE_ME_SECRET")
|
||||||
|
|
||||||
FRONTEND_URL = os.getenv("FRONTEND_URL", "http://localhost:5173")
|
FRONTEND_URL = os.getenv("FRONTEND_URL", "http://localhost:5173")
|
||||||
BACKEND_URL = os.getenv("BACKEND_URL", "http://localhost:8000")
|
BACKEND_URL = os.getenv("BACKEND_URL", "http://localhost:8000")
|
||||||
|
|
||||||
providers = {
|
|
||||||
"MojeID": MojeIDOAuth(
|
|
||||||
os.getenv("MOJEID_CLIENT_ID", "CHANGE_ME_CLIENT_ID"),
|
|
||||||
os.getenv("MOJEID_CLIENT_SECRET", "CHANGE_ME_CLIENT_SECRET"),
|
|
||||||
),
|
|
||||||
"BankID": BankID(
|
|
||||||
os.getenv("BANKID_CLIENT_ID", "CHANGE_ME_CLIENT_ID"),
|
|
||||||
os.getenv("BANKID_CLIENT_SECRET", "CHANGE_ME_CLIENT_SECRET"),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_oauth_provider(name: str) -> Optional[BaseOAuth2]:
|
|
||||||
if name not in providers:
|
|
||||||
return None
|
|
||||||
return providers[name]
|
|
||||||
|
|
||||||
|
|
||||||
class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
|
class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
|
||||||
reset_password_token_secret = SECRET
|
reset_password_token_secret = SECRET
|
||||||
verification_token_secret = SECRET
|
verification_token_secret = SECRET
|
||||||
|
|
||||||
async def oauth_callback(self: "BaseUserManager[models.UOAP, models.ID]", oauth_name: str, access_token: str,
|
|
||||||
account_id: str, account_email: str, expires_at: Optional[int] = None,
|
|
||||||
refresh_token: Optional[str] = None, request: Optional[Request] = None, *,
|
|
||||||
associate_by_email: bool = False, is_verified_by_default: bool = False) -> models.UOAP:
|
|
||||||
|
|
||||||
user = await super().oauth_callback(oauth_name, access_token, account_id, account_email, expires_at,
|
|
||||||
refresh_token, request, associate_by_email=associate_by_email,
|
|
||||||
is_verified_by_default=is_verified_by_default)
|
|
||||||
|
|
||||||
# set additional user info from the OAuth provider
|
|
||||||
provider = get_oauth_provider(oauth_name)
|
|
||||||
if provider is not None and isinstance(provider, CustomOpenID):
|
|
||||||
update_dict = await provider.get_user_info(access_token)
|
|
||||||
await self.user_db.update(user, update_dict)
|
|
||||||
|
|
||||||
return user
|
|
||||||
|
|
||||||
async def on_after_register(self, user: User, request: Optional[Request] = None):
|
async def on_after_register(self, user: User, request: Optional[Request] = None):
|
||||||
await self.request_verify(user, request)
|
await self.request_verify(user, request)
|
||||||
|
|
||||||
@@ -86,23 +46,19 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
|
|||||||
"Pokud jsi registraci neprováděl(a), tento email ignoruj.\n"
|
"Pokud jsi registraci neprováděl(a), tento email ignoruj.\n"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
send_email.delay(user.email, subject, body)
|
enqueue_email(to=user.email, subject=subject, body=body)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("[Email Fallback] To:", user.email)
|
print("[Email Fallback] To:", user.email)
|
||||||
print("[Email Fallback] Subject:", subject)
|
print("[Email Fallback] Subject:", subject)
|
||||||
print("[Email Fallback] Body:\n", body)
|
print("[Email Fallback] Body:\n", body)
|
||||||
|
|
||||||
|
|
||||||
async def get_user_manager(user_db: SQLAlchemyUserDatabase = Depends(get_user_db)):
|
async def get_user_manager(user_db: SQLAlchemyUserDatabase = Depends(get_user_db)):
|
||||||
yield UserManager(user_db)
|
yield UserManager(user_db)
|
||||||
|
|
||||||
|
|
||||||
bearer_transport = BearerTransport(tokenUrl="auth/jwt/login")
|
bearer_transport = BearerTransport(tokenUrl="auth/jwt/login")
|
||||||
|
|
||||||
|
|
||||||
def get_jwt_strategy() -> JWTStrategy:
|
def get_jwt_strategy() -> JWTStrategy:
|
||||||
return JWTStrategy(secret=SECRET, lifetime_seconds=604800)
|
return JWTStrategy(secret=SECRET, lifetime_seconds=3600)
|
||||||
|
|
||||||
|
|
||||||
auth_backend = AuthenticationBackend(
|
auth_backend = AuthenticationBackend(
|
||||||
name="jwt",
|
name="jwt",
|
||||||
@@ -114,3 +70,4 @@ fastapi_users = FastAPIUsers[User, uuid.UUID](get_user_manager, [auth_backend])
|
|||||||
|
|
||||||
current_active_user = fastapi_users.current_user(active=True)
|
current_active_user = fastapi_users.current_user(active=True)
|
||||||
current_active_verified_user = fastapi_users.current_user(active=True, verified=True)
|
current_active_verified_user = fastapi_users.current_user(active=True, verified=True)
|
||||||
|
|
||||||
19
7project/backend/app/workers/celery_tasks.py
Normal file
19
7project/backend/app/workers/celery_tasks.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
from celery import shared_task
|
||||||
|
|
||||||
|
logger = logging.getLogger("celery_tasks")
|
||||||
|
if not logger.handlers:
|
||||||
|
_h = logging.StreamHandler()
|
||||||
|
logger.addHandler(_h)
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="workers.send_email")
|
||||||
|
def send_email(to: str, subject: str, body: str) -> None:
|
||||||
|
if not (to and subject and body):
|
||||||
|
logger.error("Email task missing fields. to=%r subject=%r body_len=%r", to, subject, len(body) if body else 0)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Placeholder for real email sending logic
|
||||||
|
logger.info("[Celery] Email sent | to=%s | subject=%s | body_len=%d", to, subject, len(body))
|
||||||
4
7project/backend/main.py
Normal file
4
7project/backend/main.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
import uvicorn
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
uvicorn.run("app.app:app", host="0.0.0.0", log_level="info")
|
||||||
@@ -11,7 +11,6 @@ asyncmy==0.2.9
|
|||||||
bcrypt==4.3.0
|
bcrypt==4.3.0
|
||||||
billiard==4.2.2
|
billiard==4.2.2
|
||||||
celery==5.5.3
|
celery==5.5.3
|
||||||
certifi==2025.10.5
|
|
||||||
cffi==2.0.0
|
cffi==2.0.0
|
||||||
click==8.1.8
|
click==8.1.8
|
||||||
click-didyoumean==0.3.1
|
click-didyoumean==0.3.1
|
||||||
@@ -26,10 +25,7 @@ fastapi-users==14.0.1
|
|||||||
fastapi-users-db-sqlalchemy==7.0.0
|
fastapi-users-db-sqlalchemy==7.0.0
|
||||||
greenlet==3.2.4
|
greenlet==3.2.4
|
||||||
h11==0.16.0
|
h11==0.16.0
|
||||||
httpcore==1.0.9
|
|
||||||
httptools==0.6.4
|
httptools==0.6.4
|
||||||
httpx==0.28.1
|
|
||||||
httpx-oauth==0.16.1
|
|
||||||
idna==3.10
|
idna==3.10
|
||||||
kombu==5.5.4
|
kombu==5.5.4
|
||||||
makefun==1.16.0
|
makefun==1.16.0
|
||||||
@@ -38,8 +34,6 @@ MarkupSafe==3.0.2
|
|||||||
multidict==6.6.4
|
multidict==6.6.4
|
||||||
packaging==25.0
|
packaging==25.0
|
||||||
pamqp==3.3.0
|
pamqp==3.3.0
|
||||||
prometheus-fastapi-instrumentator==7.1.0
|
|
||||||
prometheus_client==0.23.1
|
|
||||||
prompt_toolkit==3.0.52
|
prompt_toolkit==3.0.52
|
||||||
propcache==0.3.2
|
propcache==0.3.2
|
||||||
pwdlib==0.2.1
|
pwdlib==0.2.1
|
||||||
@@ -52,17 +46,14 @@ python-dateutil==2.9.0.post0
|
|||||||
python-dotenv==1.1.1
|
python-dotenv==1.1.1
|
||||||
python-multipart==0.0.20
|
python-multipart==0.0.20
|
||||||
PyYAML==6.0.2
|
PyYAML==6.0.2
|
||||||
sentry-sdk==2.42.0
|
|
||||||
six==1.17.0
|
six==1.17.0
|
||||||
sniffio==1.3.1
|
sniffio==1.3.1
|
||||||
SQLAlchemy==2.0.43
|
SQLAlchemy==2.0.43
|
||||||
SQLAlchemy-Utils==0.42.0
|
|
||||||
starlette==0.48.0
|
starlette==0.48.0
|
||||||
tomli==2.2.1
|
tomli==2.2.1
|
||||||
typing-inspection==0.4.1
|
typing-inspection==0.4.1
|
||||||
typing_extensions==4.15.0
|
typing_extensions==4.15.0
|
||||||
tzdata==2025.2
|
tzdata==2025.2
|
||||||
urllib3==2.5.0
|
|
||||||
uvicorn==0.37.0
|
uvicorn==0.37.0
|
||||||
uvloop==0.21.0
|
uvloop==0.21.0
|
||||||
vine==5.1.0
|
vine==5.1.0
|
||||||
@@ -70,4 +61,3 @@ watchfiles==1.1.0
|
|||||||
wcwidth==0.2.14
|
wcwidth==0.2.14
|
||||||
websockets==15.0.1
|
websockets==15.0.1
|
||||||
yarl==1.20.1
|
yarl==1.20.1
|
||||||
python-json-logger==2.0.7
|
|
||||||
54
7project/charts/myapp-chart/templates/NOTES.txt
Normal file
54
7project/charts/myapp-chart/templates/NOTES.txt
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
Thank you for installing myapp-chart.
|
||||||
|
|
||||||
|
This chart packages all Kubernetes manifests from the original deployment directory and parameterizes environment, database name (with optional PR suffix), image, and domain for external access.
|
||||||
|
|
||||||
|
Namespaces per developer (important):
|
||||||
|
- Install each developer's environment into their own namespace using Helm's -n/--namespace flag.
|
||||||
|
- No hardcoded namespace is used in templates; resources are created in .Release.Namespace.
|
||||||
|
- Example namespaces: dev-alice, dev-bob, pr-123, etc.
|
||||||
|
|
||||||
|
Key values:
|
||||||
|
- deployment -> used as Database CR name and DB username (MARIADB_DB and MARIADB_USER)
|
||||||
|
- image.repository/tag or image.digest -> container image
|
||||||
|
- domain -> public FQDN used by TunnelBinding (required to expose app)
|
||||||
|
- app/worker names, replicas, ports
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- Dev install (Alice):
|
||||||
|
helm upgrade --install myapp ./7project/charts/myapp-chart \
|
||||||
|
-n dev-alice --create-namespace \
|
||||||
|
-f values-dev.yaml \
|
||||||
|
--set domain=alice.demo.example.com \
|
||||||
|
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
||||||
|
--set-string database.password="$DB_PASSWORD"
|
||||||
|
|
||||||
|
- Dev install (Bob):
|
||||||
|
helm upgrade --install myapp ./7project/charts/myapp-chart \
|
||||||
|
-n dev-bob --create-namespace \
|
||||||
|
-f values-dev.yaml \
|
||||||
|
--set domain=bob.demo.example.com
|
||||||
|
|
||||||
|
- Prod install (different cleanupPolicy):
|
||||||
|
helm upgrade --install myapp ./7project/charts/myapp-chart \
|
||||||
|
-n prod --create-namespace \
|
||||||
|
-f values-prod.yaml \
|
||||||
|
--set domain=app.example.com
|
||||||
|
|
||||||
|
- PR (preview) install with DB name containing PR number (also its own namespace):
|
||||||
|
PR=123
|
||||||
|
helm upgrade --install myapp-pr-$PR ./7project/charts/myapp-chart \
|
||||||
|
-n pr-$PR --create-namespace \
|
||||||
|
-f values-dev.yaml \
|
||||||
|
--set prNumber=$PR \
|
||||||
|
--set deployment=preview-$PR \
|
||||||
|
--set domain=pr-$PR.example.com
|
||||||
|
|
||||||
|
- Use a custom deployment identifier to suffix DB name, DB username and Secret name:
|
||||||
|
helm upgrade --install myapp ./7project/charts/myapp-chart \
|
||||||
|
-n dev-alice --create-namespace \
|
||||||
|
-f values-dev.yaml \
|
||||||
|
--set deployment=alice \
|
||||||
|
--set domain=alice.demo.example.com
|
||||||
|
|
||||||
|
Render locally (dry run):
|
||||||
|
helm template ./7project/charts/myapp-chart -f values-dev.yaml --set prNumber=456 --set deployment=test --set domain=demo.example.com --namespace dev-test | sed -n '/kind: Database/,$p' | head -n 30
|
||||||
68
7project/charts/myapp-chart/templates/app-deployment.yaml
Normal file
68
7project/charts/myapp-chart/templates/app-deployment.yaml
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ .Values.app.name }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .Values.app.replicas }}
|
||||||
|
revisionHistoryLimit: 3
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: {{ .Values.app.name }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: {{ .Values.app.name }}
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: {{ .Values.app.name }}
|
||||||
|
image: "{{- if .Values.image.digest -}}{{ .Values.image.repository }}@{{ .Values.image.digest }}{{- else -}}{{ .Values.image.repository }}:{{ default "latest" .Values.image.tag }}{{- end -}}"
|
||||||
|
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||||
|
securityContext:
|
||||||
|
allowPrivilegeEscalation: false
|
||||||
|
capabilities:
|
||||||
|
drop: ["ALL"]
|
||||||
|
ports:
|
||||||
|
- containerPort: {{ .Values.app.port }}
|
||||||
|
env:
|
||||||
|
- name: MARIADB_HOST
|
||||||
|
value: {{ printf "%s.%s.svc.cluster.local" .Values.mariadb.mariaDbRef.name .Values.mariadb.mariaDbRef.namespace | quote }}
|
||||||
|
- name: MARIADB_PORT
|
||||||
|
value: '3306'
|
||||||
|
- name: MARIADB_DB
|
||||||
|
value: {{ required "Set .Values.deployment" .Values.deployment | quote }}
|
||||||
|
- name: MARIADB_USER
|
||||||
|
value: {{ required "Set .Values.deployment" .Values.deployment | quote }}
|
||||||
|
- name: MARIADB_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ required "Set .Values.database.secretName" .Values.database.secretName }}
|
||||||
|
key: password
|
||||||
|
- name: RABBITMQ_USERNAME
|
||||||
|
value: {{ .Values.rabbitmq.username | quote }}
|
||||||
|
- name: RABBITMQ_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ printf "%s-user-credentials" (.Values.rabbitmq.username | default "app-user") }}
|
||||||
|
key: password
|
||||||
|
- name: RABBITMQ_HOST
|
||||||
|
value: {{ printf "%s.%s.svc.cluster.local" "rabbitmq-cluster" .Release.Namespace | quote }}
|
||||||
|
- name: RABBITMQ_PORT
|
||||||
|
value: {{ .Values.rabbitmq.port | quote }}
|
||||||
|
- name: RABBITMQ_VHOST
|
||||||
|
value: {{ .Values.rabbitmq.vhost | default "/" | quote }}
|
||||||
|
- name: MAIL_QUEUE
|
||||||
|
value: {{ .Values.worker.mailQueueName | default "mail_queue" | quote }}
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /
|
||||||
|
port: {{ .Values.app.port }}
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
failureThreshold: 3
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /
|
||||||
|
port: {{ .Values.app.port }}
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
failureThreshold: 3
|
||||||
@@ -2,12 +2,9 @@ apiVersion: v1
|
|||||||
kind: Service
|
kind: Service
|
||||||
metadata:
|
metadata:
|
||||||
name: {{ .Values.app.name }}
|
name: {{ .Values.app.name }}
|
||||||
labels:
|
|
||||||
app: {{ .Values.app.name }}
|
|
||||||
spec:
|
spec:
|
||||||
ports:
|
ports:
|
||||||
- name: http
|
- port: {{ .Values.service.port }}
|
||||||
port: {{ .Values.service.port }}
|
|
||||||
targetPort: {{ .Values.app.port }}
|
targetPort: {{ .Values.app.port }}
|
||||||
selector:
|
selector:
|
||||||
app: {{ .Values.app.name }}
|
app: {{ .Values.app.name }}
|
||||||
48
7project/charts/myapp-chart/templates/worker-deployment.yaml
Normal file
48
7project/charts/myapp-chart/templates/worker-deployment.yaml
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: {{ printf "%s-worker" .Values.app.name }}
|
||||||
|
spec:
|
||||||
|
replicas: {{ .Values.worker.replicas }}
|
||||||
|
revisionHistoryLimit: 3
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: {{ printf "%s-worker" .Values.app.name }}
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: {{ printf "%s-worker" .Values.app.name }}
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: {{ printf "%s-worker" .Values.app.name }}
|
||||||
|
image: "{{- if .Values.image.digest -}}{{ .Values.image.repository }}@{{ .Values.image.digest }}{{- else -}}{{ .Values.image.repository }}:{{ default "latest" .Values.image.tag }}{{- end -}}"
|
||||||
|
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||||
|
securityContext:
|
||||||
|
allowPrivilegeEscalation: false
|
||||||
|
capabilities:
|
||||||
|
drop: ["ALL"]
|
||||||
|
command:
|
||||||
|
- celery
|
||||||
|
- -A
|
||||||
|
- app.celery_app
|
||||||
|
- worker
|
||||||
|
- -Q
|
||||||
|
- $(MAIL_QUEUE)
|
||||||
|
- --loglevel
|
||||||
|
- INFO
|
||||||
|
env:
|
||||||
|
- name: RABBITMQ_USERNAME
|
||||||
|
value: {{ .Values.rabbitmq.username | quote }}
|
||||||
|
- name: RABBITMQ_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: {{ printf "%s-user-credentials" (.Values.rabbitmq.username | default "app-user") }}
|
||||||
|
key: password
|
||||||
|
- name: RABBITMQ_HOST
|
||||||
|
value: {{ printf "%s.%s.svc.cluster.local" "rabbitmq-cluster" .Release.Namespace | quote }}
|
||||||
|
- name: RABBITMQ_PORT
|
||||||
|
value: {{ .Values.rabbitmq.port | quote }}
|
||||||
|
- name: RABBITMQ_VHOST
|
||||||
|
value: {{ .Values.rabbitmq.vhost | default "/" | quote }}
|
||||||
|
- name: MAIL_QUEUE
|
||||||
|
value: {{ .Values.worker.mailQueueName | default "mail_queue" | quote }}
|
||||||
@@ -5,6 +5,3 @@ app:
|
|||||||
|
|
||||||
worker:
|
worker:
|
||||||
replicas: 3
|
replicas: 3
|
||||||
|
|
||||||
cron:
|
|
||||||
enabled: true
|
|
||||||
@@ -11,15 +11,6 @@ deployment: ""
|
|||||||
# Public domain to expose the app under (used by TunnelBinding fqdn)
|
# Public domain to expose the app under (used by TunnelBinding fqdn)
|
||||||
# Set at install time: --set domain=example.com
|
# Set at install time: --set domain=example.com
|
||||||
domain: ""
|
domain: ""
|
||||||
domain_scheme: ""
|
|
||||||
|
|
||||||
unirate:
|
|
||||||
key: ""
|
|
||||||
|
|
||||||
frontend_domain: ""
|
|
||||||
frontend_domain_scheme: ""
|
|
||||||
|
|
||||||
sentry_dsn: ""
|
|
||||||
|
|
||||||
image:
|
image:
|
||||||
repository: lukastrkan/cc-app-demo
|
repository: lukastrkan/cc-app-demo
|
||||||
@@ -38,38 +29,9 @@ worker:
|
|||||||
# Queue name for Celery worker and for CRD Queue
|
# Queue name for Celery worker and for CRD Queue
|
||||||
mailQueueName: "mail_queue"
|
mailQueueName: "mail_queue"
|
||||||
|
|
||||||
cron:
|
|
||||||
enabled: false
|
|
||||||
schedule: "*/5 * * * *" # every 5 minutes
|
|
||||||
scheme: "http"
|
|
||||||
endpoint: "/_cron"
|
|
||||||
concurrencyPolicy: "Forbid"
|
|
||||||
|
|
||||||
smtp:
|
|
||||||
host:
|
|
||||||
port: 587
|
|
||||||
username: ""
|
|
||||||
password: ""
|
|
||||||
tls: false
|
|
||||||
ssl: false
|
|
||||||
from: ""
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
service:
|
service:
|
||||||
port: 80
|
port: 80
|
||||||
|
|
||||||
oauth:
|
|
||||||
bankid:
|
|
||||||
clientId: ""
|
|
||||||
clientSecret: ""
|
|
||||||
mojeid:
|
|
||||||
clientId: ""
|
|
||||||
clientSecret: ""
|
|
||||||
csas:
|
|
||||||
clientId: ""
|
|
||||||
clientSecret: ""
|
|
||||||
|
|
||||||
rabbitmq:
|
rabbitmq:
|
||||||
create: true
|
create: true
|
||||||
replicas: 1
|
replicas: 1
|
||||||
@@ -95,4 +57,3 @@ database:
|
|||||||
userName: app-demo-user
|
userName: app-demo-user
|
||||||
secretName: app-demo-database-secret
|
secretName: app-demo-database-secret
|
||||||
password: ""
|
password: ""
|
||||||
encryptionSecret: ""
|
|
||||||
@@ -1,81 +0,0 @@
|
|||||||
# Project Evaluation Checklist
|
|
||||||
|
|
||||||
The group earn points by completing items from the categories below.
|
|
||||||
You are not expected to complete all items.
|
|
||||||
Focus on areas that align with your project goals and interests.
|
|
||||||
|
|
||||||
The core deliverables are required.
|
|
||||||
This means that you must get at least 2 points for each item in this category.
|
|
||||||
|
|
||||||
| **Category** | **Item** | **Max Points** | **Points** | **Comment** |
|
|
||||||
|:---------------------------------|:----------------------------------------|:---------------|:-----------|:----------------------------------------------------------------------------------------------------|
|
|
||||||
| **Core Deliverables (Required)** | | | | |
|
|
||||||
| Codebase & Organization | Well-organized project structure | 5 | 5 | Project is well-organized |
|
|
||||||
| | Clean, readable code | 5 | 4 | Should be readable(function names should help), but readability can always be improved |
|
|
||||||
| | Use planning tool (e.g., GitHub issues) | 5 | 4 | We used Github issues |
|
|
||||||
| | Proper version control usage | 5 | 5 | We used branches for development, pull request reviews |
|
|
||||||
| 23 | Complete source code | 5 | 5 | The code is complete |
|
|
||||||
| Documentation | Comprehensive reproducibility report | 10 | 8 | Our report is precise |
|
|
||||||
| | Updated design document | 5 | 4 | Our design document was updated and merged into the report |
|
|
||||||
| | Clear build/deployment instructions | 5 | 5 | Should be clear |
|
|
||||||
| | Troubleshooting guide | 5 | 3 | When it comes to troubleshooting, there is never enough documentation |
|
|
||||||
| | Completed self-assessment table | 5 | 5 | Completed. |
|
|
||||||
| 30 | Hour sheets for all members | 5 | 5 | Filled. |
|
|
||||||
| Presentation Video | Project demonstration | 5 | 5 | Yes |
|
|
||||||
| | Code walk-through | 5 | 3 | There was not enough time to go through all of our code |
|
|
||||||
| 13 | Deployment showcase | 5 | 5 | Yes |
|
|
||||||
| **Technical Implementation** | | | | |
|
|
||||||
| Application Functionality | Basic functionality works | 10 | 10 | The app works as intended |
|
|
||||||
| | Advanced features implemented | 10 | 5 | OAuth, BankAPI connection (not only mock bank) |
|
|
||||||
| | Error handling & robustness | 10 | 5 | The app should be robust enough for normal use (TODO Lukasi kdyztak oprav pls) |
|
|
||||||
| 24 | User-friendly interface | 5 | 4 | Could always be better |
|
|
||||||
| Backend & Architecture | Stateless web server | 5 | 0 | Our server is stateful |
|
|
||||||
| | Stateful application | 10 | 10 | Our server is stateful |
|
|
||||||
| | Database integration | 10 | 10 | Yes, db is integrated |
|
|
||||||
| | API design | 5 | 5 | Backend APIs are implemented with public Swagger docs |
|
|
||||||
| 28 | Microservices architecture | 10 | 3 | Well not really but we have the app and the worker services divided (TODO Lukasi kdyztak oprav pls) |
|
|
||||||
| Cloud Integration | Basic cloud deployment | 10 | 10 | Yes (In private cluster) |
|
|
||||||
| | Cloud APIs usage | 10 | 0 | Not Applicable (TODO Lukasi kdyztak oprav pls) |
|
|
||||||
| | Serverless components | 10 | 0 | No (TODO Lukasi kdyztak oprav pls) |
|
|
||||||
| 10 | Advanced cloud services | 5 | 0 | Not Applicable (TODO Lukasi kdyztak oprav pls) |
|
|
||||||
| **DevOps & Deployment** | | | | |
|
|
||||||
| Containerization | Basic Dockerfile | 5 | 5 | Yes |
|
|
||||||
| | Optimized Dockerfile | 5 | 0 | Not really (TODO Lukasi kdyztak oprav pls) |
|
|
||||||
| | Docker Compose | 5 | 5 | dev only |
|
|
||||||
| 15 | Persistent storage | 5 | 5 | Yes |
|
|
||||||
| Deployment & Scaling | Manual deployment | 5 | 5 | Yes, possible |
|
|
||||||
| | Automated deployment | 5 | 5 | Yes, withGithub runners |
|
|
||||||
| | Multiple replicas | 5 | 5 | Yes (4) |
|
|
||||||
| 25 | Kubernetes deployment | 10 | 10 | Yes |
|
|
||||||
| **Quality Assurance** | | | | |
|
|
||||||
| Testing | Unit tests | 5 | 2 | Basic coverage |
|
|
||||||
| | Integration tests | 5 | 5 | Yes |
|
|
||||||
| | End-to-end tests | 5 | 5 | Yes |
|
|
||||||
| 12 | Performance testing | 5 | 0 | No |
|
|
||||||
| Monitoring & Operations | Health checks | 5 | 5 | Yes |
|
|
||||||
| | Logging | 5 | 2 | only to terminal add logstash |
|
|
||||||
| | Metrics/Monitoring | 2 | 2 | Yes |
|
|
||||||
| 12 | Custom Metrics for your project | 3 | 3 | Prometheus, Grafana |
|
|
||||||
| Security | HTTPS/TLS | 5 | 5 | Yes |
|
|
||||||
| | Authentication | 5 | 5 | Yes |
|
|
||||||
| 15 | Authorization | 5 | 5 | Yes |
|
|
||||||
| **Innovation & Excellence** | | | | |
|
|
||||||
| Advanced Features and | AI/ML Integration | 10 | 0 | No |
|
|
||||||
| Technical Excellence | Real-time features | 10 | 0 | No |
|
|
||||||
| | Creative problem solving | 10 | 4 | Cron jobs for bank scraping |
|
|
||||||
| | Performance optimization | 5 | 3 | Delegating emails and scraping to workers |
|
|
||||||
| 7 | Exceptional user experience | 5 | 0 | |
|
|
||||||
| **Total** | | **255** | **214** | |
|
|
||||||
## Grading Scale
|
|
||||||
|
|
||||||
- **Minimum Required: 100 points**
|
|
||||||
- **Maximum: 200+ points**
|
|
||||||
|
|
||||||
| Grade | Points |
|
|
||||||
|-------|----------|
|
|
||||||
| A | 180-200+ |
|
|
||||||
| B | 160-179 |
|
|
||||||
| C | 140-159 |
|
|
||||||
| D | 120-139 |
|
|
||||||
| E | 100-119 |
|
|
||||||
| F | 0-99 |
|
|
||||||
@@ -15,7 +15,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- redis_data:/data
|
- redis_data:/data
|
||||||
rabbitmq:
|
rabbitmq:
|
||||||
image: bitnamilegacy/rabbitmq:3.13.3-debian-12-r0
|
image: bitnami/rabbitmq:3.13.3-debian-12-r0
|
||||||
network_mode: host
|
network_mode: host
|
||||||
ports:
|
ports:
|
||||||
- "5672:5672"
|
- "5672:5672"
|
||||||
@@ -8,8 +8,4 @@ fi
|
|||||||
cd backend || { echo "Directory 'backend' does not exist"; exit 1; }
|
cd backend || { echo "Directory 'backend' does not exist"; exit 1; }
|
||||||
alembic revision --autogenerate -m "$1"
|
alembic revision --autogenerate -m "$1"
|
||||||
git add alembic/versions/*
|
git add alembic/versions/*
|
||||||
YELLOW='\033[1;33m'
|
|
||||||
NC='\033[0m' # No Color
|
|
||||||
|
|
||||||
echo -e "${YELLOW}Don't forget to check imports in the new migration file!${NC}"
|
|
||||||
cd - || exit
|
cd - || exit
|
||||||
20
7project/deployment/app-demo-database-grant.yaml
Normal file
20
7project/deployment/app-demo-database-grant.yaml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
apiVersion: k8s.mariadb.com/v1alpha1
|
||||||
|
kind: Grant
|
||||||
|
metadata:
|
||||||
|
name: grant
|
||||||
|
spec:
|
||||||
|
mariaDbRef:
|
||||||
|
name: mariadb-repl
|
||||||
|
namespace: mariadb-operator
|
||||||
|
privileges:
|
||||||
|
- "ALL PRIVILEGES"
|
||||||
|
database: "app-demo-database"
|
||||||
|
table: "*"
|
||||||
|
username: "app-demo-user"
|
||||||
|
grantOption: true
|
||||||
|
host: "%"
|
||||||
|
# Delete the resource in the database whenever the CR gets deleted.
|
||||||
|
# Alternatively, you can specify Skip in order to omit deletion.
|
||||||
|
cleanupPolicy: Skip
|
||||||
|
requeueInterval: 10h
|
||||||
|
retryInterval: 30s
|
||||||
7
7project/deployment/app-demo-database-secret.yaml
Normal file
7
7project/deployment/app-demo-database-secret.yaml
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Secret
|
||||||
|
metadata:
|
||||||
|
name: app-demo-database-secret
|
||||||
|
type: kubernetes.io/basic-auth
|
||||||
|
stringData:
|
||||||
|
password: "strongpassword"
|
||||||
20
7project/deployment/app-demo-database-user.yaml
Normal file
20
7project/deployment/app-demo-database-user.yaml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
apiVersion: k8s.mariadb.com/v1alpha1
|
||||||
|
kind: User
|
||||||
|
metadata:
|
||||||
|
name: app-demo-user
|
||||||
|
spec:
|
||||||
|
# If you want the user to be created with a different name than the resource name
|
||||||
|
# name: user-custom
|
||||||
|
mariaDbRef:
|
||||||
|
name: mariadb-repl
|
||||||
|
namespace: mariadb-operator
|
||||||
|
passwordSecretKeyRef:
|
||||||
|
name: app-demo-database-secret
|
||||||
|
key: password
|
||||||
|
maxUserConnections: 20
|
||||||
|
host: "%"
|
||||||
|
# Delete the resource in the database whenever the CR gets deleted.
|
||||||
|
# Alternatively, you can specify Skip in order to omit deletion.
|
||||||
|
cleanupPolicy: Skip
|
||||||
|
requeueInterval: 10h
|
||||||
|
retryInterval: 30s
|
||||||
15
7project/deployment/app-demo-database.yaml
Normal file
15
7project/deployment/app-demo-database.yaml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
apiVersion: k8s.mariadb.com/v1alpha1
|
||||||
|
kind: Database
|
||||||
|
metadata:
|
||||||
|
name: app-demo-database
|
||||||
|
spec:
|
||||||
|
mariaDbRef:
|
||||||
|
name: mariadb-repl
|
||||||
|
namespace: mariadb-operator
|
||||||
|
characterSet: utf8
|
||||||
|
collate: utf8_general_ci
|
||||||
|
# Delete the resource in the database whenever the CR gets deleted.
|
||||||
|
# Alternatively, you can specify Skip in order to omit deletion.
|
||||||
|
cleanupPolicy: Skip
|
||||||
|
requeueInterval: 10h
|
||||||
|
retryInterval: 30s
|
||||||
48
7project/deployment/app-demo-deployment.yaml
Normal file
48
7project/deployment/app-demo-deployment.yaml
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: app-demo
|
||||||
|
spec:
|
||||||
|
replicas: 3
|
||||||
|
revisionHistoryLimit: 3
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: app-demo
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: app-demo
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- image: lukastrkan/cc-app-demo@sha256:75634b4d97282b6b8424fe17767c81adf44af5f7359c1d25883073b5629b3e05
|
||||||
|
name: app-demo
|
||||||
|
ports:
|
||||||
|
- containerPort: 8000
|
||||||
|
env:
|
||||||
|
- name: MARIADB_HOST
|
||||||
|
value: mariadb-repl.mariadb-operator.svc.cluster.local
|
||||||
|
- name: MARIADB_PORT
|
||||||
|
value: '3306'
|
||||||
|
- name: MARIADB_DB
|
||||||
|
value: app-demo-database
|
||||||
|
- name: MARIADB_USER
|
||||||
|
value: app-demo-user
|
||||||
|
- name: MARIADB_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: app-demo-database-secret
|
||||||
|
key: password
|
||||||
|
livenessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /
|
||||||
|
port: 8000
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
failureThreshold: 3
|
||||||
|
readinessProbe:
|
||||||
|
httpGet:
|
||||||
|
path: /
|
||||||
|
port: 8000
|
||||||
|
initialDelaySeconds: 10
|
||||||
|
periodSeconds: 10
|
||||||
|
failureThreshold: 3
|
||||||
10
7project/deployment/app-demo-svc.yaml
Normal file
10
7project/deployment/app-demo-svc.yaml
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: app-demo
|
||||||
|
spec:
|
||||||
|
ports:
|
||||||
|
- port: 80
|
||||||
|
targetPort: 8000
|
||||||
|
selector:
|
||||||
|
app: app-demo
|
||||||
41
7project/deployment/app-demo-worker-deployment.yaml
Normal file
41
7project/deployment/app-demo-worker-deployment.yaml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: app-demo-worker
|
||||||
|
spec:
|
||||||
|
replicas: 3
|
||||||
|
revisionHistoryLimit: 3
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: app-demo-worker
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: app-demo-worker
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- image: lukastrkan/cc-app-demo@sha256:75634b4d97282b6b8424fe17767c81adf44af5f7359c1d25883073b5629b3e05
|
||||||
|
name: app-demo-worker
|
||||||
|
command:
|
||||||
|
- celery
|
||||||
|
- -A
|
||||||
|
- app.celery_app
|
||||||
|
- worker
|
||||||
|
- -Q
|
||||||
|
- $(MAIL_QUEUE)
|
||||||
|
- --loglevel
|
||||||
|
- INFO
|
||||||
|
env:
|
||||||
|
- name: RABBITMQ_USERNAME
|
||||||
|
value: demo-app
|
||||||
|
- name: RABBITMQ_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: demo-app-user-credentials
|
||||||
|
key: password
|
||||||
|
- name: RABBITMQ_HOST
|
||||||
|
value: rabbitmq.rabbitmq.svc.cluster.local
|
||||||
|
- name: RABBITMQ_PORT
|
||||||
|
value: '5672'
|
||||||
|
- name: RABBITMQ_VHOST
|
||||||
|
value: "/"
|
||||||
14
7project/deployment/tunnel.yaml
Normal file
14
7project/deployment/tunnel.yaml
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
apiVersion: networking.cfargotunnel.com/v1alpha1
|
||||||
|
kind: TunnelBinding
|
||||||
|
metadata:
|
||||||
|
name: guestbook-tunnel-binding
|
||||||
|
namespace: group-project
|
||||||
|
subjects:
|
||||||
|
- name: app-server
|
||||||
|
spec:
|
||||||
|
target: http://app-demo.group-project.svc.cluster.local
|
||||||
|
fqdn: demo.ltrk.cz
|
||||||
|
noTlsVerify: true
|
||||||
|
tunnelRef:
|
||||||
|
kind: ClusterTunnel
|
||||||
|
name: cluster-tunnel
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
# Weekly Meeting Notes
|
|
||||||
|
|
||||||
- Group 8 - Personal finance tracker
|
|
||||||
- Mentor: Jaychander
|
|
||||||
|
|
||||||
Keep all meeting notes in the `meetings.md` file in your project folder.
|
|
||||||
Just copy the template below for each weekly meeting and fill in the details.
|
|
||||||
|
|
||||||
## Administrative Info
|
|
||||||
|
|
||||||
- Date: 2025-10-16
|
|
||||||
- Attendees: Dejan Ribarovski, Lukas Trkan
|
|
||||||
- Notetaker: Dejan Ribarovski
|
|
||||||
|
|
||||||
## Progress Update (Before Meeting)
|
|
||||||
|
|
||||||
Summary of what has been accomplished since the last meeting in the following categories.
|
|
||||||
|
|
||||||
## Action Items from Last Week (During Meeting)
|
|
||||||
|
|
||||||
- [x] start coding the app logic
|
|
||||||
- [x] start writing the report so it matches the actual progress
|
|
||||||
- [x] redo the system diagram so it includes a response flow
|
|
||||||
|
|
||||||
### Coding
|
|
||||||
Implemented initial functioning version of the app, added OAuth with BankId and MojeID,
|
|
||||||
added database snapshots.
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
report.md is up to date
|
|
||||||
|
|
||||||
## Questions and Topics for Discussion (Before Meeting)
|
|
||||||
|
|
||||||
Prepare 3-5 questions and topics you want to discuss with your mentor.
|
|
||||||
|
|
||||||
1. What other functionality should be added to the app
|
|
||||||
2. Priority for the next week (Testing maybe?)
|
|
||||||
3. Question 3
|
|
||||||
|
|
||||||
## Discussion Notes (During Meeting)
|
|
||||||
|
|
||||||
## Action Items for Next Week (During Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [x] OAuth
|
|
||||||
- [x] CI/CD fix
|
|
||||||
- [ ] Database local (multiple bank accounts)
|
|
||||||
- [ ] Add tests and set up github pipeline
|
|
||||||
- [ ] Frontend imporvment - user experience
|
|
||||||
- [ ] make the report more clear
|
|
||||||
|
|
||||||
---
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
# Weekly Meeting Notes
|
|
||||||
|
|
||||||
- Group 8 - Personal finance tracker
|
|
||||||
- Mentor: Jaychander
|
|
||||||
|
|
||||||
Keep all meeting notes in the `meetings.md` file in your project folder.
|
|
||||||
Just copy the template below for each weekly meeting and fill in the details.
|
|
||||||
|
|
||||||
## Administrative Info
|
|
||||||
|
|
||||||
- Date: 2025-10-23
|
|
||||||
- Attendees: Dejan
|
|
||||||
- Notetaker: Dejan
|
|
||||||
|
|
||||||
## Progress Update (Before Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [x] OAuth (BankID)
|
|
||||||
- [x] CI/CD fix
|
|
||||||
- [X] Database local (multiple bank accounts)
|
|
||||||
- [X] Add tests and set up github pipeline
|
|
||||||
- [X] Frontend imporvment - user experience
|
|
||||||
- [ ] make the report more clear - partly
|
|
||||||
|
|
||||||
Summary of what has been accomplished since the last meeting in the following categories.
|
|
||||||
|
|
||||||
### Coding
|
|
||||||
Improved Frontend, added Mock Bank, fixed deployment, fixed OAuth(BankID) on production, added basic tests
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
Not much - just updated the work done
|
|
||||||
|
|
||||||
## Questions and Topics for Discussion (Before Meeting)
|
|
||||||
|
|
||||||
This was not prepared, I planned to do it right before meeting, but Jaychander needed to go somewhere earlier.
|
|
||||||
|
|
||||||
1. Question 1
|
|
||||||
2. Question 2
|
|
||||||
3. Question 3
|
|
||||||
|
|
||||||
## Discussion Notes (During Meeting)
|
|
||||||
The tracker should not store the transactions in the database - security vulnerability.
|
|
||||||
|
|
||||||
## Action Items for Next Week (During Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [ ] Dont store data in database (security) - Load it on login (from CSAS API and local database), load automatically with email
|
|
||||||
- [ ] Go through the checklist
|
|
||||||
- [ ] Look for possible APIs (like stocks or financial details whatever)
|
|
||||||
- [ ] Report
|
|
||||||
|
|
||||||
---
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
# Weekly Meeting Notes
|
|
||||||
|
|
||||||
- Group 8 - Personal finance tracker
|
|
||||||
- Mentor: Jaychander
|
|
||||||
|
|
||||||
Keep all meeting notes in the `meetings.md` file in your project folder.
|
|
||||||
Just copy the template below for each weekly meeting and fill in the details.
|
|
||||||
|
|
||||||
## Administrative Info
|
|
||||||
|
|
||||||
- Date: 2025-10-30
|
|
||||||
- Attendees: Dejan, Lukas
|
|
||||||
- Notetaker: Dejan
|
|
||||||
|
|
||||||
## Progress Update (Before Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [ ] Dont store data in database (security) - Load it on login (from CSAS API and local database), load automatically with email
|
|
||||||
- [X] Go through the checklist
|
|
||||||
- [X] Look for possible APIs (like stocks or financial details whatever)
|
|
||||||
- [ ] Report - partly
|
|
||||||
|
|
||||||
Summary of what has been accomplished since the last meeting in the following categories.
|
|
||||||
|
|
||||||
### Coding
|
|
||||||
Implemented CSAS API transactions fetch, Added tests with testing database on github actions, redone UI,
|
|
||||||
added currency exchange rate with CNB API
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
Not much - just updated the work done
|
|
||||||
|
|
||||||
## Questions and Topics for Discussion (Before Meeting)
|
|
||||||
|
|
||||||
1. Security regarding storing transactions - possibility of encryption
|
|
||||||
2. Realisticaly what needs to be done for us to be done
|
|
||||||
3. Question 3
|
|
||||||
|
|
||||||
## Discussion Notes (During Meeting)
|
|
||||||
The tracker should not store the transactions in the database - security vulnerability.
|
|
||||||
|
|
||||||
## Action Items for Next Week (During Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [x] Change the name on frontend from 7project
|
|
||||||
- [x] Finalize the funcionality and everyting in the code part
|
|
||||||
- [ ] Try to finalize report with focus on reproducibility
|
|
||||||
- [ ] More high level explanation of the workflow in the report
|
|
||||||
|
|
||||||
---
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
# Weekly Meeting Notes
|
|
||||||
|
|
||||||
- Group 8 - Personal finance tracker
|
|
||||||
- Mentor: Jaychander
|
|
||||||
|
|
||||||
Keep all meeting notes in the `meetings.md` file in your project folder.
|
|
||||||
Just copy the template below for each weekly meeting and fill in the details.
|
|
||||||
|
|
||||||
## Administrative Info
|
|
||||||
|
|
||||||
- Date: 2025-10-08
|
|
||||||
- Attendees: Dejan Ribarovski, Lukas Trkan
|
|
||||||
- Notetaker: Dejan Ribarovski
|
|
||||||
|
|
||||||
## Progress Update (Before Meeting)
|
|
||||||
|
|
||||||
Summary of what has been accomplished since the last meeting in the following categories.
|
|
||||||
|
|
||||||
### Coding
|
|
||||||
|
|
||||||
Lukas has implemented the template source directories, source files and config files necessary for deployment
|
|
||||||
- docker compose for database, redis cache and rabbit MQ
|
|
||||||
- tofu
|
|
||||||
- backend template
|
|
||||||
- frontend template
|
|
||||||
- charts templates
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
- Created GitHub issues for the next steps
|
|
||||||
- Added this document + checklist and report
|
|
||||||
|
|
||||||
## Questions and Topics for Discussion (Before Meeting)
|
|
||||||
|
|
||||||
Prepare 3-5 questions and topics you want to discuss with your mentor.
|
|
||||||
|
|
||||||
1. Anything we should add structure-wise?
|
|
||||||
2. Anything you would like us to prioritize until next week?
|
|
||||||
|
|
||||||
## Discussion Notes (During Meeting)
|
|
||||||
|
|
||||||
- start working on the report
|
|
||||||
- start coding the actual code
|
|
||||||
- write problems solved
|
|
||||||
- redo the system diagram - see the response as well
|
|
||||||
- create a meetings folder wih seperate meetings files
|
|
||||||
## Action Items for Next Week (During Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [ ] start coding the app logic
|
|
||||||
- [ ] start writing the report so it matches the actual progress
|
|
||||||
- [ ] redo the system diagram so it includes a response flow
|
|
||||||
|
|
||||||
---
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
# Weekly Meeting Notes
|
|
||||||
|
|
||||||
- Group 8 - Personal finance tracker
|
|
||||||
- Mentor: Jaychander
|
|
||||||
|
|
||||||
Keep all meeting notes in the `meetings.md` file in your project folder.
|
|
||||||
Just copy the template below for each weekly meeting and fill in the details.
|
|
||||||
|
|
||||||
## Administrative Info
|
|
||||||
|
|
||||||
- Date: 2025-10-30
|
|
||||||
- Attendees: Dejan, Lukas
|
|
||||||
- Notetaker: Dejan
|
|
||||||
|
|
||||||
## Progress Update (Before Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [x] Change the name on frontend from 7project
|
|
||||||
- [x] Finalize the funcionality and everyting in the code part
|
|
||||||
- [x] Try to finalize report with focus on reproducibility
|
|
||||||
- [x] More high level explanation of the workflow in the report
|
|
||||||
|
|
||||||
Summary of what has been accomplished since the last meeting in the following categories.
|
|
||||||
|
|
||||||
### Coding
|
|
||||||
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
|
|
||||||
## Questions and Topics for Discussion (Before Meeting)
|
|
||||||
|
|
||||||
|
|
||||||
## Discussion Notes (During Meeting)
|
|
||||||
The tracker should not store the transactions in the database - security vulnerability.
|
|
||||||
|
|
||||||
## Action Items for Next Week (During Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [ ] video
|
|
||||||
- [ ] highlight the optional stuff in the report
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
---
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
# Weekly Meeting Notes
|
|
||||||
|
|
||||||
- Group X - Project Title
|
|
||||||
- Mentor: Mentor Name
|
|
||||||
|
|
||||||
Keep all meeting notes in the `meetings.md` file in your project folder.
|
|
||||||
Just copy the template below for each weekly meeting and fill in the details.
|
|
||||||
|
|
||||||
## Administrative Info
|
|
||||||
|
|
||||||
- Date: 2025-09-19
|
|
||||||
- Attendees: Name1, Name2, Name3
|
|
||||||
- Notetaker: Name1
|
|
||||||
|
|
||||||
## Progress Update (Before Meeting)
|
|
||||||
|
|
||||||
Summary of what has been accomplished since the last meeting in the following categories.
|
|
||||||
|
|
||||||
### Coding
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
## Questions and Topics for Discussion (Before Meeting)
|
|
||||||
|
|
||||||
Prepare 3-5 questions and topics you want to discuss with your mentor.
|
|
||||||
|
|
||||||
1. Question 1
|
|
||||||
2. Question 2
|
|
||||||
3. Question 3
|
|
||||||
|
|
||||||
## Discussion Notes (During Meeting)
|
|
||||||
|
|
||||||
## Action Items for Next Week (During Meeting)
|
|
||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
|
||||||
|
|
||||||
- [ ] Action Item 1
|
|
||||||
- [ ] Action Item 2
|
|
||||||
- [ ] Action Item 3
|
|
||||||
|
|
||||||
---
|
|
||||||
@@ -1,803 +0,0 @@
|
|||||||
# Personal finance tracker
|
|
||||||
|
|
||||||
<!--- **Instructions**:
|
|
||||||
> This template provides the structure for your project report.
|
|
||||||
> Replace the placeholder text with your actual content.
|
|
||||||
> Remove instructions that are not relevant for your project, but leave the headings along with a (NA) label. -->
|
|
||||||
|
|
||||||
## Project Overview
|
|
||||||
|
|
||||||
**Project Name**: Personal Finance Tracker
|
|
||||||
|
|
||||||
**Deployment URL**: https://finance.ltrk.cz/
|
|
||||||
|
|
||||||
**Group Members**:
|
|
||||||
|
|
||||||
- 289229, Lukáš Trkan, lukastrkan
|
|
||||||
- 289258, Dejan Ribarovski, ribardej (derib2613)
|
|
||||||
|
|
||||||
**Brief Description**:
|
|
||||||
Our application allows users to easily track their cash flow
|
|
||||||
through multiple bank accounts. Users can label their transactions with custom categories that can be later used for
|
|
||||||
filtering and visualization. New transactions are automatically fetched in the background.
|
|
||||||
|
|
||||||
## Architecture Overview
|
|
||||||
|
|
||||||
Our system is a full‑stack web application composed of a React frontend, a FastAPI backend,
|
|
||||||
a asynchronousMariaDB database with Maxscale, and background workers powered by Celery with RabbitMQ.
|
|
||||||
The backend exposes REST endpoints for authentication (email/password and OAuth), users, categories,
|
|
||||||
transactions, exchange rates and bank APIs. Infrastructure for Kubernetes is managed via Terraform/OpenTofu and
|
|
||||||
the application is packaged via a Helm chart. This all is deployed on private TalosOS cluster running on Proxmox VE with
|
|
||||||
CI/CD and with public access over Cloudflare tunnels. Static files for frontend are served via Cloudflare pages.
|
|
||||||
Other services deployed in the cluster includes Longhorn for persistent storage, Prometheus with Grafana for monitoring.
|
|
||||||
|
|
||||||
### High-Level Architecture
|
|
||||||
|
|
||||||
```mermaid
|
|
||||||
flowchart TB
|
|
||||||
n3(("User")) <--> client["Frontend"]
|
|
||||||
proc_queue["Message Queue"] --> proc_queue_worker["Worker Service"]
|
|
||||||
proc_queue_worker -- SMTP --> ext_mail[("Email Service")]
|
|
||||||
proc_queue_worker <-- HTTP request/response --> ext_bank[("Bank API")]
|
|
||||||
proc_queue_worker <--> db[("Database")]
|
|
||||||
proc_cron["Cron"] <-- HTTP request/response --> svc["Backend API"]
|
|
||||||
svc --> proc_queue
|
|
||||||
n2["Cloudflare tunnel"] <-- HTTP request/response --> svc
|
|
||||||
svc <--> db
|
|
||||||
svc <-- HTTP request/response --> api[("UniRate API")]
|
|
||||||
client <-- HTTP request/response --> n2
|
|
||||||
```
|
|
||||||
The workflow works in the following way:
|
|
||||||
|
|
||||||
- Client connects to the frontend. After login, frontend automatically fetches the stored transactions from
|
|
||||||
the database via the backend API and currency rates from UniRate API.
|
|
||||||
- When the client opts for fetching new transactions via the Bank API, cron will trigger periodic fetching
|
|
||||||
using background worker.
|
|
||||||
- After successful load, these transactions are stored to the database and displayed to the client
|
|
||||||
|
|
||||||
### Database Schema
|
|
||||||
|
|
||||||
```mermaid
|
|
||||||
classDiagram
|
|
||||||
direction BT
|
|
||||||
class alembic_version {
|
|
||||||
varchar(32) version_num
|
|
||||||
}
|
|
||||||
class categories {
|
|
||||||
varchar(100) name
|
|
||||||
varchar(255) description
|
|
||||||
char(36) user_id
|
|
||||||
int(11) id
|
|
||||||
}
|
|
||||||
class category_transaction {
|
|
||||||
int(11) category_id
|
|
||||||
int(11) transaction_id
|
|
||||||
}
|
|
||||||
class oauth_account {
|
|
||||||
char(36) user_id
|
|
||||||
varchar(100) oauth_name
|
|
||||||
varchar(4096) access_token
|
|
||||||
int(11) expires_at
|
|
||||||
varchar(1024) refresh_token
|
|
||||||
varchar(320) account_id
|
|
||||||
varchar(320) account_email
|
|
||||||
char(36) id
|
|
||||||
}
|
|
||||||
class transaction {
|
|
||||||
blob amount
|
|
||||||
blob description
|
|
||||||
char(36) user_id
|
|
||||||
date date
|
|
||||||
int(11) id
|
|
||||||
}
|
|
||||||
class user {
|
|
||||||
varchar(100) first_name
|
|
||||||
varchar(100) last_name
|
|
||||||
varchar(320) email
|
|
||||||
varchar(1024) hashed_password
|
|
||||||
tinyint(1) is_active
|
|
||||||
tinyint(1) is_superuser
|
|
||||||
tinyint(1) is_verified
|
|
||||||
longtext config
|
|
||||||
char(36) id
|
|
||||||
}
|
|
||||||
|
|
||||||
categories --> user: user_id -> id
|
|
||||||
category_transaction --> categories: category_id -> id
|
|
||||||
category_transaction --> transaction: transaction_id -> id
|
|
||||||
oauth_account --> user: user_id -> id
|
|
||||||
transaction --> user: user_id -> id
|
|
||||||
```
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- The stored transactions are encrypted in the DB for security reasons.
|
|
||||||
- For every pull request the full APP is deployed on a separate URL and the tests are run by github CI/CD
|
|
||||||
- On every push to main, the production app is automatically updated
|
|
||||||
- UI is responsive for mobile devices
|
|
||||||
- Slow operations (emails, transactions fetching) are handled
|
|
||||||
in the background by Celery workers.
|
|
||||||
- App is monitored using prometheus metrics endpoint and metrics are shown in Grafana dashboard.
|
|
||||||
|
|
||||||
### Components
|
|
||||||
|
|
||||||
- Frontend (frontend/): React + TypeScript app built with Vite. Talks to the backend via REST, handles
|
|
||||||
login/registration, shows latest transactions, filtering, and allows adding transactions.
|
|
||||||
- Backend API (backend/app): FastAPI app with routers under app/api for auth, users, categories, transactions, exchange
|
|
||||||
rates and bankAPI. Uses FastAPI Users for auth (JWT + OAuth), SQLAlchemy ORM, and Pydantic v2 schemas.
|
|
||||||
- Worker service (backend/app/workers): Celery worker handling background tasks (emails, transactions fetching).
|
|
||||||
- Database (MariaDB with Maxscale): Persists users, categories, transactions; schema managed by Alembic migrations.
|
|
||||||
- Message Queue (RabbitMQ): Queues background tasks for Celery workers.
|
|
||||||
- Infrastructure as Code (tofu/): OpenTofu modules provisioning cluster services (RabbitMQ, Redis, Cloudflare tunnel,
|
|
||||||
etc.).
|
|
||||||
- Deployment Chart (charts/myapp-chart/): Helm chart to deploy the application to Kubernetes.
|
|
||||||
|
|
||||||
### Other services deployed in the cluster
|
|
||||||
|
|
||||||
- Longhorn: distributed storage system providing persistent volumes for the database and other services
|
|
||||||
- Prometheus + Grafana: monitoring stack collecting metrics from the app and cluster, visualized in Grafana dashboards
|
|
||||||
- MariaDB operator: manages the MariaDB cluster based on Custom resources, creates Databases, users, handles backups
|
|
||||||
- RabbitMQ operator: manages RabbitMQ cluster based on Custom resources
|
|
||||||
- Cloudflare Tunnel: allows public access to backend API running in the private cluster, providing HTTPS
|
|
||||||
|
|
||||||
### Technologies Used
|
|
||||||
|
|
||||||
- Backend: Python, FastAPI, FastAPI Users, SQLAlchemy, Pydantic, Alembic, Celery
|
|
||||||
- Frontend: React, TypeScript, Vite
|
|
||||||
- Database: MariaDB with Maxscale
|
|
||||||
- Background jobs: RabbitMQ, Celery
|
|
||||||
- Containerization/Orchestration: Docker, Docker Compose (dev), Kubernetes, Helm
|
|
||||||
- IaC/Platform: Proxmox, Talos, Cloudflare pages, OpenTofu (Terraform), cert-manager, MetalLB, Cloudflare Tunnel,
|
|
||||||
Prometheus, Loki
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
Here are software and hardware prerequisites for the development and production environments. This section also
|
|
||||||
describes
|
|
||||||
necessary environment variables and key dependencies used in the project.
|
|
||||||
|
|
||||||
### System Requirements
|
|
||||||
|
|
||||||
#### Development
|
|
||||||
|
|
||||||
- OS: Tested on MacOS, Linux and Windows should work as well
|
|
||||||
- Minimum RAM: 8 GB
|
|
||||||
- Storage: 10 GB+ free
|
|
||||||
|
|
||||||
#### Production
|
|
||||||
|
|
||||||
- 1 + 4 nodes
|
|
||||||
- CPU: 4 cores
|
|
||||||
- RAM: 8 GB
|
|
||||||
- Storage: 200 GB
|
|
||||||
|
|
||||||
### Required Software
|
|
||||||
|
|
||||||
#### Development
|
|
||||||
|
|
||||||
- Docker
|
|
||||||
- Docker Compose
|
|
||||||
- Node.js and npm
|
|
||||||
- Python 3.12
|
|
||||||
- MariaDB 11
|
|
||||||
|
|
||||||
#### Production
|
|
||||||
|
|
||||||
##### Minimal:
|
|
||||||
|
|
||||||
- domain name with Cloudflare`s nameservers - tunnel, pages
|
|
||||||
- Kubernetes cluster
|
|
||||||
- kubectl
|
|
||||||
- Helm
|
|
||||||
- OpenTofu
|
|
||||||
|
|
||||||
##### Our setup specifics:
|
|
||||||
|
|
||||||
- Proxmox VE
|
|
||||||
- TalosOS cluster
|
|
||||||
- talosctl
|
|
||||||
- GitHub self-hosted runner with access to the cluster
|
|
||||||
- TailScale for remote access to cluster
|
|
||||||
|
|
||||||
### Environment Variables
|
|
||||||
|
|
||||||
#### Backend
|
|
||||||
|
|
||||||
- `MOJEID_CLIENT_ID`, `MOJEID_CLIENT_SECRET` \- OAuth client ID and secret for
|
|
||||||
[MojeID](https://www.mojeid.cz/en/provider/)
|
|
||||||
- `BANKID_CLIENT_ID`, `BANKID_CLIENT_SECRET` \- OAuth client ID and secret for [BankID](https://developer.bankid.cz/)
|
|
||||||
- `CSAS_CLIENT_ID`, `CSAS_CLIENT_SECRET` \- OAuth client ID and secret for [Česká
|
|
||||||
spořitelna](https://developers.erstegroup.com/docs/apis/bank.csas)
|
|
||||||
- `DATABASE_URL`(or `MARIADB_HOST`, `MARIADB_PORT`, `MARIADB_DB`, `MARIADB_USER`, `MARIADB_PASSWORD`) \- MariaDB
|
|
||||||
connection details
|
|
||||||
- `RABBITMQ_USERNAME`, `RABBITMQ_PASSWORD` \- credentials for RabbitMQ
|
|
||||||
- `SENTRY_DSN` \- Sentry DSN for error reporting
|
|
||||||
- `DB_ENCRYPTION_KEY` \- symmetric key for encrypting sensitive data in the database
|
|
||||||
- `SMTP_HOST`, `SMTP_PORT`, `SMTP_USERNAME`, `SMTP_PASSWORD`, `SMTP_USE_TLS`, `SMTP_USE_SSL`, `SMTP_FROM` \- SMTP
|
|
||||||
configuration (host, port, auth credentials, TLS/SSL options, sender).
|
|
||||||
- `UNIRATE_API_KEY` \- API key for UniRate.
|
|
||||||
|
|
||||||
#### Frontend
|
|
||||||
|
|
||||||
- `VITE_BACKEND_URL` \- URL of the backend API
|
|
||||||
|
|
||||||
### Dependencies (key libraries)
|
|
||||||
|
|
||||||
Backend: FastAPI, fastapi-users, SQLAlchemy, pydantic v2, Alembic, Celery, uvicorn, pytest
|
|
||||||
Frontend: React, TypeScript, Vite
|
|
||||||
|
|
||||||
## Local development
|
|
||||||
|
|
||||||
You can run the project with Docker Compose and Python virtual environment for testing and development purposes
|
|
||||||
|
|
||||||
### 1) Clone the Repository
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git clone https://github.com/dat515-2025/Group-8.git
|
|
||||||
cd Group-8/7project/src
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2) Install dependencies
|
|
||||||
|
|
||||||
Backend
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd backend
|
|
||||||
python3 -m venv .venv
|
|
||||||
source .venv/bin/activate
|
|
||||||
pip install -r requirements.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3) Run Docker containers
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd ..
|
|
||||||
docker compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4) Prepare the database
|
|
||||||
|
|
||||||
```bash
|
|
||||||
bash upgrade_database.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5) Run backend
|
|
||||||
|
|
||||||
Before running the backend, make sure to set the necessary environment variables. Either by setting them in your shell
|
|
||||||
or by setting them in run configuration in your IDE.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd backend
|
|
||||||
uvicorn app.app:fastApi --reload --host 0.0.0.0 --port 8000
|
|
||||||
```
|
|
||||||
|
|
||||||
### 6) Run Celery worker (optional, in another terminal)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd Group-8/7project/src/backend
|
|
||||||
source .venv/bin/activate
|
|
||||||
celery -A app.celery_app.celery_app worker -l info
|
|
||||||
```
|
|
||||||
|
|
||||||
### 7) Install frontend dependencies and run
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd ../frontend
|
|
||||||
npm i
|
|
||||||
npm run dev
|
|
||||||
```
|
|
||||||
|
|
||||||
- Backend available at: http://127.0.0.1:8000 (OpenAPI at /docs)
|
|
||||||
- Frontend available at: http://localhost:5173
|
|
||||||
|
|
||||||
## Build Instructions
|
|
||||||
|
|
||||||
### Backend
|
|
||||||
|
|
||||||
App is separated into backend and frontend so it also needs to be built separately. Backend is build into docker image
|
|
||||||
and frontend is deployed as static files.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd 7project/src/backend
|
|
||||||
# Dont forget to set correct image tag with your registry and name
|
|
||||||
# For example lukastrkan/cc-app-demo or gitea.ltrk.dev/lukas/cc-app-demo
|
|
||||||
docker buildx build --platform linux/amd64,linux/arm64 -t CHANGE_ME --push .
|
|
||||||
```
|
|
||||||
|
|
||||||
### Frontend
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd project7/src/frontend
|
|
||||||
npm ci
|
|
||||||
npm run build
|
|
||||||
```
|
|
||||||
|
|
||||||
## Deployment Instructions
|
|
||||||
|
|
||||||
Deployment is tested on TalosOS cluster with 1 control plane and 4 workers, cluster needs to be setup and configured
|
|
||||||
manually. Terraform/OpenTofu is then used to deploy base services to the cluster. App itself is deployed automatically
|
|
||||||
via GitHub actions and Helm chart. Frontend files are deployed to Cloudflare pages.
|
|
||||||
|
|
||||||
### Setup Cluster
|
|
||||||
|
|
||||||
Deployment should work on any Kubernetes cluster. However, we are using 5 TalosOS virtual machines (1 control plane, 4
|
|
||||||
workers)
|
|
||||||
running on top of Proxmox VE.
|
|
||||||
|
|
||||||
1) Create at least 4 VMs with TalosOS (4 cores, 8 GB RAM, 200 GB disk)
|
|
||||||
2) Install talosctl for your OS: https://docs.siderolabs.com/talos/v1.10/getting-started/talosctl
|
|
||||||
3) Generate Talos config
|
|
||||||
4) Navigate to tofu directory
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd 7project/src/tofu
|
|
||||||
````
|
|
||||||
|
|
||||||
5) Set IP addresses in environment variables
|
|
||||||
|
|
||||||
```bash
|
|
||||||
CONTROL_PLANE_IP=<control-plane-ip>
|
|
||||||
WORKER1_IP=<worker1-ip>
|
|
||||||
WORKER2_IP=<worker2-ip>
|
|
||||||
WORKER3_IP=<worker3-ip>
|
|
||||||
WORKER4_IP=<worker4-ip>
|
|
||||||
....
|
|
||||||
```
|
|
||||||
|
|
||||||
6) Create config files
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# change my-cluster to your desired cluster name
|
|
||||||
talosctl gen config my-cluster https://$CONTROL_PLANE_IP:6443
|
|
||||||
```
|
|
||||||
|
|
||||||
7) Edit the generated configs
|
|
||||||
|
|
||||||
Apply the following changes to `worker.yaml`:
|
|
||||||
|
|
||||||
1) Add mounts for persistent storage to `machine.kubelet.extraMounts` section:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
extraMounts:
|
|
||||||
- destination: /var/lib/longhorn
|
|
||||||
type: bindind.
|
|
||||||
source: /var/lib/longhorn
|
|
||||||
options:
|
|
||||||
- bind
|
|
||||||
- rshared
|
|
||||||
- rw
|
|
||||||
```
|
|
||||||
|
|
||||||
2) Change `machine.install.image` to image with extra modules:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
image: factory.talos.dev/metal-installer/88d1f7a5c4f1d3aba7df787c448c1d3d008ed29cfb34af53fa0df4336a56040b:v1.11.1
|
|
||||||
```
|
|
||||||
|
|
||||||
or you can use latest image generated at https://factory.talos.dev with following options:
|
|
||||||
|
|
||||||
- Bare-metal machine
|
|
||||||
- your Talos os version
|
|
||||||
- amd64 architecture
|
|
||||||
- siderolabs/iscsi-tools
|
|
||||||
- siderolabs/util-linux-tools
|
|
||||||
- (Optionally) siderolabs/qemu-guest-agent
|
|
||||||
|
|
||||||
Then copy "Initial Installation" value and paste it to the image field.
|
|
||||||
|
|
||||||
3) Add docker registry mirror to `machine.registries.mirrors` section:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
registries:
|
|
||||||
mirrors:
|
|
||||||
docker.io:
|
|
||||||
endpoints:
|
|
||||||
- https://mirror.gcr.io
|
|
||||||
- https://registry-1.docker.io
|
|
||||||
```
|
|
||||||
|
|
||||||
8) Apply configs to the VMs
|
|
||||||
|
|
||||||
```bash
|
|
||||||
talosctl apply-config --insecure --nodes $CONTROL_PLANE_IP --file controlplane.yaml
|
|
||||||
talosctl apply-config --insecure --nodes $WORKER1_IP --file worker.yaml
|
|
||||||
talosctl apply-config --insecure --nodes $WORKER2_IP --file worker.yaml
|
|
||||||
talosctl apply-config --insecure --nodes $WORKER3_IP --file worker.yaml
|
|
||||||
talosctl apply-config --insecure --nodes $WORKER4_IP --file worker.yaml
|
|
||||||
```
|
|
||||||
|
|
||||||
9) Boostrap the cluster and retrieve kubeconfig
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export TALOSCONFIG=$(pwd)/talosconfig
|
|
||||||
talosctl config endpoint https://$CONTROL_PLANE_IP:6443
|
|
||||||
talosctl config node $CONTROL_PLANE_IP
|
|
||||||
|
|
||||||
talosctl bootstrap
|
|
||||||
|
|
||||||
talosctl kubeconfig .
|
|
||||||
```
|
|
||||||
|
|
||||||
You can now use k8s client like https://headlamp.dev/ with the generated kubeconfig file.
|
|
||||||
|
|
||||||
### Install base services to the cluster
|
|
||||||
|
|
||||||
1) Copy and edit variables
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cp terraform.tfvars.example terraform.tfvars
|
|
||||||
```
|
|
||||||
|
|
||||||
- `metallb_ip_range` - set to range available in your network for load balancer services
|
|
||||||
- `mariadb_password` - password for internal mariadb user
|
|
||||||
- `mariadb_root_password` - password for root user
|
|
||||||
- `mariadb_user_name` - username for admin user
|
|
||||||
- `mariadb_user_host` - allowed hosts for admin user
|
|
||||||
- `mariadb_user_password` - password for admin user
|
|
||||||
- `metallb_maxscale_ip`, `metallb_service_ip`, `metallb_primary_ip`, `metallb_secondary_ip` - IPs for database
|
|
||||||
cluster,
|
|
||||||
set them to static IPs from the `metallb_ip_range`
|
|
||||||
- `s3_enabled`, `s3_bucket`, `s3_region`, `s3_endpoint`, `s3_key_id`, `s3_key_secret` - S3 compatible storage for
|
|
||||||
backups (optional)
|
|
||||||
- `phpmyadmin_enabled` - set to false if you want to disable phpmyadmin
|
|
||||||
- `rabbitmq-password` - password for RabbitMQ
|
|
||||||
|
|
||||||
- `cloudflare_account_id` - your Cloudflare account ID
|
|
||||||
- `cloudflare_api_token` - your Cloudflare API token with permissions to manage tunnels and DNS
|
|
||||||
- `cloudflare_email` - your Cloudflare account email
|
|
||||||
- `cloudflare_tunnel_name` - name for the tunnel
|
|
||||||
- `cloudflare_domain` - your domain name managed in Cloudflare
|
|
||||||
|
|
||||||
2) Deploy without Cloudflare module first
|
|
||||||
|
|
||||||
```bash
|
|
||||||
tofu init
|
|
||||||
tofu apply -exclude modules.cloudflare
|
|
||||||
```
|
|
||||||
|
|
||||||
3) Deploy rest of the modules
|
|
||||||
|
|
||||||
```bash
|
|
||||||
tofu apply
|
|
||||||
```
|
|
||||||
|
|
||||||
### Configure deployment
|
|
||||||
|
|
||||||
1) Create self-hosted runner with access to the cluster or make cluster publicly accessible
|
|
||||||
2) Change `jobs.deploy.runs-on` in `.github/workflows/deploy-prod.yml` and in `.github/workflows/deploy-pr.yaml` to your
|
|
||||||
runner label
|
|
||||||
3) Add variables to GitHub in repository settings:
|
|
||||||
- `PROD_DOMAIN` - base domain for deployments (e.g. ltrk.cz)
|
|
||||||
- `DEV_FRONTEND_BASE_DOMAIN` - base domain for your cloudflare pages
|
|
||||||
4) Add secrets to GitHub in repository settings:
|
|
||||||
- CLOUDFLARE_ACCOUNT_ID - same as in tofu/terraform.tfvars
|
|
||||||
- CLOUDFLARE_API_TOKEN - same as in tofu/terraform.tfvars
|
|
||||||
- DOCKER_USER - your docker registry username
|
|
||||||
- DOCKER_PASSWORD - your docker registry password
|
|
||||||
- KUBE_CONFIG - content of your kubeconfig file for the cluster
|
|
||||||
- PROD_DB_PASSWORD - same as MARIADB_PASSWORD
|
|
||||||
- PROD_RABBITMQ_PASSWORD - same as MARIADB_PASSWORD
|
|
||||||
- PROD_DB_ENCRYPTION_KEY - same as DB_ENCRYPTION_KEY
|
|
||||||
- MOJEID_CLIENT_ID
|
|
||||||
- MOJEID_CLIENT_SECRET
|
|
||||||
- BANKID_CLIENT_ID
|
|
||||||
- BANKID_CLIENT_SECRET
|
|
||||||
- CSAS_CLIENT_ID
|
|
||||||
- CSAS_CLIENT_SECRET
|
|
||||||
- SENTRY_DSN
|
|
||||||
- SMTP_HOST
|
|
||||||
- SMTP_PORT
|
|
||||||
- SMTP_USERNAME
|
|
||||||
- SMTP_PASSWORD
|
|
||||||
- SMTP_FROM
|
|
||||||
- UNIRATE_API_KEY
|
|
||||||
5) On Github open Actions tab, select "Deploy Prod" and run workflow manually
|
|
||||||
|
|
||||||
## Testing Instructions
|
|
||||||
|
|
||||||
The tests are located in 7project/backend/tests directory. All tests are run by GitHub actions on every pull request and
|
|
||||||
push to main.
|
|
||||||
See the workflow [here](../.github/workflows/run-tests.yml).
|
|
||||||
|
|
||||||
If you want to run the tests locally, the preferred way is to use a [bash script](backend/test_locally.sh)
|
|
||||||
that will start a test DB container with [docker compose](backend/docker-compose.test.yml) and remove it afterwards.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd 7project/src/backend
|
|
||||||
bash test_locally.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
### Unit Tests
|
|
||||||
|
|
||||||
There are only 5 basic unit tests, since our services logic is very simple
|
|
||||||
|
|
||||||
```bash
|
|
||||||
bash test_locally.sh --only-unit
|
|
||||||
```
|
|
||||||
|
|
||||||
### Integration Tests
|
|
||||||
|
|
||||||
There are 9 basic unit tests, testing the individual backend API logic
|
|
||||||
|
|
||||||
```bash
|
|
||||||
bash test_locally.sh --only-integration
|
|
||||||
```
|
|
||||||
|
|
||||||
### End-to-End Tests
|
|
||||||
|
|
||||||
There are 7 e2e tests, testing more complex app logic
|
|
||||||
|
|
||||||
```bash
|
|
||||||
bash test_locally.sh --only-e2e
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage Examples
|
|
||||||
|
|
||||||
All endpoints are documented at OpenAPI: http://127.0.0.1:8000/docs
|
|
||||||
|
|
||||||
### Auth: Register and Login (JWT)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Register
|
|
||||||
curl -X POST http://127.0.0.1:8000/auth/register \
|
|
||||||
-H 'Content-Type: application/json' \
|
|
||||||
-d '{
|
|
||||||
"email": "user@example.com",
|
|
||||||
"password": "StrongPassw0rd",
|
|
||||||
"first_name": "Jane",
|
|
||||||
"last_name": "Doe"
|
|
||||||
}'
|
|
||||||
|
|
||||||
# Login (JWT)
|
|
||||||
TOKEN=$(curl -s -X POST http://127.0.0.1:8000/auth/jwt/login \
|
|
||||||
-H 'Content-Type: application/x-www-form-urlencoded' \
|
|
||||||
-d 'username=user@example.com&password=StrongPassw0rd' | jq -r .access_token)
|
|
||||||
|
|
||||||
echo $TOKEN
|
|
||||||
|
|
||||||
# Call a protected route
|
|
||||||
curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|
||||||
```
|
|
||||||
|
|
||||||
### Frontend
|
|
||||||
|
|
||||||
- Start with:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run dev in 7project/src/frontend
|
|
||||||
```
|
|
||||||
|
|
||||||
- Ensure VITE_BACKEND_URL is set to the backend URL (e.g., http://127.0.0.1:8000)
|
|
||||||
- Open http://localhost:5173
|
|
||||||
- Login, view latest transactions, filter, and add new transactions from the UI.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Presentation Video
|
|
||||||
|
|
||||||
**YouTube Link**: https://youtu.be/FKR85AVN8bI
|
|
||||||
|
|
||||||
**Duration**: 9 minutes 43 seconds
|
|
||||||
|
|
||||||
**Video Includes**:
|
|
||||||
|
|
||||||
- [x] Project overview and architecture
|
|
||||||
- [x] Live demonstration of key features
|
|
||||||
- [x] Code walkthrough
|
|
||||||
- [x] Build and deployment showcase
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Common Issues
|
|
||||||
|
|
||||||
#### Issue 1: Unable to apply Cloudflare terraform module
|
|
||||||
|
|
||||||
**Symptoms**: Terraform/OpenTofu apply fails during Cloudflare module deployment.
|
|
||||||
This is caused by unknown variable not known beforehand.
|
|
||||||
|
|
||||||
**Solution**: Apply first without Cloudflare module and then apply again.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
tofu apply -exclude modules.cloudflare
|
|
||||||
tofu apply
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Issue 2: Pods are unable to start
|
|
||||||
|
|
||||||
**Symptoms**: Pods are unable to start with ImagePullBackOff error. This could be caused
|
|
||||||
by either hitting docker hub rate limits or by docker hub being down.
|
|
||||||
|
|
||||||
**Solution**: Make sure you updated the cluster config to use registry mirror as described in
|
|
||||||
"Setup Cluster" section.
|
|
||||||
|
|
||||||
### Debug Commands
|
|
||||||
|
|
||||||
Get a detailed description of the Deployment:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
kubectl describe deployment finance-tracker -n prod
|
|
||||||
```
|
|
||||||
|
|
||||||
Get a list of pods in the Deployment:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
kubectl get pods -n prod
|
|
||||||
```
|
|
||||||
|
|
||||||
Check the logs of a specific pod copy value for <pod-name> from the command above (--previous flag shows logs of a
|
|
||||||
failing pod, remove it if the pod is not failing):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
kubectl logs <pod-name> -n prod --previous
|
|
||||||
```
|
|
||||||
|
|
||||||
See the service description:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
kubectl describe service finance-tracker -n prod
|
|
||||||
```
|
|
||||||
|
|
||||||
Connect to the pod and run a bash shell:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
kubectl exec -it <pod-name> -n prod -- /bin/bash
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Progress Table
|
|
||||||
|
|
||||||
> Be honest and detailed in your assessments.
|
|
||||||
> This information is used for individual grading.
|
|
||||||
> Link to the specific commit on GitHub for each contribution.
|
|
||||||
|
|
||||||
| Task/Component | Assigned To | Status | Time Spent | Difficulty | Notes |
|
|
||||||
|:----------------------------------------------------------------------------------------------------------|:------------|:-----------|:-----------|:-----------|:------|
|
|
||||||
| [Project Setup & Repository](https://github.com/dat515-2025/Group-8/pull/1) | Both | ✅ Complete | 10 Hours | Medium | |
|
|
||||||
| [Design Document](https://github.com/dat515-2025/Group-8/commit/f09f9eaa82d0953afe41f33c57ff63e0933a81ef) | Both | ✅ Complete | 4 Hours | Easy | |
|
|
||||||
| [Cluster setup ](https://github.com/dat515-2025/Group-8/commit/c8048d940df00874c290d99cdb4ad366bca6e95d) | Lukas | ✅ Complete | 30 hours | Hard | |
|
|
||||||
| [Backend API Development](https://github.com/dat515-2025/Group-8/pull/26) | Dejan | ✅ Complete | 22 hours | Medium | |
|
|
||||||
| [Database Setup & Models](https://github.com/dat515-2025/Group-8/pull/19) | Lukas | ✅ Complete | 5 hours | Medium | |
|
|
||||||
| [Frontend Development](https://github.com/dat515-2025/Group-8/pull/28) | Dejan | ✅ Complete | 32 hours | Medium | |
|
|
||||||
| [Docker Configuration](https://github.com/dat515-2025/Group-8/pull/1) | Lukas | ✅ Complete | 3 hours | Easy | |
|
|
||||||
| [Authentification](https://github.com/dat515-2025/Group-8/pull/23) | Both | ✅ Complete | 11 hours | Medium | |
|
|
||||||
| [Transactions loading](https://github.com/dat515-2025/Group-8/pull/32) | Lukas | ✅ Complete | 7 hours | Medium | |
|
|
||||||
| [Monitoring](https://github.com/dat515-2025/Group-8/pull/42/) | Lukas | ✅ Complete | 9 hours | Medium | |
|
|
||||||
| [Cloud Deployment](https://github.com/dat515-2025/Group-8/pull/16) | Both | ✅ Complete | 21 hours | Hard | |
|
|
||||||
| [Testing Implementation](https://github.com/dat515-2025/Group-8/pull/31/) | Both | ✅ Complete | 21 hours | Medium | |
|
|
||||||
| [Documentation](https://github.com/dat515-2025/Group-8/commit/515106b238bc032d5f7d5dcae931b5cb7ee2a281) | Both | ✅ Complete | 14 hours | Medium | |
|
|
||||||
| [Presentation Video](https://github.com/dat515-2025/group-name) TODO | Both | Started | 3 hours | Medium | |
|
|
||||||
|
|
||||||
## Hour Sheet
|
|
||||||
|
|
||||||
### Lukáš
|
|
||||||
|
|
||||||
| Date | Activity | Hours | Description | Representative Commit / PR |
|
|
||||||
|:----------------|:----------------------------|:--------|:------------------------------------------------------------------------------------|:------------------------------------------------------|
|
|
||||||
| 18.9. - 19.9. | Initial Setup & Design | 10 | Repository init, system design diagrams, basic Terraform setup | `feat(infrastructure): add basic terraform resources` |
|
|
||||||
| 20.9. - 5.10. | Core Infrastructure & CI/CD | 12 | K8s setup (ArgoCD), CI/CD workflows, RabbitMQ, Redis, Celery workers, DB migrations | `PR #2`, `feat(infrastructure): add rabbitmq cluster` |
|
|
||||||
| 6.10. - 9.10. | Frontend Infra & DB | 5 | Deployed frontend to Cloudflare, setup metrics, created database models | `PR #16` (Cloudflare), `PR #19` (DB structure) |
|
|
||||||
| 10.10. - 11.10. | Backend | 5 | Implemented OAuth support (MojeID, BankID) | `feat(auth): add support for OAuth and MojeID` |
|
|
||||||
| 12.10. | Infrastructure | 2 | Added database backups | `feat(infrastructure): add backups` |
|
|
||||||
| 16.10. | Infrastructure | 4 | Implemented secrets management, fixed deployment/env variables | `PR #29` (Deployment envs) |
|
|
||||||
| 17.10. | Monitoring | 1 | Added Sentry logging | `feat(app): add sentry loging` |
|
|
||||||
| 21.10. - 22.10. | Backend | 8 | Added ČSAS bank connection | `PR #32` (Fix React OAuth) |
|
|
||||||
| 29.10. - 30.10. | Backend | 5 | Implemented transaction encryption, add bank scraping | `PR #39` (CSAS Scraping) |
|
|
||||||
| 30.10. | Monitoring | 6 | Implemented Loki logging and basic Prometheus metrics | `PR #42` (Prometheus metrics) |
|
|
||||||
| 9.11. | Monitoring | 2 | Added custom Prometheus metrics | `PR #46` (Prometheus custom metrics) |
|
|
||||||
| 11.11. | Tests | 1 | Investigated and fixed broken Pytest environment | `fix(tests): set pytest env` |
|
|
||||||
| 11.11. - 12.11. | Features & Deployment | 6 | Added cron support, email sender service, updated workers & image | `PR #49` (Email), `PR #50` (Update workers) |
|
|
||||||
| 18.9 - 14.11 | Documentation | 8 | Updated report.md, design docs, and tfvars.example | `Create design.md`, `update report` |
|
|
||||||
| **Total** | | **105** | | |
|
|
||||||
|
|
||||||
### Dejan
|
|
||||||
|
|
||||||
| Date | Activity | Hours | Description | Representative Commit / PR |
|
|
||||||
|:-----------------|:---------------------|:-------|:----------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------|
|
|
||||||
| 25.9. | Design | 2 | 6design | |
|
|
||||||
| 9.10. to 11.10. | Backend APIs | 14 | Implemented Backend APIs | `PR #26`, `20-create-a-controller-layer-on-backend-side` |
|
|
||||||
| 13.10. to 15.10. | Frontend Development | 8 | Created user interface mockups | `PR #28`, `frontend basics` |
|
|
||||||
| 21.10. to 23.10. | Tests, frontend | 10 | Test basics, balance charts, and frontend improvement | `PR #31`, `30 create tests and set up a GitHub pipeline` |
|
|
||||||
| 28.10. to 30.10. | CI/CD | 6 | Integrated tests with test database setup on github workflows | `PR #31`, `30 create tests and set up a GitHub pipeline` |
|
|
||||||
| 28.10. to 30.10. | Frontend | 8 | UI improvements and exchange rate API integration | `PR #35`, `34 improve frontend functionality` |
|
|
||||||
| 29.10. | Backend | 4 | Token invalidation, few fixes | `PR #38`, `fix(backend): implemented jwt token invalidation so users cannot use …` |
|
|
||||||
| 4.11. to 6.11. | Tests | 6 | Test fixes improvement, more integration and e2e | `PR #45`, `feat(test): added more tests ` |
|
|
||||||
| 4.11. to 6.11. | Frontend | 8 | Fixes, rates API, Improved UI, added support for mobile devices | `PR #41, #44`, `feat(frontend): added CNB API and moved management into a new tab`, `43 fix the UI layout in chrome ` |
|
|
||||||
| 11.11. | Backend APIs | 4 | Moved rates API, mock bank to Backend, few fixes | `feat(backend): Moved the unirate API to the backend `, `feat(backend): moved mock bank to backend` |
|
|
||||||
| 11.11. to 12.11. | Tests | 3 | Local testing DB container, few fixes | `PR #48`, `fix(tests): fixed test runtime errors regarding database connection ` |
|
|
||||||
| 12.11. | Frontend | 3 | Enabled multiple transaction edits at once, CSAS button state | `feat(frontend): implemented multiple transaction selections in UI` |
|
|
||||||
| 13.11. | Video | 3 | Video | |
|
|
||||||
| 25.9. to 14.11. | Documentation | 8 | Documenting the dev process | multiple `feat(docs): report.md update` |
|
|
||||||
| **Total** | | **87** | | |
|
|
||||||
|
|
||||||
### Group Total: 192 hours
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Final Reflection
|
|
||||||
|
|
||||||
### What We Learned
|
|
||||||
|
|
||||||
#### Technical
|
|
||||||
|
|
||||||
- We learned how to use AI to help us with our project.
|
|
||||||
- We learned how to use Copilot for PR reviews.
|
|
||||||
- We learned how to troubleshoot issues with our project in different areas.
|
|
||||||
|
|
||||||
#### Collaboration
|
|
||||||
|
|
||||||
- Weekly meetings with the TA were great for syncing up on progress, discussing issues, planning future work.
|
|
||||||
- Using GitHub issues and pull requests was very helpful for keeping track of progress.
|
|
||||||
|
|
||||||
### Challenges Faced
|
|
||||||
|
|
||||||
#### Slow cluster performance
|
|
||||||
|
|
||||||
This was caused by single SATA SSD disk running all VMs. This was solved by adding second NVMe disk just for Talos VMs.
|
|
||||||
|
|
||||||
#### Stucked IaC deployment
|
|
||||||
|
|
||||||
If the deployed module (helm chart for example) was not configured properly, it would get stuck and timeout resulting in
|
|
||||||
namespace that cannot be deleted.
|
|
||||||
This was solved by using snapshots in Proxmox and restoring if this happened.
|
|
||||||
|
|
||||||
#### Not enough time to implement all features
|
|
||||||
|
|
||||||
Since this course is worth only 5 credits, we often had to prioritize other courses we were attending over this project.
|
|
||||||
In the end, we were able to implement all necessary features.
|
|
||||||
|
|
||||||
### If We Did This Again
|
|
||||||
|
|
||||||
#### Different framework
|
|
||||||
|
|
||||||
FastAPI lacks usable build in support for database migrations and implementing Alembic was a bit tricky.
|
|
||||||
Tricky was also integrating FastAPI auth system with React frontend, since there is no official project template.
|
|
||||||
Using .NET (which we considered initially) would probably solve these issues.
|
|
||||||
|
|
||||||
#### Private container registry
|
|
||||||
|
|
||||||
Using private container registry would allow us to include environment variables directly in the image during build.
|
|
||||||
This would simplify deployment and CI/CD setup.
|
|
||||||
|
|
||||||
#### Start sooner
|
|
||||||
|
|
||||||
The weekly meetings helped us to start planning the project earlier and avoid spending too much time on details,
|
|
||||||
but we could have started earlier if we had more time.
|
|
||||||
|
|
||||||
[What would you do differently? What worked well that you'd keep?]
|
|
||||||
|
|
||||||
### Individual Growth
|
|
||||||
|
|
||||||
#### [Lukas]
|
|
||||||
|
|
||||||
This course finally forced me to learn kubernetes (been on by TODO list for at least 3 years).
|
|
||||||
I had some prior experience with terraform/opentofu from work but this improved by understanding of it.
|
|
||||||
|
|
||||||
The biggest challenge for me was time tracking since I am used to tracking to projects, not to tasks.
|
|
||||||
(I am bad even at that :) ).
|
|
||||||
|
|
||||||
It was also interesting experience to be the one responsible for the initial project structure/design/setup
|
|
||||||
used not only by myself.
|
|
||||||
|
|
||||||
#### [Dejan]
|
|
||||||
|
|
||||||
Since I do not have a job and I am more theoretically oriented student (I am more into math, algorithms, cryptography),
|
|
||||||
this project was probably the most complex one I have ever worked on.
|
|
||||||
For me, it was a great experience to work on an actually deployed fullstack app and not only local development, that I
|
|
||||||
was used to from the past.
|
|
||||||
|
|
||||||
It was also a great experience to collaborate with Lukas who has prior experience with app deployment and
|
|
||||||
infrastructure.
|
|
||||||
Thanks to this, I learned a lot new technologies and how to work in a team (First time reviewing PRs).
|
|
||||||
|
|
||||||
It was challenging to wrap my head around the project structure and how everything was connected (And I still think I
|
|
||||||
have some gaps in my knowledge).
|
|
||||||
But I think that if I decide to create my own demo project in the future, I will definitely be able to work on it much
|
|
||||||
more efficiently.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Report Completion Date**: 15.11.2025
|
|
||||||
**Last Updated**: 15.11.2025
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
## Folder structure
|
|
||||||
|
|
||||||
- `src/`
|
|
||||||
- `backend/` - Python FastAPI backend application. Described in separate [README](./backend/README.md).
|
|
||||||
- `charts/`
|
|
||||||
- `myapp-chart/` - Helm chart for deploying the application, supports prod and dev environments. Described in
|
|
||||||
separate [README](./charts/README.md).
|
|
||||||
- `frontend/` - React frontend application. Described in separate
|
|
||||||
[README](./frontend/README.md).
|
|
||||||
- `tofu/` - Terraform/OpenTofu services deployment configurations. Described in separate
|
|
||||||
[README](./tofu/README.md).
|
|
||||||
- `compose.yaml` - Docker Compose file for local development
|
|
||||||
- `create_migration.sh` - script to create new Alembic database migration
|
|
||||||
- `upgrade_database.sh` - script to upgrade database to latest Alembic revision
|
|
||||||
8
7project/src/backend/.idea/.gitignore
generated
vendored
8
7project/src/backend/.idea/.gitignore
generated
vendored
@@ -1,8 +0,0 @@
|
|||||||
# Default ignored files
|
|
||||||
/shelf/
|
|
||||||
/workspace.xml
|
|
||||||
# Editor-based HTTP Client requests
|
|
||||||
/httpRequests/
|
|
||||||
# Datasource local storage ignored files
|
|
||||||
/dataSources/
|
|
||||||
/dataSources.local.xml
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
FROM python:3.11-trixie
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
COPY requirements.txt .
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
COPY . .
|
|
||||||
EXPOSE 8000
|
|
||||||
CMD ["sh", "-c", "alembic upgrade head && uvicorn app.app:fastApi --host 0.0.0.0 --port 8000"]
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
# Backend
|
|
||||||
|
|
||||||
This directory contains the backend code for the project. It is built using Python and FastAPI framework and with
|
|
||||||
database migrations support using Alembic.
|
|
||||||
|
|
||||||
## Directory structure
|
|
||||||
|
|
||||||
- `alembic/` - database migrations
|
|
||||||
- `app/` - main application code
|
|
||||||
- `api/` - API endpoints - routers/controllers with request handling logic
|
|
||||||
- `core/` - core application logic - database session management, security
|
|
||||||
- `models/` - database models
|
|
||||||
- `schemas/` - Endpoint schemas
|
|
||||||
- `services/` - utilities for various tasks
|
|
||||||
- `workers/` - background tasks
|
|
||||||
- `app.py` - FastAPI startup script
|
|
||||||
- `celery_app.py` - Celery startup script
|
|
||||||
- `tests/` - tests
|
|
||||||
- `docker-compose.test.yml` - docker compose for testing database
|
|
||||||
- `Dockerfile` - production Dockerfile
|
|
||||||
- `main.py` - App entrypoint
|
|
||||||
- `requirements.txt` - Python dependencies
|
|
||||||
- `test_locally.sh` - script to run tests with temporary database
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
"""update categories unique
|
|
||||||
|
|
||||||
Revision ID: 390041bd839e
|
|
||||||
Revises: 63e072f09836
|
|
||||||
Create Date: 2025-10-09 15:14:31.557686
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '390041bd839e'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = '63e072f09836'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_index(op.f('name'), table_name='categories')
|
|
||||||
op.create_unique_constraint('uix_name_user_id', 'categories', ['name', 'user_id'])
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_constraint('uix_name_user_id', 'categories', type_='unique')
|
|
||||||
op.create_index(op.f('name'), 'categories', ['name'], unique=True)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
"""add user oauth
|
|
||||||
|
|
||||||
Revision ID: 7af8f296d089
|
|
||||||
Revises: 390041bd839e
|
|
||||||
Create Date: 2025-10-10 14:05:00.153376
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
import fastapi_users_db_sqlalchemy
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '7af8f296d089'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = '390041bd839e'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table('oauth_account',
|
|
||||||
sa.Column('id', fastapi_users_db_sqlalchemy.generics.GUID(), nullable=False),
|
|
||||||
sa.Column('user_id', fastapi_users_db_sqlalchemy.generics.GUID(), nullable=False),
|
|
||||||
sa.Column('oauth_name', sa.String(length=100), nullable=False),
|
|
||||||
sa.Column('access_token', sa.String(length=1024), nullable=False),
|
|
||||||
sa.Column('expires_at', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('refresh_token', sa.String(length=1024), nullable=True),
|
|
||||||
sa.Column('account_id', sa.String(length=320), nullable=False),
|
|
||||||
sa.Column('account_email', sa.String(length=320), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ondelete='cascade'),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
op.create_index(op.f('ix_oauth_account_account_id'), 'oauth_account', ['account_id'], unique=False)
|
|
||||||
op.create_index(op.f('ix_oauth_account_oauth_name'), 'oauth_account', ['oauth_name'], unique=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_index(op.f('ix_oauth_account_oauth_name'), table_name='oauth_account')
|
|
||||||
op.drop_index(op.f('ix_oauth_account_account_id'), table_name='oauth_account')
|
|
||||||
op.drop_table('oauth_account')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
"""change token length
|
|
||||||
|
|
||||||
Revision ID: 5ab2e654c96e
|
|
||||||
Revises: 7af8f296d089
|
|
||||||
Create Date: 2025-10-11 21:07:41.930470
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import mysql
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '5ab2e654c96e'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = '7af8f296d089'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.alter_column('oauth_account', 'access_token',
|
|
||||||
existing_type=mysql.VARCHAR(length=1024),
|
|
||||||
type_=sa.String(length=4096),
|
|
||||||
existing_nullable=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.alter_column('oauth_account', 'access_token',
|
|
||||||
existing_type=sa.String(length=4096),
|
|
||||||
type_=mysql.VARCHAR(length=1024),
|
|
||||||
existing_nullable=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
"""add config to user
|
|
||||||
|
|
||||||
Revision ID: eabec90a94fe
|
|
||||||
Revises: 5ab2e654c96e
|
|
||||||
Create Date: 2025-10-21 18:56:42.085973
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = 'eabec90a94fe'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = '5ab2e654c96e'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('user', sa.Column('config', sa.JSON(), nullable=True))
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_column('user', 'config')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
"""add date to transaction
|
|
||||||
|
|
||||||
Revision ID: 1f2a3c4d5e6f
|
|
||||||
Revises: eabec90a94fe
|
|
||||||
Create Date: 2025-10-22 16:18:00
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.sql import func
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '1f2a3c4d5e6f'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = 'eabec90a94fe'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema by adding date column with server default current_date."""
|
|
||||||
op.add_column(
|
|
||||||
'transaction',
|
|
||||||
sa.Column('date', sa.Date(), nullable=False, server_default=sa.text('CURRENT_DATE'))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema by removing date column."""
|
|
||||||
op.drop_column('transaction', 'date')
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
"""Add encrypted type
|
|
||||||
|
|
||||||
Revision ID: 46b9e702e83f
|
|
||||||
Revises: 1f2a3c4d5e6f
|
|
||||||
Create Date: 2025-10-29 13:26:24.568523
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
import sqlalchemy_utils
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import mysql
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '46b9e702e83f'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = '1f2a3c4d5e6f'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.alter_column('transaction', 'amount',
|
|
||||||
existing_type=mysql.FLOAT(),
|
|
||||||
type_=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
|
||||||
existing_nullable=False)
|
|
||||||
op.alter_column('transaction', 'description',
|
|
||||||
existing_type=mysql.VARCHAR(length=255),
|
|
||||||
type_=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
|
||||||
existing_nullable=True)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.alter_column('transaction', 'description',
|
|
||||||
existing_type=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
|
||||||
type_=mysql.VARCHAR(length=255),
|
|
||||||
existing_nullable=True)
|
|
||||||
op.alter_column('transaction', 'amount',
|
|
||||||
existing_type=sqlalchemy_utils.types.encrypted.encrypted_type.EncryptedType(),
|
|
||||||
type_=mysql.FLOAT(),
|
|
||||||
existing_nullable=False)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
"""Cascade categories
|
|
||||||
|
|
||||||
Revision ID: 59cebf320c4a
|
|
||||||
Revises: 46b9e702e83f
|
|
||||||
Create Date: 2025-10-30 13:42:44.555284
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import mysql
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = '59cebf320c4a'
|
|
||||||
down_revision: Union[str, Sequence[str], None] = '46b9e702e83f'
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('category_transaction', sa.Column('category_id', sa.Integer(), nullable=False))
|
|
||||||
op.add_column('category_transaction', sa.Column('transaction_id', sa.Integer(), nullable=False))
|
|
||||||
op.drop_constraint(op.f('category_transaction_ibfk_2'), 'category_transaction', type_='foreignkey')
|
|
||||||
op.drop_constraint(op.f('category_transaction_ibfk_1'), 'category_transaction', type_='foreignkey')
|
|
||||||
op.create_foreign_key(None, 'category_transaction', 'transaction', ['transaction_id'], ['id'], ondelete='CASCADE')
|
|
||||||
op.create_foreign_key(None, 'category_transaction', 'categories', ['category_id'], ['id'], ondelete='CASCADE')
|
|
||||||
op.drop_column('category_transaction', 'id_category')
|
|
||||||
op.drop_column('category_transaction', 'id_transaction')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('category_transaction', sa.Column('id_transaction', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
|
|
||||||
op.add_column('category_transaction', sa.Column('id_category', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
|
|
||||||
op.drop_constraint(None, 'category_transaction', type_='foreignkey')
|
|
||||||
op.drop_constraint(None, 'category_transaction', type_='foreignkey')
|
|
||||||
op.create_foreign_key(op.f('category_transaction_ibfk_1'), 'category_transaction', 'categories', ['id_category'], ['id'])
|
|
||||||
op.create_foreign_key(op.f('category_transaction_ibfk_2'), 'category_transaction', 'transaction', ['id_transaction'], ['id'])
|
|
||||||
op.drop_column('category_transaction', 'transaction_id')
|
|
||||||
op.drop_column('category_transaction', 'category_id')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
from fastapi import APIRouter, Depends, status
|
|
||||||
from fastapi_users import models
|
|
||||||
from fastapi_users.manager import BaseUserManager
|
|
||||||
|
|
||||||
from app.schemas.user import UserCreate, UserRead, UserUpdate
|
|
||||||
from app.services.user_service import auth_backend, fastapi_users
|
|
||||||
|
|
||||||
router = APIRouter()
|
|
||||||
|
|
||||||
@router.delete(
|
|
||||||
"/users/me",
|
|
||||||
status_code=status.HTTP_204_NO_CONTENT,
|
|
||||||
tags=["users"],
|
|
||||||
summary="Delete current user",
|
|
||||||
response_description="The user has been successfully deleted.",
|
|
||||||
)
|
|
||||||
async def delete_me(
|
|
||||||
user: models.UserProtocol = Depends(fastapi_users.current_user(active=True)),
|
|
||||||
user_manager: BaseUserManager = Depends(fastapi_users.get_user_manager),
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Delete the currently authenticated user.
|
|
||||||
"""
|
|
||||||
await user_manager.delete(user)
|
|
||||||
|
|
||||||
# Keep existing paths as-is under /auth/* and /users/*
|
|
||||||
from fastapi import Request, Response
|
|
||||||
from app.core.security import revoke_token, extract_bearer_token
|
|
||||||
|
|
||||||
|
|
||||||
@router.post(
|
|
||||||
"/auth/jwt/logout",
|
|
||||||
status_code=status.HTTP_204_NO_CONTENT,
|
|
||||||
tags=["auth"],
|
|
||||||
summary="Log out and revoke current token",
|
|
||||||
)
|
|
||||||
async def custom_logout(request: Request) -> Response:
|
|
||||||
"""Revoke the current bearer token so it cannot be used anymore."""
|
|
||||||
token = extract_bearer_token(request)
|
|
||||||
if token:
|
|
||||||
revoke_token(token)
|
|
||||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
|
||||||
|
|
||||||
router.include_router(
|
|
||||||
fastapi_users.get_auth_router(auth_backend), prefix="/auth/jwt", tags=["auth"]
|
|
||||||
)
|
|
||||||
router.include_router(
|
|
||||||
fastapi_users.get_register_router(UserRead, UserCreate),
|
|
||||||
prefix="/auth",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
router.include_router(
|
|
||||||
fastapi_users.get_reset_password_router(),
|
|
||||||
prefix="/auth",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
router.include_router(
|
|
||||||
fastapi_users.get_verify_router(UserRead),
|
|
||||||
prefix="/auth",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
router.include_router(
|
|
||||||
fastapi_users.get_users_router(UserRead, UserUpdate),
|
|
||||||
prefix="/users",
|
|
||||||
tags=["users"],
|
|
||||||
)
|
|
||||||
@@ -1,108 +0,0 @@
|
|||||||
from typing import List
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
|
||||||
from sqlalchemy import select, delete
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.models.categories import Category
|
|
||||||
from app.schemas.category import CategoryCreate, CategoryRead, CategoryUpdate
|
|
||||||
from app.services.db import get_async_session
|
|
||||||
from app.services.user_service import current_active_user
|
|
||||||
from app.models.user import User
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/categories", tags=["categories"])
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/create", response_model=CategoryRead, status_code=status.HTTP_201_CREATED)
|
|
||||||
async def create_category(
|
|
||||||
payload: CategoryCreate,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
# Enforce per-user unique name via query to provide 409 feedback
|
|
||||||
res = await session.execute(
|
|
||||||
select(Category).where(Category.user_id == user.id, Category.name == payload.name)
|
|
||||||
)
|
|
||||||
existing = res.scalar_one_or_none()
|
|
||||||
if existing:
|
|
||||||
raise HTTPException(status_code=409, detail="Category with this name already exists")
|
|
||||||
|
|
||||||
category = Category(name=payload.name, description=payload.description, user_id=user.id)
|
|
||||||
session.add(category)
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(category)
|
|
||||||
return category
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[CategoryRead])
|
|
||||||
async def list_categories(
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(select(Category).where(Category.user_id == user.id))
|
|
||||||
return list(res.scalars())
|
|
||||||
|
|
||||||
|
|
||||||
@router.patch("/{category_id}", response_model=CategoryRead)
|
|
||||||
async def update_category(
|
|
||||||
category_id: int,
|
|
||||||
payload: CategoryUpdate,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(
|
|
||||||
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
|
||||||
)
|
|
||||||
category = res.scalar_one_or_none()
|
|
||||||
if not category:
|
|
||||||
raise HTTPException(status_code=404, detail="Category not found")
|
|
||||||
|
|
||||||
# If name changed, check uniqueness per user
|
|
||||||
if payload.name is not None and payload.name != category.name:
|
|
||||||
dup = await session.execute(
|
|
||||||
select(Category.id).where(Category.user_id == user.id, Category.name == payload.name)
|
|
||||||
)
|
|
||||||
if dup.scalar_one_or_none() is not None:
|
|
||||||
raise HTTPException(status_code=409, detail="Category with this name already exists")
|
|
||||||
category.name = payload.name
|
|
||||||
|
|
||||||
if payload.description is not None:
|
|
||||||
category.description = payload.description
|
|
||||||
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(category)
|
|
||||||
return category
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{category_id}", response_model=CategoryRead)
|
|
||||||
async def get_category(
|
|
||||||
category_id: int,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(
|
|
||||||
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
|
||||||
)
|
|
||||||
category = res.scalar_one_or_none()
|
|
||||||
if not category:
|
|
||||||
raise HTTPException(status_code=404, detail="Category not found")
|
|
||||||
return category
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{category_id}", status_code=status.HTTP_204_NO_CONTENT)
|
|
||||||
async def delete_category(
|
|
||||||
category_id: int,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(
|
|
||||||
select(Category.id).where(Category.id == category_id, Category.user_id == user.id)
|
|
||||||
)
|
|
||||||
if res.scalar_one_or_none() is None:
|
|
||||||
raise HTTPException(status_code=404, detail="Category not found")
|
|
||||||
|
|
||||||
await session.execute(
|
|
||||||
delete(Category).where(Category.id == category_id, Category.user_id == user.id)
|
|
||||||
)
|
|
||||||
await session.commit()
|
|
||||||
return None
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
from fastapi import APIRouter
|
|
||||||
from fastapi.params import Depends
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.models.user import User
|
|
||||||
from app.oauth.csas import CSASOAuth
|
|
||||||
from app.services.db import get_async_session
|
|
||||||
from app.services.user_service import current_active_user
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/auth/csas", tags=["csas"])
|
|
||||||
|
|
||||||
CLIENT_ID = os.getenv("CSAS_CLIENT_ID")
|
|
||||||
CLIENT_SECRET = os.getenv("CSAS_CLIENT_SECRET")
|
|
||||||
CSAS_OAUTH = CSASOAuth(CLIENT_ID, CLIENT_SECRET)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/authorize")
|
|
||||||
async def csas_authorize():
|
|
||||||
return {"authorization_url":
|
|
||||||
await CSAS_OAUTH.get_authorization_url(os.getenv("FRONTEND_DOMAIN_SCHEME") + "/auth/csas/callback")}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/callback")
|
|
||||||
async def csas_callback(code: str, session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user)):
|
|
||||||
response = await CSAS_OAUTH.get_access_token(code, os.getenv("FRONTEND_DOMAIN_SCHEME") + "/auth/csas/callback")
|
|
||||||
|
|
||||||
if not user.config:
|
|
||||||
user.config = {}
|
|
||||||
|
|
||||||
new_dict = user.config.copy()
|
|
||||||
new_dict["csas"] = json.dumps(response)
|
|
||||||
|
|
||||||
user.config = new_dict
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
return "OK"
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
import os
|
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
from fastapi import APIRouter, HTTPException, Query, status
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/exchange-rates", tags=["exchange-rates"])
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("", status_code=status.HTTP_200_OK)
|
|
||||||
async def get_exchange_rates(symbols: str = Query("EUR,USD,NOK", description="Comma-separated currency codes to fetch vs CZK")):
|
|
||||||
"""
|
|
||||||
Fetch exchange rates from UniRate API on the backend and return CZK-per-target rates.
|
|
||||||
- Always requests CZK in addition to requested symbols to compute conversion from USD-base.
|
|
||||||
- Returns a list of {currencyCode, rate} where rate is CZK per 1 unit of the target currency.
|
|
||||||
"""
|
|
||||||
api_key = os.getenv("UNIRATE_API_KEY")
|
|
||||||
if not api_key:
|
|
||||||
raise HTTPException(status_code=500, detail="Server is not configured with UNIRATE_API_KEY")
|
|
||||||
|
|
||||||
# Ensure CZK is included for conversion
|
|
||||||
requested = [s.strip().upper() for s in symbols.split(",") if s.strip()]
|
|
||||||
if "CZK" not in requested:
|
|
||||||
requested.append("CZK")
|
|
||||||
query_symbols = ",".join(sorted(set(requested)))
|
|
||||||
|
|
||||||
url = f"https://unirateapi.com/api/rates?api_key={api_key}&symbols={query_symbols}"
|
|
||||||
|
|
||||||
try:
|
|
||||||
async with httpx.AsyncClient(timeout=httpx.Timeout(15.0)) as client:
|
|
||||||
resp = await client.get(url)
|
|
||||||
if resp.status_code != httpx.codes.OK:
|
|
||||||
raise HTTPException(status_code=502, detail=f"Upstream UniRate error: HTTP {resp.status_code}")
|
|
||||||
data = resp.json()
|
|
||||||
except httpx.HTTPError as e:
|
|
||||||
raise HTTPException(status_code=502, detail=f"Failed to contact UniRate: {str(e)}")
|
|
||||||
|
|
||||||
# Validate response structure
|
|
||||||
rates = data.get("rates") if isinstance(data, dict) else None
|
|
||||||
base = data.get("base") if isinstance(data, dict) else None
|
|
||||||
if not rates or base != "USD" or "CZK" not in rates:
|
|
||||||
# Prefer upstream message when available
|
|
||||||
detail = data.get("message") if isinstance(data, dict) else None
|
|
||||||
if not detail and isinstance(data, dict):
|
|
||||||
err = data.get("error")
|
|
||||||
if isinstance(err, dict):
|
|
||||||
detail = err.get("info")
|
|
||||||
raise HTTPException(status_code=502, detail=detail or "Invalid response from UniRate API")
|
|
||||||
|
|
||||||
czk_per_usd = rates["CZK"]
|
|
||||||
|
|
||||||
# Build result excluding CZK itself
|
|
||||||
result = []
|
|
||||||
for code in requested:
|
|
||||||
if code == "CZK":
|
|
||||||
continue
|
|
||||||
target_per_usd = rates.get(code)
|
|
||||||
if target_per_usd in (None, 0):
|
|
||||||
# Skip unavailable or invalid
|
|
||||||
continue
|
|
||||||
czk_per_target = czk_per_usd / target_per_usd
|
|
||||||
result.append({"currencyCode": code, "rate": czk_per_target})
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,116 +0,0 @@
|
|||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import List, Optional
|
|
||||||
import random
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
|
||||||
from pydantic import BaseModel, Field, conint, confloat, validator
|
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.services.db import get_async_session
|
|
||||||
from app.services.user_service import current_active_user
|
|
||||||
from app.models.user import User
|
|
||||||
from app.models.transaction import Transaction
|
|
||||||
from app.models.categories import Category
|
|
||||||
from app.schemas.transaction import TransactionRead
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/mock-bank", tags=["mock-bank"])
|
|
||||||
|
|
||||||
|
|
||||||
class GenerateOptions(BaseModel):
|
|
||||||
count: conint(strict=True, gt=0) = Field(default=10, description="Number of transactions to generate")
|
|
||||||
minAmount: confloat(strict=True) = Field(default=-200.0, description="Minimum transaction amount")
|
|
||||||
maxAmount: confloat(strict=True) = Field(default=200.0, description="Maximum transaction amount")
|
|
||||||
startDate: Optional[str] = Field(None, description="Earliest date (YYYY-MM-DD)")
|
|
||||||
endDate: Optional[str] = Field(None, description="Latest date (YYYY-MM-DD)")
|
|
||||||
categoryIds: List[int] = Field(default_factory=list, description="Optional category IDs to assign randomly")
|
|
||||||
|
|
||||||
@validator("maxAmount")
|
|
||||||
def _validate_amounts(cls, v, values):
|
|
||||||
min_amt = values.get("minAmount")
|
|
||||||
if min_amt is not None and v < min_amt:
|
|
||||||
raise ValueError("maxAmount must be greater than or equal to minAmount")
|
|
||||||
return v
|
|
||||||
|
|
||||||
@validator("endDate")
|
|
||||||
def _validate_dates(cls, v, values):
|
|
||||||
sd = values.get("startDate")
|
|
||||||
if v and sd:
|
|
||||||
try:
|
|
||||||
ed = datetime.strptime(v, "%Y-%m-%d").date()
|
|
||||||
st = datetime.strptime(sd, "%Y-%m-%d").date()
|
|
||||||
except ValueError:
|
|
||||||
raise ValueError("Invalid date format, expected YYYY-MM-DD")
|
|
||||||
if ed < st:
|
|
||||||
raise ValueError("endDate must be greater than or equal to startDate")
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
class GeneratedTransaction(BaseModel):
|
|
||||||
amount: float
|
|
||||||
date: str # YYYY-MM-DD
|
|
||||||
category_ids: List[int] = []
|
|
||||||
description: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/generate", response_model=List[GeneratedTransaction])
|
|
||||||
async def generate_mock_transactions(
|
|
||||||
options: GenerateOptions,
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
# Seed randomness per user to make results less erratic across multiple calls in quick succession
|
|
||||||
seed = int(datetime.utcnow().timestamp()) ^ int(user.id)
|
|
||||||
rnd = random.Random(seed)
|
|
||||||
|
|
||||||
# Determine date range
|
|
||||||
if options.startDate:
|
|
||||||
start_date = datetime.strptime(options.startDate, "%Y-%m-%d").date()
|
|
||||||
else:
|
|
||||||
start_date = (datetime.utcnow() - timedelta(days=365)).date()
|
|
||||||
if options.endDate:
|
|
||||||
end_date = datetime.strptime(options.endDate, "%Y-%m-%d").date()
|
|
||||||
else:
|
|
||||||
end_date = datetime.utcnow().date()
|
|
||||||
|
|
||||||
span_days = max(0, (end_date - start_date).days)
|
|
||||||
|
|
||||||
results: List[GeneratedTransaction] = []
|
|
||||||
for _ in range(options.count):
|
|
||||||
amount = round(rnd.uniform(options.minAmount, options.maxAmount), 2)
|
|
||||||
# Pick a random date in the inclusive range
|
|
||||||
rand_day = rnd.randint(0, span_days) if span_days > 0 else 0
|
|
||||||
tx_date = start_date + timedelta(days=rand_day)
|
|
||||||
# Pick category randomly from provided list, or empty
|
|
||||||
if options.categoryIds:
|
|
||||||
cat = [rnd.choice(options.categoryIds)]
|
|
||||||
else:
|
|
||||||
cat = []
|
|
||||||
# Optional simple description for flavor
|
|
||||||
desc = None
|
|
||||||
# Assemble
|
|
||||||
results.append(GeneratedTransaction(
|
|
||||||
amount=amount,
|
|
||||||
date=tx_date.isoformat(),
|
|
||||||
category_ids=cat,
|
|
||||||
description=desc,
|
|
||||||
))
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/scrape")
|
|
||||||
async def scrape_mock_bank():
|
|
||||||
# 80% of the time: nothing to scrape
|
|
||||||
if random.random() < 0.8:
|
|
||||||
return []
|
|
||||||
|
|
||||||
transactions = []
|
|
||||||
count = random.randint(1, 10)
|
|
||||||
for _ in range(count):
|
|
||||||
transactions.append({
|
|
||||||
"amount": round(random.uniform(-200.0, 200.0), 2),
|
|
||||||
"date": (datetime.utcnow().date() - timedelta(days=random.randint(0, 30))).isoformat(),
|
|
||||||
"description": "Mock transaction",
|
|
||||||
})
|
|
||||||
|
|
||||||
return transactions
|
|
||||||
@@ -1,280 +0,0 @@
|
|||||||
from typing import List, Optional
|
|
||||||
from datetime import date
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
|
||||||
from sqlalchemy import select, and_, func
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from app.models.transaction import Transaction
|
|
||||||
from app.models.categories import Category
|
|
||||||
from app.schemas.transaction import (
|
|
||||||
TransactionCreate,
|
|
||||||
TransactionRead,
|
|
||||||
TransactionUpdate,
|
|
||||||
)
|
|
||||||
from app.services.db import get_async_session
|
|
||||||
from app.services.user_service import current_active_user
|
|
||||||
from app.models.user import User
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/transactions", tags=["transactions"])
|
|
||||||
|
|
||||||
|
|
||||||
def _to_read_model(tx: Transaction) -> TransactionRead:
|
|
||||||
return TransactionRead(
|
|
||||||
id=tx.id,
|
|
||||||
amount=tx.amount,
|
|
||||||
description=tx.description,
|
|
||||||
date=tx.date,
|
|
||||||
category_ids=[c.id for c in (tx.categories or [])],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/create", response_model=TransactionRead, status_code=status.HTTP_201_CREATED)
|
|
||||||
async def create_transaction(
|
|
||||||
payload: TransactionCreate,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
# Build transaction; set `date` only if provided to let DB default apply otherwise
|
|
||||||
tx_kwargs = dict(
|
|
||||||
amount=payload.amount,
|
|
||||||
description=payload.description,
|
|
||||||
user_id=user.id,
|
|
||||||
)
|
|
||||||
if payload.date is not None:
|
|
||||||
parsed_date = payload.date
|
|
||||||
if isinstance(parsed_date, str):
|
|
||||||
try:
|
|
||||||
parsed_date = date.fromisoformat(parsed_date)
|
|
||||||
except ValueError:
|
|
||||||
raise HTTPException(status_code=400, detail="Invalid date format, expected YYYY-MM-DD")
|
|
||||||
tx_kwargs["date"] = parsed_date
|
|
||||||
tx = Transaction(**tx_kwargs)
|
|
||||||
|
|
||||||
# Attach categories if provided (and owned by user)
|
|
||||||
if payload.category_ids:
|
|
||||||
res = await session.execute(
|
|
||||||
select(Category).where(
|
|
||||||
Category.user_id == user.id, Category.id.in_(payload.category_ids)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
categories = list(res.scalars())
|
|
||||||
if len(categories) != len(set(payload.category_ids)):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=400,
|
|
||||||
detail="Duplicate category IDs provided or one or more categories not found"
|
|
||||||
)
|
|
||||||
tx.categories = categories
|
|
||||||
|
|
||||||
session.add(tx)
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(tx)
|
|
||||||
# Ensure categories are loaded
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
return _to_read_model(tx)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/", response_model=List[TransactionRead])
|
|
||||||
async def list_transactions(
|
|
||||||
start_date: Optional[date] = None,
|
|
||||||
end_date: Optional[date] = None,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
cond = [Transaction.user_id == user.id]
|
|
||||||
if start_date is not None:
|
|
||||||
cond.append(Transaction.date >= start_date)
|
|
||||||
if end_date is not None:
|
|
||||||
cond.append(Transaction.date <= end_date)
|
|
||||||
res = await session.execute(
|
|
||||||
select(Transaction).where(and_(*cond)).order_by(Transaction.date, Transaction.id)
|
|
||||||
)
|
|
||||||
txs = list(res.scalars())
|
|
||||||
# Eagerly load categories for each transaction
|
|
||||||
for tx in txs:
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
return [_to_read_model(tx) for tx in txs]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/balance_series")
|
|
||||||
async def get_balance_series(
|
|
||||||
start_date: Optional[date] = None,
|
|
||||||
end_date: Optional[date] = None,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
cond = [Transaction.user_id == user.id]
|
|
||||||
if start_date is not None:
|
|
||||||
cond.append(Transaction.date >= start_date)
|
|
||||||
if end_date is not None:
|
|
||||||
cond.append(Transaction.date <= end_date)
|
|
||||||
res = await session.execute(
|
|
||||||
select(Transaction).where(and_(*cond)).order_by(Transaction.date, Transaction.id)
|
|
||||||
)
|
|
||||||
txs = list(res.scalars())
|
|
||||||
# Group by date and accumulate
|
|
||||||
daily = {}
|
|
||||||
for tx in txs:
|
|
||||||
key = tx.date.isoformat() if hasattr(tx.date, 'isoformat') else str(tx.date)
|
|
||||||
daily[key] = daily.get(key, 0.0) + float(tx.amount)
|
|
||||||
# Build cumulative series sorted by date
|
|
||||||
series = []
|
|
||||||
running = 0.0
|
|
||||||
for d in sorted(daily.keys()):
|
|
||||||
running += daily[d]
|
|
||||||
series.append({"date": d, "balance": running})
|
|
||||||
return series
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{transaction_id}", response_model=TransactionRead)
|
|
||||||
async def get_transaction(
|
|
||||||
transaction_id: int,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(
|
|
||||||
select(Transaction).where(
|
|
||||||
Transaction.id == transaction_id, Transaction.user_id == user.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
tx: Optional[Transaction] = res.scalar_one_or_none()
|
|
||||||
if not tx:
|
|
||||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
return _to_read_model(tx)
|
|
||||||
|
|
||||||
|
|
||||||
@router.patch("/{transaction_id}/edit", response_model=TransactionRead)
|
|
||||||
async def update_transaction(
|
|
||||||
transaction_id: int,
|
|
||||||
payload: TransactionUpdate,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(
|
|
||||||
select(Transaction).where(
|
|
||||||
Transaction.id == transaction_id, Transaction.user_id == user.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
tx: Optional[Transaction] = res.scalar_one_or_none()
|
|
||||||
if not tx:
|
|
||||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
|
||||||
|
|
||||||
if payload.amount is not None:
|
|
||||||
tx.amount = payload.amount
|
|
||||||
if payload.description is not None:
|
|
||||||
tx.description = payload.description
|
|
||||||
if payload.date is not None:
|
|
||||||
new_date = payload.date
|
|
||||||
if isinstance(new_date, str):
|
|
||||||
try:
|
|
||||||
new_date = date.fromisoformat(new_date)
|
|
||||||
except ValueError:
|
|
||||||
raise HTTPException(status_code=400, detail="Invalid date format, expected YYYY-MM-DD")
|
|
||||||
tx.date = new_date
|
|
||||||
|
|
||||||
if payload.category_ids is not None:
|
|
||||||
# Preload categories to avoid async lazy-load during assignment
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
if payload.category_ids:
|
|
||||||
# Check for duplicate category IDs in the payload
|
|
||||||
if len(payload.category_ids) != len(set(payload.category_ids)):
|
|
||||||
raise HTTPException(status_code=400, detail="Duplicate category IDs in payload")
|
|
||||||
res = await session.execute(
|
|
||||||
select(Category).where(
|
|
||||||
Category.user_id == user.id, Category.id.in_(payload.category_ids)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
categories = list(res.scalars())
|
|
||||||
if len(categories) != len(payload.category_ids):
|
|
||||||
raise HTTPException(status_code=400, detail="One or more categories not found")
|
|
||||||
tx.categories = categories
|
|
||||||
else:
|
|
||||||
tx.categories = []
|
|
||||||
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
return _to_read_model(tx)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{transaction_id}/delete", status_code=status.HTTP_204_NO_CONTENT)
|
|
||||||
async def delete_transaction(
|
|
||||||
transaction_id: int,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res = await session.execute(
|
|
||||||
select(Transaction).where(
|
|
||||||
Transaction.id == transaction_id, Transaction.user_id == user.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
tx = res.scalar_one_or_none()
|
|
||||||
if not tx:
|
|
||||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
|
||||||
|
|
||||||
await session.delete(tx)
|
|
||||||
await session.commit()
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/{transaction_id}/categories/{category_id}", response_model=TransactionRead)
|
|
||||||
async def assign_category(
|
|
||||||
transaction_id: int,
|
|
||||||
category_id: int,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
# Load transaction and category ensuring ownership
|
|
||||||
res_tx = await session.execute(
|
|
||||||
select(Transaction).where(
|
|
||||||
Transaction.id == transaction_id, Transaction.user_id == user.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
tx: Optional[Transaction] = res_tx.scalar_one_or_none()
|
|
||||||
if not tx:
|
|
||||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
|
||||||
|
|
||||||
res_cat = await session.execute(
|
|
||||||
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
|
||||||
)
|
|
||||||
cat: Optional[Category] = res_cat.scalar_one_or_none()
|
|
||||||
if not cat:
|
|
||||||
raise HTTPException(status_code=404, detail="Category not found")
|
|
||||||
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
if cat not in tx.categories:
|
|
||||||
tx.categories.append(cat)
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
return _to_read_model(tx)
|
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/{transaction_id}/categories/{category_id}", response_model=TransactionRead)
|
|
||||||
async def unassign_category(
|
|
||||||
transaction_id: int,
|
|
||||||
category_id: int,
|
|
||||||
session: AsyncSession = Depends(get_async_session),
|
|
||||||
user: User = Depends(current_active_user),
|
|
||||||
):
|
|
||||||
res_tx = await session.execute(
|
|
||||||
select(Transaction).where(
|
|
||||||
Transaction.id == transaction_id, Transaction.user_id == user.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
tx: Optional[Transaction] = res_tx.scalar_one_or_none()
|
|
||||||
if not tx:
|
|
||||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
|
||||||
|
|
||||||
res_cat = await session.execute(
|
|
||||||
select(Category).where(Category.id == category_id, Category.user_id == user.id)
|
|
||||||
)
|
|
||||||
cat: Optional[Category] = res_cat.scalar_one_or_none()
|
|
||||||
if not cat:
|
|
||||||
raise HTTPException(status_code=404, detail="Category not found")
|
|
||||||
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
if cat in tx.categories:
|
|
||||||
tx.categories.remove(cat)
|
|
||||||
await session.commit()
|
|
||||||
await session.refresh(tx, attribute_names=["categories"])
|
|
||||||
return _to_read_model(tx)
|
|
||||||
@@ -1,176 +0,0 @@
|
|||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from datetime import datetime
|
|
||||||
from pythonjsonlogger import jsonlogger
|
|
||||||
|
|
||||||
from fastapi import Depends, FastAPI, HTTPException
|
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
|
||||||
from prometheus_fastapi_instrumentator import Instrumentator, metrics
|
|
||||||
from starlette.requests import Request
|
|
||||||
|
|
||||||
from app.services.prometheus import number_of_users, number_of_transactions
|
|
||||||
|
|
||||||
from app.services import bank_scraper
|
|
||||||
from app.workers.celery_tasks import load_transactions, load_all_transactions
|
|
||||||
from app.models.user import User, OAuthAccount
|
|
||||||
|
|
||||||
from app.services.user_service import current_active_verified_user
|
|
||||||
from app.api.auth import router as auth_router
|
|
||||||
from app.api.csas import router as csas_router
|
|
||||||
from app.api.categories import router as categories_router
|
|
||||||
from app.api.transactions import router as transactions_router
|
|
||||||
from app.api.exchange_rates import router as exchange_rates_router
|
|
||||||
from app.services.user_service import auth_backend, current_active_verified_user, fastapi_users, get_oauth_provider, \
|
|
||||||
UserManager, get_jwt_strategy
|
|
||||||
from app.core.security import extract_bearer_token, is_token_revoked, decode_and_verify_jwt
|
|
||||||
from app.services.user_service import SECRET
|
|
||||||
|
|
||||||
from fastapi import FastAPI
|
|
||||||
import sentry_sdk
|
|
||||||
from fastapi_users.db import SQLAlchemyUserDatabase
|
|
||||||
from app.core.db import async_session_maker, engine
|
|
||||||
from app.core.base import Base
|
|
||||||
|
|
||||||
sentry_sdk.init(
|
|
||||||
dsn=os.getenv("SENTRY_DSN"),
|
|
||||||
send_default_pii=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
fastApi = FastAPI()
|
|
||||||
|
|
||||||
# CORS for frontend dev server
|
|
||||||
fastApi.add_middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=[
|
|
||||||
"http://localhost:5173",
|
|
||||||
"http://127.0.0.1:5173",
|
|
||||||
os.getenv("FRONTEND_DOMAIN_SCHEME", "")
|
|
||||||
],
|
|
||||||
allow_credentials=True,
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if not os.getenv("PYTEST_RUN_CONFIG"):
|
|
||||||
prometheus = Instrumentator().instrument(fastApi)
|
|
||||||
# Register custom metrics
|
|
||||||
prometheus.add(number_of_users()).add(number_of_transactions())
|
|
||||||
prometheus.expose(
|
|
||||||
fastApi,
|
|
||||||
endpoint="/metrics",
|
|
||||||
include_in_schema=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
fastApi.include_router(auth_router)
|
|
||||||
fastApi.include_router(categories_router)
|
|
||||||
fastApi.include_router(transactions_router)
|
|
||||||
fastApi.include_router(exchange_rates_router)
|
|
||||||
from app.api.mock_bank import router as mock_bank_router
|
|
||||||
fastApi.include_router(mock_bank_router)
|
|
||||||
|
|
||||||
for h in list(logging.root.handlers):
|
|
||||||
logging.root.removeHandler(h)
|
|
||||||
|
|
||||||
_log_handler = logging.StreamHandler(sys.stdout)
|
|
||||||
_formatter = jsonlogger.JsonFormatter(
|
|
||||||
fmt='%(asctime)s %(levelname)s %(name)s %(message)s %(pathname)s %(lineno)d %(process)d %(thread)d'
|
|
||||||
)
|
|
||||||
_log_handler.setFormatter(_formatter)
|
|
||||||
|
|
||||||
logging.root.setLevel(logging.INFO)
|
|
||||||
logging.root.addHandler(_log_handler)
|
|
||||||
|
|
||||||
for _name in ("uvicorn", "uvicorn.error", "uvicorn.access"):
|
|
||||||
_logger = logging.getLogger(_name)
|
|
||||||
_logger.handlers = [_log_handler]
|
|
||||||
_logger.propagate = True
|
|
||||||
|
|
||||||
|
|
||||||
@fastApi.middleware("http")
|
|
||||||
async def auth_guard(request: Request, call_next):
|
|
||||||
# Enforce revoked/expired JWTs are rejected globally
|
|
||||||
token = extract_bearer_token(request)
|
|
||||||
if token:
|
|
||||||
from fastapi import Response, status as _status
|
|
||||||
# Deny if token is revoked
|
|
||||||
if is_token_revoked(token):
|
|
||||||
return Response(status_code=_status.HTTP_401_UNAUTHORIZED)
|
|
||||||
# Deny if token is expired or invalid
|
|
||||||
try:
|
|
||||||
decode_and_verify_jwt(token, SECRET)
|
|
||||||
except Exception:
|
|
||||||
return Response(status_code=_status.HTTP_401_UNAUTHORIZED)
|
|
||||||
return await call_next(request)
|
|
||||||
|
|
||||||
|
|
||||||
@fastApi.middleware("http")
|
|
||||||
async def log_traffic(request: Request, call_next):
|
|
||||||
start_time = datetime.now()
|
|
||||||
response = await call_next(request)
|
|
||||||
process_time = (datetime.now() - start_time).total_seconds()
|
|
||||||
client_host = request.client.host
|
|
||||||
log_params = {
|
|
||||||
"request_method": request.method,
|
|
||||||
"request_url": str(request.url),
|
|
||||||
"request_size": request.headers.get("content-length"),
|
|
||||||
"request_headers": dict(request.headers),
|
|
||||||
"response_status": response.status_code,
|
|
||||||
"response_size": response.headers.get("content-length"),
|
|
||||||
"response_headers": dict(response.headers),
|
|
||||||
"process_time": process_time,
|
|
||||||
"client_host": client_host
|
|
||||||
}
|
|
||||||
logging.getLogger(__name__).info("http_request", extra=log_params)
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
fastApi.include_router(
|
|
||||||
fastapi_users.get_oauth_router(
|
|
||||||
get_oauth_provider("MojeID"),
|
|
||||||
auth_backend,
|
|
||||||
"SECRET",
|
|
||||||
associate_by_email=True,
|
|
||||||
redirect_url=os.getenv("FRONTEND_DOMAIN_SCHEME", "http://localhost:3000") + "/auth/mojeid/callback",
|
|
||||||
),
|
|
||||||
prefix="/auth/mojeid",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
|
|
||||||
fastApi.include_router(
|
|
||||||
fastapi_users.get_oauth_router(
|
|
||||||
get_oauth_provider("BankID"),
|
|
||||||
auth_backend,
|
|
||||||
"SECRET",
|
|
||||||
associate_by_email=True,
|
|
||||||
redirect_url=os.getenv("FRONTEND_DOMAIN_SCHEME", "http://localhost:3000") + "/auth/bankid/callback",
|
|
||||||
),
|
|
||||||
prefix="/auth/bankid",
|
|
||||||
tags=["auth"],
|
|
||||||
)
|
|
||||||
|
|
||||||
fastApi.include_router(csas_router)
|
|
||||||
|
|
||||||
|
|
||||||
# Liveness/root endpoint
|
|
||||||
@fastApi.get("/", include_in_schema=False)
|
|
||||||
async def root():
|
|
||||||
return {"status": "ok"}
|
|
||||||
|
|
||||||
|
|
||||||
@fastApi.get("/authenticated-route")
|
|
||||||
async def authenticated_route(user: User = Depends(current_active_verified_user)):
|
|
||||||
return {"message": f"Hello {user.email}!"}
|
|
||||||
|
|
||||||
|
|
||||||
@fastApi.get("/_cron", include_in_schema=False)
|
|
||||||
async def handle_cron(request: Request):
|
|
||||||
# endpoint accessed by Clodflare => return 404
|
|
||||||
if request.headers.get("cf-connecting-ip"):
|
|
||||||
raise HTTPException(status_code=404)
|
|
||||||
|
|
||||||
logging.info("[Cron] Triggering scheduled tasks via HTTP endpoint")
|
|
||||||
task = load_all_transactions.delay()
|
|
||||||
return {"status": "queued", "action": "csas_scrape_all", "task_id": getattr(task, 'id', None)}
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
import re
|
|
||||||
import jwt
|
|
||||||
from fastapi import Request
|
|
||||||
|
|
||||||
# Simple in-memory revocation store for revoked JWT tokens.
|
|
||||||
#
|
|
||||||
# Limitations:
|
|
||||||
# - All revoked tokens will be lost if the process restarts (data loss on restart).
|
|
||||||
# - Not suitable for multi-instance deployments: the revocation list is not shared between instances.
|
|
||||||
# A token revoked in one instance will not be recognized as revoked in others.
|
|
||||||
#
|
|
||||||
# For production, use a persistent and shared store (e.g., Redis or a database).
|
|
||||||
_REVOKED_TOKENS: set[str] = set()
|
|
||||||
|
|
||||||
# Bearer token regex
|
|
||||||
_BEARER_RE = re.compile(r"^[Bb]earer\s+(.+)$")
|
|
||||||
|
|
||||||
|
|
||||||
def extract_bearer_token(request: Request) -> Optional[str]:
|
|
||||||
auth = request.headers.get("authorization")
|
|
||||||
if not auth:
|
|
||||||
return None
|
|
||||||
m = _BEARER_RE.match(auth)
|
|
||||||
if not m:
|
|
||||||
return None
|
|
||||||
return m.group(1).strip()
|
|
||||||
|
|
||||||
|
|
||||||
def revoke_token(token: str) -> None:
|
|
||||||
if token:
|
|
||||||
_REVOKED_TOKENS.add(token)
|
|
||||||
|
|
||||||
|
|
||||||
def is_token_revoked(token: str) -> bool:
|
|
||||||
return token in _REVOKED_TOKENS
|
|
||||||
|
|
||||||
|
|
||||||
def decode_and_verify_jwt(token: str, secret: str) -> dict:
|
|
||||||
"""
|
|
||||||
Decode the JWT using the shared secret, verifying expiration and signature.
|
|
||||||
Audience is not verified here to be compatible with fastapi-users default tokens.
|
|
||||||
Raises jwt.ExpiredSignatureError if expired.
|
|
||||||
Raises jwt.InvalidTokenError for other issues.
|
|
||||||
Returns the decoded payload dict on success.
|
|
||||||
"""
|
|
||||||
return jwt.decode(
|
|
||||||
token,
|
|
||||||
secret,
|
|
||||||
algorithms=["HS256"],
|
|
||||||
options={"verify_aud": False},
|
|
||||||
) # verify_exp is True by default
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
from fastapi_users_db_sqlalchemy import GUID
|
|
||||||
from sqlalchemy import Column, Integer, String, ForeignKey, Table, UniqueConstraint
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
|
|
||||||
from app.core.base import Base
|
|
||||||
|
|
||||||
association_table = Table(
|
|
||||||
"category_transaction",
|
|
||||||
Base.metadata,
|
|
||||||
Column("category_id", Integer, ForeignKey("categories.id", ondelete="CASCADE"), primary_key=True),
|
|
||||||
Column("transaction_id", Integer, ForeignKey("transaction.id", ondelete="CASCADE"), primary_key=True)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Category(Base):
|
|
||||||
__tablename__ = "categories"
|
|
||||||
__table_args__ = (
|
|
||||||
UniqueConstraint("name", "user_id", name="uix_name_user_id"),
|
|
||||||
)
|
|
||||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
|
||||||
name = Column(String(length=100), nullable=False)
|
|
||||||
description = Column(String(length=255), nullable=True)
|
|
||||||
user_id = Column(GUID, ForeignKey("user.id"), nullable=False)
|
|
||||||
user = relationship("User", back_populates="categories")
|
|
||||||
transactions = relationship("Transaction", secondary=association_table, back_populates="categories")
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
import os
|
|
||||||
from fastapi_users_db_sqlalchemy import GUID
|
|
||||||
from sqlalchemy import Column, Integer, String, Float, ForeignKey, Date, func
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from sqlalchemy_utils import EncryptedType
|
|
||||||
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
|
||||||
|
|
||||||
from app.core.base import Base
|
|
||||||
from app.models.categories import association_table
|
|
||||||
|
|
||||||
SECRET_KEY = os.environ.get("DB_ENCRYPTION_KEY", "localdev")
|
|
||||||
|
|
||||||
|
|
||||||
class Transaction(Base):
|
|
||||||
__tablename__ = "transaction"
|
|
||||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
|
||||||
amount = Column(EncryptedType(Float, SECRET_KEY, engine=FernetEngine), nullable=False)
|
|
||||||
description = Column(EncryptedType(String(length=255), SECRET_KEY, engine=FernetEngine), nullable=True)
|
|
||||||
date = Column(Date, nullable=False, server_default=func.current_date())
|
|
||||||
user_id = Column(GUID, ForeignKey("user.id"), nullable=False)
|
|
||||||
|
|
||||||
# Relationship
|
|
||||||
user = relationship("User", back_populates="transactions")
|
|
||||||
categories = relationship("Category", secondary=association_table, back_populates="transactions", passive_deletes=True)
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
from sqlalchemy import Column, String
|
|
||||||
from sqlalchemy.orm import relationship, mapped_column, Mapped
|
|
||||||
from fastapi_users.db import SQLAlchemyBaseUserTableUUID, SQLAlchemyBaseOAuthAccountTableUUID
|
|
||||||
from sqlalchemy.sql.sqltypes import JSON
|
|
||||||
|
|
||||||
from app.core.base import Base
|
|
||||||
|
|
||||||
|
|
||||||
class OAuthAccount(SQLAlchemyBaseOAuthAccountTableUUID, Base):
|
|
||||||
# BankID token is longer than default
|
|
||||||
access_token: Mapped[str] = mapped_column(String(length=4096), nullable=False)
|
|
||||||
|
|
||||||
|
|
||||||
class User(SQLAlchemyBaseUserTableUUID, Base):
|
|
||||||
first_name = Column(String(length=100), nullable=True)
|
|
||||||
last_name = Column(String(length=100), nullable=True)
|
|
||||||
oauth_accounts = relationship("OAuthAccount", lazy="joined")
|
|
||||||
config = Column(JSON, default={})
|
|
||||||
|
|
||||||
# Relationship
|
|
||||||
transactions = relationship("Transaction", back_populates="user")
|
|
||||||
categories = relationship("Category", back_populates="user")
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
import secrets
|
|
||||||
from typing import Optional, Literal
|
|
||||||
|
|
||||||
from httpx_oauth.oauth2 import T
|
|
||||||
|
|
||||||
from app.oauth.custom_openid import CustomOpenID
|
|
||||||
|
|
||||||
|
|
||||||
class BankID(CustomOpenID):
|
|
||||||
def __init__(self, client_id: str, client_secret: str):
|
|
||||||
super().__init__(
|
|
||||||
client_id,
|
|
||||||
client_secret,
|
|
||||||
"https://oidc.sandbox.bankid.cz/.well-known/openid-configuration",
|
|
||||||
"BankID",
|
|
||||||
base_scopes=["openid", "profile.email", "profile.name"],
|
|
||||||
)
|
|
||||||
|
|
||||||
async def get_user_info(self, token: str) -> dict:
|
|
||||||
info = await self.get_profile(token)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"first_name": info.get("given_name"),
|
|
||||||
"last_name": info.get("family_name"),
|
|
||||||
}
|
|
||||||
|
|
||||||
async def get_authorization_url(
|
|
||||||
self,
|
|
||||||
redirect_uri: str,
|
|
||||||
state: Optional[str] = None,
|
|
||||||
scope: Optional[list[str]] = None,
|
|
||||||
code_challenge: Optional[str] = None,
|
|
||||||
code_challenge_method: Optional[Literal["plain", "S256"]] = None,
|
|
||||||
extras_params: Optional[T] = None,
|
|
||||||
) -> str:
|
|
||||||
if extras_params is None:
|
|
||||||
extras_params = {}
|
|
||||||
|
|
||||||
# BankID requires random nonce parameter for security
|
|
||||||
# https://developer.bankid.cz/docs/security_sep
|
|
||||||
extras_params["nonce"] = secrets.token_urlsafe()
|
|
||||||
|
|
||||||
return await super().get_authorization_url(
|
|
||||||
redirect_uri,
|
|
||||||
state,
|
|
||||||
scope,
|
|
||||||
code_challenge,
|
|
||||||
code_challenge_method,
|
|
||||||
extras_params,
|
|
||||||
)
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
import os
|
|
||||||
from os.path import dirname, join
|
|
||||||
from typing import Optional, Any
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
from httpx_oauth.exceptions import GetProfileError
|
|
||||||
from httpx_oauth.oauth2 import BaseOAuth2
|
|
||||||
|
|
||||||
import app.services.db
|
|
||||||
|
|
||||||
BASE_DIR = dirname(__file__)
|
|
||||||
certs = (
|
|
||||||
join(BASE_DIR, "public_key.pem"),
|
|
||||||
join(BASE_DIR, "private_key.key")
|
|
||||||
)
|
|
||||||
|
|
||||||
class CSASOAuth(BaseOAuth2):
|
|
||||||
|
|
||||||
def __init__(self, client_id: str, client_secret: str):
|
|
||||||
super().__init__(
|
|
||||||
client_id,
|
|
||||||
client_secret,
|
|
||||||
base_scopes=["aisp"],
|
|
||||||
authorize_endpoint="https://webapi.developers.erstegroup.com/api/csas/sandbox/v1/sandbox-idp/auth",
|
|
||||||
access_token_endpoint="https://webapi.developers.erstegroup.com/api/csas/sandbox/v1/sandbox-idp/token",
|
|
||||||
refresh_token_endpoint="https://webapi.developers.erstegroup.com/api/csas/sandbox/v1/sandbox-idp/token"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
from httpx_oauth.clients.openid import OpenID
|
|
||||||
|
|
||||||
|
|
||||||
class CustomOpenID(OpenID):
|
|
||||||
async def get_user_info(self, token: str) -> dict:
|
|
||||||
raise NotImplementedError()
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
import json
|
|
||||||
from typing import Optional, Literal, Any
|
|
||||||
|
|
||||||
from httpx_oauth.oauth2 import T
|
|
||||||
|
|
||||||
from app.oauth.custom_openid import CustomOpenID
|
|
||||||
|
|
||||||
|
|
||||||
class MojeIDOAuth(CustomOpenID):
|
|
||||||
def __init__(self, client_id: str, client_secret: str):
|
|
||||||
super().__init__(
|
|
||||||
client_id,
|
|
||||||
client_secret,
|
|
||||||
"https://mojeid.cz/.well-known/openid-configuration/",
|
|
||||||
"MojeID",
|
|
||||||
base_scopes=["openid", "email", "profile"],
|
|
||||||
)
|
|
||||||
|
|
||||||
async def get_user_info(self, token: str) -> Optional[Any]:
|
|
||||||
info = await self.get_profile(token)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"first_name": info.get("given_name"),
|
|
||||||
"last_name": info.get("family_name"),
|
|
||||||
}
|
|
||||||
|
|
||||||
async def get_authorization_url(
|
|
||||||
self,
|
|
||||||
redirect_uri: str,
|
|
||||||
state: Optional[str] = None,
|
|
||||||
scope: Optional[list[str]] = None,
|
|
||||||
code_challenge: Optional[str] = None,
|
|
||||||
code_challenge_method: Optional[Literal["plain", "S256"]] = None,
|
|
||||||
extras_params: Optional[T] = None,
|
|
||||||
) -> str:
|
|
||||||
required_fields = {
|
|
||||||
'id_token': {
|
|
||||||
'name': {'essential': True},
|
|
||||||
'given_name': {'essential': True},
|
|
||||||
'family_name': {'essential': True},
|
|
||||||
'email': {'essential': True},
|
|
||||||
'mojeid_valid': {'essential': True},
|
|
||||||
}}
|
|
||||||
|
|
||||||
if extras_params is None:
|
|
||||||
extras_params = {}
|
|
||||||
extras_params["claims"] = json.dumps(required_fields)
|
|
||||||
|
|
||||||
return await super().get_authorization_url(
|
|
||||||
redirect_uri,
|
|
||||||
state,
|
|
||||||
scope,
|
|
||||||
code_challenge,
|
|
||||||
code_challenge_method,
|
|
||||||
extras_params,
|
|
||||||
)
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
-----BEGIN RSA PRIVATE KEY-----
|
|
||||||
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDcr/oxgV074ETd
|
|
||||||
DkP/0l8LFnRofru+m2wNNG/ttVCioTqwnvR4oYxwq3U9qIBsT0D+Rx/Ef7qcpzqf
|
|
||||||
/w9xt6Hosdv6I5jMHGaVQqLiPuV26/a7WvcmU+PpYuEBmbBHjGVJRBwgPtlUW1VL
|
|
||||||
M8Pht9YiaagEKvFa6SUidZLfPv+ECohqgH4mgMrEcG/BTnry0/5xQdadRC9o25cl
|
|
||||||
NtZIesS5GPeelhggFTkbh/FaxvMXhIAaRXT61cnxgxtfM71h5ObX5Lwle9z5a+Tw
|
|
||||||
xgQhSQq1jbHALYvTwsc4Q/NQGXpGNWy599sb7dg5AkPFSSF4ceXBo/2jOaZCqWrt
|
|
||||||
FVONZ+blAgMBAAECggEBAJwQbrRXsaFIRiq1jez5znC+3m+PQCHZM55a+NR3pqB7
|
|
||||||
uE9y+ZvdUr3S4sRJxxfRLDsl/Rcu5L8nm9PNwhQ/MmamcNQCHGoro3fmed3ZcNia
|
|
||||||
og94ktMt/DztygUhtIHEjVQ0sFc1WufG9xiJcPrM0MfhRAo+fBQ4UCSAVO8/U98B
|
|
||||||
a4yukrPNeEA03hyjLB9W41pNQfyOtAHqzwDg9Q5XVaGMCLZT1bjCIquUcht5iMva
|
|
||||||
tiw3cwdiYIklLTzTCsPPK9A/AlWZyUXL8KxtN0mU0kkwlXqASoXZ2nqdkhjRye/V
|
|
||||||
3JXOmlDtDaJCqWDpH2gHLxMCl7OjfPvuD66bAT3H63kCgYEA5zxW/l6oI3gwYW7+
|
|
||||||
j6rEjA2n8LikVnyW2e/PZ7pxBH3iBFe2DHx/imeqd/0IzixcM1zZT/V+PTFPQizG
|
|
||||||
lOU7stN6Zg/LuRdxneHPyLWCimJP7BBJCWyJkuxKy9psokyBhGSLR/phL3fP7UkB
|
|
||||||
o2I3vGmTFu5A0FzXcNH/cXPMdy8CgYEA9FJw3kyzXlInhJ6Cd63mckLPLYDArUsm
|
|
||||||
THBoeH2CVTBS5g0bCbl7N1ZxUoYwZPD4lg5V0nWhZALGf+85ULSjX03PMf1cc6WW
|
|
||||||
EIbZIo9hX+mGRa/FudDd+TlbtBnn0jucwABuLQi9mIepE55Hu9tw5/FT3cHeZVQc
|
|
||||||
cC0T6ulVvisCgYBCzFeFG+sOdAXl356B+h7VJozBKVWv9kXNp00O9fj4BzVnc78P
|
|
||||||
VFezr8a66snEZWQtIkFUq+JP4xK2VyD2mlHoktbk7OM5EOCtbzILFQQk3cmgtAOl
|
|
||||||
SUlkvAXPZcXEDL3NdQ4XOOkiQUY7kb97Z0AamZT4JtNqXaeO29si9wS12QKBgHYg
|
|
||||||
Hd3864Qg6GZgVOgUNiTsVErFw2KFwQCYIIqQ9CDH+myrzXTILuC0dJnXszI6p5W1
|
|
||||||
XJ0irmMyTFKykN2KWKrNbe3Xd4mad5GKARWKiSPcPkUXFNwgNhI3PzU2iTTGCaVz
|
|
||||||
D9HKNhC3FnIbxsb29AHQViITh7kqD43U3ZpoMkJ9AoGAZ+sg+CPfuo3ZMpbcdb3B
|
|
||||||
ZX2UhAvNKxgHvNnHOjO+pvaM7HiH+BT0650brfBWQ0nTG1dt18mCevVk1UM/5hO9
|
|
||||||
AtZw06vCLOJ3p3qpgkSlRZ1H7VokG9M8Od0zXqtJrmeLeBq7dfuDisYOuA+NUEbJ
|
|
||||||
UM/UHByieS6ywetruz0LpM0=
|
|
||||||
-----END RSA PRIVATE KEY-----
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user