mirror of
https://github.com/dat515-2025/Group-8.git
synced 2026-03-22 15:12:08 +01:00
Compare commits
42 Commits
add_more_t
...
573404dead
| Author | SHA1 | Date | |
|---|---|---|---|
| 573404dead | |||
| d57dd82a64 | |||
| 50f37c1161 | |||
| ae22d2ee5f | |||
| 509608f8c9 | |||
| ed723d1d13 | |||
| b0dee5e289 | |||
| 640da2ee04 | |||
| ab9aefd140 | |||
|
|
4eaf46e77e | ||
|
|
a30ae4d010 | ||
|
|
ef26e88713 | ||
|
|
2e1dddb4f8 | ||
|
|
25e587cea8 | ||
|
|
3cdefc33fc | ||
|
|
5954e56956 | ||
|
|
8575ef8ff5 | ||
| c53e314b2a | |||
| c0bc44622f | |||
| 3d31ff4631 | |||
|
|
8b92b9bd18 | ||
|
|
3d26ed6a62 | ||
|
|
67b44539f2 | ||
|
|
ff9cc712db | ||
| dc7ce9e6a1 | |||
| 188cdf5727 | |||
| 4cf0d2a981 | |||
| 9986cce8f9 | |||
| b3b5717e9e | |||
|
|
1da927dc07 | ||
| 537d050080 | |||
| 1e4f342176 | |||
| c62e0adcf3 | |||
| 24d86abfc4 | |||
| 21305f18e2 | |||
| e708f7b18b | |||
| f58083870f | |||
| ca8287cd8b | |||
|
|
ed3e6329dd | ||
|
|
a214e2cd8b | ||
| 6c8d2202b5 | |||
|
|
b480734fee |
4
.github/workflows/deploy-pr.yaml
vendored
4
.github/workflows/deploy-pr.yaml
vendored
@@ -85,6 +85,7 @@ jobs:
|
|||||||
DOMAIN_SCHEME: "${{ needs.get_urls.outputs.backend_url_scheme }}"
|
DOMAIN_SCHEME: "${{ needs.get_urls.outputs.backend_url_scheme }}"
|
||||||
FRONTEND_DOMAIN: "${{ needs.get_urls.outputs.frontend_url }}"
|
FRONTEND_DOMAIN: "${{ needs.get_urls.outputs.frontend_url }}"
|
||||||
FRONTEND_DOMAIN_SCHEME: "${{ needs.get_urls.outputs.frontend_url_scheme }}"
|
FRONTEND_DOMAIN_SCHEME: "${{ needs.get_urls.outputs.frontend_url_scheme }}"
|
||||||
|
UNIRATE_API_KEY: ${{ secrets.UNIRATE_API_KEY }}
|
||||||
run: |
|
run: |
|
||||||
PR=${{ github.event.pull_request.number }}
|
PR=${{ github.event.pull_request.number }}
|
||||||
RELEASE=myapp-pr-$PR
|
RELEASE=myapp-pr-$PR
|
||||||
@@ -102,7 +103,8 @@ jobs:
|
|||||||
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
||||||
--set-string database.password="$DB_PASSWORD" \
|
--set-string database.password="$DB_PASSWORD" \
|
||||||
--set-string database.encryptionSecret="$PR" \
|
--set-string database.encryptionSecret="$PR" \
|
||||||
--set-string app.name="finance-tracker-pr-$PR"
|
--set-string app.name="finance-tracker-pr-$PR" \
|
||||||
|
--set-string unirate.key="$UNIRATE_API_KEY"
|
||||||
|
|
||||||
- name: Post preview URLs as PR comment
|
- name: Post preview URLs as PR comment
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
|
|||||||
20
.github/workflows/deploy-prod.yaml
vendored
20
.github/workflows/deploy-prod.yaml
vendored
@@ -27,6 +27,7 @@ jobs:
|
|||||||
|
|
||||||
build:
|
build:
|
||||||
name: Build and push image (reusable)
|
name: Build and push image (reusable)
|
||||||
|
needs: [test]
|
||||||
uses: ./.github/workflows/build-image.yaml
|
uses: ./.github/workflows/build-image.yaml
|
||||||
with:
|
with:
|
||||||
mode: prod
|
mode: prod
|
||||||
@@ -36,6 +37,7 @@ jobs:
|
|||||||
|
|
||||||
get_urls:
|
get_urls:
|
||||||
name: Generate Production URLs
|
name: Generate Production URLs
|
||||||
|
needs: [test]
|
||||||
uses: ./.github/workflows/url_generator.yml
|
uses: ./.github/workflows/url_generator.yml
|
||||||
with:
|
with:
|
||||||
mode: prod
|
mode: prod
|
||||||
@@ -92,6 +94,14 @@ jobs:
|
|||||||
CSAS_CLIENT_ID: ${{ secrets.CSAS_CLIENT_ID }}
|
CSAS_CLIENT_ID: ${{ secrets.CSAS_CLIENT_ID }}
|
||||||
CSAS_CLIENT_SECRET: ${{ secrets.CSAS_CLIENT_SECRET }}
|
CSAS_CLIENT_SECRET: ${{ secrets.CSAS_CLIENT_SECRET }}
|
||||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||||
|
SMTP_HOST: ${{ secrets.SMTP_HOST }}
|
||||||
|
SMTP_PORT: ${{ secrets.SMTP_PORT }}
|
||||||
|
SMTP_USERNAME: ${{ secrets.SMTP_USERNAME }}
|
||||||
|
SMTP_PASSWORD: ${{ secrets.SMTP_PASSWORD }}
|
||||||
|
SMTP_USE_TLS: ${{ secrets.SMTP_USE_TLS }}
|
||||||
|
SMTP_USE_SSL: ${{ secrets.SMTP_USE_SSL }}
|
||||||
|
SMTP_FROM: ${{ secrets.SMTP_FROM }}
|
||||||
|
UNIRATE_API_KEY: ${{ secrets.UNIRATE_API_KEY }}
|
||||||
run: |
|
run: |
|
||||||
helm upgrade --install myapp ./7project/charts/myapp-chart \
|
helm upgrade --install myapp ./7project/charts/myapp-chart \
|
||||||
-n prod --create-namespace \
|
-n prod --create-namespace \
|
||||||
@@ -111,4 +121,12 @@ jobs:
|
|||||||
--set-string oauth.csas.clientId="$CSAS_CLIENT_ID" \
|
--set-string oauth.csas.clientId="$CSAS_CLIENT_ID" \
|
||||||
--set-string oauth.csas.clientSecret="$CSAS_CLIENT_SECRET" \
|
--set-string oauth.csas.clientSecret="$CSAS_CLIENT_SECRET" \
|
||||||
--set-string sentry_dsn="$SENTRY_DSN" \
|
--set-string sentry_dsn="$SENTRY_DSN" \
|
||||||
--set-string database.encryptionSecret="${{ secrets.PROD_DB_ENCRYPTION_KEY }}"
|
--set-string database.encryptionSecret="${{ secrets.PROD_DB_ENCRYPTION_KEY }}" \
|
||||||
|
--set-string smtp.host="$SMTP_HOST" \
|
||||||
|
--set smtp.port="$SMTP_PORT" \
|
||||||
|
--set-string smtp.username="$SMTP_USERNAME" \
|
||||||
|
--set-string smtp.password="$SMTP_PASSWORD" \
|
||||||
|
--set-string smtp.tls="$SMTP_USE_TLS" \
|
||||||
|
--set-string smtp.ssl="$SMTP_USE_SSL" \
|
||||||
|
--set-string smtp.from="$SMTP_FROM" \
|
||||||
|
--set-string unirate.key="$UNIRATE_API_KEY"
|
||||||
5
.github/workflows/run-tests.yml
vendored
5
.github/workflows/run-tests.yml
vendored
@@ -31,6 +31,9 @@ jobs:
|
|||||||
MARIADB_DB: group_project
|
MARIADB_DB: group_project
|
||||||
MARIADB_USER: appuser
|
MARIADB_USER: appuser
|
||||||
MARIADB_PASSWORD: apppass
|
MARIADB_PASSWORD: apppass
|
||||||
|
# Ensure the application uses MariaDB (async) during tests
|
||||||
|
DATABASE_URL: mysql+asyncmy://appuser:apppass@127.0.0.1:3306/group_project
|
||||||
|
DISABLE_METRICS: "1"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
@@ -57,5 +60,7 @@ jobs:
|
|||||||
working-directory: ./7project/backend
|
working-directory: ./7project/backend
|
||||||
|
|
||||||
- name: Run tests with pytest
|
- name: Run tests with pytest
|
||||||
|
env:
|
||||||
|
PYTEST_RUN_CONFIG: "True"
|
||||||
run: pytest
|
run: pytest
|
||||||
working-directory: ./7project/backend
|
working-directory: ./7project/backend
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM python:3.11-slim
|
FROM python:3.11-trixie
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
|
|||||||
66
7project/backend/app/api/exchange_rates.py
Normal file
66
7project/backend/app/api/exchange_rates.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import os
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from fastapi import APIRouter, HTTPException, Query, status
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/exchange-rates", tags=["exchange-rates"])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("", status_code=status.HTTP_200_OK)
|
||||||
|
async def get_exchange_rates(symbols: str = Query("EUR,USD,NOK", description="Comma-separated currency codes to fetch vs CZK")):
|
||||||
|
"""
|
||||||
|
Fetch exchange rates from UniRate API on the backend and return CZK-per-target rates.
|
||||||
|
- Always requests CZK in addition to requested symbols to compute conversion from USD-base.
|
||||||
|
- Returns a list of {currencyCode, rate} where rate is CZK per 1 unit of the target currency.
|
||||||
|
"""
|
||||||
|
api_key = os.getenv("UNIRATE_API_KEY")
|
||||||
|
if not api_key:
|
||||||
|
raise HTTPException(status_code=500, detail="Server is not configured with UNIRATE_API_KEY")
|
||||||
|
|
||||||
|
# Ensure CZK is included for conversion
|
||||||
|
requested = [s.strip().upper() for s in symbols.split(",") if s.strip()]
|
||||||
|
if "CZK" not in requested:
|
||||||
|
requested.append("CZK")
|
||||||
|
query_symbols = ",".join(sorted(set(requested)))
|
||||||
|
|
||||||
|
url = f"https://unirateapi.com/api/rates?api_key={api_key}&symbols={query_symbols}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=httpx.Timeout(15.0)) as client:
|
||||||
|
resp = await client.get(url)
|
||||||
|
if resp.status_code != httpx.codes.OK:
|
||||||
|
raise HTTPException(status_code=502, detail=f"Upstream UniRate error: HTTP {resp.status_code}")
|
||||||
|
data = resp.json()
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
raise HTTPException(status_code=502, detail=f"Failed to contact UniRate: {str(e)}")
|
||||||
|
|
||||||
|
# Validate response structure
|
||||||
|
rates = data.get("rates") if isinstance(data, dict) else None
|
||||||
|
base = data.get("base") if isinstance(data, dict) else None
|
||||||
|
if not rates or base != "USD" or "CZK" not in rates:
|
||||||
|
# Prefer upstream message when available
|
||||||
|
detail = data.get("message") if isinstance(data, dict) else None
|
||||||
|
if not detail and isinstance(data, dict):
|
||||||
|
err = data.get("error")
|
||||||
|
if isinstance(err, dict):
|
||||||
|
detail = err.get("info")
|
||||||
|
raise HTTPException(status_code=502, detail=detail or "Invalid response from UniRate API")
|
||||||
|
|
||||||
|
czk_per_usd = rates["CZK"]
|
||||||
|
|
||||||
|
# Build result excluding CZK itself
|
||||||
|
result = []
|
||||||
|
for code in requested:
|
||||||
|
if code == "CZK":
|
||||||
|
continue
|
||||||
|
target_per_usd = rates.get(code)
|
||||||
|
if target_per_usd in (None, 0):
|
||||||
|
# Skip unavailable or invalid
|
||||||
|
continue
|
||||||
|
czk_per_target = czk_per_usd / target_per_usd
|
||||||
|
result.append({"currencyCode": code, "rate": czk_per_target})
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
116
7project/backend/app/api/mock_bank.py
Normal file
116
7project/backend/app/api/mock_bank.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import List, Optional
|
||||||
|
import random
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
from pydantic import BaseModel, Field, conint, confloat, validator
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from app.services.db import get_async_session
|
||||||
|
from app.services.user_service import current_active_user
|
||||||
|
from app.models.user import User
|
||||||
|
from app.models.transaction import Transaction
|
||||||
|
from app.models.categories import Category
|
||||||
|
from app.schemas.transaction import TransactionRead
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/mock-bank", tags=["mock-bank"])
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateOptions(BaseModel):
|
||||||
|
count: conint(strict=True, gt=0) = Field(default=10, description="Number of transactions to generate")
|
||||||
|
minAmount: confloat(strict=True) = Field(default=-200.0, description="Minimum transaction amount")
|
||||||
|
maxAmount: confloat(strict=True) = Field(default=200.0, description="Maximum transaction amount")
|
||||||
|
startDate: Optional[str] = Field(None, description="Earliest date (YYYY-MM-DD)")
|
||||||
|
endDate: Optional[str] = Field(None, description="Latest date (YYYY-MM-DD)")
|
||||||
|
categoryIds: List[int] = Field(default_factory=list, description="Optional category IDs to assign randomly")
|
||||||
|
|
||||||
|
@validator("maxAmount")
|
||||||
|
def _validate_amounts(cls, v, values):
|
||||||
|
min_amt = values.get("minAmount")
|
||||||
|
if min_amt is not None and v < min_amt:
|
||||||
|
raise ValueError("maxAmount must be greater than or equal to minAmount")
|
||||||
|
return v
|
||||||
|
|
||||||
|
@validator("endDate")
|
||||||
|
def _validate_dates(cls, v, values):
|
||||||
|
sd = values.get("startDate")
|
||||||
|
if v and sd:
|
||||||
|
try:
|
||||||
|
ed = datetime.strptime(v, "%Y-%m-%d").date()
|
||||||
|
st = datetime.strptime(sd, "%Y-%m-%d").date()
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError("Invalid date format, expected YYYY-MM-DD")
|
||||||
|
if ed < st:
|
||||||
|
raise ValueError("endDate must be greater than or equal to startDate")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class GeneratedTransaction(BaseModel):
|
||||||
|
amount: float
|
||||||
|
date: str # YYYY-MM-DD
|
||||||
|
category_ids: List[int] = []
|
||||||
|
description: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/generate", response_model=List[GeneratedTransaction])
|
||||||
|
async def generate_mock_transactions(
|
||||||
|
options: GenerateOptions,
|
||||||
|
user: User = Depends(current_active_user),
|
||||||
|
):
|
||||||
|
# Seed randomness per user to make results less erratic across multiple calls in quick succession
|
||||||
|
seed = int(datetime.utcnow().timestamp()) ^ int(user.id)
|
||||||
|
rnd = random.Random(seed)
|
||||||
|
|
||||||
|
# Determine date range
|
||||||
|
if options.startDate:
|
||||||
|
start_date = datetime.strptime(options.startDate, "%Y-%m-%d").date()
|
||||||
|
else:
|
||||||
|
start_date = (datetime.utcnow() - timedelta(days=365)).date()
|
||||||
|
if options.endDate:
|
||||||
|
end_date = datetime.strptime(options.endDate, "%Y-%m-%d").date()
|
||||||
|
else:
|
||||||
|
end_date = datetime.utcnow().date()
|
||||||
|
|
||||||
|
span_days = max(0, (end_date - start_date).days)
|
||||||
|
|
||||||
|
results: List[GeneratedTransaction] = []
|
||||||
|
for _ in range(options.count):
|
||||||
|
amount = round(rnd.uniform(options.minAmount, options.maxAmount), 2)
|
||||||
|
# Pick a random date in the inclusive range
|
||||||
|
rand_day = rnd.randint(0, span_days) if span_days > 0 else 0
|
||||||
|
tx_date = start_date + timedelta(days=rand_day)
|
||||||
|
# Pick category randomly from provided list, or empty
|
||||||
|
if options.categoryIds:
|
||||||
|
cat = [rnd.choice(options.categoryIds)]
|
||||||
|
else:
|
||||||
|
cat = []
|
||||||
|
# Optional simple description for flavor
|
||||||
|
desc = None
|
||||||
|
# Assemble
|
||||||
|
results.append(GeneratedTransaction(
|
||||||
|
amount=amount,
|
||||||
|
date=tx_date.isoformat(),
|
||||||
|
category_ids=cat,
|
||||||
|
description=desc,
|
||||||
|
))
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/scrape")
|
||||||
|
async def scrape_mock_bank():
|
||||||
|
# 80% of the time: nothing to scrape
|
||||||
|
if random.random() < 0.8:
|
||||||
|
return []
|
||||||
|
|
||||||
|
transactions = []
|
||||||
|
count = random.randint(1, 10)
|
||||||
|
for _ in range(count):
|
||||||
|
transactions.append({
|
||||||
|
"amount": round(random.uniform(-200.0, 200.0), 2),
|
||||||
|
"date": (datetime.utcnow().date() - timedelta(days=random.randint(0, 30))).isoformat(),
|
||||||
|
"description": "Mock transaction",
|
||||||
|
})
|
||||||
|
|
||||||
|
return transactions
|
||||||
@@ -1,14 +1,17 @@
|
|||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pythonjsonlogger import jsonlogger
|
from pythonjsonlogger import jsonlogger
|
||||||
|
|
||||||
from fastapi import Depends, FastAPI
|
from fastapi import Depends, FastAPI, HTTPException
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from prometheus_fastapi_instrumentator import Instrumentator, metrics
|
from prometheus_fastapi_instrumentator import Instrumentator, metrics
|
||||||
from starlette.requests import Request
|
from starlette.requests import Request
|
||||||
|
|
||||||
|
from app.services.prometheus import number_of_users, number_of_transactions
|
||||||
|
|
||||||
from app.services import bank_scraper
|
from app.services import bank_scraper
|
||||||
from app.workers.celery_tasks import load_transactions, load_all_transactions
|
from app.workers.celery_tasks import load_transactions, load_all_transactions
|
||||||
from app.models.user import User, OAuthAccount
|
from app.models.user import User, OAuthAccount
|
||||||
@@ -18,6 +21,7 @@ from app.api.auth import router as auth_router
|
|||||||
from app.api.csas import router as csas_router
|
from app.api.csas import router as csas_router
|
||||||
from app.api.categories import router as categories_router
|
from app.api.categories import router as categories_router
|
||||||
from app.api.transactions import router as transactions_router
|
from app.api.transactions import router as transactions_router
|
||||||
|
from app.api.exchange_rates import router as exchange_rates_router
|
||||||
from app.services.user_service import auth_backend, current_active_verified_user, fastapi_users, get_oauth_provider, \
|
from app.services.user_service import auth_backend, current_active_verified_user, fastapi_users, get_oauth_provider, \
|
||||||
UserManager, get_jwt_strategy
|
UserManager, get_jwt_strategy
|
||||||
from app.core.security import extract_bearer_token, is_token_revoked, decode_and_verify_jwt
|
from app.core.security import extract_bearer_token, is_token_revoked, decode_and_verify_jwt
|
||||||
@@ -26,7 +30,8 @@ from app.services.user_service import SECRET
|
|||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
import sentry_sdk
|
import sentry_sdk
|
||||||
from fastapi_users.db import SQLAlchemyUserDatabase
|
from fastapi_users.db import SQLAlchemyUserDatabase
|
||||||
from app.core.db import async_session_maker
|
from app.core.db import async_session_maker, engine
|
||||||
|
from app.core.base import Base
|
||||||
|
|
||||||
sentry_sdk.init(
|
sentry_sdk.init(
|
||||||
dsn=os.getenv("SENTRY_DSN"),
|
dsn=os.getenv("SENTRY_DSN"),
|
||||||
@@ -48,18 +53,23 @@ fastApi.add_middleware(
|
|||||||
allow_headers=["*"],
|
allow_headers=["*"],
|
||||||
)
|
)
|
||||||
|
|
||||||
prometheus = Instrumentator().instrument(fastApi)
|
|
||||||
|
|
||||||
prometheus.expose(
|
if not os.getenv("PYTEST_RUN_CONFIG"):
|
||||||
fastApi,
|
prometheus = Instrumentator().instrument(fastApi)
|
||||||
endpoint="/metrics",
|
# Register custom metrics
|
||||||
include_in_schema=True,
|
prometheus.add(number_of_users()).add(number_of_transactions())
|
||||||
)
|
prometheus.expose(
|
||||||
|
fastApi,
|
||||||
|
endpoint="/metrics",
|
||||||
|
include_in_schema=True,
|
||||||
|
)
|
||||||
|
|
||||||
fastApi.include_router(auth_router)
|
fastApi.include_router(auth_router)
|
||||||
fastApi.include_router(categories_router)
|
fastApi.include_router(categories_router)
|
||||||
fastApi.include_router(transactions_router)
|
fastApi.include_router(transactions_router)
|
||||||
|
fastApi.include_router(exchange_rates_router)
|
||||||
|
from app.api.mock_bank import router as mock_bank_router
|
||||||
|
fastApi.include_router(mock_bank_router)
|
||||||
|
|
||||||
for h in list(logging.root.handlers):
|
for h in list(logging.root.handlers):
|
||||||
logging.root.removeHandler(h)
|
logging.root.removeHandler(h)
|
||||||
@@ -73,7 +83,6 @@ _log_handler.setFormatter(_formatter)
|
|||||||
logging.root.setLevel(logging.INFO)
|
logging.root.setLevel(logging.INFO)
|
||||||
logging.root.addHandler(_log_handler)
|
logging.root.addHandler(_log_handler)
|
||||||
|
|
||||||
|
|
||||||
for _name in ("uvicorn", "uvicorn.error", "uvicorn.access"):
|
for _name in ("uvicorn", "uvicorn.error", "uvicorn.access"):
|
||||||
_logger = logging.getLogger(_name)
|
_logger = logging.getLogger(_name)
|
||||||
_logger.handlers = [_log_handler]
|
_logger.handlers = [_log_handler]
|
||||||
@@ -156,16 +165,12 @@ async def authenticated_route(user: User = Depends(current_active_verified_user)
|
|||||||
return {"message": f"Hello {user.email}!"}
|
return {"message": f"Hello {user.email}!"}
|
||||||
|
|
||||||
|
|
||||||
@fastApi.get("/debug/scrape/csas/all", tags=["debug"])
|
@fastApi.get("/_cron", include_in_schema=False)
|
||||||
async def debug_scrape_csas_all():
|
async def handle_cron(request: Request):
|
||||||
logging.info("[Debug] Queueing CSAS scrape for all users via HTTP endpoint (Celery)")
|
# endpoint accessed by Clodflare => return 404
|
||||||
|
if request.headers.get("cf-connecting-ip"):
|
||||||
|
raise HTTPException(status_code=404)
|
||||||
|
|
||||||
|
logging.info("[Cron] Triggering scheduled tasks via HTTP endpoint")
|
||||||
task = load_all_transactions.delay()
|
task = load_all_transactions.delay()
|
||||||
return {"status": "queued", "action": "csas_scrape_all", "task_id": getattr(task, 'id', None)}
|
return {"status": "queued", "action": "csas_scrape_all", "task_id": getattr(task, 'id', None)}
|
||||||
|
|
||||||
|
|
||||||
@fastApi.post("/debug/scrape/csas/{user_id}", tags=["debug"])
|
|
||||||
async def debug_scrape_csas_user(user_id: str, user: User = Depends(current_active_verified_user)):
|
|
||||||
logging.info("[Debug] Queueing CSAS scrape for single user via HTTP endpoint (Celery) | user_id=%s", user_id)
|
|
||||||
task = load_transactions.delay(user_id)
|
|
||||||
return {"status": "queued", "action": "csas_scrape_single", "user_id": user_id,
|
|
||||||
"task_id": getattr(task, 'id', None)}
|
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
from app.core.base import Base
|
from app.core.base import Base
|
||||||
|
|
||||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||||
@@ -23,6 +25,7 @@ host_env = os.getenv("MARIADB_HOST", "localhost")
|
|||||||
ssl_enabled = host_env not in {"localhost", "127.0.0.1"}
|
ssl_enabled = host_env not in {"localhost", "127.0.0.1"}
|
||||||
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
||||||
|
|
||||||
|
# Async engine/session for the async parts of the app
|
||||||
engine = create_async_engine(
|
engine = create_async_engine(
|
||||||
DATABASE_URL,
|
DATABASE_URL,
|
||||||
pool_pre_ping=True,
|
pool_pre_ping=True,
|
||||||
@@ -30,3 +33,13 @@ engine = create_async_engine(
|
|||||||
connect_args=connect_args,
|
connect_args=connect_args,
|
||||||
)
|
)
|
||||||
async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
|
async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
|
||||||
|
|
||||||
|
# Synchronous engine/session for sync utilities (e.g., bank_scraper)
|
||||||
|
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
||||||
|
engine_sync = create_engine(
|
||||||
|
SYNC_DATABASE_URL,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
echo=os.getenv("SQL_ECHO", "0") == "1",
|
||||||
|
connect_args=connect_args,
|
||||||
|
)
|
||||||
|
sync_session_maker = sessionmaker(bind=engine_sync, expire_on_commit=False)
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
from os.path import dirname, join
|
from os.path import dirname, join
|
||||||
from time import strptime
|
from time import strptime
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
@@ -7,7 +8,7 @@ from uuid import UUID
|
|||||||
import httpx
|
import httpx
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
|
|
||||||
from app.core.db import async_session_maker
|
from app.core.db import sync_session_maker
|
||||||
from app.models.transaction import Transaction
|
from app.models.transaction import Transaction
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
|
|
||||||
@@ -20,26 +21,78 @@ CERTS = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def aload_ceska_sporitelna_transactions(user_id: str) -> None:
|
def load_mock_bank_transactions(user_id: str) -> None:
|
||||||
try:
|
try:
|
||||||
uid = UUID(str(user_id))
|
uid = UUID(str(user_id))
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error("Invalid user_id provided to bank_scraper (async): %r", user_id)
|
logger.error("Invalid user_id provided to bank_scraper (sync): %r", user_id)
|
||||||
return
|
return
|
||||||
|
|
||||||
await _aload_ceska_sporitelna_transactions(uid)
|
_load_mock_bank_transactions(uid)
|
||||||
|
|
||||||
|
|
||||||
async def aload_all_ceska_sporitelna_transactions() -> None:
|
def load_all_mock_bank_transactions() -> None:
|
||||||
async with async_session_maker() as session:
|
with sync_session_maker() as session:
|
||||||
result = await session.execute(select(User))
|
users = session.execute(select(User)).unique().scalars().all()
|
||||||
users = result.unique().scalars().all()
|
logger.info("[BankScraper] Starting Mock Bank scrape for all users | count=%d", len(users))
|
||||||
|
|
||||||
|
processed = 0
|
||||||
|
for user in users:
|
||||||
|
try:
|
||||||
|
_load_mock_bank_transactions(user.id)
|
||||||
|
processed += 1
|
||||||
|
except Exception:
|
||||||
|
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
||||||
|
getattr(user, 'email', None))
|
||||||
|
logger.info("[BankScraper] Finished Mock Bank scrape for all users | processed=%d", processed)
|
||||||
|
|
||||||
|
|
||||||
|
def _load_mock_bank_transactions(user_id: UUID) -> None:
|
||||||
|
with sync_session_maker() as session:
|
||||||
|
user: User | None = session.execute(select(User).where(User.id == user_id)).unique().scalar_one_or_none()
|
||||||
|
if user is None:
|
||||||
|
logger.warning("User not found for id=%s", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
transactions = []
|
||||||
|
with httpx.Client() as client:
|
||||||
|
response = client.get(f"{os.getenv('APP_POD_URL')}/mock-bank/scrape")
|
||||||
|
if response.status_code != httpx.codes.OK:
|
||||||
|
return
|
||||||
|
for transaction in response.json():
|
||||||
|
transactions.append(
|
||||||
|
Transaction(
|
||||||
|
amount=transaction["amount"],
|
||||||
|
description=transaction.get("description"),
|
||||||
|
date=strptime(transaction["date"], "%Y-%m-%d"),
|
||||||
|
user_id=user_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for transaction in transactions:
|
||||||
|
session.add(transaction)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def load_ceska_sporitelna_transactions(user_id: str) -> None:
|
||||||
|
try:
|
||||||
|
uid = UUID(str(user_id))
|
||||||
|
except Exception:
|
||||||
|
logger.error("Invalid user_id provided to bank_scraper (sync): %r", user_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
_load_ceska_sporitelna_transactions(uid)
|
||||||
|
|
||||||
|
|
||||||
|
def load_all_ceska_sporitelna_transactions() -> None:
|
||||||
|
with sync_session_maker() as session:
|
||||||
|
users = session.execute(select(User)).unique().scalars().all()
|
||||||
logger.info("[BankScraper] Starting CSAS scrape for all users | count=%d", len(users))
|
logger.info("[BankScraper] Starting CSAS scrape for all users | count=%d", len(users))
|
||||||
|
|
||||||
processed = 0
|
processed = 0
|
||||||
for user in users:
|
for user in users:
|
||||||
try:
|
try:
|
||||||
await _aload_ceska_sporitelna_transactions(user.id)
|
_load_ceska_sporitelna_transactions(user.id)
|
||||||
processed += 1
|
processed += 1
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
||||||
@@ -47,10 +100,9 @@ async def aload_all_ceska_sporitelna_transactions() -> None:
|
|||||||
logger.info("[BankScraper] Finished CSAS scrape for all users | processed=%d", processed)
|
logger.info("[BankScraper] Finished CSAS scrape for all users | processed=%d", processed)
|
||||||
|
|
||||||
|
|
||||||
async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
def _load_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
||||||
async with (async_session_maker() as session):
|
with sync_session_maker() as session:
|
||||||
result = await session.execute(select(User).where(User.id == user_id))
|
user: User | None = session.execute(select(User).where(User.id == user_id)).unique().scalar_one_or_none()
|
||||||
user: User = result.unique().scalar_one_or_none()
|
|
||||||
if user is None:
|
if user is None:
|
||||||
logger.warning("User not found for id=%s", user_id)
|
logger.warning("User not found for id=%s", user_id)
|
||||||
return
|
return
|
||||||
@@ -65,8 +117,8 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|||||||
|
|
||||||
accounts = []
|
accounts = []
|
||||||
try:
|
try:
|
||||||
async with httpx.AsyncClient(cert=CERTS, timeout=httpx.Timeout(20.0)) as client:
|
with httpx.Client(cert=CERTS, timeout=httpx.Timeout(20.0)) as client:
|
||||||
response = await client.get(
|
response = client.get(
|
||||||
"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts?size=10&page=0&sort=iban&order=desc",
|
"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts?size=10&page=0&sort=iban&order=desc",
|
||||||
headers={
|
headers={
|
||||||
"Authorization": f"Bearer {cfg['access_token']}",
|
"Authorization": f"Bearer {cfg['access_token']}",
|
||||||
@@ -77,7 +129,7 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|||||||
if response.status_code != httpx.codes.OK:
|
if response.status_code != httpx.codes.OK:
|
||||||
return
|
return
|
||||||
|
|
||||||
for account in response.json()["accounts"]:
|
for account in response.json().get("accounts", []):
|
||||||
accounts.append(account)
|
accounts.append(account)
|
||||||
|
|
||||||
except (httpx.HTTPError,) as e:
|
except (httpx.HTTPError,) as e:
|
||||||
@@ -85,11 +137,13 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|||||||
return
|
return
|
||||||
|
|
||||||
for account in accounts:
|
for account in accounts:
|
||||||
id = account["id"]
|
acc_id = account.get("id")
|
||||||
|
if not acc_id:
|
||||||
|
continue
|
||||||
|
|
||||||
url = f"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts/{id}/transactions?size=100&page=0&sort=bookingdate&order=desc"
|
url = f"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts/{acc_id}/transactions?size=100&page=0&sort=bookingdate&order=desc"
|
||||||
async with httpx.AsyncClient(cert=CERTS) as client:
|
with httpx.Client(cert=CERTS) as client:
|
||||||
response = await client.get(
|
response = client.get(
|
||||||
url,
|
url,
|
||||||
headers={
|
headers={
|
||||||
"Authorization": f"Bearer {cfg['access_token']}",
|
"Authorization": f"Bearer {cfg['access_token']}",
|
||||||
@@ -100,7 +154,7 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|||||||
if response.status_code != httpx.codes.OK:
|
if response.status_code != httpx.codes.OK:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
transactions = response.json()["transactions"]
|
transactions = response.json().get("transactions", [])
|
||||||
|
|
||||||
for transaction in transactions:
|
for transaction in transactions:
|
||||||
description = transaction.get("entryDetails", {}).get("transactionDetails", {}).get(
|
description = transaction.get("entryDetails", {}).get("transactionDetails", {}).get(
|
||||||
@@ -108,9 +162,12 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|||||||
date_str = transaction.get("bookingDate", {}).get("date")
|
date_str = transaction.get("bookingDate", {}).get("date")
|
||||||
date = strptime(date_str, "%Y-%m-%d") if date_str else None
|
date = strptime(date_str, "%Y-%m-%d") if date_str else None
|
||||||
amount = transaction.get("amount", {}).get("value")
|
amount = transaction.get("amount", {}).get("value")
|
||||||
if transaction.get("creditDebitIndicator") == "DBIT":
|
if transaction.get("creditDebitIndicator") == "DBIT" and amount is not None:
|
||||||
amount = -abs(amount)
|
amount = -abs(amount)
|
||||||
|
|
||||||
|
if amount is None:
|
||||||
|
continue
|
||||||
|
|
||||||
obj = Transaction(
|
obj = Transaction(
|
||||||
amount=amount,
|
amount=amount,
|
||||||
description=description,
|
description=description,
|
||||||
@@ -118,7 +175,4 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
|||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
)
|
)
|
||||||
session.add(obj)
|
session.add(obj)
|
||||||
await session.commit()
|
session.commit()
|
||||||
|
|
||||||
pass
|
|
||||||
pass
|
|
||||||
|
|||||||
48
7project/backend/app/services/prometheus.py
Normal file
48
7project/backend/app/services/prometheus.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
from typing import Callable
|
||||||
|
from prometheus_fastapi_instrumentator.metrics import Info
|
||||||
|
from prometheus_client import Gauge
|
||||||
|
from sqlalchemy import select, func
|
||||||
|
|
||||||
|
from app.core.db import async_session_maker
|
||||||
|
from app.models.transaction import Transaction
|
||||||
|
from app.models.user import User
|
||||||
|
|
||||||
|
|
||||||
|
def number_of_users() -> Callable[[Info], None]:
|
||||||
|
METRIC = Gauge(
|
||||||
|
"number_of_users_total",
|
||||||
|
"Number of registered users.",
|
||||||
|
labelnames=("users",)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def instrumentation(info: Info) -> None:
|
||||||
|
try:
|
||||||
|
async with async_session_maker() as session:
|
||||||
|
result = await session.execute(select(func.count(User.id)))
|
||||||
|
user_count = result.scalar_one() or 0
|
||||||
|
except Exception:
|
||||||
|
# In case of DB errors, avoid crashing metrics endpoint
|
||||||
|
user_count = 0
|
||||||
|
METRIC.labels(users="total").set(user_count)
|
||||||
|
|
||||||
|
return instrumentation
|
||||||
|
|
||||||
|
|
||||||
|
def number_of_transactions() -> Callable[[Info], None]:
|
||||||
|
METRIC = Gauge(
|
||||||
|
"number_of_transactions_total",
|
||||||
|
"Number of transactions stored.",
|
||||||
|
labelnames=("transactions",)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def instrumentation(info: Info) -> None:
|
||||||
|
try:
|
||||||
|
async with async_session_maker() as session:
|
||||||
|
result = await session.execute(select(func.count()).select_from(Transaction))
|
||||||
|
transaction_count = result.scalar_one() or 0
|
||||||
|
except Exception:
|
||||||
|
# In case of DB errors, avoid crashing metrics endpoint
|
||||||
|
transaction_count = 0
|
||||||
|
METRIC.labels(transactions="total").set(transaction_count)
|
||||||
|
|
||||||
|
return instrumentation
|
||||||
@@ -1,9 +1,10 @@
|
|||||||
import logging
|
import logging
|
||||||
import asyncio
|
import os
|
||||||
|
import smtplib
|
||||||
from celery import shared_task
|
from email.message import EmailMessage
|
||||||
|
|
||||||
import app.services.bank_scraper
|
import app.services.bank_scraper
|
||||||
|
from app.celery_app import celery_app
|
||||||
|
|
||||||
logger = logging.getLogger("celery_tasks")
|
logger = logging.getLogger("celery_tasks")
|
||||||
if not logger.handlers:
|
if not logger.handlers:
|
||||||
@@ -12,96 +13,74 @@ if not logger.handlers:
|
|||||||
logger.setLevel(logging.INFO)
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
|
||||||
def run_coro(coro) -> None:
|
@celery_app.task(name="workers.send_email")
|
||||||
"""Run an async coroutine in a fresh event loop without using run_until_complete.
|
|
||||||
Primary strategy runs in a new loop in the current thread. If that fails due to
|
|
||||||
debugger patches (e.g., Bad file descriptor from pydevd_nest_asyncio), fall back
|
|
||||||
to running in a dedicated thread with its own event loop.
|
|
||||||
"""
|
|
||||||
import threading
|
|
||||||
|
|
||||||
def _cleanup_loop(loop):
|
|
||||||
try:
|
|
||||||
pending = [t for t in asyncio.all_tasks(loop) if not t.done()]
|
|
||||||
for t in pending:
|
|
||||||
t.cancel()
|
|
||||||
if pending:
|
|
||||||
loop.run_until_complete(asyncio.gather(*pending, return_exceptions=True))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
loop.close()
|
|
||||||
finally:
|
|
||||||
asyncio.set_event_loop(None)
|
|
||||||
|
|
||||||
# First attempt: Run in current thread with a fresh event loop
|
|
||||||
try:
|
|
||||||
loop = asyncio.get_event_loop_policy().new_event_loop()
|
|
||||||
try:
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
task = loop.create_task(coro)
|
|
||||||
task.add_done_callback(lambda _t: loop.stop())
|
|
||||||
loop.run_forever()
|
|
||||||
exc = task.exception()
|
|
||||||
if exc:
|
|
||||||
raise exc
|
|
||||||
return
|
|
||||||
finally:
|
|
||||||
_cleanup_loop(loop)
|
|
||||||
except OSError as e:
|
|
||||||
logger.warning("run_coro primary strategy failed (%s). Falling back to thread runner.", e)
|
|
||||||
except Exception:
|
|
||||||
# For any other unexpected errors, try thread fallback as well
|
|
||||||
logger.exception("run_coro primary strategy raised; attempting thread fallback")
|
|
||||||
|
|
||||||
# Fallback: Run in a dedicated thread with its own event loop
|
|
||||||
error = {"exc": None}
|
|
||||||
|
|
||||||
def _thread_target():
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
try:
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
task = loop.create_task(coro)
|
|
||||||
task.add_done_callback(lambda _t: loop.stop())
|
|
||||||
loop.run_forever()
|
|
||||||
exc = task.exception()
|
|
||||||
if exc:
|
|
||||||
error["exc"] = exc
|
|
||||||
finally:
|
|
||||||
_cleanup_loop(loop)
|
|
||||||
|
|
||||||
th = threading.Thread(target=_thread_target, name="celery-async-runner", daemon=True)
|
|
||||||
th.start()
|
|
||||||
th.join()
|
|
||||||
if error["exc"] is not None:
|
|
||||||
raise error["exc"]
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.send_email")
|
|
||||||
def send_email(to: str, subject: str, body: str) -> None:
|
def send_email(to: str, subject: str, body: str) -> None:
|
||||||
if not (to and subject and body):
|
if not (to and subject and body):
|
||||||
logger.error("Email task missing fields. to=%r subject=%r body_len=%r", to, subject, len(body) if body else 0)
|
logger.error("Email task missing fields. to=%r subject=%r body_len=%r", to, subject, len(body) if body else 0)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Placeholder for real email sending logic
|
host = os.getenv("SMTP_HOST")
|
||||||
logger.info("[Celery] Email sent | to=%s | subject=%s | body_len=%d", to, subject, len(body))
|
if not host:
|
||||||
|
logger.error("SMTP_HOST is not configured; cannot send email")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
port = int(os.getenv("SMTP_PORT", "25"))
|
||||||
|
username = os.getenv("SMTP_USERNAME")
|
||||||
|
password = os.getenv("SMTP_PASSWORD")
|
||||||
|
use_tls = os.getenv("SMTP_USE_TLS", "0").lower() in {"1", "true", "yes"}
|
||||||
|
use_ssl = os.getenv("SMTP_USE_SSL", "0").lower() in {"1", "true", "yes"}
|
||||||
|
timeout = int(os.getenv("SMTP_TIMEOUT", "10"))
|
||||||
|
mail_from = os.getenv("SMTP_FROM") or username or "noreply@localhost"
|
||||||
|
|
||||||
|
# Build message
|
||||||
|
msg = EmailMessage()
|
||||||
|
msg["To"] = to
|
||||||
|
msg["From"] = mail_from
|
||||||
|
msg["Subject"] = subject
|
||||||
|
msg.set_content(body)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if use_ssl:
|
||||||
|
with smtplib.SMTP_SSL(host=host, port=port, timeout=timeout) as smtp:
|
||||||
|
if username and password:
|
||||||
|
smtp.login(username, password)
|
||||||
|
smtp.send_message(msg)
|
||||||
|
else:
|
||||||
|
with smtplib.SMTP(host=host, port=port, timeout=timeout) as smtp:
|
||||||
|
# STARTTLS if requested
|
||||||
|
if use_tls:
|
||||||
|
smtp.starttls()
|
||||||
|
if username and password:
|
||||||
|
smtp.login(username, password)
|
||||||
|
smtp.send_message(msg)
|
||||||
|
logger.info("[Celery] Email sent | to=%s | subject=%s | body_len=%d", to, subject, len(body))
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to send email via SMTP to=%s subject=%s host=%s port=%s tls=%s ssl=%s", to, subject,
|
||||||
|
host, port, use_tls, use_ssl)
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.load_transactions")
|
@celery_app.task(name="workers.load_transactions")
|
||||||
def load_transactions(user_id: str) -> None:
|
def load_transactions(user_id: str) -> None:
|
||||||
if not user_id:
|
if not user_id:
|
||||||
logger.error("Load transactions task missing user_id.")
|
logger.error("Load transactions task missing user_id.")
|
||||||
return
|
return
|
||||||
|
|
||||||
run_coro(app.services.bank_scraper.aload_ceska_sporitelna_transactions(user_id))
|
logger.info("[Celery] Starting load_transactions | user_id=%s", user_id)
|
||||||
|
try:
|
||||||
# Placeholder for real transaction loading logic
|
# Use synchronous bank scraper functions directly, mirroring load_all_transactions
|
||||||
logger.info("[Celery] Transactions loaded for user_id=%s", user_id)
|
app.services.bank_scraper.load_mock_bank_transactions(user_id)
|
||||||
|
app.services.bank_scraper.load_ceska_sporitelna_transactions(user_id)
|
||||||
|
except Exception:
|
||||||
|
logger.exception("Failed to load transactions for user_id=%s", user_id)
|
||||||
|
else:
|
||||||
|
logger.info("[Celery] Finished load_transactions | user_id=%s", user_id)
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="workers.load_all_transactions")
|
@celery_app.task(name="workers.load_all_transactions")
|
||||||
def load_all_transactions() -> None:
|
def load_all_transactions() -> None:
|
||||||
logger.info("[Celery] Starting load_all_transactions")
|
logger.info("[Celery] Starting load_all_transactions")
|
||||||
run_coro(app.services.bank_scraper.aload_all_ceska_sporitelna_transactions())
|
# Now use synchronous bank scraper functions directly
|
||||||
|
app.services.bank_scraper.load_all_mock_bank_transactions()
|
||||||
|
app.services.bank_scraper.load_all_ceska_sporitelna_transactions()
|
||||||
logger.info("[Celery] Finished load_all_transactions")
|
logger.info("[Celery] Finished load_all_transactions")
|
||||||
|
|||||||
@@ -101,17 +101,26 @@ async def test_e2e_transaction_workflow(fastapi_app, test_user):
|
|||||||
async def test_register_then_login_and_fetch_me(fastapi_app):
|
async def test_register_then_login_and_fetch_me(fastapi_app):
|
||||||
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
email = "newuser@example.com"
|
# Use unique email to avoid duplicates across runs
|
||||||
|
suffix = uuid.uuid4().hex[:8]
|
||||||
|
email = f"newuser_{suffix}@example.com"
|
||||||
password = "StrongPassw0rd!"
|
password = "StrongPassw0rd!"
|
||||||
|
|
||||||
reg = await ac.post("/auth/register", json={"email": email, "password": password})
|
reg = await ac.post("/auth/register", json={"email": email, "password": password})
|
||||||
assert reg.status_code in (status.HTTP_201_CREATED, status.HTTP_200_OK)
|
assert reg.status_code in (status.HTTP_201_CREATED, status.HTTP_200_OK)
|
||||||
|
|
||||||
login = await ac.post("/auth/jwt/login", data={"username": email, "password": password})
|
login = await ac.post("/auth/jwt/login", data={"username": email, "password": password})
|
||||||
assert login.status_code == status.HTTP_200_OK
|
assert login.status_code == status.HTTP_200_OK
|
||||||
token = login.json()["access_token"]
|
token = login.json()["access_token"]
|
||||||
me = await ac.get("/users/me", headers={"Authorization": f"Bearer {token}"})
|
headers = {"Authorization": f"Bearer {token}"}
|
||||||
assert me.status_code == status.HTTP_200_OK
|
try:
|
||||||
assert me.json()["email"] == email
|
me = await ac.get("/users/me", headers=headers)
|
||||||
|
assert me.status_code == status.HTTP_200_OK
|
||||||
|
assert me.json()["email"] == email
|
||||||
|
finally:
|
||||||
|
# Cleanup: delete the created user so future runs won’t conflict
|
||||||
|
d = await ac.delete("/users/me", headers=headers)
|
||||||
|
assert d.status_code == status.HTTP_204_NO_CONTENT
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
@@ -158,22 +167,44 @@ async def test_update_category_conflict_and_404(fastapi_app, test_user):
|
|||||||
async def test_category_cross_user_isolation(fastapi_app):
|
async def test_category_cross_user_isolation(fastapi_app):
|
||||||
transport = ASGITransport(app=fastapi_app)
|
transport = ASGITransport(app=fastapi_app)
|
||||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||||
|
# Generate unique emails for both users
|
||||||
|
sfx = uuid.uuid4().hex[:8]
|
||||||
|
u1 = {"email": f"u1_{sfx}@example.com", "password": "Aaaaaa1!"}
|
||||||
|
u2 = {"email": f"u2_{sfx}@example.com", "password": "Aaaaaa1!"}
|
||||||
|
|
||||||
# user1
|
# user1
|
||||||
u1 = {"email": "u1@example.com", "password": "Aaaaaa1!"}
|
|
||||||
assert (await ac.post("/auth/register", json=u1)).status_code in (200, 201)
|
assert (await ac.post("/auth/register", json=u1)).status_code in (200, 201)
|
||||||
t1 = (await ac.post("/auth/jwt/login", data={"username": u1["email"], "password": u1["password"]})).json()["access_token"]
|
t1 = (await ac.post("/auth/jwt/login", data={"username": u1["email"], "password": u1["password"]})).json()["access_token"]
|
||||||
|
h1 = {"Authorization": f"Bearer {t1}"}
|
||||||
|
|
||||||
# user1 creates a category
|
# user1 creates a category
|
||||||
c = (await ac.post("/categories/create", json={"name": "Private"}, headers={"Authorization": f"Bearer {t1}"})).json()
|
c = (await ac.post("/categories/create", json={"name": "Private"}, headers=h1)).json()
|
||||||
|
cat_id = c["id"]
|
||||||
|
|
||||||
# user2
|
# user2
|
||||||
u2 = {"email": "u2@example.com", "password": "Aaaaaa1!"}
|
|
||||||
assert (await ac.post("/auth/register", json=u2)).status_code in (200, 201)
|
assert (await ac.post("/auth/register", json=u2)).status_code in (200, 201)
|
||||||
t2 = (await ac.post("/auth/jwt/login", data={"username": u2["email"], "password": u2["password"]})).json()["access_token"]
|
t2 = (await ac.post("/auth/jwt/login", data={"username": u2["email"], "password": u2["password"]})).json()["access_token"]
|
||||||
|
h2 = {"Authorization": f"Bearer {t2}"}
|
||||||
|
|
||||||
# user2 cannot read/delete user1's category
|
try:
|
||||||
g = await ac.get(f"/categories/{c['id']}", headers={"Authorization": f"Bearer {t2}"})
|
# user2 cannot read/delete user1's category
|
||||||
assert g.status_code == status.HTTP_404_NOT_FOUND
|
g = await ac.get(f"/categories/{cat_id}", headers=h2)
|
||||||
d = await ac.delete(f"/categories/{c['id']}", headers={"Authorization": f"Bearer {t2}"})
|
assert g.status_code == status.HTTP_404_NOT_FOUND
|
||||||
assert d.status_code == status.HTTP_404_NOT_FOUND
|
d = await ac.delete(f"/categories/{cat_id}", headers=h2)
|
||||||
|
assert d.status_code == status.HTTP_404_NOT_FOUND
|
||||||
|
finally:
|
||||||
|
# Cleanup: remove the created category as its owner
|
||||||
|
try:
|
||||||
|
_ = await ac.delete(f"/categories/{cat_id}", headers=h1)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
# Cleanup: delete both users to avoid email conflicts later
|
||||||
|
try:
|
||||||
|
_ = await ac.delete("/users/me", headers=h1)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
_ = await ac.delete("/users/me", headers=h2)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -90,6 +90,11 @@ spec:
|
|||||||
secretKeyRef:
|
secretKeyRef:
|
||||||
name: prod
|
name: prod
|
||||||
key: CSAS_CLIENT_SECRET
|
key: CSAS_CLIENT_SECRET
|
||||||
|
- name: UNIRATE_API_KEY
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: UNIRATE_API_KEY
|
||||||
- name: DOMAIN
|
- name: DOMAIN
|
||||||
value: {{ required "Set .Values.domain" .Values.domain | quote }}
|
value: {{ required "Set .Values.domain" .Values.domain | quote }}
|
||||||
- name: DOMAIN_SCHEME
|
- name: DOMAIN_SCHEME
|
||||||
|
|||||||
25
7project/charts/myapp-chart/templates/cron.yaml
Normal file
25
7project/charts/myapp-chart/templates/cron.yaml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
{{- if .Values.cron.enabled }}
|
||||||
|
apiVersion: batch/v1
|
||||||
|
kind: CronJob
|
||||||
|
metadata:
|
||||||
|
name: cronjob
|
||||||
|
spec:
|
||||||
|
schedule: {{ .Values.cron.schedule | quote }}
|
||||||
|
concurrencyPolicy: {{ .Values.cron.concurrencyPolicy | quote }}
|
||||||
|
jobTemplate:
|
||||||
|
spec:
|
||||||
|
template:
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: cronjob
|
||||||
|
image: curlimages/curl:latest
|
||||||
|
imagePullPolicy: IfNotPresent
|
||||||
|
args:
|
||||||
|
- -sS
|
||||||
|
- -o
|
||||||
|
- /dev/null
|
||||||
|
- -w
|
||||||
|
- "%{http_code}"
|
||||||
|
- {{ printf "%s://%s.%s.svc.cluster.local%s" .Values.cron.scheme .Values.app.name .Release.Namespace .Values.cron.endpoint | quote }}
|
||||||
|
restartPolicy: OnFailure
|
||||||
|
{{- end }}
|
||||||
@@ -19,3 +19,11 @@ stringData:
|
|||||||
RABBITMQ_USERNAME: {{ .Values.rabbitmq.username | quote }}
|
RABBITMQ_USERNAME: {{ .Values.rabbitmq.username | quote }}
|
||||||
SENTRY_DSN: {{ .Values.sentry_dsn | quote }}
|
SENTRY_DSN: {{ .Values.sentry_dsn | quote }}
|
||||||
DB_ENCRYPTION_KEY: {{ required "Set .Values.database.encryptionSecret" .Values.database.encryptionSecret | quote }}
|
DB_ENCRYPTION_KEY: {{ required "Set .Values.database.encryptionSecret" .Values.database.encryptionSecret | quote }}
|
||||||
|
SMTP_HOST: {{ .Values.smtp.host | default "" | quote }}
|
||||||
|
SMTP_PORT: {{ .Values.smtp.port | default 587 | quote }}
|
||||||
|
SMTP_USERNAME: {{ .Values.smtp.username | default "" | quote }}
|
||||||
|
SMTP_PASSWORD: {{ .Values.smtp.password | default "" | quote }}
|
||||||
|
SMTP_USE_TLS: {{ .Values.smtp.tls | default false | quote }}
|
||||||
|
SMTP_USE_SSL: {{ .Values.smtp.ssl | default false | quote }}
|
||||||
|
SMTP_FROM: {{ .Values.smtp.from | default "" | quote }}
|
||||||
|
UNIRATE_API_KEY: {{ .Values.unirate.key | default "" | quote }}
|
||||||
|
|||||||
@@ -85,3 +85,40 @@ spec:
|
|||||||
secretKeyRef:
|
secretKeyRef:
|
||||||
name: prod
|
name: prod
|
||||||
key: DB_ENCRYPTION_KEY
|
key: DB_ENCRYPTION_KEY
|
||||||
|
- name: SMTP_HOST
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_HOST
|
||||||
|
- name: SMTP_PORT
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_PORT
|
||||||
|
- name: SMTP_USERNAME
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_USERNAME
|
||||||
|
- name: SMTP_PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_PASSWORD
|
||||||
|
- name: SMTP_USE_TLS
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_USE_TLS
|
||||||
|
- name: SMTP_USE_SSL
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_USE_SSL
|
||||||
|
- name: SMTP_FROM
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: prod
|
||||||
|
key: SMTP_FROM
|
||||||
|
- name: APP_POD_URL
|
||||||
|
value: {{ printf "http://%s.%s.svc.cluster.local" .Values.app.name .Release.Namespace | quote }}
|
||||||
|
|||||||
@@ -5,3 +5,6 @@ app:
|
|||||||
|
|
||||||
worker:
|
worker:
|
||||||
replicas: 3
|
replicas: 3
|
||||||
|
|
||||||
|
cron:
|
||||||
|
enabled: true
|
||||||
|
|||||||
@@ -13,6 +13,9 @@ deployment: ""
|
|||||||
domain: ""
|
domain: ""
|
||||||
domain_scheme: ""
|
domain_scheme: ""
|
||||||
|
|
||||||
|
unirate:
|
||||||
|
key: ""
|
||||||
|
|
||||||
frontend_domain: ""
|
frontend_domain: ""
|
||||||
frontend_domain_scheme: ""
|
frontend_domain_scheme: ""
|
||||||
|
|
||||||
@@ -35,6 +38,23 @@ worker:
|
|||||||
# Queue name for Celery worker and for CRD Queue
|
# Queue name for Celery worker and for CRD Queue
|
||||||
mailQueueName: "mail_queue"
|
mailQueueName: "mail_queue"
|
||||||
|
|
||||||
|
cron:
|
||||||
|
enabled: false
|
||||||
|
schedule: "*/5 * * * *" # every 5 minutes
|
||||||
|
scheme: "http"
|
||||||
|
endpoint: "/_cron"
|
||||||
|
concurrencyPolicy: "Forbid"
|
||||||
|
|
||||||
|
smtp:
|
||||||
|
host:
|
||||||
|
port: 587
|
||||||
|
username: ""
|
||||||
|
password: ""
|
||||||
|
tls: false
|
||||||
|
ssl: false
|
||||||
|
from: ""
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
service:
|
service:
|
||||||
port: 80
|
port: 80
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import BalanceChart from './BalanceChart';
|
|||||||
import ManualManagement from './ManualManagement';
|
import ManualManagement from './ManualManagement';
|
||||||
import CategoryPieChart from './CategoryPieChart';
|
import CategoryPieChart from './CategoryPieChart';
|
||||||
import MockBankModal, { type MockGenerationOptions } from './MockBankModal';
|
import MockBankModal, { type MockGenerationOptions } from './MockBankModal';
|
||||||
import { BACKEND_URL, VITE_UNIRATE_API_KEY } from '../config';
|
import { BACKEND_URL } from '../config';
|
||||||
|
|
||||||
function formatAmount(n: number) {
|
function formatAmount(n: number) {
|
||||||
return new Intl.NumberFormat(undefined, { minimumFractionDigits: 2, maximumFractionDigits: 2 }).format(n);
|
return new Intl.NumberFormat(undefined, { minimumFractionDigits: 2, maximumFractionDigits: 2 }).format(n);
|
||||||
@@ -21,17 +21,6 @@ type RateData = {
|
|||||||
rate: number;
|
rate: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
// The part of the API response structure we need
|
|
||||||
type UnirateApiResponse = {
|
|
||||||
base: string;
|
|
||||||
rates: { [key: string]: number };
|
|
||||||
// We'll also check for error formats just in case
|
|
||||||
message?: string;
|
|
||||||
error?: {
|
|
||||||
info: string;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
// The currencies you want to display
|
// The currencies you want to display
|
||||||
const TARGET_CURRENCIES = ['EUR', 'USD', 'NOK'];
|
const TARGET_CURRENCIES = ['EUR', 'USD', 'NOK'];
|
||||||
|
|
||||||
@@ -45,49 +34,20 @@ function CurrencyRates() {
|
|||||||
setLoading(true);
|
setLoading(true);
|
||||||
setError(null);
|
setError(null);
|
||||||
|
|
||||||
const API_KEY = VITE_UNIRATE_API_KEY;
|
|
||||||
|
|
||||||
// We need to get the CZK rate as well, to use it for conversion
|
|
||||||
const allSymbols = [...TARGET_CURRENCIES, 'CZK'].join(',');
|
|
||||||
|
|
||||||
// We remove the `base` param, as the API seems to force base=USD
|
|
||||||
const UNIRATE_API_URL = `https://unirateapi.com/api/rates?api_key=${API_KEY}&symbols=${allSymbols}`;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const res = await fetch(UNIRATE_API_URL);
|
const base = BACKEND_URL.replace(/\/$/, '');
|
||||||
const data: UnirateApiResponse = await res.json();
|
const url = `${base}/exchange-rates?symbols=${TARGET_CURRENCIES.join(',')}`;
|
||||||
|
const token = localStorage.getItem('token');
|
||||||
// --- THIS IS THE NEW, CORRECTED LOGIC ---
|
const res = await fetch(url, {
|
||||||
|
headers: token ? { Authorization: `Bearer ${token}` } : undefined,
|
||||||
// 1. Check if the 'rates' object exists. If not, it's an error.
|
credentials: 'include',
|
||||||
if (!data.rates) {
|
|
||||||
let errorMessage = data.message || (data.error ? data.error.info : 'Invalid API response');
|
|
||||||
throw new Error(errorMessage || 'Could not load rates');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. Check that we got the base currency (USD) and our conversion currency (CZK)
|
|
||||||
if (data.base !== 'USD' || !data.rates.CZK) {
|
|
||||||
throw new Error('API response is missing required data for conversion (USD or CZK)');
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Get our main conversion factor
|
|
||||||
const czkPerUsd = data.rates.CZK; // e.g., 23.0
|
|
||||||
|
|
||||||
// 4. Calculate the rates for our target currencies
|
|
||||||
const formattedRates = TARGET_CURRENCIES.map(code => {
|
|
||||||
const targetPerUsd = data.rates[code]; // e.g., 0.9 for EUR
|
|
||||||
|
|
||||||
// This calculates: (CZK per USD) / (TARGET per USD) = CZK per TARGET
|
|
||||||
// e.g. (23.0 CZK / 1 USD) / (0.9 EUR / 1 USD) = 25.55 CZK / 1 EUR
|
|
||||||
const rate = czkPerUsd / targetPerUsd;
|
|
||||||
|
|
||||||
return {
|
|
||||||
currencyCode: code,
|
|
||||||
rate: rate,
|
|
||||||
};
|
|
||||||
});
|
});
|
||||||
|
if (!res.ok) {
|
||||||
setRates(formattedRates);
|
const text = await res.text();
|
||||||
|
throw new Error(text || `Failed to load rates (${res.status})`);
|
||||||
|
}
|
||||||
|
const data: RateData[] = await res.json();
|
||||||
|
setRates(data);
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
setError(err.message || 'Could not load rates');
|
setError(err.message || 'Could not load rates');
|
||||||
} finally {
|
} finally {
|
||||||
@@ -235,44 +195,50 @@ export default function Dashboard({ onLogout }: { onLogout: () => void }) {
|
|||||||
setIsGenerating(true);
|
setIsGenerating(true);
|
||||||
setMockModalOpen(false);
|
setMockModalOpen(false);
|
||||||
|
|
||||||
const { count, minAmount, maxAmount, startDate, endDate, categoryIds } = options;
|
try {
|
||||||
const newTransactions: Transaction[] = [];
|
const base = BACKEND_URL.replace(/\/$/, '');
|
||||||
|
const url = `${base}/mock-bank/generate`;
|
||||||
const startDateTime = new Date(startDate).getTime();
|
const token = localStorage.getItem('token');
|
||||||
const endDateTime = new Date(endDate).getTime();
|
const res = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
for (let i = 0; i < count; i++) {
|
headers: {
|
||||||
// Generate random data based on user input
|
'Content-Type': 'application/json',
|
||||||
const amount = parseFloat((Math.random() * (maxAmount - minAmount) + minAmount).toFixed(2));
|
...(token ? { Authorization: `Bearer ${token}` } : {}),
|
||||||
|
},
|
||||||
const randomTime = Math.random() * (endDateTime - startDateTime) + startDateTime;
|
credentials: 'include',
|
||||||
const date = new Date(randomTime);
|
body: JSON.stringify(options),
|
||||||
const dateString = date.toISOString().split('T')[0];
|
});
|
||||||
|
if (!res.ok) {
|
||||||
const randomCategory = categoryIds.length > 0
|
const text = await res.text();
|
||||||
? [categoryIds[Math.floor(Math.random() * categoryIds.length)]]
|
throw new Error(text || `Failed to generate mock transactions (${res.status})`);
|
||||||
: [];
|
|
||||||
|
|
||||||
const payload = {
|
|
||||||
amount,
|
|
||||||
date: dateString,
|
|
||||||
category_ids: randomCategory,
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const created = await createTransaction(payload);
|
|
||||||
newTransactions.push(created);
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Failed to create mock transaction:", err);
|
|
||||||
alert('An error occurred while generating transactions. Check the console.');
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
|
const generated: Array<{ amount: number; date: string; category_ids: number[]; description?: string | null }>
|
||||||
|
= await res.json();
|
||||||
|
|
||||||
|
const newTransactions: Transaction[] = [];
|
||||||
|
for (const g of generated) {
|
||||||
|
try {
|
||||||
|
const created = await createTransaction({
|
||||||
|
amount: g.amount,
|
||||||
|
date: g.date,
|
||||||
|
category_ids: g.category_ids || [],
|
||||||
|
description: g.description || undefined,
|
||||||
|
});
|
||||||
|
newTransactions.push(created);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Failed to create mock transaction:', err);
|
||||||
|
// continue creating others
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
alert(`${newTransactions.length} mock transactions were successfully generated!`);
|
||||||
|
} catch (err: any) {
|
||||||
|
console.error(err);
|
||||||
|
alert(err?.message || 'Failed to generate mock transactions');
|
||||||
|
} finally {
|
||||||
|
setIsGenerating(false);
|
||||||
|
await loadAll();
|
||||||
}
|
}
|
||||||
|
|
||||||
setIsGenerating(false);
|
|
||||||
alert(`${newTransactions.length} mock transactions were successfully generated!`);
|
|
||||||
|
|
||||||
await loadAll();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
useEffect(() => { loadAll(); }, [startDate, endDate]);
|
useEffect(() => { loadAll(); }, [startDate, endDate]);
|
||||||
|
|||||||
@@ -43,8 +43,8 @@ The tracker should not store the transactions in the database - security vulnera
|
|||||||
|
|
||||||
Last 3 minutes of the meeting, summarize action items.
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
- [ ] Change the name on frontend from 7project
|
- [x] Change the name on frontend from 7project
|
||||||
- [ ] Finalize the funcionality and everyting in the code part
|
- [x] Finalize the funcionality and everyting in the code part
|
||||||
- [ ] Try to finalize report with focus on reproducibility
|
- [ ] Try to finalize report with focus on reproducibility
|
||||||
- [ ] More high level explanation of the workflow in the report
|
- [ ] More high level explanation of the workflow in the report
|
||||||
|
|
||||||
|
|||||||
47
7project/meetings/2025-11-6-meeting.md
Normal file
47
7project/meetings/2025-11-6-meeting.md
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
# Weekly Meeting Notes
|
||||||
|
|
||||||
|
- Group 8 - Personal finance tracker
|
||||||
|
- Mentor: Jaychander
|
||||||
|
|
||||||
|
Keep all meeting notes in the `meetings.md` file in your project folder.
|
||||||
|
Just copy the template below for each weekly meeting and fill in the details.
|
||||||
|
|
||||||
|
## Administrative Info
|
||||||
|
|
||||||
|
- Date: 2025-10-30
|
||||||
|
- Attendees: Dejan, Lukas
|
||||||
|
- Notetaker: Dejan
|
||||||
|
|
||||||
|
## Progress Update (Before Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [x] Change the name on frontend from 7project
|
||||||
|
- [x] Finalize the funcionality and everyting in the code part
|
||||||
|
- [x] Try to finalize report with focus on reproducibility
|
||||||
|
- [x] More high level explanation of the workflow in the report
|
||||||
|
|
||||||
|
Summary of what has been accomplished since the last meeting in the following categories.
|
||||||
|
|
||||||
|
### Coding
|
||||||
|
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
|
||||||
|
## Questions and Topics for Discussion (Before Meeting)
|
||||||
|
|
||||||
|
|
||||||
|
## Discussion Notes (During Meeting)
|
||||||
|
The tracker should not store the transactions in the database - security vulnerability.
|
||||||
|
|
||||||
|
## Action Items for Next Week (During Meeting)
|
||||||
|
|
||||||
|
Last 3 minutes of the meeting, summarize action items.
|
||||||
|
|
||||||
|
- [ ] video
|
||||||
|
- [ ] highlight the optional stuff in the report
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
@@ -14,7 +14,7 @@
|
|||||||
- 289229, Lukáš Trkan, lukastrkan
|
- 289229, Lukáš Trkan, lukastrkan
|
||||||
- 289258, Dejan Ribarovski, derib2613, ribardej
|
- 289258, Dejan Ribarovski, derib2613, ribardej
|
||||||
|
|
||||||
**Brief Description**: (něco spíš jako abstract, introuction, story behind)
|
**Brief Description**:
|
||||||
Our application is a finance tracker, so a person can easily track his cash flow
|
Our application is a finance tracker, so a person can easily track his cash flow
|
||||||
through multiple bank accounts. Person can label transactions with custom categories
|
through multiple bank accounts. Person can label transactions with custom categories
|
||||||
and later filter by them.
|
and later filter by them.
|
||||||
@@ -34,9 +34,16 @@ flowchart LR
|
|||||||
client[Client/Frontend] <--> svc[Backend API]
|
client[Client/Frontend] <--> svc[Backend API]
|
||||||
svc --> proc_queue
|
svc --> proc_queue
|
||||||
svc <--> db[(Database)]
|
svc <--> db[(Database)]
|
||||||
svc <--> cache[(Cache)]
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The workflow works in the following way:
|
||||||
|
- Client connects to the frontend. After login, frontend automatically fetches the stored transactions from
|
||||||
|
the database via the backend API
|
||||||
|
- When the client opts for fetching new transactions via the Bank API, the backend delegates the task
|
||||||
|
to a background worker service via the Message queue.
|
||||||
|
- After successful load, these transactions are stored to the database and displayed to the client
|
||||||
|
- There is also a Task planner, that executes periodic tasks, like fetching new transactions automatically from the Bank API
|
||||||
|
|
||||||
### Components
|
### Components
|
||||||
|
|
||||||
- Frontend (frontend/): React + TypeScript app built with Vite. Talks to the backend via REST, handles login/registration, shows latest transactions, filtering, and allows adding transactions.
|
- Frontend (frontend/): React + TypeScript app built with Vite. Talks to the backend via REST, handles login/registration, shows latest transactions, filtering, and allows adding transactions.
|
||||||
@@ -123,12 +130,13 @@ docker compose up --build
|
|||||||
# Set environment variables (or create .env file)
|
# Set environment variables (or create .env file)
|
||||||
# TODO: fix
|
# TODO: fix
|
||||||
export SECRET=CHANGE_ME_SECRET
|
export SECRET=CHANGE_ME_SECRET
|
||||||
export BACKEND_URL=http://127.0.0.1:8000
|
export FRONTEND_DOMAIN_SCHEME=http://localhost:5173
|
||||||
export FRONTEND_URL=http://localhost:5173
|
export BANKID_CLIENT_ID=CHANGE_ME
|
||||||
export DATABASE_URL=postgresql+asyncpg://user:password@127.0.0.1:5432/app
|
export BANKID_CLIENT_SECRET=CHANGE_ME
|
||||||
export RABBITMQ_URL=amqp://guest:guest@127.0.0.1:5672/
|
export CSAS_CLIENT_ID=CHANGE_ME
|
||||||
export REDIS_URL=redis://127.0.0.1:6379/0
|
export CSAS_CLIENT_SECRET=CHANGE_ME
|
||||||
|
export MOJEID_CLIENT_ID=CHANGE_ME
|
||||||
|
export MOJEID_CLIENT_SECRET=CHANGE_ME
|
||||||
# Apply DB migrations (Alembic)
|
# Apply DB migrations (Alembic)
|
||||||
# From 7project
|
# From 7project
|
||||||
bash upgrade_database.sh
|
bash upgrade_database.sh
|
||||||
@@ -164,7 +172,38 @@ npm run build
|
|||||||
```
|
```
|
||||||
|
|
||||||
## Deployment Instructions
|
## Deployment Instructions
|
||||||
|
### Setup Cluster
|
||||||
|
Deployment should work on any Kubernetes cluster. However, we are using 4 TalosOS virtual machines (1 control plane, 3 workers)
|
||||||
|
running on top of Proxmox VE.
|
||||||
|
|
||||||
|
1) Create 4 VMs with TalosOS
|
||||||
|
2) Install talosctl for your OS: https://docs.siderolabs.com/talos/v1.10/getting-started/talosctl
|
||||||
|
3) Generate Talos config
|
||||||
|
```bash
|
||||||
|
# TODO: add commands
|
||||||
|
```
|
||||||
|
4) Edit the generated worker.yaml
|
||||||
|
- add google container registry mirror
|
||||||
|
- add modules from config generator
|
||||||
|
- add extramounts for persistent storage
|
||||||
|
- add kernel modules
|
||||||
|
|
||||||
|
5) Apply the config to the VMs
|
||||||
|
```bash
|
||||||
|
#TODO: add config apply commands
|
||||||
|
```
|
||||||
|
|
||||||
|
6) Verify the cluster is up
|
||||||
|
```bash
|
||||||
|
```
|
||||||
|
|
||||||
|
7) Export kubeconfig
|
||||||
|
```bash
|
||||||
|
# TODO: add export command
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### Install
|
||||||
1) Install base services to cluster
|
1) Install base services to cluster
|
||||||
```bash
|
```bash
|
||||||
cd tofu
|
cd tofu
|
||||||
@@ -172,8 +211,8 @@ cd tofu
|
|||||||
cp terraform.tfvars.example terraform.tfvars
|
cp terraform.tfvars.example terraform.tfvars
|
||||||
# authenticate to your cluster/cloud as needed, then:
|
# authenticate to your cluster/cloud as needed, then:
|
||||||
tofu init
|
tofu init
|
||||||
tofu plan
|
tofu apply -exclude modules.cloudflare
|
||||||
tofu apply
|
tofu apply
|
||||||
```
|
```
|
||||||
|
|
||||||
2) Deploy the app using Helm
|
2) Deploy the app using Helm
|
||||||
@@ -217,28 +256,28 @@ open http://localhost:5173
|
|||||||
```
|
```
|
||||||
|
|
||||||
## Testing Instructions
|
## Testing Instructions
|
||||||
|
The tests are located in 7project/backend/tests directory
|
||||||
|
If you want to test locally, you have to have the DB running locally as well (start the docker compose in /backend).
|
||||||
|
```bash
|
||||||
|
cd backend
|
||||||
|
```
|
||||||
|
|
||||||
### Unit Tests
|
### Unit Tests
|
||||||
|
There are only 3 basic unit tests, since our services logic is very simple
|
||||||
```bash
|
```bash
|
||||||
# Commands to run unit tests
|
pytest tests/test_unit_user_service.py
|
||||||
# For example:
|
|
||||||
# go test ./...
|
|
||||||
# npm test
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Integration Tests
|
### Integration Tests
|
||||||
|
There are 11 basic unit tests, testing the individual backend API logic
|
||||||
```bash
|
```bash
|
||||||
# Commands to run integration tests
|
pytest tests/test_integration_app.py
|
||||||
# Any setup required for integration tests
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### End-to-End Tests
|
### End-to-End Tests
|
||||||
|
There are 7 e2e tests testing more complex app logic
|
||||||
```bash
|
```bash
|
||||||
# Commands to run e2e tests
|
pytest tests/test_e2e.py
|
||||||
# How to set up test environment
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage Examples
|
## Usage Examples
|
||||||
@@ -315,24 +354,24 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Self-Assessment Table
|
## Progress Table
|
||||||
|
|
||||||
> Be honest and detailed in your assessments.
|
> Be honest and detailed in your assessments.
|
||||||
> This information is used for individual grading.
|
> This information is used for individual grading.
|
||||||
> Link to the specific commit on GitHub for each contribution.
|
> Link to the specific commit on GitHub for each contribution.
|
||||||
|
|
||||||
| Task/Component | Assigned To | Status | Time Spent | Difficulty | Notes |
|
| Task/Component | Assigned To | Status | Time Spent | Difficulty | Notes |
|
||||||
|-----------------------------------------------------------------------|-------------| ------------- |----------------|------------| ----------- |
|
|-----------------------------------------------------------------------|-------------| ------------- |------------|------------| ----------- |
|
||||||
| [Project Setup & Repository](https://github.com/dat515-2025/Group-8#) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
| [Project Setup & Repository](https://github.com/dat515-2025/Group-8#) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
||||||
| [Design Document](https://github.com/dat515-2025/Group-8/blob/main/6design/design.md) | Both | ✅ Complete | 2 Hours | Easy | [Any notes] |
|
| [Design Document](https://github.com/dat515-2025/Group-8/blob/main/6design/design.md) | Both | ✅ Complete | 4 Hours | Easy | [Any notes] |
|
||||||
| [Backend API Development](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/api) | Dejan | 🔄 In Progress | 10 hours | Medium | [Any notes] |
|
| [Backend API Development](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/api) | Dejan | ✅ Complete | 12 hours | Medium | [Any notes] |
|
||||||
| [Database Setup & Models](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/models) | Lukas | 🔄 In Progress | [X hours] | Medium | [Any notes] |
|
| [Database Setup & Models](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/models) | Lukas | 🔄 In Progress | [X hours] | Medium | [Any notes] |
|
||||||
| [Frontend Development](https://github.com/dat515-2025/Group-8/tree/main/7project/frontend) | Dejan | 🔄 In Progress | 7 hours so far | Medium | [Any notes] |
|
| [Frontend Development](https://github.com/dat515-2025/Group-8/tree/main/7project/frontend) | Dejan | ✅ Complete | 17 hours | Medium | [Any notes] |
|
||||||
| [Docker Configuration](https://github.com/dat515-2025/Group-8/blob/main/7project/compose.yml) | Lukas | ✅ Complete | [X hours] | Easy | [Any notes] |
|
| [Docker Configuration](https://github.com/dat515-2025/Group-8/blob/main/7project/compose.yml) | Lukas | ✅ Complete | [X hours] | Easy | [Any notes] |
|
||||||
| [Cloud Deployment](https://github.com/dat515-2025/Group-8/blob/main/7project/deployment/app-demo-deployment.yaml) | Lukas | ✅ Complete | [X hours] | Hard | [Any notes] |
|
| [Cloud Deployment](https://github.com/dat515-2025/Group-8/blob/main/7project/deployment/app-demo-deployment.yaml) | Lukas | ✅ Complete | [X hours] | Hard | [Any notes] |
|
||||||
| [Testing Implementation](https://github.com/dat515-2025/group-name) | Dejan | 🔄 In Progress | [X hours] | Medium | [Any notes] |
|
| [Testing Implementation](https://github.com/dat515-2025/group-name) | Dejan | ✅ Complete | 16 hours | Medium | [Any notes] |
|
||||||
| [Documentation](https://github.com/dat515-2025/group-name) | Both | 🔄 In Progress | [X hours] | Easy | [Any notes] |
|
| [Documentation](https://github.com/dat515-2025/group-name) | Both | 🔄 In Progress | [X hours] | Easy | [Any notes] |
|
||||||
| [Presentation Video](https://github.com/dat515-2025/group-name) | Both | ❌ Not Started | [X hours] | Medium | [Any notes] |
|
| [Presentation Video](https://github.com/dat515-2025/group-name) | Both | ❌ Not Started | [X hours] | Medium | [Any notes] |
|
||||||
|
|
||||||
**Legend**: ✅ Complete | 🔄 In Progress | ⏳ Pending | ❌ Not Started
|
**Legend**: ✅ Complete | 🔄 In Progress | ⏳ Pending | ❌ Not Started
|
||||||
|
|
||||||
@@ -353,15 +392,18 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
|||||||
|
|
||||||
### Dejan
|
### Dejan
|
||||||
|
|
||||||
| Date | Activity | Hours | Description |
|
| Date | Activity | Hours | Description |
|
||||||
|-----------------|----------------------|--------|----------------------------------------------------------------------------------|
|
|-----------------|----------------------|--------|---------------------------------------------------------------|
|
||||||
| 25.9. | Design | 2 | 6design |
|
| 25.9. | Design | 2 | 6design |
|
||||||
| 9.10 to 11.10. | Backend APIs | 10 | Implemented Backend APIs |
|
| 9.10 to 11.10. | Backend APIs | 12 | Implemented Backend APIs |
|
||||||
| 13.10 to 15.10. | Frontend Development | 7 | Created user interface mockups |
|
| 13.10 to 15.10. | Frontend Development | 8 | Created user interface mockups |
|
||||||
| Continually | Documantation | 5 | Documenting the dev process |
|
| Continually | Documentation | 6 | Documenting the dev process |
|
||||||
| 21.10 to 23.10 | Tests, forntend | 10 | Test basics, balance charts, and frontend improvement |
|
| 21.10 to 23.10 | Tests, frontend | 10 | Test basics, balance charts, and frontend improvement |
|
||||||
| 28.10 to 30.10 | Tests, forntend | 7 | Tests improvement with test database setup, UI fix and exchange rate integration |
|
| 28.10 to 30.10 | CI | 6 | Integrated tests with test database setup on github workflows |
|
||||||
| **Total** | | **41** | |
|
| 28.10 to 30.10 | Frontend | 7 | UI improvements and exchange rate API integration |
|
||||||
|
| 4.11 to 6.11 | Tests | 6 | Test fixes improvement, more integration and e2e |
|
||||||
|
| 4.11 to 6.11 | Frontend | 6 | Fixes, Improved UI, added support for mobile devices |
|
||||||
|
| **Total** | | **63** | |
|
||||||
|
|
||||||
|
|
||||||
### Group Total: [XXX.X] hours
|
### Group Total: [XXX.X] hours
|
||||||
|
|||||||
Reference in New Issue
Block a user