mirror of
https://github.com/dat515-2025/Group-8.git
synced 2026-03-22 06:57:47 +01:00
Compare commits
68 Commits
merge/prom
...
59d53967b0
| Author | SHA1 | Date | |
|---|---|---|---|
| 59d53967b0 | |||
| f3086f8c73 | |||
|
|
fd437b1caf | ||
| 96ebc27001 | |||
|
|
922651fdbf | ||
|
|
e164b185e0 | ||
|
|
186b4fd09a | ||
|
|
280d495335 | ||
|
|
e73233c90a | ||
|
|
aade78bf3f | ||
|
|
50e489a8e0 | ||
|
|
1679abb71f | ||
| 573404dead | |||
| d57dd82a64 | |||
| 50f37c1161 | |||
| ae22d2ee5f | |||
| 509608f8c9 | |||
| ed723d1d13 | |||
| b0dee5e289 | |||
| 640da2ee04 | |||
| ab9aefd140 | |||
|
|
4eaf46e77e | ||
|
|
a30ae4d010 | ||
|
|
ef26e88713 | ||
|
|
2e1dddb4f8 | ||
|
|
25e587cea8 | ||
|
|
3cdefc33fc | ||
|
|
5954e56956 | ||
|
|
8575ef8ff5 | ||
| c53e314b2a | |||
| c0bc44622f | |||
| 3d31ff4631 | |||
|
|
8b92b9bd18 | ||
|
|
3d26ed6a62 | ||
|
|
67b44539f2 | ||
|
|
ff9cc712db | ||
| dc7ce9e6a1 | |||
| 188cdf5727 | |||
| 4cf0d2a981 | |||
| 9986cce8f9 | |||
| b3b5717e9e | |||
|
|
1da927dc07 | ||
| 537d050080 | |||
| 1e4f342176 | |||
| c62e0adcf3 | |||
| 24d86abfc4 | |||
| 21305f18e2 | |||
| e708f7b18b | |||
| f58083870f | |||
| ca8287cd8b | |||
|
|
ed3e6329dd | ||
|
|
a214e2cd8b | ||
| 6c8d2202b5 | |||
|
|
b480734fee | ||
|
|
8b301c386e | ||
|
|
733e7a8918 | ||
|
|
524e7a6f98 | ||
|
|
0c9882e9b3 | ||
|
|
72494c4aae | ||
|
|
60560dea99 | ||
|
|
a9b2aba55a | ||
|
|
36b1fe887b | ||
|
|
8543c72730 | ||
| 24087c2810 | |||
|
|
6818b1f649 | ||
| c864e753c9 | |||
| b4a453be04 | |||
| d290664352 |
8
.github/workflows/deploy-pr.yaml
vendored
8
.github/workflows/deploy-pr.yaml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
runner: vhs
|
||||
mode: pr
|
||||
pr_number: ${{ github.event.pull_request.number }}
|
||||
base_domain: ${{ vars.DEV_BASE_DOMAIN }}
|
||||
base_domain: ${{ vars.PROD_DOMAIN }}
|
||||
secrets: inherit
|
||||
|
||||
frontend:
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
|
||||
- name: Helm upgrade/install PR preview
|
||||
env:
|
||||
DEV_BASE_DOMAIN: ${{ secrets.BASE_DOMAIN }}
|
||||
DEV_BASE_DOMAIN: ${{ vars.BASE_DOMAIN }}
|
||||
RABBITMQ_PASSWORD: ${{ secrets.PROD_RABBITMQ_PASSWORD }}
|
||||
DB_PASSWORD: ${{ secrets.PROD_DB_PASSWORD }}
|
||||
DIGEST: ${{ needs.build.outputs.digest }}
|
||||
@@ -85,6 +85,7 @@ jobs:
|
||||
DOMAIN_SCHEME: "${{ needs.get_urls.outputs.backend_url_scheme }}"
|
||||
FRONTEND_DOMAIN: "${{ needs.get_urls.outputs.frontend_url }}"
|
||||
FRONTEND_DOMAIN_SCHEME: "${{ needs.get_urls.outputs.frontend_url_scheme }}"
|
||||
UNIRATE_API_KEY: ${{ secrets.UNIRATE_API_KEY }}
|
||||
run: |
|
||||
PR=${{ github.event.pull_request.number }}
|
||||
RELEASE=myapp-pr-$PR
|
||||
@@ -102,7 +103,8 @@ jobs:
|
||||
--set-string rabbitmq.password="$RABBITMQ_PASSWORD" \
|
||||
--set-string database.password="$DB_PASSWORD" \
|
||||
--set-string database.encryptionSecret="$PR" \
|
||||
--set-string app.name="finance-tracker-pr-$PR"
|
||||
--set-string app.name="finance-tracker-pr-$PR" \
|
||||
--set-string unirate.key="$UNIRATE_API_KEY"
|
||||
|
||||
- name: Post preview URLs as PR comment
|
||||
uses: actions/github-script@v7
|
||||
|
||||
20
.github/workflows/deploy-prod.yaml
vendored
20
.github/workflows/deploy-prod.yaml
vendored
@@ -27,6 +27,7 @@ jobs:
|
||||
|
||||
build:
|
||||
name: Build and push image (reusable)
|
||||
needs: [test]
|
||||
uses: ./.github/workflows/build-image.yaml
|
||||
with:
|
||||
mode: prod
|
||||
@@ -36,6 +37,7 @@ jobs:
|
||||
|
||||
get_urls:
|
||||
name: Generate Production URLs
|
||||
needs: [test]
|
||||
uses: ./.github/workflows/url_generator.yml
|
||||
with:
|
||||
mode: prod
|
||||
@@ -92,6 +94,14 @@ jobs:
|
||||
CSAS_CLIENT_ID: ${{ secrets.CSAS_CLIENT_ID }}
|
||||
CSAS_CLIENT_SECRET: ${{ secrets.CSAS_CLIENT_SECRET }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
SMTP_HOST: ${{ secrets.SMTP_HOST }}
|
||||
SMTP_PORT: ${{ secrets.SMTP_PORT }}
|
||||
SMTP_USERNAME: ${{ secrets.SMTP_USERNAME }}
|
||||
SMTP_PASSWORD: ${{ secrets.SMTP_PASSWORD }}
|
||||
SMTP_USE_TLS: ${{ secrets.SMTP_USE_TLS }}
|
||||
SMTP_USE_SSL: ${{ secrets.SMTP_USE_SSL }}
|
||||
SMTP_FROM: ${{ secrets.SMTP_FROM }}
|
||||
UNIRATE_API_KEY: ${{ secrets.UNIRATE_API_KEY }}
|
||||
run: |
|
||||
helm upgrade --install myapp ./7project/charts/myapp-chart \
|
||||
-n prod --create-namespace \
|
||||
@@ -111,4 +121,12 @@ jobs:
|
||||
--set-string oauth.csas.clientId="$CSAS_CLIENT_ID" \
|
||||
--set-string oauth.csas.clientSecret="$CSAS_CLIENT_SECRET" \
|
||||
--set-string sentry_dsn="$SENTRY_DSN" \
|
||||
--set-string database.encryptionSecret="${{ secrets.PROD_DB_ENCRYPTION_KEY }}"
|
||||
--set-string database.encryptionSecret="${{ secrets.PROD_DB_ENCRYPTION_KEY }}" \
|
||||
--set-string smtp.host="$SMTP_HOST" \
|
||||
--set smtp.port="$SMTP_PORT" \
|
||||
--set-string smtp.username="$SMTP_USERNAME" \
|
||||
--set-string smtp.password="$SMTP_PASSWORD" \
|
||||
--set-string smtp.tls="$SMTP_USE_TLS" \
|
||||
--set-string smtp.ssl="$SMTP_USE_SSL" \
|
||||
--set-string smtp.from="$SMTP_FROM" \
|
||||
--set-string unirate.key="$UNIRATE_API_KEY"
|
||||
5
.github/workflows/run-tests.yml
vendored
5
.github/workflows/run-tests.yml
vendored
@@ -31,6 +31,9 @@ jobs:
|
||||
MARIADB_DB: group_project
|
||||
MARIADB_USER: appuser
|
||||
MARIADB_PASSWORD: apppass
|
||||
# Ensure the application uses MariaDB (async) during tests
|
||||
DATABASE_URL: mysql+asyncmy://appuser:apppass@127.0.0.1:3306/group_project
|
||||
DISABLE_METRICS: "1"
|
||||
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
@@ -57,5 +60,7 @@ jobs:
|
||||
working-directory: ./7project/backend
|
||||
|
||||
- name: Run tests with pytest
|
||||
env:
|
||||
PYTEST_RUN_CONFIG: "True"
|
||||
run: pytest
|
||||
working-directory: ./7project/backend
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11-slim
|
||||
FROM python:3.11-trixie
|
||||
|
||||
WORKDIR /app
|
||||
COPY requirements.txt .
|
||||
|
||||
66
7project/backend/app/api/exchange_rates.py
Normal file
66
7project/backend/app/api/exchange_rates.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter, HTTPException, Query, status
|
||||
|
||||
router = APIRouter(prefix="/exchange-rates", tags=["exchange-rates"])
|
||||
|
||||
|
||||
@router.get("", status_code=status.HTTP_200_OK)
|
||||
async def get_exchange_rates(symbols: str = Query("EUR,USD,NOK", description="Comma-separated currency codes to fetch vs CZK")):
|
||||
"""
|
||||
Fetch exchange rates from UniRate API on the backend and return CZK-per-target rates.
|
||||
- Always requests CZK in addition to requested symbols to compute conversion from USD-base.
|
||||
- Returns a list of {currencyCode, rate} where rate is CZK per 1 unit of the target currency.
|
||||
"""
|
||||
api_key = os.getenv("UNIRATE_API_KEY")
|
||||
if not api_key:
|
||||
raise HTTPException(status_code=500, detail="Server is not configured with UNIRATE_API_KEY")
|
||||
|
||||
# Ensure CZK is included for conversion
|
||||
requested = [s.strip().upper() for s in symbols.split(",") if s.strip()]
|
||||
if "CZK" not in requested:
|
||||
requested.append("CZK")
|
||||
query_symbols = ",".join(sorted(set(requested)))
|
||||
|
||||
url = f"https://unirateapi.com/api/rates?api_key={api_key}&symbols={query_symbols}"
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=httpx.Timeout(15.0)) as client:
|
||||
resp = await client.get(url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
raise HTTPException(status_code=502, detail=f"Upstream UniRate error: HTTP {resp.status_code}")
|
||||
data = resp.json()
|
||||
except httpx.HTTPError as e:
|
||||
raise HTTPException(status_code=502, detail=f"Failed to contact UniRate: {str(e)}")
|
||||
|
||||
# Validate response structure
|
||||
rates = data.get("rates") if isinstance(data, dict) else None
|
||||
base = data.get("base") if isinstance(data, dict) else None
|
||||
if not rates or base != "USD" or "CZK" not in rates:
|
||||
# Prefer upstream message when available
|
||||
detail = data.get("message") if isinstance(data, dict) else None
|
||||
if not detail and isinstance(data, dict):
|
||||
err = data.get("error")
|
||||
if isinstance(err, dict):
|
||||
detail = err.get("info")
|
||||
raise HTTPException(status_code=502, detail=detail or "Invalid response from UniRate API")
|
||||
|
||||
czk_per_usd = rates["CZK"]
|
||||
|
||||
# Build result excluding CZK itself
|
||||
result = []
|
||||
for code in requested:
|
||||
if code == "CZK":
|
||||
continue
|
||||
target_per_usd = rates.get(code)
|
||||
if target_per_usd in (None, 0):
|
||||
# Skip unavailable or invalid
|
||||
continue
|
||||
czk_per_target = czk_per_usd / target_per_usd
|
||||
result.append({"currencyCode": code, "rate": czk_per_target})
|
||||
|
||||
return result
|
||||
|
||||
|
||||
116
7project/backend/app/api/mock_bank.py
Normal file
116
7project/backend/app/api/mock_bank.py
Normal file
@@ -0,0 +1,116 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional
|
||||
import random
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from pydantic import BaseModel, Field, conint, confloat, validator
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.services.db import get_async_session
|
||||
from app.services.user_service import current_active_user
|
||||
from app.models.user import User
|
||||
from app.models.transaction import Transaction
|
||||
from app.models.categories import Category
|
||||
from app.schemas.transaction import TransactionRead
|
||||
|
||||
router = APIRouter(prefix="/mock-bank", tags=["mock-bank"])
|
||||
|
||||
|
||||
class GenerateOptions(BaseModel):
|
||||
count: conint(strict=True, gt=0) = Field(default=10, description="Number of transactions to generate")
|
||||
minAmount: confloat(strict=True) = Field(default=-200.0, description="Minimum transaction amount")
|
||||
maxAmount: confloat(strict=True) = Field(default=200.0, description="Maximum transaction amount")
|
||||
startDate: Optional[str] = Field(None, description="Earliest date (YYYY-MM-DD)")
|
||||
endDate: Optional[str] = Field(None, description="Latest date (YYYY-MM-DD)")
|
||||
categoryIds: List[int] = Field(default_factory=list, description="Optional category IDs to assign randomly")
|
||||
|
||||
@validator("maxAmount")
|
||||
def _validate_amounts(cls, v, values):
|
||||
min_amt = values.get("minAmount")
|
||||
if min_amt is not None and v < min_amt:
|
||||
raise ValueError("maxAmount must be greater than or equal to minAmount")
|
||||
return v
|
||||
|
||||
@validator("endDate")
|
||||
def _validate_dates(cls, v, values):
|
||||
sd = values.get("startDate")
|
||||
if v and sd:
|
||||
try:
|
||||
ed = datetime.strptime(v, "%Y-%m-%d").date()
|
||||
st = datetime.strptime(sd, "%Y-%m-%d").date()
|
||||
except ValueError:
|
||||
raise ValueError("Invalid date format, expected YYYY-MM-DD")
|
||||
if ed < st:
|
||||
raise ValueError("endDate must be greater than or equal to startDate")
|
||||
return v
|
||||
|
||||
|
||||
class GeneratedTransaction(BaseModel):
|
||||
amount: float
|
||||
date: str # YYYY-MM-DD
|
||||
category_ids: List[int] = []
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
@router.post("/generate", response_model=List[GeneratedTransaction])
|
||||
async def generate_mock_transactions(
|
||||
options: GenerateOptions,
|
||||
user: User = Depends(current_active_user),
|
||||
):
|
||||
# Seed randomness per user to make results less erratic across multiple calls in quick succession
|
||||
seed = int(datetime.utcnow().timestamp()) ^ int(user.id)
|
||||
rnd = random.Random(seed)
|
||||
|
||||
# Determine date range
|
||||
if options.startDate:
|
||||
start_date = datetime.strptime(options.startDate, "%Y-%m-%d").date()
|
||||
else:
|
||||
start_date = (datetime.utcnow() - timedelta(days=365)).date()
|
||||
if options.endDate:
|
||||
end_date = datetime.strptime(options.endDate, "%Y-%m-%d").date()
|
||||
else:
|
||||
end_date = datetime.utcnow().date()
|
||||
|
||||
span_days = max(0, (end_date - start_date).days)
|
||||
|
||||
results: List[GeneratedTransaction] = []
|
||||
for _ in range(options.count):
|
||||
amount = round(rnd.uniform(options.minAmount, options.maxAmount), 2)
|
||||
# Pick a random date in the inclusive range
|
||||
rand_day = rnd.randint(0, span_days) if span_days > 0 else 0
|
||||
tx_date = start_date + timedelta(days=rand_day)
|
||||
# Pick category randomly from provided list, or empty
|
||||
if options.categoryIds:
|
||||
cat = [rnd.choice(options.categoryIds)]
|
||||
else:
|
||||
cat = []
|
||||
# Optional simple description for flavor
|
||||
desc = None
|
||||
# Assemble
|
||||
results.append(GeneratedTransaction(
|
||||
amount=amount,
|
||||
date=tx_date.isoformat(),
|
||||
category_ids=cat,
|
||||
description=desc,
|
||||
))
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@router.get("/scrape")
|
||||
async def scrape_mock_bank():
|
||||
# 80% of the time: nothing to scrape
|
||||
if random.random() < 0.8:
|
||||
return []
|
||||
|
||||
transactions = []
|
||||
count = random.randint(1, 10)
|
||||
for _ in range(count):
|
||||
transactions.append({
|
||||
"amount": round(random.uniform(-200.0, 200.0), 2),
|
||||
"date": (datetime.utcnow().date() - timedelta(days=random.randint(0, 30))).isoformat(),
|
||||
"description": "Mock transaction",
|
||||
})
|
||||
|
||||
return transactions
|
||||
@@ -1,12 +1,17 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pythonjsonlogger import jsonlogger
|
||||
|
||||
from fastapi import Depends, FastAPI
|
||||
from fastapi import Depends, FastAPI, HTTPException
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from prometheus_fastapi_instrumentator import Instrumentator, metrics
|
||||
from starlette.requests import Request
|
||||
|
||||
from app.services.prometheus import number_of_users, number_of_transactions
|
||||
|
||||
from app.services import bank_scraper
|
||||
from app.workers.celery_tasks import load_transactions, load_all_transactions
|
||||
from app.models.user import User, OAuthAccount
|
||||
@@ -16,6 +21,7 @@ from app.api.auth import router as auth_router
|
||||
from app.api.csas import router as csas_router
|
||||
from app.api.categories import router as categories_router
|
||||
from app.api.transactions import router as transactions_router
|
||||
from app.api.exchange_rates import router as exchange_rates_router
|
||||
from app.services.user_service import auth_backend, current_active_verified_user, fastapi_users, get_oauth_provider, \
|
||||
UserManager, get_jwt_strategy
|
||||
from app.core.security import extract_bearer_token, is_token_revoked, decode_and_verify_jwt
|
||||
@@ -24,7 +30,8 @@ from app.services.user_service import SECRET
|
||||
from fastapi import FastAPI
|
||||
import sentry_sdk
|
||||
from fastapi_users.db import SQLAlchemyUserDatabase
|
||||
from app.core.db import async_session_maker
|
||||
from app.core.db import async_session_maker, engine
|
||||
from app.core.base import Base
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=os.getenv("SENTRY_DSN"),
|
||||
@@ -46,19 +53,40 @@ fastApi.add_middleware(
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
prometheus = Instrumentator().instrument(fastApi)
|
||||
|
||||
prometheus.expose(
|
||||
fastApi,
|
||||
endpoint="/metrics",
|
||||
include_in_schema=True,
|
||||
)
|
||||
if not os.getenv("PYTEST_RUN_CONFIG"):
|
||||
prometheus = Instrumentator().instrument(fastApi)
|
||||
# Register custom metrics
|
||||
prometheus.add(number_of_users()).add(number_of_transactions())
|
||||
prometheus.expose(
|
||||
fastApi,
|
||||
endpoint="/metrics",
|
||||
include_in_schema=True,
|
||||
)
|
||||
|
||||
fastApi.include_router(auth_router)
|
||||
fastApi.include_router(categories_router)
|
||||
fastApi.include_router(transactions_router)
|
||||
fastApi.include_router(exchange_rates_router)
|
||||
from app.api.mock_bank import router as mock_bank_router
|
||||
fastApi.include_router(mock_bank_router)
|
||||
|
||||
logging.basicConfig(filename='app.log', level=logging.INFO, format='%(asctime)s %(message)s')
|
||||
for h in list(logging.root.handlers):
|
||||
logging.root.removeHandler(h)
|
||||
|
||||
_log_handler = logging.StreamHandler(sys.stdout)
|
||||
_formatter = jsonlogger.JsonFormatter(
|
||||
fmt='%(asctime)s %(levelname)s %(name)s %(message)s %(pathname)s %(lineno)d %(process)d %(thread)d'
|
||||
)
|
||||
_log_handler.setFormatter(_formatter)
|
||||
|
||||
logging.root.setLevel(logging.INFO)
|
||||
logging.root.addHandler(_log_handler)
|
||||
|
||||
for _name in ("uvicorn", "uvicorn.error", "uvicorn.access"):
|
||||
_logger = logging.getLogger(_name)
|
||||
_logger.handlers = [_log_handler]
|
||||
_logger.propagate = True
|
||||
|
||||
|
||||
@fastApi.middleware("http")
|
||||
@@ -95,7 +123,7 @@ async def log_traffic(request: Request, call_next):
|
||||
"process_time": process_time,
|
||||
"client_host": client_host
|
||||
}
|
||||
logging.info(str(log_params))
|
||||
logging.getLogger(__name__).info("http_request", extra=log_params)
|
||||
return response
|
||||
|
||||
|
||||
@@ -137,16 +165,12 @@ async def authenticated_route(user: User = Depends(current_active_verified_user)
|
||||
return {"message": f"Hello {user.email}!"}
|
||||
|
||||
|
||||
@fastApi.get("/debug/scrape/csas/all", tags=["debug"])
|
||||
async def debug_scrape_csas_all():
|
||||
logging.info("[Debug] Queueing CSAS scrape for all users via HTTP endpoint (Celery)")
|
||||
@fastApi.get("/_cron", include_in_schema=False)
|
||||
async def handle_cron(request: Request):
|
||||
# endpoint accessed by Clodflare => return 404
|
||||
if request.headers.get("cf-connecting-ip"):
|
||||
raise HTTPException(status_code=404)
|
||||
|
||||
logging.info("[Cron] Triggering scheduled tasks via HTTP endpoint")
|
||||
task = load_all_transactions.delay()
|
||||
return {"status": "queued", "action": "csas_scrape_all", "task_id": getattr(task, 'id', None)}
|
||||
|
||||
|
||||
@fastApi.post("/debug/scrape/csas/{user_id}", tags=["debug"])
|
||||
async def debug_scrape_csas_user(user_id: str, user: User = Depends(current_active_verified_user)):
|
||||
logging.info("[Debug] Queueing CSAS scrape for single user via HTTP endpoint (Celery) | user_id=%s", user_id)
|
||||
task = load_transactions.delay(user_id)
|
||||
return {"status": "queued", "action": "csas_scrape_single", "user_id": user_id,
|
||||
"task_id": getattr(task, 'id', None)}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import os
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from app.core.base import Base
|
||||
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
@@ -23,6 +25,7 @@ host_env = os.getenv("MARIADB_HOST", "localhost")
|
||||
ssl_enabled = host_env not in {"localhost", "127.0.0.1"}
|
||||
connect_args = {"ssl": {"ssl": True}} if ssl_enabled else {}
|
||||
|
||||
# Async engine/session for the async parts of the app
|
||||
engine = create_async_engine(
|
||||
DATABASE_URL,
|
||||
pool_pre_ping=True,
|
||||
@@ -30,3 +33,13 @@ engine = create_async_engine(
|
||||
connect_args=connect_args,
|
||||
)
|
||||
async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
|
||||
|
||||
# Synchronous engine/session for sync utilities (e.g., bank_scraper)
|
||||
SYNC_DATABASE_URL = DATABASE_URL.replace("+asyncmy", "+pymysql")
|
||||
engine_sync = create_engine(
|
||||
SYNC_DATABASE_URL,
|
||||
pool_pre_ping=True,
|
||||
echo=os.getenv("SQL_ECHO", "0") == "1",
|
||||
connect_args=connect_args,
|
||||
)
|
||||
sync_session_maker = sessionmaker(bind=engine_sync, expire_on_commit=False)
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import uuid
|
||||
from typing import Optional
|
||||
from typing import Optional, Dict, Any
|
||||
from fastapi_users import schemas
|
||||
|
||||
class UserRead(schemas.BaseUser[uuid.UUID]):
|
||||
first_name: Optional[str] = None
|
||||
last_name: Optional[str] = None
|
||||
config: Optional[Dict[str, Any]] = None
|
||||
|
||||
class UserCreate(schemas.BaseUserCreate):
|
||||
first_name: Optional[str] = None
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from os.path import dirname, join
|
||||
from time import strptime
|
||||
from uuid import UUID
|
||||
@@ -7,7 +8,7 @@ from uuid import UUID
|
||||
import httpx
|
||||
from sqlalchemy import select
|
||||
|
||||
from app.core.db import async_session_maker
|
||||
from app.core.db import sync_session_maker
|
||||
from app.models.transaction import Transaction
|
||||
from app.models.user import User
|
||||
|
||||
@@ -20,26 +21,78 @@ CERTS = (
|
||||
)
|
||||
|
||||
|
||||
async def aload_ceska_sporitelna_transactions(user_id: str) -> None:
|
||||
def load_mock_bank_transactions(user_id: str) -> None:
|
||||
try:
|
||||
uid = UUID(str(user_id))
|
||||
except Exception:
|
||||
logger.error("Invalid user_id provided to bank_scraper (async): %r", user_id)
|
||||
logger.error("Invalid user_id provided to bank_scraper (sync): %r", user_id)
|
||||
return
|
||||
|
||||
await _aload_ceska_sporitelna_transactions(uid)
|
||||
_load_mock_bank_transactions(uid)
|
||||
|
||||
|
||||
async def aload_all_ceska_sporitelna_transactions() -> None:
|
||||
async with async_session_maker() as session:
|
||||
result = await session.execute(select(User))
|
||||
users = result.unique().scalars().all()
|
||||
def load_all_mock_bank_transactions() -> None:
|
||||
with sync_session_maker() as session:
|
||||
users = session.execute(select(User)).unique().scalars().all()
|
||||
logger.info("[BankScraper] Starting Mock Bank scrape for all users | count=%d", len(users))
|
||||
|
||||
processed = 0
|
||||
for user in users:
|
||||
try:
|
||||
_load_mock_bank_transactions(user.id)
|
||||
processed += 1
|
||||
except Exception:
|
||||
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
||||
getattr(user, 'email', None))
|
||||
logger.info("[BankScraper] Finished Mock Bank scrape for all users | processed=%d", processed)
|
||||
|
||||
|
||||
def _load_mock_bank_transactions(user_id: UUID) -> None:
|
||||
with sync_session_maker() as session:
|
||||
user: User | None = session.execute(select(User).where(User.id == user_id)).unique().scalar_one_or_none()
|
||||
if user is None:
|
||||
logger.warning("User not found for id=%s", user_id)
|
||||
return
|
||||
|
||||
transactions = []
|
||||
with httpx.Client() as client:
|
||||
response = client.get(f"{os.getenv('APP_POD_URL')}/mock-bank/scrape")
|
||||
if response.status_code != httpx.codes.OK:
|
||||
return
|
||||
for transaction in response.json():
|
||||
transactions.append(
|
||||
Transaction(
|
||||
amount=transaction["amount"],
|
||||
description=transaction.get("description"),
|
||||
date=strptime(transaction["date"], "%Y-%m-%d"),
|
||||
user_id=user_id,
|
||||
)
|
||||
)
|
||||
|
||||
for transaction in transactions:
|
||||
session.add(transaction)
|
||||
session.commit()
|
||||
|
||||
|
||||
def load_ceska_sporitelna_transactions(user_id: str) -> None:
|
||||
try:
|
||||
uid = UUID(str(user_id))
|
||||
except Exception:
|
||||
logger.error("Invalid user_id provided to bank_scraper (sync): %r", user_id)
|
||||
return
|
||||
|
||||
_load_ceska_sporitelna_transactions(uid)
|
||||
|
||||
|
||||
def load_all_ceska_sporitelna_transactions() -> None:
|
||||
with sync_session_maker() as session:
|
||||
users = session.execute(select(User)).unique().scalars().all()
|
||||
logger.info("[BankScraper] Starting CSAS scrape for all users | count=%d", len(users))
|
||||
|
||||
processed = 0
|
||||
for user in users:
|
||||
try:
|
||||
await _aload_ceska_sporitelna_transactions(user.id)
|
||||
_load_ceska_sporitelna_transactions(user.id)
|
||||
processed += 1
|
||||
except Exception:
|
||||
logger.exception("[BankScraper] Error scraping for user id=%s email=%s", user.id,
|
||||
@@ -47,10 +100,9 @@ async def aload_all_ceska_sporitelna_transactions() -> None:
|
||||
logger.info("[BankScraper] Finished CSAS scrape for all users | processed=%d", processed)
|
||||
|
||||
|
||||
async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
||||
async with (async_session_maker() as session):
|
||||
result = await session.execute(select(User).where(User.id == user_id))
|
||||
user: User = result.unique().scalar_one_or_none()
|
||||
def _load_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
||||
with sync_session_maker() as session:
|
||||
user: User | None = session.execute(select(User).where(User.id == user_id)).unique().scalar_one_or_none()
|
||||
if user is None:
|
||||
logger.warning("User not found for id=%s", user_id)
|
||||
return
|
||||
@@ -65,8 +117,8 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
||||
|
||||
accounts = []
|
||||
try:
|
||||
async with httpx.AsyncClient(cert=CERTS, timeout=httpx.Timeout(20.0)) as client:
|
||||
response = await client.get(
|
||||
with httpx.Client(cert=CERTS, timeout=httpx.Timeout(20.0)) as client:
|
||||
response = client.get(
|
||||
"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts?size=10&page=0&sort=iban&order=desc",
|
||||
headers={
|
||||
"Authorization": f"Bearer {cfg['access_token']}",
|
||||
@@ -77,7 +129,7 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
||||
if response.status_code != httpx.codes.OK:
|
||||
return
|
||||
|
||||
for account in response.json()["accounts"]:
|
||||
for account in response.json().get("accounts", []):
|
||||
accounts.append(account)
|
||||
|
||||
except (httpx.HTTPError,) as e:
|
||||
@@ -85,11 +137,13 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
||||
return
|
||||
|
||||
for account in accounts:
|
||||
id = account["id"]
|
||||
acc_id = account.get("id")
|
||||
if not acc_id:
|
||||
continue
|
||||
|
||||
url = f"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts/{id}/transactions?size=100&page=0&sort=bookingdate&order=desc"
|
||||
async with httpx.AsyncClient(cert=CERTS) as client:
|
||||
response = await client.get(
|
||||
url = f"https://webapi.developers.erstegroup.com/api/csas/sandbox/v4/account-information/my/accounts/{acc_id}/transactions?size=100&page=0&sort=bookingdate&order=desc"
|
||||
with httpx.Client(cert=CERTS) as client:
|
||||
response = client.get(
|
||||
url,
|
||||
headers={
|
||||
"Authorization": f"Bearer {cfg['access_token']}",
|
||||
@@ -100,7 +154,7 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
||||
if response.status_code != httpx.codes.OK:
|
||||
continue
|
||||
|
||||
transactions = response.json()["transactions"]
|
||||
transactions = response.json().get("transactions", [])
|
||||
|
||||
for transaction in transactions:
|
||||
description = transaction.get("entryDetails", {}).get("transactionDetails", {}).get(
|
||||
@@ -108,9 +162,12 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
||||
date_str = transaction.get("bookingDate", {}).get("date")
|
||||
date = strptime(date_str, "%Y-%m-%d") if date_str else None
|
||||
amount = transaction.get("amount", {}).get("value")
|
||||
if transaction.get("creditDebitIndicator") == "DBIT":
|
||||
if transaction.get("creditDebitIndicator") == "DBIT" and amount is not None:
|
||||
amount = -abs(amount)
|
||||
|
||||
if amount is None:
|
||||
continue
|
||||
|
||||
obj = Transaction(
|
||||
amount=amount,
|
||||
description=description,
|
||||
@@ -118,7 +175,4 @@ async def _aload_ceska_sporitelna_transactions(user_id: UUID) -> None:
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(obj)
|
||||
await session.commit()
|
||||
|
||||
pass
|
||||
pass
|
||||
session.commit()
|
||||
|
||||
48
7project/backend/app/services/prometheus.py
Normal file
48
7project/backend/app/services/prometheus.py
Normal file
@@ -0,0 +1,48 @@
|
||||
from typing import Callable
|
||||
from prometheus_fastapi_instrumentator.metrics import Info
|
||||
from prometheus_client import Gauge
|
||||
from sqlalchemy import select, func
|
||||
|
||||
from app.core.db import async_session_maker
|
||||
from app.models.transaction import Transaction
|
||||
from app.models.user import User
|
||||
|
||||
|
||||
def number_of_users() -> Callable[[Info], None]:
|
||||
METRIC = Gauge(
|
||||
"number_of_users_total",
|
||||
"Number of registered users.",
|
||||
labelnames=("users",)
|
||||
)
|
||||
|
||||
async def instrumentation(info: Info) -> None:
|
||||
try:
|
||||
async with async_session_maker() as session:
|
||||
result = await session.execute(select(func.count(User.id)))
|
||||
user_count = result.scalar_one() or 0
|
||||
except Exception:
|
||||
# In case of DB errors, avoid crashing metrics endpoint
|
||||
user_count = 0
|
||||
METRIC.labels(users="total").set(user_count)
|
||||
|
||||
return instrumentation
|
||||
|
||||
|
||||
def number_of_transactions() -> Callable[[Info], None]:
|
||||
METRIC = Gauge(
|
||||
"number_of_transactions_total",
|
||||
"Number of transactions stored.",
|
||||
labelnames=("transactions",)
|
||||
)
|
||||
|
||||
async def instrumentation(info: Info) -> None:
|
||||
try:
|
||||
async with async_session_maker() as session:
|
||||
result = await session.execute(select(func.count()).select_from(Transaction))
|
||||
transaction_count = result.scalar_one() or 0
|
||||
except Exception:
|
||||
# In case of DB errors, avoid crashing metrics endpoint
|
||||
transaction_count = 0
|
||||
METRIC.labels(transactions="total").set(transaction_count)
|
||||
|
||||
return instrumentation
|
||||
@@ -1,9 +1,10 @@
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
from celery import shared_task
|
||||
import os
|
||||
import smtplib
|
||||
from email.message import EmailMessage
|
||||
|
||||
import app.services.bank_scraper
|
||||
from app.celery_app import celery_app
|
||||
|
||||
logger = logging.getLogger("celery_tasks")
|
||||
if not logger.handlers:
|
||||
@@ -12,96 +13,74 @@ if not logger.handlers:
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
|
||||
def run_coro(coro) -> None:
|
||||
"""Run an async coroutine in a fresh event loop without using run_until_complete.
|
||||
Primary strategy runs in a new loop in the current thread. If that fails due to
|
||||
debugger patches (e.g., Bad file descriptor from pydevd_nest_asyncio), fall back
|
||||
to running in a dedicated thread with its own event loop.
|
||||
"""
|
||||
import threading
|
||||
|
||||
def _cleanup_loop(loop):
|
||||
try:
|
||||
pending = [t for t in asyncio.all_tasks(loop) if not t.done()]
|
||||
for t in pending:
|
||||
t.cancel()
|
||||
if pending:
|
||||
loop.run_until_complete(asyncio.gather(*pending, return_exceptions=True))
|
||||
except Exception:
|
||||
pass
|
||||
finally:
|
||||
try:
|
||||
loop.close()
|
||||
finally:
|
||||
asyncio.set_event_loop(None)
|
||||
|
||||
# First attempt: Run in current thread with a fresh event loop
|
||||
try:
|
||||
loop = asyncio.get_event_loop_policy().new_event_loop()
|
||||
try:
|
||||
asyncio.set_event_loop(loop)
|
||||
task = loop.create_task(coro)
|
||||
task.add_done_callback(lambda _t: loop.stop())
|
||||
loop.run_forever()
|
||||
exc = task.exception()
|
||||
if exc:
|
||||
raise exc
|
||||
return
|
||||
finally:
|
||||
_cleanup_loop(loop)
|
||||
except OSError as e:
|
||||
logger.warning("run_coro primary strategy failed (%s). Falling back to thread runner.", e)
|
||||
except Exception:
|
||||
# For any other unexpected errors, try thread fallback as well
|
||||
logger.exception("run_coro primary strategy raised; attempting thread fallback")
|
||||
|
||||
# Fallback: Run in a dedicated thread with its own event loop
|
||||
error = {"exc": None}
|
||||
|
||||
def _thread_target():
|
||||
loop = asyncio.new_event_loop()
|
||||
try:
|
||||
asyncio.set_event_loop(loop)
|
||||
task = loop.create_task(coro)
|
||||
task.add_done_callback(lambda _t: loop.stop())
|
||||
loop.run_forever()
|
||||
exc = task.exception()
|
||||
if exc:
|
||||
error["exc"] = exc
|
||||
finally:
|
||||
_cleanup_loop(loop)
|
||||
|
||||
th = threading.Thread(target=_thread_target, name="celery-async-runner", daemon=True)
|
||||
th.start()
|
||||
th.join()
|
||||
if error["exc"] is not None:
|
||||
raise error["exc"]
|
||||
|
||||
|
||||
@shared_task(name="workers.send_email")
|
||||
@celery_app.task(name="workers.send_email")
|
||||
def send_email(to: str, subject: str, body: str) -> None:
|
||||
if not (to and subject and body):
|
||||
logger.error("Email task missing fields. to=%r subject=%r body_len=%r", to, subject, len(body) if body else 0)
|
||||
return
|
||||
|
||||
# Placeholder for real email sending logic
|
||||
logger.info("[Celery] Email sent | to=%s | subject=%s | body_len=%d", to, subject, len(body))
|
||||
host = os.getenv("SMTP_HOST")
|
||||
if not host:
|
||||
logger.error("SMTP_HOST is not configured; cannot send email")
|
||||
return
|
||||
|
||||
# Configuration
|
||||
port = int(os.getenv("SMTP_PORT", "25"))
|
||||
username = os.getenv("SMTP_USERNAME")
|
||||
password = os.getenv("SMTP_PASSWORD")
|
||||
use_tls = os.getenv("SMTP_USE_TLS", "0").lower() in {"1", "true", "yes"}
|
||||
use_ssl = os.getenv("SMTP_USE_SSL", "0").lower() in {"1", "true", "yes"}
|
||||
timeout = int(os.getenv("SMTP_TIMEOUT", "10"))
|
||||
mail_from = os.getenv("SMTP_FROM") or username or "noreply@localhost"
|
||||
|
||||
# Build message
|
||||
msg = EmailMessage()
|
||||
msg["To"] = to
|
||||
msg["From"] = mail_from
|
||||
msg["Subject"] = subject
|
||||
msg.set_content(body)
|
||||
|
||||
try:
|
||||
if use_ssl:
|
||||
with smtplib.SMTP_SSL(host=host, port=port, timeout=timeout) as smtp:
|
||||
if username and password:
|
||||
smtp.login(username, password)
|
||||
smtp.send_message(msg)
|
||||
else:
|
||||
with smtplib.SMTP(host=host, port=port, timeout=timeout) as smtp:
|
||||
# STARTTLS if requested
|
||||
if use_tls:
|
||||
smtp.starttls()
|
||||
if username and password:
|
||||
smtp.login(username, password)
|
||||
smtp.send_message(msg)
|
||||
logger.info("[Celery] Email sent | to=%s | subject=%s | body_len=%d", to, subject, len(body))
|
||||
except Exception:
|
||||
logger.exception("Failed to send email via SMTP to=%s subject=%s host=%s port=%s tls=%s ssl=%s", to, subject,
|
||||
host, port, use_tls, use_ssl)
|
||||
|
||||
|
||||
@shared_task(name="workers.load_transactions")
|
||||
@celery_app.task(name="workers.load_transactions")
|
||||
def load_transactions(user_id: str) -> None:
|
||||
if not user_id:
|
||||
logger.error("Load transactions task missing user_id.")
|
||||
return
|
||||
|
||||
run_coro(app.services.bank_scraper.aload_ceska_sporitelna_transactions(user_id))
|
||||
|
||||
# Placeholder for real transaction loading logic
|
||||
logger.info("[Celery] Transactions loaded for user_id=%s", user_id)
|
||||
logger.info("[Celery] Starting load_transactions | user_id=%s", user_id)
|
||||
try:
|
||||
# Use synchronous bank scraper functions directly, mirroring load_all_transactions
|
||||
app.services.bank_scraper.load_mock_bank_transactions(user_id)
|
||||
app.services.bank_scraper.load_ceska_sporitelna_transactions(user_id)
|
||||
except Exception:
|
||||
logger.exception("Failed to load transactions for user_id=%s", user_id)
|
||||
else:
|
||||
logger.info("[Celery] Finished load_transactions | user_id=%s", user_id)
|
||||
|
||||
|
||||
@shared_task(name="workers.load_all_transactions")
|
||||
@celery_app.task(name="workers.load_all_transactions")
|
||||
def load_all_transactions() -> None:
|
||||
logger.info("[Celery] Starting load_all_transactions")
|
||||
run_coro(app.services.bank_scraper.aload_all_ceska_sporitelna_transactions())
|
||||
# Now use synchronous bank scraper functions directly
|
||||
app.services.bank_scraper.load_all_mock_bank_transactions()
|
||||
app.services.bank_scraper.load_all_ceska_sporitelna_transactions()
|
||||
logger.info("[Celery] Finished load_all_transactions")
|
||||
|
||||
20
7project/backend/docker-compose.test.yml
Normal file
20
7project/backend/docker-compose.test.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
version: "3.9"
|
||||
services:
|
||||
mariadb:
|
||||
image: mariadb:11.4
|
||||
container_name: test-mariadb
|
||||
environment:
|
||||
MARIADB_ROOT_PASSWORD: rootpw
|
||||
MARIADB_DATABASE: group_project
|
||||
MARIADB_USER: appuser
|
||||
MARIADB_PASSWORD: apppass
|
||||
ports:
|
||||
- "3307:3306" # host:container (use 3307 on host to avoid conflicts)
|
||||
healthcheck:
|
||||
test: ["CMD", "mariadb-admin", "ping", "-h", "127.0.0.1", "-u", "root", "-prootpw", "--silent"]
|
||||
interval: 5s
|
||||
timeout: 2s
|
||||
retries: 20
|
||||
# Truly ephemeral, fast storage (removed when container stops)
|
||||
tmpfs:
|
||||
- /var/lib/mysql
|
||||
@@ -70,3 +70,4 @@ watchfiles==1.1.0
|
||||
wcwidth==0.2.14
|
||||
websockets==15.0.1
|
||||
yarl==1.20.1
|
||||
python-json-logger==2.0.7
|
||||
|
||||
113
7project/backend/test-with-ephemeral-mariadb.sh
Executable file
113
7project/backend/test-with-ephemeral-mariadb.sh
Executable file
@@ -0,0 +1,113 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Run tests against a disposable local MariaDB on host port 3307 using Docker Compose.
|
||||
# Requirements: Docker, docker compose plugin, Python, Alembic, pytest.
|
||||
# Usage:
|
||||
# chmod +x ./test-with-ephemeral-mariadb.sh
|
||||
# # From 7project/backend directory
|
||||
# ./test-with-ephemeral-mariadb.sh [--only-unit|--only-integration|--only-e2e] [pytest-args...]
|
||||
# # Examples:
|
||||
# ./test-with-ephemeral-mariadb.sh --only-unit -q
|
||||
# ./test-with-ephemeral-mariadb.sh --only-integration -k "login"
|
||||
# ./test-with-ephemeral-mariadb.sh --only-e2e -vv
|
||||
#
|
||||
# This script will:
|
||||
# 1) Start a MariaDB 11.4 container (ephemeral storage, port 3307)
|
||||
# 2) Wait until it's healthy
|
||||
# 3) Export env vars expected by the app (DATABASE_URL etc.)
|
||||
# 4) Run Alembic migrations
|
||||
# 5) Run pytest
|
||||
# 6) Tear everything down (containers and tmpfs data)
|
||||
|
||||
COMPOSE_FILE="docker-compose.test.yml"
|
||||
SERVICE_NAME="mariadb"
|
||||
CONTAINER_NAME="test-mariadb"
|
||||
|
||||
if ! command -v docker >/dev/null 2>&1; then
|
||||
echo "Docker is required but not found in PATH" >&2
|
||||
exit 1
|
||||
fi
|
||||
if ! docker compose version >/dev/null 2>&1; then
|
||||
echo "Docker Compose V2 plugin is required (docker compose)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Bring up the DB
|
||||
echo "Starting MariaDB (port 3307) with docker compose..."
|
||||
docker compose -f "$COMPOSE_FILE" up -d
|
||||
|
||||
# Ensure we clean up on exit
|
||||
cleanup() {
|
||||
echo "\nTearing down docker compose stack..."
|
||||
docker compose -f "$COMPOSE_FILE" down -v || true
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
# Wait for healthy container
|
||||
echo -n "Waiting for MariaDB to become healthy"
|
||||
for i in {1..60}; do
|
||||
status=$(docker inspect -f '{{.State.Health.Status}}' "$CONTAINER_NAME" 2>/dev/null || echo "")
|
||||
if [ "$status" = "healthy" ]; then
|
||||
echo " -> healthy"
|
||||
break
|
||||
fi
|
||||
echo -n "."
|
||||
sleep 1
|
||||
if [ $i -eq 60 ]; then
|
||||
echo "\nMariaDB did not become healthy in time" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
# Export env vars for the app/tests (match app/core/db.py expectations)
|
||||
export MARIADB_HOST=127.0.0.1
|
||||
export MARIADB_PORT=3307
|
||||
export MARIADB_DB=group_project
|
||||
export MARIADB_USER=appuser
|
||||
export MARIADB_PASSWORD=apppass
|
||||
export DATABASE_URL="mysql+asyncmy://$MARIADB_USER:$MARIADB_PASSWORD@$MARIADB_HOST:$MARIADB_PORT/$MARIADB_DB"
|
||||
export PYTEST_RUN_CONFIG="True"
|
||||
|
||||
# Determine which tests to run based on flags
|
||||
UNIT_TESTS="tests/test_unit_user_service.py"
|
||||
INTEGRATION_TESTS="tests/test_integration_app.py"
|
||||
E2E_TESTS="tests/test_e2e.py"
|
||||
|
||||
FLAG_COUNT=0
|
||||
TEST_TARGET=""
|
||||
declare -a PYTEST_ARGS=()
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--only-unit)
|
||||
TEST_TARGET="$UNIT_TESTS"; FLAG_COUNT=$((FLAG_COUNT+1));;
|
||||
--only-integration)
|
||||
TEST_TARGET="$INTEGRATION_TESTS"; FLAG_COUNT=$((FLAG_COUNT+1));;
|
||||
--only-e2e)
|
||||
TEST_TARGET="$E2E_TESTS"; FLAG_COUNT=$((FLAG_COUNT+1));;
|
||||
*)
|
||||
PYTEST_ARGS+=("$arg");;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$FLAG_COUNT" -gt 1 ]; then
|
||||
echo "Error: Use only one of --only-unit, --only-integration, or --only-e2e" >&2
|
||||
exit 2
|
||||
fi
|
||||
|
||||
# Run Alembic migrations then tests
|
||||
pushd . >/dev/null
|
||||
echo "Running Alembic migrations..."
|
||||
alembic upgrade head
|
||||
|
||||
echo "Running pytest..."
|
||||
if [ -n "$TEST_TARGET" ]; then
|
||||
# Use "${PYTEST_ARGS[@]:-}" to safely expand empty array with 'set -u'
|
||||
pytest "$TEST_TARGET" "${PYTEST_ARGS[@]:-}"
|
||||
else
|
||||
# Use "${PYTEST_ARGS[@]:-}" to safely expand empty array with 'set -u'
|
||||
pytest "${PYTEST_ARGS[@]:-}"
|
||||
fi
|
||||
popd >/dev/null
|
||||
|
||||
# Cleanup handled by trap
|
||||
210
7project/backend/tests/test_e2e.py
Normal file
210
7project/backend/tests/test_e2e.py
Normal file
@@ -0,0 +1,210 @@
|
||||
import pytest
|
||||
import uuid
|
||||
from httpx import AsyncClient, ASGITransport
|
||||
from fastapi import status
|
||||
|
||||
|
||||
def test_e2e(client):
|
||||
# 1) Service is alive
|
||||
alive = client.get("/")
|
||||
assert alive.status_code == status.HTTP_200_OK
|
||||
|
||||
# 2) Attempt to login without payload should fail fast (validation error)
|
||||
login = client.post("/auth/jwt/login")
|
||||
assert login.status_code in (status.HTTP_400_BAD_REQUEST, status.HTTP_422_UNPROCESSABLE_CONTENT)
|
||||
|
||||
# 3) Protected endpoint should not be accessible without token
|
||||
me = client.get("/users/me")
|
||||
assert me.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_e2e_full_user_lifecycle(fastapi_app, test_user):
|
||||
# Use an AsyncClient with ASGITransport for async tests
|
||||
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
login_payload = test_user
|
||||
|
||||
# 1. Log in with the new credentials
|
||||
login_resp = await ac.post("/auth/jwt/login", data=login_payload)
|
||||
assert login_resp.status_code == status.HTTP_200_OK
|
||||
token = login_resp.json()["access_token"]
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
# 2. Access a protected endpoint
|
||||
me_resp = await ac.get("/users/me", headers=headers)
|
||||
assert me_resp.status_code == status.HTTP_200_OK
|
||||
assert me_resp.json()["email"] == test_user["username"]
|
||||
|
||||
# 3. Update the user's profile
|
||||
update_payload = {"first_name": "Test"}
|
||||
patch_resp = await ac.patch("/users/me", json=update_payload, headers=headers)
|
||||
assert patch_resp.status_code == status.HTTP_200_OK
|
||||
assert patch_resp.json()["first_name"] == "Test"
|
||||
|
||||
# 4. Log out
|
||||
logout_resp = await ac.post("/auth/jwt/logout", headers=headers)
|
||||
assert logout_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||
|
||||
# 5. Verify token is invalid
|
||||
me_again_resp = await ac.get("/users/me", headers=headers)
|
||||
assert me_again_resp.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_e2e_transaction_workflow(fastapi_app, test_user):
|
||||
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
# 1. Log in to get the token
|
||||
login_resp = await ac.post("/auth/jwt/login", data=test_user)
|
||||
token = login_resp.json()["access_token"]
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
# NEW STEP: Create a category first to get a valid ID
|
||||
category_payload = {"name": "Test Category for E2E"}
|
||||
create_category_resp = await ac.post("/categories/create", json=category_payload, headers=headers)
|
||||
assert create_category_resp.status_code == status.HTTP_201_CREATED
|
||||
category_id = create_category_resp.json()["id"]
|
||||
|
||||
# 2. Create a new transaction
|
||||
tx_payload = {"amount": -55.40, "description": "Milk and eggs"}
|
||||
tx_resp = await ac.post("/transactions/create", json=tx_payload, headers=headers)
|
||||
assert tx_resp.status_code == status.HTTP_201_CREATED
|
||||
tx_id = tx_resp.json()["id"]
|
||||
|
||||
# 3. Assign the category
|
||||
assign_resp = await ac.post(f"/transactions/{tx_id}/categories/{category_id}", headers=headers)
|
||||
assert assign_resp.status_code == status.HTTP_200_OK
|
||||
|
||||
# 4. Verify assignment
|
||||
get_tx_resp = await ac.get(f"/transactions/{tx_id}", headers=headers)
|
||||
assert category_id in get_tx_resp.json()["category_ids"]
|
||||
|
||||
# 5. Unassign the category
|
||||
unassign_resp = await ac.delete(f"/transactions/{tx_id}/categories/{category_id}", headers=headers)
|
||||
assert unassign_resp.status_code == status.HTTP_200_OK
|
||||
|
||||
# 6. Get the transaction again and verify the category is gone
|
||||
get_tx_again_resp = await ac.get(f"/transactions/{tx_id}", headers=headers)
|
||||
final_tx_data = get_tx_again_resp.json()
|
||||
assert category_id not in final_tx_data["category_ids"]
|
||||
|
||||
# 7. Delete the transaction for cleanup
|
||||
delete_resp = await ac.delete(f"/transactions/{tx_id}/delete", headers=headers)
|
||||
assert delete_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||
|
||||
# NEW STEP: Clean up the created category
|
||||
delete_category_resp = await ac.delete(f"/categories/{category_id}", headers=headers)
|
||||
assert delete_category_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_register_then_login_and_fetch_me(fastapi_app):
|
||||
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
# Use unique email to avoid duplicates across runs
|
||||
suffix = uuid.uuid4().hex[:8]
|
||||
email = f"newuser_{suffix}@example.com"
|
||||
password = "StrongPassw0rd!"
|
||||
|
||||
reg = await ac.post("/auth/register", json={"email": email, "password": password})
|
||||
assert reg.status_code in (status.HTTP_201_CREATED, status.HTTP_200_OK)
|
||||
|
||||
login = await ac.post("/auth/jwt/login", data={"username": email, "password": password})
|
||||
assert login.status_code == status.HTTP_200_OK
|
||||
token = login.json()["access_token"]
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
try:
|
||||
me = await ac.get("/users/me", headers=headers)
|
||||
assert me.status_code == status.HTTP_200_OK
|
||||
assert me.json()["email"] == email
|
||||
finally:
|
||||
# Cleanup: delete the created user so future runs won’t conflict
|
||||
d = await ac.delete("/users/me", headers=headers)
|
||||
assert d.status_code == status.HTTP_204_NO_CONTENT
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_current_user_revokes_access(fastapi_app):
|
||||
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
email = "todelete@example.com"
|
||||
password = "Passw0rd!"
|
||||
reg = await ac.post("/auth/register", json={"email": email, "password": password})
|
||||
assert reg.status_code in (status.HTTP_200_OK, status.HTTP_201_CREATED)
|
||||
|
||||
login = await ac.post("/auth/jwt/login", data={"username": email, "password": password})
|
||||
token = login.json()["access_token"]
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
# Delete self
|
||||
d = await ac.delete("/users/me", headers=headers)
|
||||
assert d.status_code == status.HTTP_204_NO_CONTENT
|
||||
|
||||
# Access should now fail
|
||||
me = await ac.get("/users/me", headers=headers)
|
||||
assert me.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_category_conflict_and_404(fastapi_app, test_user):
|
||||
transport = ASGITransport(app=fastapi_app)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||
h = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
a = (await ac.post("/categories/create", json={"name": "A"}, headers=h)).json()
|
||||
b = (await ac.post("/categories/create", json={"name": "B"}, headers=h)).json()
|
||||
|
||||
# Attempt to rename A -> B should conflict
|
||||
conflict = await ac.patch(f"/categories/{a['id']}", json={"name": "B"}, headers=h)
|
||||
assert conflict.status_code == status.HTTP_409_CONFLICT
|
||||
|
||||
# Update non-existent
|
||||
missing = await ac.patch("/categories/999999", json={"name": "Z"}, headers=h)
|
||||
assert missing.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_category_cross_user_isolation(fastapi_app):
|
||||
transport = ASGITransport(app=fastapi_app)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
# Generate unique emails for both users
|
||||
sfx = uuid.uuid4().hex[:8]
|
||||
u1 = {"email": f"u1_{sfx}@example.com", "password": "Aaaaaa1!"}
|
||||
u2 = {"email": f"u2_{sfx}@example.com", "password": "Aaaaaa1!"}
|
||||
|
||||
# user1
|
||||
assert (await ac.post("/auth/register", json=u1)).status_code in (200, 201)
|
||||
t1 = (await ac.post("/auth/jwt/login", data={"username": u1["email"], "password": u1["password"]})).json()["access_token"]
|
||||
h1 = {"Authorization": f"Bearer {t1}"}
|
||||
|
||||
# user1 creates a category
|
||||
c = (await ac.post("/categories/create", json={"name": "Private"}, headers=h1)).json()
|
||||
cat_id = c["id"]
|
||||
|
||||
# user2
|
||||
assert (await ac.post("/auth/register", json=u2)).status_code in (200, 201)
|
||||
t2 = (await ac.post("/auth/jwt/login", data={"username": u2["email"], "password": u2["password"]})).json()["access_token"]
|
||||
h2 = {"Authorization": f"Bearer {t2}"}
|
||||
|
||||
try:
|
||||
# user2 cannot read/delete user1's category
|
||||
g = await ac.get(f"/categories/{cat_id}", headers=h2)
|
||||
assert g.status_code == status.HTTP_404_NOT_FOUND
|
||||
d = await ac.delete(f"/categories/{cat_id}", headers=h2)
|
||||
assert d.status_code == status.HTTP_404_NOT_FOUND
|
||||
finally:
|
||||
# Cleanup: remove the created category as its owner
|
||||
try:
|
||||
_ = await ac.delete(f"/categories/{cat_id}", headers=h1)
|
||||
except Exception:
|
||||
pass
|
||||
# Cleanup: delete both users to avoid email conflicts later
|
||||
try:
|
||||
_ = await ac.delete("/users/me", headers=h1)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
_ = await ac.delete("/users/me", headers=h2)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
import pytest
|
||||
import uuid
|
||||
from httpx import AsyncClient, ASGITransport
|
||||
from fastapi import status
|
||||
|
||||
|
||||
def test_e2e_minimal_auth_flow(client):
|
||||
# 1) Service is alive
|
||||
alive = client.get("/")
|
||||
assert alive.status_code == status.HTTP_200_OK
|
||||
|
||||
# 2) Attempt to login without payload should fail fast (validation error)
|
||||
login = client.post("/auth/jwt/login")
|
||||
assert login.status_code in (status.HTTP_400_BAD_REQUEST, status.HTTP_422_UNPROCESSABLE_CONTENT)
|
||||
|
||||
# 3) Protected endpoint should not be accessible without token
|
||||
me = client.get("/users/me")
|
||||
assert me.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_e2e_full_user_lifecycle(fastapi_app, test_user):
|
||||
# Use an AsyncClient with ASGITransport for async tests
|
||||
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
login_payload = test_user
|
||||
|
||||
# 1. Log in with the new credentials
|
||||
login_resp = await ac.post("/auth/jwt/login", data=login_payload)
|
||||
assert login_resp.status_code == status.HTTP_200_OK
|
||||
token = login_resp.json()["access_token"]
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
# 2. Access a protected endpoint
|
||||
me_resp = await ac.get("/users/me", headers=headers)
|
||||
assert me_resp.status_code == status.HTTP_200_OK
|
||||
assert me_resp.json()["email"] == test_user["username"]
|
||||
|
||||
# 3. Update the user's profile
|
||||
update_payload = {"first_name": "Test"}
|
||||
patch_resp = await ac.patch("/users/me", json=update_payload, headers=headers)
|
||||
assert patch_resp.status_code == status.HTTP_200_OK
|
||||
assert patch_resp.json()["first_name"] == "Test"
|
||||
|
||||
# 4. Log out
|
||||
logout_resp = await ac.post("/auth/jwt/logout", headers=headers)
|
||||
assert logout_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||
|
||||
# 5. Verify token is invalid
|
||||
me_again_resp = await ac.get("/users/me", headers=headers)
|
||||
assert me_again_resp.status_code == status.HTTP_401_UNAUTHORIZED
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_e2e_transaction_workflow(fastapi_app, test_user):
|
||||
transport = ASGITransport(app=fastapi_app, raise_app_exceptions=True)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
# 1. Log in to get the token
|
||||
login_resp = await ac.post("/auth/jwt/login", data=test_user)
|
||||
token = login_resp.json()["access_token"]
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
# NEW STEP: Create a category first to get a valid ID
|
||||
category_payload = {"name": "Test Category for E2E"}
|
||||
create_category_resp = await ac.post("/categories/create", json=category_payload, headers=headers)
|
||||
assert create_category_resp.status_code == status.HTTP_201_CREATED
|
||||
category_id = create_category_resp.json()["id"]
|
||||
|
||||
# 2. Create a new transaction
|
||||
tx_payload = {"amount": -55.40, "description": "Milk and eggs"}
|
||||
tx_resp = await ac.post("/transactions/create", json=tx_payload, headers=headers)
|
||||
assert tx_resp.status_code == status.HTTP_201_CREATED
|
||||
tx_id = tx_resp.json()["id"]
|
||||
|
||||
# 3. Assign the category
|
||||
assign_resp = await ac.post(f"/transactions/{tx_id}/categories/{category_id}", headers=headers)
|
||||
assert assign_resp.status_code == status.HTTP_200_OK
|
||||
|
||||
# 4. Verify assignment
|
||||
get_tx_resp = await ac.get(f"/transactions/{tx_id}", headers=headers)
|
||||
assert category_id in get_tx_resp.json()["category_ids"]
|
||||
|
||||
# 5. Unassign the category
|
||||
unassign_resp = await ac.delete(f"/transactions/{tx_id}/categories/{category_id}", headers=headers)
|
||||
assert unassign_resp.status_code == status.HTTP_200_OK
|
||||
|
||||
# 6. Get the transaction again and verify the category is gone
|
||||
get_tx_again_resp = await ac.get(f"/transactions/{tx_id}", headers=headers)
|
||||
final_tx_data = get_tx_again_resp.json()
|
||||
assert category_id not in final_tx_data["category_ids"]
|
||||
|
||||
# 7. Delete the transaction for cleanup
|
||||
delete_resp = await ac.delete(f"/transactions/{tx_id}/delete", headers=headers)
|
||||
assert delete_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||
|
||||
# NEW STEP: Clean up the created category
|
||||
delete_category_resp = await ac.delete(f"/categories/{category_id}", headers=headers)
|
||||
assert delete_category_resp.status_code in (status.HTTP_200_OK, status.HTTP_204_NO_CONTENT)
|
||||
@@ -3,17 +3,6 @@ import pytest
|
||||
from httpx import AsyncClient, ASGITransport
|
||||
|
||||
|
||||
def test_root_ok(client):
|
||||
resp = client.get("/")
|
||||
assert resp.status_code == status.HTTP_200_OK
|
||||
assert resp.json() == {"status": "ok"}
|
||||
|
||||
|
||||
def test_authenticated_route_requires_auth(client):
|
||||
resp = client.get("/authenticated-route")
|
||||
assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_and_get_category(fastapi_app, test_user):
|
||||
# Use AsyncClient for async tests
|
||||
@@ -63,4 +52,108 @@ async def test_create_transaction_missing_amount_fails(fastapi_app, test_user):
|
||||
resp = await ac.post("/transactions/create", json=invalid_payload, headers=headers)
|
||||
|
||||
# 4. Assert the expected validation error
|
||||
assert resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
|
||||
assert resp.status_code == status.HTTP_422_UNPROCESSABLE_CONTENT
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_login_invalid_credentials(fastapi_app, test_user):
|
||||
transport = ASGITransport(app=fastapi_app)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
bad = await ac.post("/auth/jwt/login", data={"username": test_user["username"], "password": "nope"})
|
||||
assert bad.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_400_BAD_REQUEST)
|
||||
unknown = await ac.post("/auth/jwt/login", data={"username": "nouser@example.com", "password": "x"})
|
||||
assert unknown.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_category_duplicate_name_conflict(fastapi_app, test_user):
|
||||
transport = ASGITransport(app=fastapi_app)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||
h = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
p = {"name": "Food"}
|
||||
r1 = await ac.post("/categories/create", json=p, headers=h)
|
||||
assert r1.status_code == status.HTTP_201_CREATED
|
||||
r2 = await ac.post("/categories/create", json=p, headers=h)
|
||||
assert r2.status_code == status.HTTP_409_CONFLICT
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_transaction_invalid_date_format(fastapi_app, test_user):
|
||||
transport = ASGITransport(app=fastapi_app)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||
h = {"Authorization": f"Bearer {token}"}
|
||||
bad = await ac.post("/transactions/create", json={"amount": 10, "description": "x", "date": "31-12-2024"}, headers=h)
|
||||
assert bad.status_code == status.HTTP_400_BAD_REQUEST
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_transaction_rejects_duplicate_category_ids(fastapi_app, test_user):
|
||||
transport = ASGITransport(app=fastapi_app)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||
h = {"Authorization": f"Bearer {token}"}
|
||||
tx = (await ac.post("/transactions/create", json={"amount": 5, "description": "x"}, headers=h)).json()
|
||||
dup = await ac.patch(f"/transactions/{tx['id']}/edit", json={"category_ids": [1, 1]}, headers=h)
|
||||
assert dup.status_code == status.HTTP_400_BAD_REQUEST
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_assign_unassign_category_not_found_cases(fastapi_app, test_user):
|
||||
transport = ASGITransport(app=fastapi_app)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||
h = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
# Create tx and category
|
||||
tx = (await ac.post("/transactions/create", json={"amount": 1, "description": "a"}, headers=h)).json()
|
||||
cat = (await ac.post("/categories/create", json={"name": "X"}, headers=h)).json()
|
||||
|
||||
# Missing transaction
|
||||
r1 = await ac.post(f"/transactions/999999/categories/{cat['id']}", headers=h)
|
||||
assert r1.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
# Missing category
|
||||
r2 = await ac.post(f"/transactions/{tx['id']}/categories/999999", headers=h)
|
||||
assert r2.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transactions_date_filter_and_balance_series(fastapi_app, test_user):
|
||||
transport = ASGITransport(app=fastapi_app)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||
h = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
# Seed transactions spanning days
|
||||
data = [
|
||||
{"amount": 100, "description": "day1", "date": "2024-01-01"},
|
||||
{"amount": -25, "description": "day2", "date": "2024-01-02"},
|
||||
{"amount": 50, "description": "day3", "date": "2024-01-03"},
|
||||
]
|
||||
for p in data:
|
||||
r = await ac.post("/transactions/create", json=p, headers=h)
|
||||
assert r.status_code == status.HTTP_201_CREATED
|
||||
|
||||
# Filtered list (2nd and 3rd only)
|
||||
lst = await ac.get("/transactions/", params={"start_date": "2024-01-02", "end_date": "2024-01-03"}, headers=h)
|
||||
assert lst.status_code == status.HTTP_200_OK
|
||||
assert len(lst.json()) == 2
|
||||
|
||||
# Balance series should be cumulative per date
|
||||
series = await ac.get("/transactions/balance_series", headers=h)
|
||||
assert series.status_code == status.HTTP_200_OK
|
||||
s = series.json()
|
||||
assert s == [
|
||||
{"date": "2024-01-01", "balance": 100.0},
|
||||
{"date": "2024-01-02", "balance": 75.0},
|
||||
{"date": "2024-01-03", "balance": 125.0},
|
||||
]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_transaction_not_found(fastapi_app, test_user):
|
||||
transport = ASGITransport(app=fastapi_app)
|
||||
async with AsyncClient(transport=transport, base_url="http://testserver") as ac:
|
||||
token = (await ac.post("/auth/jwt/login", data=test_user)).json()["access_token"]
|
||||
h = {"Authorization": f"Bearer {token}"}
|
||||
r = await ac.delete("/transactions/9999999/delete", headers=h)
|
||||
assert r.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import types
|
||||
import asyncio
|
||||
import pytest
|
||||
|
||||
from fastapi import status
|
||||
from app.services import user_service
|
||||
|
||||
|
||||
@@ -22,6 +20,15 @@ def test_get_jwt_strategy_lifetime():
|
||||
# Basic smoke check: strategy has a lifetime set to 604800
|
||||
assert getattr(strategy, "lifetime_seconds", None) in (604800,)
|
||||
|
||||
def test_root_ok(client):
|
||||
resp = client.get("/")
|
||||
assert resp.status_code == status.HTTP_200_OK
|
||||
assert resp.json() == {"status": "ok"}
|
||||
|
||||
|
||||
def test_authenticated_route_requires_auth(client):
|
||||
resp = client.get("/authenticated-route")
|
||||
assert resp.status_code in (status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_on_after_request_verify_enqueues_email(monkeypatch):
|
||||
|
||||
@@ -90,6 +90,11 @@ spec:
|
||||
secretKeyRef:
|
||||
name: prod
|
||||
key: CSAS_CLIENT_SECRET
|
||||
- name: UNIRATE_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: prod
|
||||
key: UNIRATE_API_KEY
|
||||
- name: DOMAIN
|
||||
value: {{ required "Set .Values.domain" .Values.domain | quote }}
|
||||
- name: DOMAIN_SCHEME
|
||||
|
||||
25
7project/charts/myapp-chart/templates/cron.yaml
Normal file
25
7project/charts/myapp-chart/templates/cron.yaml
Normal file
@@ -0,0 +1,25 @@
|
||||
{{- if .Values.cron.enabled }}
|
||||
apiVersion: batch/v1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: cronjob
|
||||
spec:
|
||||
schedule: {{ .Values.cron.schedule | quote }}
|
||||
concurrencyPolicy: {{ .Values.cron.concurrencyPolicy | quote }}
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: cronjob
|
||||
image: curlimages/curl:latest
|
||||
imagePullPolicy: IfNotPresent
|
||||
args:
|
||||
- -sS
|
||||
- -o
|
||||
- /dev/null
|
||||
- -w
|
||||
- "%{http_code}"
|
||||
- {{ printf "%s://%s.%s.svc.cluster.local%s" .Values.cron.scheme .Values.app.name .Release.Namespace .Values.cron.endpoint | quote }}
|
||||
restartPolicy: OnFailure
|
||||
{{- end }}
|
||||
@@ -19,3 +19,11 @@ stringData:
|
||||
RABBITMQ_USERNAME: {{ .Values.rabbitmq.username | quote }}
|
||||
SENTRY_DSN: {{ .Values.sentry_dsn | quote }}
|
||||
DB_ENCRYPTION_KEY: {{ required "Set .Values.database.encryptionSecret" .Values.database.encryptionSecret | quote }}
|
||||
SMTP_HOST: {{ .Values.smtp.host | default "" | quote }}
|
||||
SMTP_PORT: {{ .Values.smtp.port | default 587 | quote }}
|
||||
SMTP_USERNAME: {{ .Values.smtp.username | default "" | quote }}
|
||||
SMTP_PASSWORD: {{ .Values.smtp.password | default "" | quote }}
|
||||
SMTP_USE_TLS: {{ .Values.smtp.tls | default false | quote }}
|
||||
SMTP_USE_SSL: {{ .Values.smtp.ssl | default false | quote }}
|
||||
SMTP_FROM: {{ .Values.smtp.from | default "" | quote }}
|
||||
UNIRATE_API_KEY: {{ .Values.unirate.key | default "" | quote }}
|
||||
|
||||
@@ -85,3 +85,40 @@ spec:
|
||||
secretKeyRef:
|
||||
name: prod
|
||||
key: DB_ENCRYPTION_KEY
|
||||
- name: SMTP_HOST
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: prod
|
||||
key: SMTP_HOST
|
||||
- name: SMTP_PORT
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: prod
|
||||
key: SMTP_PORT
|
||||
- name: SMTP_USERNAME
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: prod
|
||||
key: SMTP_USERNAME
|
||||
- name: SMTP_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: prod
|
||||
key: SMTP_PASSWORD
|
||||
- name: SMTP_USE_TLS
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: prod
|
||||
key: SMTP_USE_TLS
|
||||
- name: SMTP_USE_SSL
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: prod
|
||||
key: SMTP_USE_SSL
|
||||
- name: SMTP_FROM
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: prod
|
||||
key: SMTP_FROM
|
||||
- name: APP_POD_URL
|
||||
value: {{ printf "http://%s.%s.svc.cluster.local" .Values.app.name .Release.Namespace | quote }}
|
||||
|
||||
@@ -5,3 +5,6 @@ app:
|
||||
|
||||
worker:
|
||||
replicas: 3
|
||||
|
||||
cron:
|
||||
enabled: true
|
||||
|
||||
@@ -13,6 +13,9 @@ deployment: ""
|
||||
domain: ""
|
||||
domain_scheme: ""
|
||||
|
||||
unirate:
|
||||
key: ""
|
||||
|
||||
frontend_domain: ""
|
||||
frontend_domain_scheme: ""
|
||||
|
||||
@@ -35,6 +38,23 @@ worker:
|
||||
# Queue name for Celery worker and for CRD Queue
|
||||
mailQueueName: "mail_queue"
|
||||
|
||||
cron:
|
||||
enabled: false
|
||||
schedule: "*/5 * * * *" # every 5 minutes
|
||||
scheme: "http"
|
||||
endpoint: "/_cron"
|
||||
concurrencyPolicy: "Forbid"
|
||||
|
||||
smtp:
|
||||
host:
|
||||
port: 587
|
||||
username: ""
|
||||
password: ""
|
||||
tls: false
|
||||
ssl: false
|
||||
from: ""
|
||||
|
||||
|
||||
|
||||
service:
|
||||
port: 80
|
||||
|
||||
@@ -19,6 +19,17 @@ export type Transaction = {
|
||||
date?: string | null; // ISO date (YYYY-MM-DD)
|
||||
};
|
||||
|
||||
export async function deleteTransaction(id: number): Promise<void> {
|
||||
const res = await fetch(`${getBaseUrl()}/transactions/${id}/delete`, {
|
||||
method: 'DELETE',
|
||||
headers: getHeaders('none'),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const text = await res.text();
|
||||
throw new Error(text || 'Failed to delete transaction');
|
||||
}
|
||||
}
|
||||
|
||||
function getBaseUrl() {
|
||||
const base = BACKEND_URL?.replace(/\/$/, '') || '';
|
||||
return base || '';
|
||||
@@ -122,6 +133,9 @@ export type User = {
|
||||
is_active: boolean;
|
||||
is_superuser: boolean;
|
||||
is_verified: boolean;
|
||||
// Optional JSON config object for user-level integrations and settings
|
||||
// Example: { csas: "{\"expires_at\": 1761824615, ...}" } or { csas: { expires_at: 1761824615, ... } }
|
||||
config?: Record<string, any> | null;
|
||||
};
|
||||
|
||||
export async function getMe(): Promise<User> {
|
||||
|
||||
@@ -13,9 +13,9 @@ export function applyTheme(theme: Theme) {
|
||||
export function applyFontSize(size: FontSize) {
|
||||
const root = document.documentElement;
|
||||
const map: Record<FontSize, string> = {
|
||||
small: '14px',
|
||||
medium: '18px',
|
||||
large: '22px',
|
||||
small: '12px',
|
||||
medium: '15px',
|
||||
large: '21px',
|
||||
};
|
||||
root.style.fontSize = map[size];
|
||||
}
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
export const BACKEND_URL: string =
|
||||
import.meta.env.VITE_BACKEND_URL ?? '';
|
||||
import.meta.env.VITE_BACKEND_URL ?? 'http://127.0.0.1:8000';
|
||||
@@ -24,8 +24,6 @@ a:hover {
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
display: flex;
|
||||
place-items: center;
|
||||
min-width: 320px;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
// src/BalanceChart.tsx
|
||||
import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, Legend, ResponsiveContainer } from 'recharts';
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, Legend } from 'recharts';
|
||||
import { type BalancePoint } from '../api';
|
||||
|
||||
function formatAmount(n: number) {
|
||||
@@ -10,37 +11,56 @@ function formatDate(dateStr: string) {
|
||||
return new Date(dateStr).toLocaleDateString(undefined, { month: 'short', day: 'numeric' });
|
||||
}
|
||||
|
||||
export default function BalanceChart({ data }: { data: BalancePoint[] }) {
|
||||
type Props = { data: BalancePoint[]; pxPerPoint?: number };
|
||||
|
||||
export default function BalanceChart({ data, pxPerPoint = 40 }: Props) {
|
||||
const wrapRef = useRef<HTMLDivElement | null>(null);
|
||||
const [containerWidth, setContainerWidth] = useState(0);
|
||||
|
||||
useEffect(() => {
|
||||
function measure() {
|
||||
if (!wrapRef.current) return;
|
||||
setContainerWidth(wrapRef.current.clientWidth);
|
||||
}
|
||||
measure();
|
||||
const obs = new ResizeObserver(measure);
|
||||
if (wrapRef.current) obs.observe(wrapRef.current);
|
||||
return () => obs.disconnect();
|
||||
}, []);
|
||||
|
||||
if (data.length === 0) {
|
||||
return <div>No data to display</div>;
|
||||
}
|
||||
|
||||
const desiredWidth = Math.max(containerWidth, Math.max(600, data.length * pxPerPoint));
|
||||
|
||||
return (
|
||||
<ResponsiveContainer width="100%" height={300}>
|
||||
<LineChart
|
||||
data={data}
|
||||
// Increased 'left' margin to create more space for the Y-axis label and tick values
|
||||
margin={{ top: 5, right: 30, left: 50, bottom: 5 }} // <-- Change this line
|
||||
>
|
||||
<CartesianGrid strokeDasharray="3 3" />
|
||||
<XAxis
|
||||
dataKey="date"
|
||||
tickFormatter={formatDate}
|
||||
label={{ value: 'Date', position: 'insideBottom', offset: -5 }}
|
||||
/>
|
||||
<YAxis
|
||||
tickFormatter={(value) => formatAmount(value as number)}
|
||||
// Adjusted 'offset' for the Y-axis label.
|
||||
// A negative offset moves it further away from the axis.
|
||||
label={{ value: 'Balance', angle: -90, position: 'insideLeft', offset: -30 }} // <-- Change this line
|
||||
/>
|
||||
<Tooltip
|
||||
labelFormatter={formatDate}
|
||||
formatter={(value) => [formatAmount(value as number), 'Balance']}
|
||||
/>
|
||||
<Legend />
|
||||
<Line type="monotone" dataKey="balance" stroke="#3b82f6" strokeWidth={2} activeDot={{ r: 8 }} />
|
||||
</LineChart>
|
||||
</ResponsiveContainer>
|
||||
<div ref={wrapRef} className="chart-scroll">
|
||||
<div className="chart-inner" style={{ minWidth: desiredWidth, paddingBottom: 8 }}>
|
||||
<LineChart
|
||||
width={desiredWidth}
|
||||
height={300}
|
||||
data={data}
|
||||
margin={{ top: 5, right: 30, left: 50, bottom: 5 }}
|
||||
>
|
||||
<CartesianGrid strokeDasharray="3 3" />
|
||||
<XAxis
|
||||
dataKey="date"
|
||||
tickFormatter={formatDate}
|
||||
label={{ value: 'Date', position: 'insideBottom', offset: -5 }}
|
||||
/>
|
||||
<YAxis
|
||||
tickFormatter={(value) => formatAmount(value as number)}
|
||||
label={{ value: 'Balance', angle: -90, position: 'insideLeft', offset: -30 }}
|
||||
/>
|
||||
<Tooltip
|
||||
labelFormatter={formatDate}
|
||||
formatter={(value) => [formatAmount(value as number), 'Balance']}
|
||||
/>
|
||||
<Legend />
|
||||
<Line type="monotone" dataKey="balance" stroke="#3b82f6" strokeWidth={2} activeDot={{ r: 8 }} />
|
||||
</LineChart>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -92,9 +92,13 @@ export default function CategoryPieCharts({ transactions, categories }: { transa
|
||||
|
||||
|
||||
return (
|
||||
<div style={{ display: 'flex', flexWrap: 'wrap', gap: '20px', justifyContent: 'center' }}>
|
||||
<SinglePieChart data={expensesData} title="Expenses by Category" />
|
||||
<SinglePieChart data={earningsData} title="Earnings by Category" />
|
||||
<div className="pie-grid" >
|
||||
<div className="pie-card">
|
||||
<SinglePieChart data={expensesData} title="Expenses by Category" />
|
||||
</div>
|
||||
<div className="pie-card">
|
||||
<SinglePieChart data={earningsData} title="Earnings by Category" />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { type Category, type Transaction, type BalancePoint, getCategories, getTransactions, createTransaction, updateTransaction, getBalanceSeries } from '../api';
|
||||
import { useEffect, useMemo, useState, useCallback } from 'react';
|
||||
import { type Category, type Transaction, type BalancePoint, getMe, deleteTransaction, getCategories, getTransactions, createTransaction, updateTransaction, getBalanceSeries } from '../api';
|
||||
import AccountPage from './AccountPage';
|
||||
import AppearancePage from './AppearancePage';
|
||||
import BalanceChart from './BalanceChart';
|
||||
@@ -12,28 +12,20 @@ function formatAmount(n: number) {
|
||||
return new Intl.NumberFormat(undefined, { minimumFractionDigits: 2, maximumFractionDigits: 2 }).format(n);
|
||||
}
|
||||
|
||||
// Add this new component to your Dashboard.tsx file, above the Dashboard component
|
||||
//https://unirateapi.com/
|
||||
|
||||
|
||||
// Define the structure for the rate data we care about
|
||||
type CnbRate = {
|
||||
type RateData = {
|
||||
currencyCode: string;
|
||||
rate: number;
|
||||
};
|
||||
|
||||
// The part of the API response structure we need
|
||||
type CnbApiResponse = {
|
||||
rates: Array<{
|
||||
amount: number;
|
||||
currencyCode: string;
|
||||
rate: number;
|
||||
}>;
|
||||
};
|
||||
|
||||
// The currencies you want to display
|
||||
const TARGET_CURRENCIES = ['EUR', 'USD', 'NOK'];
|
||||
|
||||
function CurrencyRates() {
|
||||
const [rates, setRates] = useState<CnbRate[]>([]);
|
||||
const [rates, setRates] = useState<RateData[]>([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
@@ -42,31 +34,20 @@ function CurrencyRates() {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
// Get today's date in YYYY-MM-DD format for the API
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
const CNB_API_URL = `/api-cnb/cnbapi/exrates/daily?date=${today}&lang=EN`;
|
||||
|
||||
try {
|
||||
const res = await fetch(CNB_API_URL);
|
||||
const base = BACKEND_URL.replace(/\/$/, '');
|
||||
const url = `${base}/exchange-rates?symbols=${TARGET_CURRENCIES.join(',')}`;
|
||||
const token = localStorage.getItem('token');
|
||||
const res = await fetch(url, {
|
||||
headers: token ? { Authorization: `Bearer ${token}` } : undefined,
|
||||
credentials: 'include',
|
||||
});
|
||||
if (!res.ok) {
|
||||
// This can happen on weekends/holidays or if rates aren't posted yet
|
||||
throw new Error(`Rates unavailable (Status: ${res.status})`);
|
||||
const text = await res.text();
|
||||
throw new Error(text || `Failed to load rates (${res.status})`);
|
||||
}
|
||||
const data: CnbApiResponse = await res.json();
|
||||
|
||||
if (!data.rates) {
|
||||
throw new Error("Invalid API response");
|
||||
}
|
||||
|
||||
const filteredRates = data.rates
|
||||
.filter(rate => TARGET_CURRENCIES.includes(rate.currencyCode))
|
||||
.map(rate => ({
|
||||
currencyCode: rate.currencyCode,
|
||||
// Handle 'amount' field (e.g., JPY is per 100)
|
||||
rate: rate.rate / rate.amount
|
||||
}));
|
||||
|
||||
setRates(filteredRates);
|
||||
const data: RateData[] = await res.json();
|
||||
setRates(data);
|
||||
} catch (err: any) {
|
||||
setError(err.message || 'Could not load rates');
|
||||
} finally {
|
||||
@@ -108,10 +89,26 @@ function CurrencyRates() {
|
||||
)) : <li style={{color: '#8a91b4'}}>No rates found.</li>}
|
||||
</ul>
|
||||
)}
|
||||
|
||||
<a
|
||||
href="https://unirateapi.com"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
style={{
|
||||
display: 'block',
|
||||
marginTop: '1rem',
|
||||
fontSize: '0.8em',
|
||||
color: '#8a91b4', // Muted color
|
||||
textDecoration: 'none'
|
||||
}}
|
||||
>
|
||||
Exchange Rates By UniRateAPI
|
||||
</a>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
export default function Dashboard({ onLogout }: { onLogout: () => void }) {
|
||||
const [current, setCurrent] = useState<'home' | 'manual' | 'account' | 'appearance'>('home');
|
||||
const [transactions, setTransactions] = useState<Transaction[]>([]);
|
||||
@@ -121,6 +118,47 @@ export default function Dashboard({ onLogout }: { onLogout: () => void }) {
|
||||
const [isMockModalOpen, setMockModalOpen] = useState(false);
|
||||
const [isGenerating, setIsGenerating] = useState(false);
|
||||
|
||||
// Current user and CSAS connection status
|
||||
const [csasConnected, setCsasConnected] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
try {
|
||||
const u = await getMe();
|
||||
// Determine CSAS connection validity
|
||||
const csas = (u as any)?.config?.csas;
|
||||
let obj: any = null;
|
||||
if (csas) {
|
||||
if (typeof csas === 'string') {
|
||||
try { obj = JSON.parse(csas); } catch {}
|
||||
} else if (typeof csas === 'object') {
|
||||
obj = csas;
|
||||
}
|
||||
}
|
||||
let exp: number | null = null;
|
||||
const raw = obj?.expires_at;
|
||||
if (typeof raw === 'number') {
|
||||
exp = raw;
|
||||
} else if (typeof raw === 'string') {
|
||||
const asNum = Number(raw);
|
||||
if (!Number.isNaN(asNum)) {
|
||||
exp = asNum;
|
||||
} else {
|
||||
const ms = Date.parse(raw);
|
||||
if (!Number.isNaN(ms)) exp = Math.floor(ms / 1000);
|
||||
}
|
||||
}
|
||||
if (exp && exp > Math.floor(Date.now() / 1000)) {
|
||||
setCsasConnected(true);
|
||||
} else {
|
||||
setCsasConnected(false);
|
||||
}
|
||||
} catch (e) {
|
||||
// ignore, user may not be loaded; keep button enabled
|
||||
}
|
||||
})();
|
||||
}, []);
|
||||
|
||||
// Start CSAS (George) OAuth after login
|
||||
async function startOauthCsas() {
|
||||
const base = BACKEND_URL.replace(/\/$/, '');
|
||||
@@ -161,9 +199,24 @@ export default function Dashboard({ onLogout }: { onLogout: () => void }) {
|
||||
|
||||
// Manual forms moved to ManualManagement page
|
||||
|
||||
// Inline edit state for transaction categories
|
||||
// Inline edit state for transaction editing
|
||||
const [editingTxId, setEditingTxId] = useState<number | null>(null);
|
||||
const [editingCategoryIds, setEditingCategoryIds] = useState<number[]>([]);
|
||||
const [editingAmount, setEditingAmount] = useState<string>('');
|
||||
const [editingDescription, setEditingDescription] = useState<string>('');
|
||||
const [editingDate, setEditingDate] = useState<string>(''); // YYYY-MM-DD
|
||||
|
||||
// Sidebar toggle for mobile
|
||||
const [sidebarOpen, setSidebarOpen] = useState(false);
|
||||
|
||||
// Multi-select state for transactions and bulk category assignment
|
||||
const [selectedTxIds, setSelectedTxIds] = useState<number[]>([]);
|
||||
const [bulkCategoryIds, setBulkCategoryIds] = useState<number[]>([]);
|
||||
const toggleSelectTx = useCallback((id: number) => {
|
||||
setSelectedTxIds(prev => prev.includes(id) ? prev.filter(x => x !== id) : [...prev, id]);
|
||||
}, []);
|
||||
const clearSelection = useCallback(() => setSelectedTxIds([]), []);
|
||||
const selectAllVisible = useCallback((ids: number[]) => setSelectedTxIds(ids), []);
|
||||
|
||||
async function loadAll() {
|
||||
setLoading(true);
|
||||
@@ -190,47 +243,53 @@ export default function Dashboard({ onLogout }: { onLogout: () => void }) {
|
||||
setIsGenerating(true);
|
||||
setMockModalOpen(false);
|
||||
|
||||
const { count, minAmount, maxAmount, startDate, endDate, categoryIds } = options;
|
||||
const newTransactions: Transaction[] = [];
|
||||
|
||||
const startDateTime = new Date(startDate).getTime();
|
||||
const endDateTime = new Date(endDate).getTime();
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
// Generate random data based on user input
|
||||
const amount = parseFloat((Math.random() * (maxAmount - minAmount) + minAmount).toFixed(2));
|
||||
|
||||
const randomTime = Math.random() * (endDateTime - startDateTime) + startDateTime;
|
||||
const date = new Date(randomTime);
|
||||
const dateString = date.toISOString().split('T')[0];
|
||||
|
||||
const randomCategory = categoryIds.length > 0
|
||||
? [categoryIds[Math.floor(Math.random() * categoryIds.length)]]
|
||||
: [];
|
||||
|
||||
const payload = {
|
||||
amount,
|
||||
date: dateString,
|
||||
category_ids: randomCategory,
|
||||
};
|
||||
|
||||
try {
|
||||
const created = await createTransaction(payload);
|
||||
newTransactions.push(created);
|
||||
} catch (err) {
|
||||
console.error("Failed to create mock transaction:", err);
|
||||
alert('An error occurred while generating transactions. Check the console.');
|
||||
break;
|
||||
try {
|
||||
const base = BACKEND_URL.replace(/\/$/, '');
|
||||
const url = `${base}/mock-bank/generate`;
|
||||
const token = localStorage.getItem('token');
|
||||
const res = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(token ? { Authorization: `Bearer ${token}` } : {}),
|
||||
},
|
||||
credentials: 'include',
|
||||
body: JSON.stringify(options),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const text = await res.text();
|
||||
throw new Error(text || `Failed to generate mock transactions (${res.status})`);
|
||||
}
|
||||
const generated: Array<{ amount: number; date: string; category_ids: number[]; description?: string | null }>
|
||||
= await res.json();
|
||||
|
||||
const newTransactions: Transaction[] = [];
|
||||
for (const g of generated) {
|
||||
try {
|
||||
const created = await createTransaction({
|
||||
amount: g.amount,
|
||||
date: g.date,
|
||||
category_ids: g.category_ids || [],
|
||||
description: g.description || undefined,
|
||||
});
|
||||
newTransactions.push(created);
|
||||
} catch (err) {
|
||||
console.error('Failed to create mock transaction:', err);
|
||||
// continue creating others
|
||||
}
|
||||
}
|
||||
|
||||
alert(`${newTransactions.length} mock transactions were successfully generated!`);
|
||||
} catch (err: any) {
|
||||
console.error(err);
|
||||
alert(err?.message || 'Failed to generate mock transactions');
|
||||
} finally {
|
||||
setIsGenerating(false);
|
||||
await loadAll();
|
||||
}
|
||||
|
||||
setIsGenerating(false);
|
||||
alert(`${newTransactions.length} mock transactions were successfully generated!`);
|
||||
|
||||
await loadAll();
|
||||
}
|
||||
|
||||
useEffect(() => { loadAll(); }, [startDate, endDate]);
|
||||
useEffect(() => { loadAll(); clearSelection(); }, [startDate, endDate]);
|
||||
|
||||
const filtered = useMemo(() => {
|
||||
let arr = [...transactions];
|
||||
@@ -256,34 +315,65 @@ export default function Dashboard({ onLogout }: { onLogout: () => void }) {
|
||||
const pageEnd = pageStart + pageSize;
|
||||
const visible = sortedDesc.slice(pageStart, pageEnd);
|
||||
|
||||
// Reset selection when page or filters impacting visible set change
|
||||
useEffect(() => { clearSelection(); }, [page, minAmount, maxAmount, filterCategoryId, searchText]);
|
||||
|
||||
function categoryNameById(id: number) { return categories.find(c => c.id === id)?.name || `#${id}`; }
|
||||
|
||||
|
||||
function beginEditCategories(t: Transaction) {
|
||||
function beginEditTransaction(t: Transaction) {
|
||||
setEditingTxId(t.id);
|
||||
setEditingCategoryIds([...(t.category_ids || [])]);
|
||||
setEditingAmount(String(t.amount));
|
||||
setEditingDescription(t.description || '');
|
||||
setEditingDate(t.date || '');
|
||||
}
|
||||
function cancelEditCategories() {
|
||||
function cancelEditTransaction() {
|
||||
setEditingTxId(null);
|
||||
setEditingCategoryIds([]);
|
||||
setEditingAmount('');
|
||||
setEditingDescription('');
|
||||
setEditingDate('');
|
||||
}
|
||||
async function saveEditCategories() {
|
||||
async function saveEditTransaction() {
|
||||
if (editingTxId == null) return;
|
||||
const amountNum = Number(editingAmount);
|
||||
if (Number.isNaN(amountNum)) {
|
||||
alert('Amount must be a number.');
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const updated = await updateTransaction(editingTxId, { category_ids: editingCategoryIds });
|
||||
const updated = await updateTransaction(editingTxId, {
|
||||
amount: amountNum,
|
||||
description: editingDescription,
|
||||
date: editingDate || undefined,
|
||||
category_ids: editingCategoryIds,
|
||||
});
|
||||
setTransactions(prev => prev.map(p => (p.id === updated.id ? updated : p)));
|
||||
cancelEditCategories();
|
||||
// Optionally refresh balance series to reflect changes immediately
|
||||
try { setBalanceSeries(await getBalanceSeries(startDate || undefined, endDate || undefined)); } catch {}
|
||||
cancelEditTransaction();
|
||||
} catch (err: any) {
|
||||
alert(err?.message || 'Failed to update transaction categories');
|
||||
alert(err?.message || 'Failed to update transaction');
|
||||
}
|
||||
}
|
||||
async function handleDeleteTransaction(id: number) {
|
||||
if (!confirm('Delete this transaction? This cannot be undone.')) return;
|
||||
try {
|
||||
await deleteTransaction(id);
|
||||
setTransactions(prev => prev.filter(t => t.id !== id));
|
||||
try { setBalanceSeries(await getBalanceSeries(startDate || undefined, endDate || undefined)); } catch {}
|
||||
} catch (err: any) {
|
||||
alert(err?.message || 'Failed to delete transaction');
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="app-layout">
|
||||
<div className={`app-layout ${sidebarOpen ? 'sidebar-open' : ''}`}>
|
||||
<aside className="sidebar" style={{ display: 'flex', flexDirection: 'column' }}>
|
||||
<div>
|
||||
<div className="logo">7Project</div>
|
||||
<nav className="nav">
|
||||
<div className="logo">Finance Tracker</div>
|
||||
<nav className="nav" onClick={() => setSidebarOpen(false)}>
|
||||
<button className={current === 'home' ? 'active' : ''} onClick={() => setCurrent('home')}>Home</button>
|
||||
<button className={current === 'manual' ? 'active' : ''} onClick={() => setCurrent('manual')}>Manual management</button>
|
||||
<button className={current === 'account' ? 'active' : ''} onClick={() => setCurrent('account')}>Account</button>
|
||||
@@ -296,6 +386,12 @@ export default function Dashboard({ onLogout }: { onLogout: () => void }) {
|
||||
</aside>
|
||||
<div className="content">
|
||||
<div className="topbar">
|
||||
<button
|
||||
className="icon-btn hamburger"
|
||||
aria-label="Open menu"
|
||||
aria-expanded={sidebarOpen}
|
||||
onClick={() => setSidebarOpen(true)}
|
||||
>☰</button>
|
||||
<h2 style={{ margin: 0 }}>{current === 'home' ? 'Dashboard' : current === 'manual' ? 'Manual management' : current === 'account' ? 'Account' : 'Appearance'}</h2>
|
||||
<div className="actions">
|
||||
<span className="user muted">Signed in</span>
|
||||
@@ -309,7 +405,7 @@ export default function Dashboard({ onLogout }: { onLogout: () => void }) {
|
||||
<h3>Bank connections</h3>
|
||||
<div className="connection-row">
|
||||
<p className="muted" style={{ margin: 0 }}>Connect your CSAS (George) account.</p>
|
||||
<button className="btn primary" onClick={startOauthCsas}>Connect CSAS (George)</button>
|
||||
<button className="btn primary" onClick={startOauthCsas} disabled={csasConnected}>{csasConnected ? 'Successfully connected to CSAS' : 'Connect CSAS (George)'}</button>
|
||||
</div>
|
||||
<div className="connection-row">
|
||||
<p className="muted" style={{ margin: 0 }}>Generate data from a mock bank.</p>
|
||||
@@ -371,44 +467,174 @@ export default function Dashboard({ onLogout }: { onLogout: () => void }) {
|
||||
<div className="muted">
|
||||
Showing {visible.length} of {filtered.length} (page {Math.min(page + 1, Math.max(1, totalPages))}/{Math.max(1, totalPages)})
|
||||
</div>
|
||||
<div className="actions">
|
||||
<div className="actions" style={{ gap: 8, alignItems: 'center' }}>
|
||||
{selectedTxIds.length > 0 && (
|
||||
<>
|
||||
<span className="muted">Selected: {selectedTxIds.length}</span>
|
||||
<select
|
||||
className="input"
|
||||
multiple
|
||||
value={bulkCategoryIds.map(String)}
|
||||
onChange={(e) => {
|
||||
const ids = Array.from(e.currentTarget.selectedOptions).map(o => Number(o.value));
|
||||
setBulkCategoryIds(ids);
|
||||
}}
|
||||
>
|
||||
{categories.map(c => (
|
||||
<option key={c.id} value={c.id}>{c.name}</option>
|
||||
))}
|
||||
</select>
|
||||
<button
|
||||
className="btn primary"
|
||||
onClick={async () => {
|
||||
if (bulkCategoryIds.length === 0) {
|
||||
alert('Pick at least one category to assign.');
|
||||
return;
|
||||
}
|
||||
try {
|
||||
// Apply selected categories to each selected transaction, replacing their categories
|
||||
const updates = await Promise.allSettled(
|
||||
selectedTxIds.map(id => updateTransaction(id, { category_ids: bulkCategoryIds }))
|
||||
);
|
||||
const fulfilled = updates.filter(u => u.status === 'fulfilled') as PromiseFulfilledResult<Transaction>[];
|
||||
const updatedById = new Map<number, Transaction>(fulfilled.map(f => [f.value.id, f.value]));
|
||||
setTransactions(prev => prev.map(t => updatedById.get(t.id) || t));
|
||||
try { setBalanceSeries(await getBalanceSeries(startDate || undefined, endDate || undefined)); } catch {}
|
||||
if (fulfilled.length !== selectedTxIds.length) {
|
||||
alert(`Assigned categories to ${fulfilled.length} of ${selectedTxIds.length} selected transactions. Some updates failed.`);
|
||||
}
|
||||
} catch (e: any) {
|
||||
alert(e?.message || 'Failed to assign categories');
|
||||
} finally {
|
||||
clearSelection();
|
||||
setBulkCategoryIds([]);
|
||||
}
|
||||
}}
|
||||
>
|
||||
Apply categories to selected
|
||||
</button>
|
||||
<button className="btn" onClick={clearSelection}>Clear selection</button>
|
||||
</>
|
||||
)}
|
||||
<button className="btn primary" disabled={page <= 0} onClick={() => setPage(p => Math.max(0, p - 1))}>Previous</button>
|
||||
<button className="btn primary" disabled={page >= totalPages - 1} onClick={() => setPage(p => Math.min(totalPages - 1, p + 1))}>Next</button>
|
||||
</div>
|
||||
</div>
|
||||
<table className="table">
|
||||
<table className="table responsive">
|
||||
<thead>
|
||||
<tr>
|
||||
<th style={{ width: 36 }}>
|
||||
<input
|
||||
type="checkbox"
|
||||
aria-label="Select all on page"
|
||||
checked={visible.length > 0 && visible.every(v => selectedTxIds.includes(v.id))}
|
||||
onChange={(e) => {
|
||||
if (e.currentTarget.checked) {
|
||||
selectAllVisible(visible.map(v => v.id));
|
||||
} else {
|
||||
// remove only currently visible from selection
|
||||
setSelectedTxIds(prev => prev.filter(id => !visible.some(v => v.id === id)));
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</th>
|
||||
<th>Date</th>
|
||||
<th style={{ textAlign: 'right' }}>Amount</th>
|
||||
<th>Description</th>
|
||||
<th>Categories</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{visible.map(t => (
|
||||
<tr key={t.id}>
|
||||
<td>{t.date || ''}</td>
|
||||
<td className="amount">{formatAmount(t.amount)}</td>
|
||||
<td>{t.description || ''}</td>
|
||||
<tr key={t.id} style={{ backgroundColor: selectedTxIds.includes(t.id) ? 'rgba(88, 136, 255, 0.1)' : undefined }}>
|
||||
<td>
|
||||
<input
|
||||
type="checkbox"
|
||||
aria-label={`Select transaction ${t.id}`}
|
||||
checked={selectedTxIds.includes(t.id)}
|
||||
onChange={() => toggleSelectTx(t.id)}
|
||||
/>
|
||||
</td>
|
||||
{/* Date cell */}
|
||||
<td data-label="Date">
|
||||
{editingTxId === t.id ? (
|
||||
<div className="space-y" style={{ display: 'flex', alignItems: 'center', gap: 8 }}>
|
||||
<select multiple className="input" value={editingCategoryIds.map(String)} onChange={(e) => {
|
||||
const opts = Array.from(e.currentTarget.selectedOptions).map(o => Number(o.value));
|
||||
setEditingCategoryIds(opts);
|
||||
}}>
|
||||
<input
|
||||
className="input"
|
||||
type="date"
|
||||
value={editingDate}
|
||||
onChange={(e) => setEditingDate(e.target.value)}
|
||||
/>
|
||||
) : (
|
||||
t.date || ''
|
||||
)}
|
||||
</td>
|
||||
|
||||
{/* Amount cell */}
|
||||
<td data-label="Amount" className="amount" style={{ textAlign: 'right' }}>
|
||||
{editingTxId === t.id ? (
|
||||
<input
|
||||
className="input"
|
||||
type="number"
|
||||
step="0.01"
|
||||
value={editingAmount}
|
||||
onChange={(e) => setEditingAmount(e.target.value)}
|
||||
style={{ textAlign: 'right' }}
|
||||
/>
|
||||
) : (
|
||||
formatAmount(t.amount)
|
||||
)}
|
||||
</td>
|
||||
|
||||
{/* Description cell */}
|
||||
<td data-label="Description">
|
||||
{editingTxId === t.id ? (
|
||||
<input
|
||||
className="input"
|
||||
type="text"
|
||||
value={editingDescription}
|
||||
onChange={(e) => setEditingDescription(e.target.value)}
|
||||
/>
|
||||
) : (
|
||||
t.description || ''
|
||||
)}
|
||||
</td>
|
||||
|
||||
{/* Categories cell */}
|
||||
<td data-label="Categories">
|
||||
{editingTxId === t.id ? (
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: 8 }}>
|
||||
<select
|
||||
multiple
|
||||
className="input"
|
||||
value={editingCategoryIds.map(String)}
|
||||
onChange={(e) => {
|
||||
const opts = Array.from(e.currentTarget.selectedOptions).map(o => Number(o.value));
|
||||
setEditingCategoryIds(opts);
|
||||
}}
|
||||
>
|
||||
{categories.map(c => (
|
||||
<option key={c.id} value={c.id}>{c.name}</option>
|
||||
))}
|
||||
</select>
|
||||
<button className="btn small" onClick={saveEditCategories}>Save</button>
|
||||
<button className="btn small" onClick={cancelEditCategories}>Cancel</button>
|
||||
</div>
|
||||
) : (
|
||||
<div className="space-x" style={{ display: 'flex', alignItems: 'center', gap: 8, justifyContent: 'space-between' }}>
|
||||
<span>{t.category_ids.map(id => categoryNameById(id)).join(', ') || '—'}</span>
|
||||
<button className="btn small" onClick={() => beginEditCategories(t)}>Change</button>
|
||||
<span>{t.category_ids.map(id => categoryNameById(id)).join(', ') || '—'}</span>
|
||||
)}
|
||||
</td>
|
||||
|
||||
{/* Actions cell */}
|
||||
<td data-label="Actions">
|
||||
{editingTxId === t.id ? (
|
||||
<div className="actions" style={{ display: 'flex', gap: 8, justifyContent: 'flex-end' }}>
|
||||
<button className="btn small" onClick={saveEditTransaction}>Save</button>
|
||||
<button className="btn small" onClick={cancelEditTransaction}>Cancel</button>
|
||||
<button className="btn small" onClick={() => handleDeleteTransaction(t.id)}>Delete</button>
|
||||
</div>
|
||||
) : (
|
||||
<div className="actions" style={{ display: 'flex', gap: 8, justifyContent: 'flex-end' }}>
|
||||
<button className="btn small" onClick={() => beginEditTransaction(t)}>Edit</button>
|
||||
<button className="btn small" onClick={() => handleDeleteTransaction(t.id)}>Delete</button>
|
||||
</div>
|
||||
)}
|
||||
</td>
|
||||
@@ -447,6 +673,7 @@ export default function Dashboard({ onLogout }: { onLogout: () => void }) {
|
||||
onClose={() => setMockModalOpen(false)}
|
||||
onGenerate={handleGenerateMockTransactions}
|
||||
/>
|
||||
{sidebarOpen && <div className="backdrop" onClick={() => setSidebarOpen(false)} />}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -80,7 +80,7 @@ export default function LoginRegisterPage({ onLoggedIn }: { onLoggedIn: () => vo
|
||||
<input className="input" type="password" required value={password} onChange={(e) => setPassword(e.target.value)} />
|
||||
</div>
|
||||
{mode === 'register' && (
|
||||
<div className="form-row">
|
||||
<div className="space-y">
|
||||
<div>
|
||||
<label className="muted">First name (optional)</label>
|
||||
<input className="input" type="text" value={firstName} onChange={(e) => setFirstName(e.target.value)} />
|
||||
|
||||
@@ -48,26 +48,49 @@ body[data-theme="dark"] {
|
||||
.card h3 { margin: 0 0 12px; }
|
||||
|
||||
/* Forms */
|
||||
.input, select, textarea {
|
||||
/* Common field styles (no custom arrow here) */
|
||||
.input, textarea {
|
||||
width: 100%;
|
||||
padding: 10px 12px;
|
||||
border-radius: 10px;
|
||||
border: 1px solid var(--border);
|
||||
background-color: var(--panel);
|
||||
color: var(--muted);
|
||||
}
|
||||
|
||||
/* Add these properties specifically for the select element */
|
||||
/* Select-only: show custom dropdown arrow */
|
||||
select.input {
|
||||
-webkit-appearance: none;
|
||||
-moz-appearance: none;
|
||||
appearance: none;
|
||||
|
||||
padding-right: 32px; /* Add space for the custom arrow */
|
||||
padding-right: 32px; /* room for the arrow */
|
||||
background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 20 20'%3e%3cpath stroke='%236b7280' stroke-linecap='round' stroke-linejoin='round' stroke-width='1.5' d='M6 8l4 4 4-4'/%3e%3c/svg%3e");
|
||||
background-position: right 0.5rem center;
|
||||
background-repeat: no-repeat;
|
||||
background-size: 1.5em 1.5em;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.pie-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(2, minmax(0, 1fr));
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
@media (max-width: 900px) {
|
||||
.pie-grid {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
|
||||
/* Make charts scale nicely within the cards */
|
||||
.pie-card canvas, .pie-card svg {
|
||||
max-width: 100%;
|
||||
height: auto;
|
||||
display: block;
|
||||
}
|
||||
|
||||
.input:focus, select:focus, textarea:focus {
|
||||
outline: 2px solid var(--primary);
|
||||
outline-offset: 2px;
|
||||
@@ -151,3 +174,117 @@ body.auth-page #root {
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
|
||||
/* Responsive enhancements */
|
||||
|
||||
/* Off-canvas sidebar + hamburger for mobile */
|
||||
@media (max-width: 900px) {
|
||||
.app-layout {
|
||||
grid-template-columns: 1fr;
|
||||
min-height: 100dvh;
|
||||
position: relative;
|
||||
}
|
||||
.sidebar {
|
||||
position: fixed;
|
||||
inset: 0 auto 0 0;
|
||||
width: 80vw;
|
||||
max-width: 320px;
|
||||
transform: translateX(-100%);
|
||||
transition: transform 200ms ease;
|
||||
z-index: 1000;
|
||||
overflow-y: auto;
|
||||
}
|
||||
.app-layout.sidebar-open .sidebar {
|
||||
transform: translateX(0);
|
||||
}
|
||||
.hamburger {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
margin-right: 8px;
|
||||
}
|
||||
.topbar { position: sticky; top: 0; z-index: 500; }
|
||||
}
|
||||
|
||||
@media (min-width: 901px) {
|
||||
.hamburger { display: none; }
|
||||
}
|
||||
|
||||
/* Backdrop when sidebar is open */
|
||||
.backdrop {
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
background: rgba(0,0,0,0.45);
|
||||
z-index: 900;
|
||||
}
|
||||
|
||||
/* Responsive table: convert to card list on small screens */
|
||||
.table.responsive { width: 100%; }
|
||||
@media (max-width: 700px) {
|
||||
.table.responsive thead { display: none; }
|
||||
.table.responsive tbody tr {
|
||||
display: block;
|
||||
border: 1px solid var(--border, #2a2f45);
|
||||
border-radius: 8px;
|
||||
margin-bottom: 12px;
|
||||
overflow: hidden;
|
||||
background: var(--panel);
|
||||
}
|
||||
.table.responsive tbody td {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 12px;
|
||||
padding: 10px 12px;
|
||||
border-bottom: 1px solid var(--border);
|
||||
text-align: left !important; /* override any right align */
|
||||
}
|
||||
.table.responsive tbody td:last-child { border-bottom: 0; }
|
||||
.table.responsive tbody td::before {
|
||||
content: attr(data-label);
|
||||
font-weight: 600;
|
||||
color: var(--muted);
|
||||
}
|
||||
.table.responsive .actions { width: 100%; justify-content: flex-end; }
|
||||
.table.responsive .amount { font-weight: 600; }
|
||||
}
|
||||
|
||||
/* Filters and controls wrapping */
|
||||
@media (max-width: 900px) {
|
||||
.form-row { grid-template-columns: repeat(2, minmax(0, 1fr)); }
|
||||
}
|
||||
@media (max-width: 700px) {
|
||||
.form-row { grid-template-columns: 1fr; }
|
||||
}
|
||||
|
||||
.table-controls { gap: 12px; }
|
||||
@media (max-width: 700px) {
|
||||
.table-controls { flex-direction: column; align-items: stretch; }
|
||||
.table-controls .actions { width: 100%; }
|
||||
.table-controls .actions .btn { flex: 1 0 auto; }
|
||||
}
|
||||
|
||||
/* Touch-friendly sizes */
|
||||
.btn, .input, select.input { min-height: 40px; }
|
||||
.btn.small { min-height: 36px; }
|
||||
|
||||
/* Connection rows on mobile */
|
||||
@media (max-width: 700px) {
|
||||
.connection-row { flex-direction: column; align-items: stretch; gap: 8px; }
|
||||
.connection-row .btn { width: 100%; }
|
||||
}
|
||||
|
||||
/* Charts should scale to container */
|
||||
.card canvas, .card svg { max-width: 100%; height: auto; display: block; }
|
||||
|
||||
|
||||
/* Horizontal scroll container for wide charts */
|
||||
.chart-scroll {
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
-webkit-overflow-scrolling: touch; /* momentum scroll on iOS */
|
||||
}
|
||||
.chart-inner { min-width: 900px; }
|
||||
|
||||
@@ -43,8 +43,8 @@ The tracker should not store the transactions in the database - security vulnera
|
||||
|
||||
Last 3 minutes of the meeting, summarize action items.
|
||||
|
||||
- [ ] Change the name on frontend from 7project
|
||||
- [ ] Finalize the funcionality and everyting in the code part
|
||||
- [x] Change the name on frontend from 7project
|
||||
- [x] Finalize the funcionality and everyting in the code part
|
||||
- [ ] Try to finalize report with focus on reproducibility
|
||||
- [ ] More high level explanation of the workflow in the report
|
||||
|
||||
|
||||
47
7project/meetings/2025-11-6-meeting.md
Normal file
47
7project/meetings/2025-11-6-meeting.md
Normal file
@@ -0,0 +1,47 @@
|
||||
# Weekly Meeting Notes
|
||||
|
||||
- Group 8 - Personal finance tracker
|
||||
- Mentor: Jaychander
|
||||
|
||||
Keep all meeting notes in the `meetings.md` file in your project folder.
|
||||
Just copy the template below for each weekly meeting and fill in the details.
|
||||
|
||||
## Administrative Info
|
||||
|
||||
- Date: 2025-10-30
|
||||
- Attendees: Dejan, Lukas
|
||||
- Notetaker: Dejan
|
||||
|
||||
## Progress Update (Before Meeting)
|
||||
|
||||
Last 3 minutes of the meeting, summarize action items.
|
||||
|
||||
- [x] Change the name on frontend from 7project
|
||||
- [x] Finalize the funcionality and everyting in the code part
|
||||
- [x] Try to finalize report with focus on reproducibility
|
||||
- [x] More high level explanation of the workflow in the report
|
||||
|
||||
Summary of what has been accomplished since the last meeting in the following categories.
|
||||
|
||||
### Coding
|
||||
|
||||
|
||||
### Documentation
|
||||
|
||||
|
||||
## Questions and Topics for Discussion (Before Meeting)
|
||||
|
||||
|
||||
## Discussion Notes (During Meeting)
|
||||
The tracker should not store the transactions in the database - security vulnerability.
|
||||
|
||||
## Action Items for Next Week (During Meeting)
|
||||
|
||||
Last 3 minutes of the meeting, summarize action items.
|
||||
|
||||
- [ ] video
|
||||
- [ ] highlight the optional stuff in the report
|
||||
|
||||
|
||||
|
||||
---
|
||||
@@ -1,9 +1,9 @@
|
||||
# Personal finance tracker
|
||||
|
||||
> **Instructions**:
|
||||
<!--- **Instructions**:
|
||||
> This template provides the structure for your project report.
|
||||
> Replace the placeholder text with your actual content.
|
||||
> Remove instructions that are not relevant for your project, but leave the headings along with a (NA) label.
|
||||
> Remove instructions that are not relevant for your project, but leave the headings along with a (NA) label. -->
|
||||
|
||||
## Project Overview
|
||||
|
||||
@@ -12,169 +12,416 @@
|
||||
**Group Members**:
|
||||
|
||||
- 289229, Lukáš Trkan, lukastrkan
|
||||
- 289258, Dejan Ribarovski, derib2613, ribardej
|
||||
- 289258, Dejan Ribarovski, ribardej (derib2613)
|
||||
|
||||
**Brief Description**: (něco spíš jako abstract, introuction, story behind)
|
||||
Our application is a finance tracker, so a person can easily track his cash flow
|
||||
through multiple bank accounts. Person can label transactions with custom categories
|
||||
and later filter by them.
|
||||
**Brief Description**:
|
||||
Our application allows users to easily track their cash flow
|
||||
through multiple bank accounts. Users can label their transactions with custom categories that can be later used for
|
||||
filtering and visualization. New transactions are automatically fetched in the background.
|
||||
|
||||
## Architecture Overview
|
||||
Our system is a full‑stack web application composed of a React frontend, a FastAPI backend, a PostgreSQL database, and asynchronous background workers powered by Celery with RabbitMQ. Redis is available for caching/kv and may be used by Celery as a result backend. The backend exposes REST endpoints for authentication (email/password and OAuth), users, categories, and transactions. A thin controller layer (FastAPI routers) lives under app/api. Infrastructure for Kubernetes is provided via OpenTofu (Terraform‑compatible) modules and the application is packaged via a Helm chart.
|
||||
|
||||
Our system is a full‑stack web application composed of a React frontend, a FastAPI backend,
|
||||
a MariaDB database with Maxscale, and asynchronous background workers powered by Celery with RabbitMQ.
|
||||
Redis is available for caching/kv and may be used by Celery as a result backend. The backend
|
||||
exposes REST endpoints for authentication (email/password and OAuth), users, categories,
|
||||
transactions, exchange rates and bank APIs. A thin controller layer (FastAPI routers) lives under app/api.
|
||||
Infrastructure for Kubernetes is provided via OpenTofu (Terraform‑compatible) modules and
|
||||
the application is packaged via a Helm chart.
|
||||
|
||||
### High-Level Architecture
|
||||
|
||||
```mermaid
|
||||
flowchart LR
|
||||
proc_queue[Message Queue] --> proc_queue_worker[Worker Service]
|
||||
proc_queue_worker --> ext_mail[(Email Service)]
|
||||
proc_cron[Task planner] --> proc_queue
|
||||
proc_queue_worker --> ext_bank[(Bank API)]
|
||||
proc_queue_worker --> db
|
||||
client[Client/Frontend] <--> svc[Backend API]
|
||||
n3(("User")) <--> client["Frontend"]
|
||||
proc_queue["Message Queue"] --> proc_queue_worker["Worker Service"]
|
||||
proc_queue_worker -- SMTP --> ext_mail[("Email Service")]
|
||||
proc_queue_worker <-- HTTP request/response --> ext_bank[("Bank API")]
|
||||
proc_queue_worker <--> db[("Database")]
|
||||
proc_cron["Cron"] <-- HTTP request/response --> svc["Backend API"]
|
||||
svc --> proc_queue
|
||||
svc <--> db[(Database)]
|
||||
svc <--> cache[(Cache)]
|
||||
n2["Cloudflare tunnel"] <-- HTTP request/response --> svc
|
||||
svc <--> db
|
||||
svc <-- HTTP request/response --> api[("UniRate API")]
|
||||
client <-- HTTP request/response --> n2
|
||||
```
|
||||
|
||||
The workflow works in the following way:
|
||||
|
||||
- Client connects to the frontend. After login, frontend automatically fetches the stored transactions from
|
||||
the database via the backend API and currency rates from UniRate API.
|
||||
- When the client opts for fetching new transactions via the Bank API, the backend delegates the task
|
||||
to a background worker service via the Message queue.
|
||||
- After successful load, these transactions are stored to the database and displayed to the client
|
||||
- There is also a Task planner, that executes periodic tasks, like fetching new transactions automatically from the Bank
|
||||
APIs
|
||||
|
||||
### Features
|
||||
|
||||
- The stored transactions are encrypted in the DB for security reasons.
|
||||
- For every pull request the full APP is deployed on a separate URL and the tests are run by github CI/CD
|
||||
- On every push to main, the production app is automatically updated
|
||||
- UI is responsive for mobile devices
|
||||
|
||||
### Components
|
||||
|
||||
- Frontend (frontend/): React + TypeScript app built with Vite. Talks to the backend via REST, handles login/registration, shows latest transactions, filtering, and allows adding transactions.
|
||||
- Backend API (backend/app): FastAPI app with routers under app/api for auth, categories, and transactions. Uses FastAPI Users for auth (JWT + OAuth), SQLAlchemy ORM, and Pydantic v2 schemas.
|
||||
- Worker service (backend/app/workers): Celery worker handling asynchronous tasks (e.g., sending verification emails, future background processing).
|
||||
- Frontend (frontend/): React + TypeScript app built with Vite. Talks to the backend via REST, handles
|
||||
login/registration, shows latest transactions, filtering, and allows adding transactions.
|
||||
- Backend API (backend/app): FastAPI app with routers under app/api for auth, users, categories, transactions, exchange
|
||||
rates and bankAPI. Uses FastAPI Users for auth (JWT + OAuth), SQLAlchemy ORM, and Pydantic v2 schemas.
|
||||
- Worker service (backend/app/workers): Celery worker handling asynchronous tasks (e.g., sending verification emails,
|
||||
future background processing).
|
||||
- Database (PostgreSQL): Persists users, categories, transactions; schema managed by Alembic migrations.
|
||||
- Message Queue (RabbitMQ): Transports background jobs from the API to the worker.
|
||||
- Cache/Result Store (Redis): Available for caching or Celery result backend.
|
||||
- Infrastructure as Code (tofu/): OpenTofu modules provisioning cluster services (RabbitMQ, Redis, Argo CD, cert-manager, Cloudflare tunnel, etc.).
|
||||
- Infrastructure as Code (tofu/): OpenTofu modules provisioning cluster services (RabbitMQ, Redis, Argo CD,
|
||||
cert-manager, Cloudflare tunnel, etc.).
|
||||
- Deployment Chart (charts/myapp-chart/): Helm chart to deploy the application to Kubernetes.
|
||||
|
||||
### Technologies Used
|
||||
|
||||
- Backend: Python, FastAPI, FastAPI Users, SQLAlchemy, Pydantic, Alembic, Celery
|
||||
- Frontend: React, TypeScript, Vite
|
||||
- Database: PostgreSQL
|
||||
- Messaging: RabbitMQ
|
||||
- Cache: Redis
|
||||
- Database: MariaDB with Maxscale
|
||||
- Background jobs: RabbitMQ, Celery
|
||||
- Containerization/Orchestration: Docker, Docker Compose (dev), Kubernetes, Helm
|
||||
- IaC/Platform: OpenTofu (Terraform), Argo CD, cert-manager, MetalLB, Cloudflare Tunnel, Prometheus
|
||||
- IaC/Platform: Proxmox, Talos, Cloudflare pages, OpenTofu (Terraform), cert-manager, MetalLB, Cloudflare Tunnel,
|
||||
Prometheus, Loki
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### System Requirements
|
||||
|
||||
- Operating System: Linux, macOS, or Windows
|
||||
- Minimum RAM: 4 GB (8 GB recommended for running backend, frontend, and database together)
|
||||
- Storage: 2 GB free (Docker images may require additional space)
|
||||
#### Development
|
||||
|
||||
- Minimum RAM: 8 GB
|
||||
- Storage: 10 GB+ free
|
||||
|
||||
#### Production
|
||||
|
||||
- 1 + 4 nodes
|
||||
- CPU: 4 cores
|
||||
- RAM: 8 GB
|
||||
- Storage: 200 GB
|
||||
|
||||
### Required Software
|
||||
|
||||
- Docker Desktop or Docker Engine 24+
|
||||
- Docker Compose v2+
|
||||
- Node.js 20+ and npm 10+ (for local frontend dev/build)
|
||||
- Python 3.12+ (for local backend dev outside Docker)
|
||||
- PostgreSQL 15+ (optional if running DB outside Docker)
|
||||
- Helm 3.12+ and kubectl 1.29+ (for Kubernetes deployment)
|
||||
- OpenTofu 1.7+ (for infrastructure provisioning)
|
||||
#### Development
|
||||
|
||||
### Environment Variables (common)
|
||||
- Docker
|
||||
- Docker Compose
|
||||
- Node.js and npm
|
||||
- Python 3.12
|
||||
- MariaDB 11
|
||||
|
||||
- Backend: SECRET, FRONTEND_URL, BACKEND_URL, DATABASE_URL, RABBITMQ_URL, REDIS_URL
|
||||
- OAuth vars (Backend): MOJEID_CLIENT_ID/SECRET, BANKID_CLIENT_ID/SECRET (optional)
|
||||
- Frontend: VITE_BACKEND_URL
|
||||
#### Production
|
||||
|
||||
##### Minimal:
|
||||
|
||||
- domain name with Cloudflare`s nameservers - tunnel, pages
|
||||
- Kubernetes cluster
|
||||
- kubectl
|
||||
- Helm
|
||||
- OpenTofu
|
||||
|
||||
##### Our setup specifics:
|
||||
|
||||
- Proxmox VE
|
||||
- TalosOS cluster
|
||||
- talosctl
|
||||
- GitHub self-hosted runner with access to the cluster
|
||||
- TailScale for remote access to cluster
|
||||
|
||||
### Environment Variables
|
||||
|
||||
#### Backend
|
||||
|
||||
- `MOJEID_CLIENT_ID`, `MOJEID_CLIENT_SECRET` \- OAuth client ID and secret for
|
||||
MojeID - https://www.mojeid.cz/en/provider/
|
||||
- `BANKID_CLIENT_ID`, `BANKID_CLIENT_SECRET` \- OAuth client ID and secret for BankID - https://developer.bankid.cz/
|
||||
- `CSAS_CLIENT_ID`, `CSAS_CLIENT_SECRET` \- OAuth client ID and secret for Česká
|
||||
spořitelna - https://developers.erstegroup.com/docs/apis/bank.csas
|
||||
- `DATABASE_URL`(or `MARIADB_HOST`, `MARIADB_PORT`, `MARIADB_DB`, `MARIADB_USER`, `MARIADB_PASSWORD`) \- MariaDB
|
||||
connection details
|
||||
- `RABBITMQ_USERNAME`, `RABBITMQ_PASSWORD` \- credentials for RabbitMQ
|
||||
- `SENTRY_DSN` \- Sentry DSN for error reporting
|
||||
- `DB_ENCRYPTION_KEY` \- symmetric key for encrypting sensitive data in the database
|
||||
- `SMTP_HOST`, `SMTP_PORT`, `SMTP_USERNAME`, `SMTP_PASSWORD`, `SMTP_USE_TLS`, `SMTP_USE_SSL`, `SMTP_FROM` \- SMTP
|
||||
configuration (host, port, auth credentials, TLS/SSL options, sender).
|
||||
- `UNIRATE_API_KEY` \- API key for UniRate.
|
||||
|
||||
#### Frontend
|
||||
|
||||
- `VITE_BACKEND_URL` \- URL of the backend API
|
||||
|
||||
### Dependencies (key libraries)
|
||||
I am not sure what is meant by "key libraries"
|
||||
|
||||
Backend: FastAPI, fastapi-users, SQLAlchemy, pydantic v2, Alembic, Celery
|
||||
Frontend: React, TypeScript, Vite
|
||||
Services: PostgreSQL, RabbitMQ, Redis
|
||||
Backend: FastAPI, fastapi-users, SQLAlchemy, pydantic v2, Alembic, Celery, uvicorn, pytest
|
||||
Frontend: React, TypeScript, Vite
|
||||
|
||||
## Build Instructions
|
||||
## Local development
|
||||
|
||||
You can run the project with Docker Compose (recommended for local development) or run services manually.
|
||||
You can run the project with Docker Compose and Python virtual environment for testing and development purposes
|
||||
|
||||
### 1) Clone the Repository
|
||||
|
||||
```bash
|
||||
git clone https://github.com/dat515-2025/Group-8.git
|
||||
cd 7project
|
||||
cd Group-8/7project
|
||||
```
|
||||
|
||||
### 2) Install dependencies
|
||||
|
||||
Backend
|
||||
|
||||
```bash
|
||||
# In 7project/backend
|
||||
python3.12 -m venv .venv
|
||||
source .venv/bin/activate # Windows: .venv\Scripts\activate
|
||||
cd backend
|
||||
python3 -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
Frontend
|
||||
|
||||
### 3) Run Docker containers
|
||||
|
||||
```bash
|
||||
# In 7project/frontend
|
||||
npm install
|
||||
cd ..
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
### 3) Manual Local Run
|
||||
### 4) Prepare the database
|
||||
|
||||
Backend
|
||||
```bash
|
||||
# From the 7project/ directory
|
||||
docker compose up --build
|
||||
# This starts: PostgreSQL, RabbitMQ/Redis (if defined)
|
||||
bash upgrade_database.sh
|
||||
```
|
||||
|
||||
# Set environment variables (or create .env file)
|
||||
export SECRET=CHANGE_ME_SECRET
|
||||
export BACKEND_URL=http://127.0.0.1:8000
|
||||
export FRONTEND_URL=http://localhost:5173
|
||||
export DATABASE_URL=postgresql+asyncpg://user:password@127.0.0.1:5432/app
|
||||
export RABBITMQ_URL=amqp://guest:guest@127.0.0.1:5672/
|
||||
export REDIS_URL=redis://127.0.0.1:6379/0
|
||||
### 5) Run backend
|
||||
|
||||
# Apply DB migrations (Alembic)
|
||||
# From 7project/backend
|
||||
alembic upgrade head
|
||||
```bash
|
||||
cd backend
|
||||
|
||||
# Run API
|
||||
#TODO: set env variables
|
||||
uvicorn app.app:fastApi --reload --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
# Run Celery worker (optional, for emails/background tasks)
|
||||
### 6) Run Celery worker (optional, in another terminal)
|
||||
|
||||
```bash
|
||||
cd Group-8/7project/backend
|
||||
source .venv/bin/activate
|
||||
celery -A app.celery_app.celery_app worker -l info
|
||||
```
|
||||
|
||||
Frontend
|
||||
### 7) Install frontend dependencies and run
|
||||
|
||||
```bash
|
||||
# Configure backend URL for dev
|
||||
echo 'VITE_BACKEND_URL=http://127.0.0.1:8000' > .env
|
||||
cd ../frontend
|
||||
npm i
|
||||
npm run dev
|
||||
# Open http://localhost:5173
|
||||
```
|
||||
|
||||
- Backend default: http://127.0.0.1:8000 (OpenAPI at /docs)
|
||||
- Frontend default: http://localhost:5173
|
||||
- Backend available at: http://127.0.0.1:8000 (OpenAPI at /docs)
|
||||
- Frontend available at: http://localhost:5173
|
||||
|
||||
If needed, adjust compose services/ports in compose.yml.
|
||||
## Build Instructions
|
||||
|
||||
### Backend
|
||||
|
||||
```bash
|
||||
cd 7project/backend
|
||||
# Dont forget to set correct image tag with your registry and name
|
||||
# For example lukastrkan/cc-app-demo or gitea.ltrk.dev/lukas/cc-app-demo
|
||||
docker buildx build --platform linux/amd64,linux/arm64 -t CHANGE_ME --push .
|
||||
```
|
||||
|
||||
### Frontend
|
||||
|
||||
```bash
|
||||
cd project7/frontend
|
||||
npm ci
|
||||
npm run build
|
||||
```
|
||||
|
||||
## Deployment Instructions
|
||||
|
||||
### Local (Docker Compose)
|
||||
### Setup Cluster
|
||||
|
||||
Described in the previous section (Manual Local Run)
|
||||
Deployment should work on any Kubernetes cluster. However, we are using 4 TalosOS virtual machines (1 control plane, 3
|
||||
workers)
|
||||
running on top of Proxmox VE.
|
||||
|
||||
### Kubernetes (via OpenTofu + Helm)
|
||||
1) Create at least 4 VMs with TalosOS (4 cores, 8 GB RAM, 200 GB disk)
|
||||
2) Install talosctl for your OS: https://docs.siderolabs.com/talos/v1.10/getting-started/talosctl
|
||||
3) Generate Talos config
|
||||
4) Navigate to tofu directory
|
||||
|
||||
```bash
|
||||
cd 7project/tofu
|
||||
````
|
||||
|
||||
5) Set IP addresses in environment variables
|
||||
|
||||
```bash
|
||||
CONTROL_PLANE_IP=<control-plane-ip>
|
||||
WORKER1_IP=<worker1-ip>
|
||||
WORKER2_IP=<worker2-ip>
|
||||
WORKER3_IP=<worker3-ip>
|
||||
WORKER4_IP=<worker4-ip>
|
||||
....
|
||||
```
|
||||
|
||||
6) Create config files
|
||||
|
||||
```bash
|
||||
# change my-cluster to your desired cluster name
|
||||
talosctl gen config my-cluster https://$CONTROL_PLANE_IP:6443
|
||||
```
|
||||
|
||||
7) Edit the generated configs
|
||||
|
||||
Apply the following changes to `worker.yaml`:
|
||||
|
||||
1) Add mounts for persistent storage to `machine.kubelet.extraMounts` section:
|
||||
|
||||
```yaml
|
||||
extraMounts:
|
||||
- destination: /var/lib/longhorn
|
||||
type: bindind.
|
||||
source: /var/lib/longhorn
|
||||
options:
|
||||
- bind
|
||||
- rshared
|
||||
- rw
|
||||
```
|
||||
|
||||
2) Change `machine.install.image` to image with extra modules:
|
||||
|
||||
```yaml
|
||||
image: factory.talos.dev/metal-installer/88d1f7a5c4f1d3aba7df787c448c1d3d008ed29cfb34af53fa0df4336a56040b:v1.11.1
|
||||
```
|
||||
|
||||
or you can use latest image generated at https://factory.talos.dev with following options:
|
||||
|
||||
- Bare-metal machine
|
||||
- your Talos os version
|
||||
- amd64 architecture
|
||||
- siderolabs/iscsi-tools
|
||||
- siderolabs/util-linux-tools
|
||||
- (Optionally) siderolabs/qemu-guest-agent
|
||||
|
||||
Then copy "Initial Installation" value and paste it to the image field.
|
||||
|
||||
3) Add docker registry mirror to `machine.registries.mirrors` section:
|
||||
|
||||
```yaml
|
||||
registries:
|
||||
mirrors:
|
||||
docker.io:
|
||||
endpoints:
|
||||
- https://mirror.gcr.io
|
||||
- https://registry-1.docker.io
|
||||
```
|
||||
|
||||
8) Apply configs to the VMs
|
||||
|
||||
```bash
|
||||
talosctl apply-config --insecure --nodes $CONTROL_PLANE_IP --file controlplane.yaml
|
||||
talosctl apply-config --insecure --nodes $WORKER1_IP --file worker.yaml
|
||||
talosctl apply-config --insecure --nodes $WORKER2_IP --file worker.yaml
|
||||
talosctl apply-config --insecure --nodes $WORKER3_IP --file worker.yaml
|
||||
talosctl apply-config --insecure --nodes $WORKER4_IP --file worker.yaml
|
||||
```
|
||||
|
||||
9) Boostrap the cluster and retrieve kubeconfig
|
||||
|
||||
```bash
|
||||
export TALOSCONFIG=$(pwd)/talosconfig
|
||||
talosctl config endpoint https://$CONTROL_PLANE_IP:6443
|
||||
talosctl config node $CONTROL_PLANE_IP
|
||||
|
||||
talosctl bootstrap
|
||||
|
||||
talosctl kubeconfig .
|
||||
```
|
||||
|
||||
You can now use k8s client like https://headlamp.dev/ with the generated kubeconfig file.
|
||||
|
||||
### Install base services to the cluster
|
||||
|
||||
1) Copy and edit variables
|
||||
|
||||
1) Provision platform services (RabbitMQ/Redis/ingress/tunnel/etc.) with OpenTofu
|
||||
```bash
|
||||
cd tofu
|
||||
# copy and edit variables
|
||||
cp terraform.tfvars.example terraform.tfvars
|
||||
# authenticate to your cluster/cloud as needed, then:
|
||||
```
|
||||
|
||||
- `metallb_ip_range` - set to range available in your network for load balancer services
|
||||
- `mariadb_password` - password for internal mariadb user
|
||||
- `mariadb_root_password` - password for root user
|
||||
- `mariadb_user_name` - username for admin user
|
||||
- `mariadb_user_host` - allowed hosts for admin user
|
||||
- `mariadb_user_password` - password for admin user
|
||||
- `metallb_maxscale_ip`, `metallb_service_ip`, `metallb_primary_ip`, `metallb_secondary_ip` - IPs for database
|
||||
cluster,
|
||||
set them to static IPs from the `metallb_ip_range`
|
||||
- `s3_enabled`, `s3_bucket`, `s3_region`, `s3_endpoint`, `s3_key_id`, `s3_key_secret` - S3 compatible storage for
|
||||
backups (optional)
|
||||
- `phpmyadmin_enabled` - set to false if you want to disable phpmyadmin
|
||||
- `rabbitmq-password` - password for RabbitMQ
|
||||
|
||||
- `cloudflare_account_id` - your Cloudflare account ID
|
||||
- `cloudflare_api_token` - your Cloudflare API token with permissions to manage tunnels and DNS
|
||||
- `cloudflare_email` - your Cloudflare account email
|
||||
- `cloudflare_tunnel_name` - name for the tunnel
|
||||
- `cloudflare_domain` - your domain name managed in Cloudflare
|
||||
|
||||
2) Deploy without Cloudflare module first
|
||||
|
||||
```bash
|
||||
tofu init
|
||||
tofu plan
|
||||
tofu apply -exclude modules.cloudflare
|
||||
```
|
||||
|
||||
3) Deploy rest of the modules
|
||||
|
||||
```bash
|
||||
tofu apply
|
||||
```
|
||||
|
||||
### Configure deployment
|
||||
|
||||
1) Create self-hosted runner with access to the cluster or make cluster publicly accessible
|
||||
2) Change `jobs.deploy.runs-on` in `.github/workflows/deploy-prod.yml` and in `.github/workflows/deploy-pr.yaml` to your
|
||||
runner label
|
||||
3) Add variables to GitHub in repository settings:
|
||||
- `PROD_DOMAIN` - base domain for deployments (e.g. ltrk.cz)
|
||||
- `DEV_FRONTEND_BASE_DOMAIN` - base domain for your cloudflare pages
|
||||
4) Add secrets to GitHub in repository settings:
|
||||
- CLOUDFLARE_ACCOUNT_ID - same as in tofu/terraform.tfvars
|
||||
- CLOUDFLARE_API_TOKEN - same as in tofu/terraform.tfvars
|
||||
- DOCKER_USER - your docker registry username
|
||||
- DOCKER_PASSWORD - your docker registry password
|
||||
- KUBE_CONFIG - content of your kubeconfig file for the cluster
|
||||
- PROD_DB_PASSWORD - same as MARIADB_PASSWORD
|
||||
- PROD_RABBITMQ_PASSWORD - same as MARIADB_PASSWORD
|
||||
- PROD_DB_ENCRYPTION_KEY - same as DB_ENCRYPTION_KEY
|
||||
- MOJEID_CLIENT_ID
|
||||
- MOJEID_CLIENT_SECRET
|
||||
- BANKID_CLIENT_ID
|
||||
- BANKID_CLIENT_SECRET
|
||||
- CSAS_CLIENT_ID
|
||||
- CSAS_CLIENT_SECRET
|
||||
- SENTRY_DSN
|
||||
- SMTP_HOST
|
||||
- SMTP_PORT
|
||||
- SMTP_USERNAME
|
||||
- SMTP_PASSWORD
|
||||
- SMTP_FROM
|
||||
- UNIRATE_API_KEY
|
||||
5) On Github open Actions tab, select "Deploy Prod" and run workflow manually
|
||||
|
||||
# TODO: REMOVE I guess
|
||||
|
||||
2) Deploy the app using Helm
|
||||
|
||||
```bash
|
||||
# Set the namespace
|
||||
kubectl create namespace myapp || true
|
||||
@@ -189,54 +436,43 @@ helm upgrade --install myapp charts/myapp-chart \
|
||||
--set env.FRONTEND_URL="https://myapp.example.com" \
|
||||
--set env.SECRET="CHANGE_ME_SECRET"
|
||||
```
|
||||
Adjust values to your registry and domain. The chart’s NOTES.txt includes additional examples.
|
||||
|
||||
3) Expose and access
|
||||
- If using Cloudflare Tunnel or an ingress, configure DNS accordingly (see tofu/modules/cloudflare and deployment/tunnel.yaml).
|
||||
- For quick testing without ingress:
|
||||
```bash
|
||||
kubectl -n myapp port-forward deploy/myapp-backend 8000:8000
|
||||
kubectl -n myapp port-forward deploy/myapp-frontend 5173:80
|
||||
```
|
||||
|
||||
### Verification
|
||||
|
||||
```bash
|
||||
# Check pods
|
||||
kubectl -n myapp get pods
|
||||
|
||||
# Backend health
|
||||
curl -i http://127.0.0.1:8000/
|
||||
# OpenAPI
|
||||
open http://127.0.0.1:8000/docs
|
||||
|
||||
# Frontend (if port-forwarded)
|
||||
open http://localhost:5173
|
||||
```
|
||||
|
||||
## Testing Instructions
|
||||
|
||||
### Unit Tests
|
||||
The tests are located in 7project/backend/tests directory. All tests are run by GitHub actions on every pull request and
|
||||
push to main.
|
||||
See the workflow [here](../.github/workflows/run-tests.yml).
|
||||
|
||||
If you want to run the tests locally, the preferred is to use a [bash script](backend/test-with-ephemeral-mariadb.sh)
|
||||
that will start a [test DB container](backend/docker-compose.test.yml) and remove it afterward.
|
||||
|
||||
```bash
|
||||
# Commands to run unit tests
|
||||
# For example:
|
||||
# go test ./...
|
||||
# npm test
|
||||
cd 7project/backend
|
||||
bash test-with-ephemeral-mariadb.sh
|
||||
```
|
||||
|
||||
### Unit Tests
|
||||
|
||||
There are only 5 basic unit tests, since our services logic is very simple
|
||||
|
||||
```bash
|
||||
bash test-with-ephemeral-mariadb.sh --only-unit
|
||||
```
|
||||
|
||||
### Integration Tests
|
||||
|
||||
There are 9 basic unit tests, testing the individual backend API logic
|
||||
|
||||
```bash
|
||||
# Commands to run integration tests
|
||||
# Any setup required for integration tests
|
||||
bash test-with-ephemeral-mariadb.sh --only-integration
|
||||
```
|
||||
|
||||
### End-to-End Tests
|
||||
|
||||
There are 7 e2e tests, testing more complex app logic
|
||||
|
||||
```bash
|
||||
# Commands to run e2e tests
|
||||
# How to set up test environment
|
||||
bash test-with-ephemeral-mariadb.sh --only-e2e
|
||||
```
|
||||
|
||||
## Usage Examples
|
||||
@@ -313,24 +549,24 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
||||
|
||||
---
|
||||
|
||||
## Self-Assessment Table
|
||||
## Progress Table
|
||||
|
||||
> Be honest and detailed in your assessments.
|
||||
> This information is used for individual grading.
|
||||
> Link to the specific commit on GitHub for each contribution.
|
||||
|
||||
| Task/Component | Assigned To | Status | Time Spent | Difficulty | Notes |
|
||||
|-----------------------------------------------------------------------|-------------| ------------- |----------------|------------| ----------- |
|
||||
| [Project Setup & Repository](https://github.com/dat515-2025/Group-8#) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
||||
| [Design Document](https://github.com/dat515-2025/Group-8/blob/main/6design/design.md) | Both | ✅ Complete | 2 Hours | Easy | [Any notes] |
|
||||
| [Backend API Development](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/api) | Dejan | 🔄 In Progress | 10 hours | Medium | [Any notes] |
|
||||
| [Database Setup & Models](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/models) | Lukas | 🔄 In Progress | [X hours] | Medium | [Any notes] |
|
||||
| [Frontend Development](https://github.com/dat515-2025/Group-8/tree/main/7project/frontend) | Dejan | 🔄 In Progress | 7 hours so far | Medium | [Any notes] |
|
||||
| [Docker Configuration](https://github.com/dat515-2025/Group-8/blob/main/7project/compose.yml) | Lukas | ✅ Complete | [X hours] | Easy | [Any notes] |
|
||||
| [Cloud Deployment](https://github.com/dat515-2025/Group-8/blob/main/7project/deployment/app-demo-deployment.yaml) | Lukas | ✅ Complete | [X hours] | Hard | [Any notes] |
|
||||
| [Testing Implementation](https://github.com/dat515-2025/group-name) | Dejan | 🔄 In Progress | [X hours] | Medium | [Any notes] |
|
||||
| [Documentation](https://github.com/dat515-2025/group-name) | Both | 🔄 In Progress | [X hours] | Easy | [Any notes] |
|
||||
| [Presentation Video](https://github.com/dat515-2025/group-name) | Both | ❌ Not Started | [X hours] | Medium | [Any notes] |
|
||||
| Task/Component | Assigned To | Status | Time Spent | Difficulty | Notes |
|
||||
|-------------------------------------------------------------------------------------------------------------------|-------------|----------------|------------|------------|-----------------------------------------------------------------------------------------------------|
|
||||
| [Project Setup & Repository](https://github.com/dat515-2025/Group-8#) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
||||
| [Design Document](https://github.com/dat515-2025/Group-8/blob/main/6design/design.md) | Both | ✅ Complete | 4 Hours | Easy | [Any notes] |
|
||||
| [Backend API Development](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/api) | Dejan | ✅ Complete | 12 hours | Medium | [Any notes] |
|
||||
| [Database Setup & Models](https://github.com/dat515-2025/Group-8/tree/main/7project/backend/app/models) | Lukas | ✅ Complete | [X hours] | Medium | [Any notes] |
|
||||
| [Frontend Development](https://github.com/dat515-2025/Group-8/tree/main/7project/frontend) | Dejan | ✅ Complete | 17 hours | Medium | [Any notes] |
|
||||
| [Docker Configuration](https://github.com/dat515-2025/Group-8/blob/main/7project/compose.yml) | Lukas | ✅ Complete | 3 hours | Easy | [Any notes] |
|
||||
| [Cloud Deployment](https://github.com/dat515-2025/Group-8/blob/main/7project/deployment/app-demo-deployment.yaml) | Lukas | ✅ Complete | [X hours] | Hard | Using Talos cluster running in proxmox - easy snapshots etc. Frontend deployed at Cloudflare pages. |
|
||||
| [Testing Implementation](https://github.com/dat515-2025/group-name) | Dejan | ✅ Complete | 16 hours | Medium | [Any notes] |
|
||||
| [Documentation](https://github.com/dat515-2025/group-name) | Both | 🔄 In Progress | [X hours] | Easy | [Any notes] |
|
||||
| [Presentation Video](https://github.com/dat515-2025/group-name) | Both | ❌ Not Started | [X hours] | Medium | [Any notes] |
|
||||
|
||||
**Legend**: ✅ Complete | 🔄 In Progress | ⏳ Pending | ❌ Not Started
|
||||
|
||||
@@ -340,27 +576,42 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
||||
|
||||
### [Lukáš]
|
||||
|
||||
| Date | Activity | Hours | Description |
|
||||
|----------------|---------------------|------------|----------------------------------------------------|
|
||||
| 4.10 to 10.10 | Initial Setup | 40 | Repository setup, project structure, cluster setup |
|
||||
| 14.10 to 16.10 | Backend Development | 12 | Implemented user authentication - oauth |
|
||||
| 8.10 to 12.10 | CI/CD | 10 | Created database schema and models |
|
||||
| [Date] | Testing | [X.X] | Unit tests for API endpoints |
|
||||
| [Date] | Documentation | [X.X] | Updated README and design doc |
|
||||
| **Total** | | **[XX.X]** | |
|
||||
## Hour Sheet
|
||||
|
||||
**Name:** Lukáš Trkan
|
||||
|
||||
| Date | Activity | Hours | Description | Representative Commit / PR |
|
||||
|:----------------|:----------------------------|:--------|:------------------------------------------------------------------------------------|:------------------------------------------------------|
|
||||
| 18.9. - 19.9. | Initial Setup & Design | 40 | Repository init, system design diagrams, basic Terraform setup | `feat(infrastructure): add basic terraform resources` |
|
||||
| 20.9. - 5.10. | Core Infrastructure & CI/CD | 12 | K8s setup (ArgoCD), CI/CD workflows, RabbitMQ, Redis, Celery workers, DB migrations | `PR #2`, `feat(infrastructure): add rabbitmq cluster` |
|
||||
| 6.10. - 9.10. | Frontend Infra & DB | 5 | Deployed frontend to Cloudflare, setup metrics, created database models | `PR #16` (Cloudflare), `PR #19` (DB structure) |
|
||||
| 10.10. - 11.10. | Backend | 5 | Implemented OAuth support (MojeID, BankID) | `feat(auth): add support for OAuth and MojeID` |
|
||||
| 12.10. | Infrastructure | 2 | Added database backups | `feat(infrastructure): add backups` |
|
||||
| 16.10. | Infrastructure | 4 | Implemented secrets management, fixed deployment/env variables | `PR #29` (Deployment envs) |
|
||||
| 17.10. | Monitoring | 1 | Added Sentry logging | `feat(app): add sentry loging` |
|
||||
| 21.10. - 22.10. | Backend | 8 | Added ČSAS bank connection | `PR #32` (Fix React OAuth) |
|
||||
| 29.10. - 30.10. | Backend | 5 | Implemented transaction encryption, add bank scraping | `PR #39` (CSAS Scraping) |
|
||||
| 30.10. | Monitoring | 6 | Implemented Loki logging and basic Prometheus metrics | `PR #42` (Prometheus metrics) |
|
||||
| 9.11. | Monitoring | 2 | Added custom Prometheus metrics | `PR #46` (Prometheus custom metrics) |
|
||||
| 11.11. | Tests | 1 | Investigated and fixed broken Pytest environment | `fix(tests): set pytest env` |
|
||||
| 11.11. - 12.11. | Features & Deployment | 6 | Added cron support, email sender service, updated workers & image | `PR #49` (Email), `PR #50` (Update workers) |
|
||||
| 18.9 - 14.11 | Documentation | 8 | Updated report.md, design docs, and tfvars.example | `Create design.md`, `update report` |
|
||||
| **Total** | | **105** | | |
|
||||
|
||||
### Dejan
|
||||
|
||||
| Date | Activity | Hours | Description |
|
||||
|-----------------|----------------------|--------|----------------------------------------------------------------------------------|
|
||||
| 25.9. | Design | 2 | 6design |
|
||||
| 9.10 to 11.10. | Backend APIs | 10 | Implemented Backend APIs |
|
||||
| 13.10 to 15.10. | Frontend Development | 7 | Created user interface mockups |
|
||||
| Continually | Documantation | 5 | Documenting the dev process |
|
||||
| 21.10 to 23.10 | Tests, forntend | 10 | Test basics, balance charts, and frontend improvement |
|
||||
| 28.10 to 30.10 | Tests, forntend | 7 | Tests improvement with test database setup, UI fix and exchange rate integration |
|
||||
| **Total** | | **41** | |
|
||||
|
||||
| Date | Activity | Hours | Description |
|
||||
|-----------------|----------------------|--------|---------------------------------------------------------------|
|
||||
| 25.9. | Design | 2 | 6design |
|
||||
| 9.10 to 11.10. | Backend APIs | 12 | Implemented Backend APIs |
|
||||
| 13.10 to 15.10. | Frontend Development | 8 | Created user interface mockups |
|
||||
| Continually | Documentation | 6 | Documenting the dev process |
|
||||
| 21.10 to 23.10 | Tests, frontend | 10 | Test basics, balance charts, and frontend improvement |
|
||||
| 28.10 to 30.10 | CI | 6 | Integrated tests with test database setup on github workflows |
|
||||
| 28.10 to 30.10 | Frontend | 7 | UI improvements and exchange rate API integration |
|
||||
| 4.11 to 6.11 | Tests | 6 | Test fixes improvement, more integration and e2e |
|
||||
| 4.11 to 6.11 | Frontend | 6 | Fixes, Improved UI, added support for mobile devices |
|
||||
| **Total** | | **63** | |
|
||||
|
||||
### Group Total: [XXX.X] hours
|
||||
|
||||
@@ -373,16 +624,32 @@ curl -H "Authorization: Bearer $TOKEN" http://127.0.0.1:8000/authenticated-route
|
||||
[Reflect on the key technical and collaboration skills learned during this project]
|
||||
|
||||
### Challenges Faced
|
||||
#### Slow cluster performance
|
||||
This was caused by single SATA SSD disk running all VMs. This was solved by adding second NVMe disk just for Talos VMs.
|
||||
|
||||
|
||||
[Describe the main challenges and how you overcame them]
|
||||
|
||||
### If We Did This Again
|
||||
#### Different framework
|
||||
FastAPI lacks usable build in support for database migrations and implementing Alembic was a bit tricky.
|
||||
Tricky was also integrating FastAPI auth system with React frontend, since there is no official project template.
|
||||
Using .NET (which we considered initially) would probably solve these issues.
|
||||
|
||||
|
||||
[What would you do differently? What worked well that you'd keep?]
|
||||
|
||||
### Individual Growth
|
||||
|
||||
#### [Team Member 1 Name]
|
||||
#### [Lukas]
|
||||
This course finally forced me to learn kubernetes (been on by TODO list for at least 3 years).
|
||||
I had some prior experience with terraform/opentofu from work but this improved by understanding of it.
|
||||
|
||||
The biggest challenge for me was time tracking since I am used to tracking to projects, not to tasks.
|
||||
(I am bad even at that :) ).
|
||||
|
||||
It was also interesting experience to be the one responsible for the initial project structure/design/setup
|
||||
used not only by myself.
|
||||
|
||||
[Personal reflection on growth, challenges, and learning]
|
||||
|
||||
|
||||
@@ -105,14 +105,6 @@ module "database" {
|
||||
s3_key_secret = var.s3_key_secret
|
||||
}
|
||||
|
||||
#module "argocd" {
|
||||
# source = "${path.module}/modules/argocd"
|
||||
# depends_on = [module.storage, module.loadbalancer, module.cloudflare]
|
||||
|
||||
# argocd_admin_password = var.argocd_admin_password
|
||||
# cloudflare_domain = var.cloudflare_domain
|
||||
#}
|
||||
|
||||
#module "redis" {
|
||||
# source = "${path.module}/modules/redis"
|
||||
# depends_on = [module.storage]
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
apiVersion: networking.cfargotunnel.com/v1alpha1
|
||||
kind: TunnelBinding
|
||||
metadata:
|
||||
name: argocd-tunnel-binding
|
||||
namespace: argocd
|
||||
subjects:
|
||||
- name: argocd-server
|
||||
spec:
|
||||
target: https://argocd-server.argocd.svc.cluster.local
|
||||
fqdn: argocd.${base_domain}
|
||||
noTlsVerify: true
|
||||
tunnelRef:
|
||||
kind: ClusterTunnel
|
||||
name: cluster-tunnel
|
||||
@@ -1,39 +0,0 @@
|
||||
terraform {
|
||||
required_providers {
|
||||
kubectl = {
|
||||
source = "gavinbunney/kubectl"
|
||||
version = "1.19.0"
|
||||
}
|
||||
helm = {
|
||||
source = "hashicorp/helm"
|
||||
version = "3.0.2"
|
||||
}
|
||||
kubernetes = {
|
||||
source = "hashicorp/kubernetes"
|
||||
version = "2.38.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "kubernetes_namespace" "argocd" {
|
||||
metadata {
|
||||
name = "argocd"
|
||||
}
|
||||
}
|
||||
|
||||
resource "helm_release" "argocd" {
|
||||
name = "argocd"
|
||||
namespace = "argocd"
|
||||
repository = "https://argoproj.github.io/argo-helm"
|
||||
chart = "argo-cd"
|
||||
depends_on = [kubernetes_namespace.argocd]
|
||||
}
|
||||
|
||||
resource "kubectl_manifest" "argocd-tunnel-bind" {
|
||||
depends_on = [helm_release.argocd]
|
||||
|
||||
yaml_body = templatefile("${path.module}/argocd-ui.yaml", {
|
||||
base_domain = var.cloudflare_domain
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
variable "argocd_admin_password" {
|
||||
type = string
|
||||
nullable = false
|
||||
sensitive = true
|
||||
description = "ArgoCD admin password"
|
||||
}
|
||||
|
||||
variable "cloudflare_domain" {
|
||||
type = string
|
||||
default = "Base cloudflare domain, e.g. example.com"
|
||||
nullable = false
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
apiVersion: v2
|
||||
name: maxscale-helm
|
||||
version: 1.0.14
|
||||
version: 1.0.15
|
||||
description: Helm chart for MaxScale related Kubernetes manifests
|
||||
|
||||
@@ -154,6 +154,13 @@ spec:
|
||||
memory: 128Mi
|
||||
limits:
|
||||
memory: 1Gi
|
||||
monitor:
|
||||
interval: 2s
|
||||
cooperativeMonitoring: majority_of_all
|
||||
params:
|
||||
auto_failover: "true"
|
||||
auto_rejoin: "true"
|
||||
switchover_on_low_disk_space: "true"
|
||||
|
||||
livenessProbe:
|
||||
initialDelaySeconds: 20
|
||||
|
||||
@@ -59,7 +59,7 @@ resource "helm_release" "mariadb-operator" {
|
||||
resource "helm_release" "maxscale_helm" {
|
||||
name = "maxscale-helm"
|
||||
chart = "${path.module}/charts/maxscale-helm"
|
||||
version = "1.0.14"
|
||||
version = "1.0.15"
|
||||
depends_on = [helm_release.mariadb-operator-crds, kubectl_manifest.secrets]
|
||||
timeout = 3600
|
||||
|
||||
|
||||
@@ -64,3 +64,21 @@ resource "kubectl_manifest" "argocd-tunnel-bind" {
|
||||
base_domain = var.cloudflare_domain
|
||||
})
|
||||
}
|
||||
|
||||
resource "helm_release" "loki_stack" {
|
||||
name = "loki-stack"
|
||||
repository = "https://grafana.github.io/helm-charts"
|
||||
chart = "loki-stack"
|
||||
namespace = kubernetes_namespace.monitoring.metadata[0].name
|
||||
version = "2.9.12"
|
||||
|
||||
set = [{
|
||||
name = "grafana.enabled"
|
||||
value = "false"
|
||||
}]
|
||||
|
||||
|
||||
depends_on = [
|
||||
helm_release.kube_prometheus_stack
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
# Example terraform.tfvars for MariaDB and MetalLB
|
||||
|
||||
metallb_ip_range = "10.80.0.100-10.80.0.240"
|
||||
|
||||
# Secret configuration (use strong passwords; do not commit real secrets)
|
||||
@@ -11,13 +9,19 @@ mariadb_user_name = "example_user"
|
||||
mariadb_user_host = "%"
|
||||
mariadb_user_password = "example_user_password"
|
||||
|
||||
# MetalLB IPs for services (optional)
|
||||
# MetalLB IPs for services
|
||||
metallb_maxscale_ip = "10.80.0.219"
|
||||
metallb_service_ip = "10.80.0.120"
|
||||
metallb_primary_ip = "10.80.0.130"
|
||||
metallb_secondary_ip = "10.80.0.131"
|
||||
|
||||
# phpMyAdmin toggle
|
||||
s3_enabled = false
|
||||
s3_bucket = "cluster"
|
||||
s3_region = "us-east-1"
|
||||
s3_endpoint = "your.s3.endpoint.example"
|
||||
s3_key_id = "your_s3_key_id"
|
||||
s3_key_secret = "your_s3_key_secret"
|
||||
|
||||
phpmyadmin_enabled = true
|
||||
|
||||
cloudflare_account_id = "CHANGE_ME"
|
||||
@@ -26,4 +30,5 @@ cloudflare_email = "CHANGE_ME"
|
||||
cloudflare_tunnel_name = "CHANGE_ME"
|
||||
cloudflare_domain = "CHANGE_ME"
|
||||
|
||||
rabbitmq-password = "CHANGE_ME"
|
||||
|
||||
|
||||
Reference in New Issue
Block a user