chore: Backend codebase cleanup and archiving of legacy scripts

This commit is contained in:
Roo
2026-03-22 20:07:37 +00:00
parent 5d96b00f81
commit 309a72cc0b
19 changed files with 530 additions and 184 deletions

View File

@@ -0,0 +1,388 @@
#!/usr/bin/env python3
"""
Sandbox Seeder Script - Creates a persistent sandbox user in the live/dev database
for manual testing via Swagger.
Steps:
1. Register via POST /api/v1/auth/register
2. Extract verification token from Mailpit API
3. Verify email via POST /api/v1/auth/verify-email
4. Login via POST /api/v1/auth/login to get JWT
5. Complete KYC via POST /api/v1/auth/complete-kyc
6. Create organization via POST /api/v1/organizations/onboard
7. Add a test vehicle/asset via appropriate endpoint
8. Add a fuel expense (15,000 HUF) via POST /api/v1/expenses/add
Prints credentials and IDs for immediate use.
"""
import asyncio
import httpx
import json
import sys
import time
from datetime import date, datetime, timedelta
import uuid
# Configuration
API_BASE = "http://localhost:8000" # FastAPI server (runs inside sf_api container)
MAILPIT_API = "http://sf_mailpit:8025/api/v1/messages"
MAILPIT_DELETE_ALL = "http://sf_mailpit:8025/api/v1/messages"
# Generate unique email each run to avoid duplicate key errors
unique_id = int(time.time())
SANDBOX_EMAIL = f"sandbox_{unique_id}@test.com"
SANDBOX_PASSWORD = "Sandbox123!"
SANDBOX_FIRST_NAME = "Sandbox"
SANDBOX_LAST_NAME = "User"
# Dummy KYC data
DUMMY_KYC = {
"phone_number": "+36123456789",
"birth_place": "Budapest",
"birth_date": "1990-01-01",
"mothers_last_name": "Kovács",
"mothers_first_name": "Éva",
"address_zip": "1051",
"address_city": "Budapest",
"address_street_name": "Váci",
"address_street_type": "utca",
"address_house_number": "1",
"address_stairwell": None,
"address_floor": None,
"address_door": None,
"address_hrsz": None,
"identity_docs": {
"ID_CARD": {
"number": "123456AB",
"expiry_date": "2030-12-31"
}
},
"ice_contact": {
"name": "John Doe",
"phone": "+36198765432",
"relationship": "friend"
},
"preferred_language": "hu",
"preferred_currency": "HUF"
}
# Dummy organization data
DUMMY_ORG = {
"full_name": "Sandbox Test Kft.",
"name": "Sandbox Kft.",
"display_name": "Sandbox Test",
"tax_number": f"{unique_id}"[:8] + "-1-42",
"reg_number": f"01-09-{unique_id}"[:6],
"country_code": "HU",
"language": "hu",
"default_currency": "HUF",
"address_zip": "1051",
"address_city": "Budapest",
"address_street_name": "Váci",
"address_street_type": "utca",
"address_house_number": "2",
"address_stairwell": None,
"address_floor": None,
"address_door": None,
"address_hrsz": None,
"contacts": [
{
"full_name": "Sandbox User",
"email": SANDBOX_EMAIL,
"phone": "+36123456789",
"contact_type": "primary"
}
]
}
# Dummy vehicle data
DUMMY_VEHICLE = {
"catalog_id": 1, # Assuming there's at least one catalog entry
"license_plate": f"SBX-{uuid.uuid4().hex[:4]}".upper(),
"vin": f"VIN{uuid.uuid4().hex[:10]}".upper(),
"nickname": "Sandbox Car",
"purchase_date": "2025-01-01",
"initial_mileage": 5000,
"fuel_type": "petrol",
"transmission": "manual"
}
# Dummy expense data
DUMMY_EXPENSE = {
"asset_id": None, # Will be filled after vehicle creation
"category": "fuel",
"amount": 15000.0,
"date": date.today().isoformat()
}
async def clean_mailpit():
"""Delete all messages in Mailpit before registration to ensure clean state."""
print(" [DEBUG] Entering clean_mailpit()")
async with httpx.AsyncClient() as client:
try:
print(f" [DEBUG] Sending DELETE to {MAILPIT_DELETE_ALL}")
resp = await client.delete(MAILPIT_DELETE_ALL)
print(f" [DEBUG] DELETE response status: {resp.status_code}")
if resp.status_code == 200:
print("🗑️ Mailpit cleaned (all messages deleted).")
else:
print(f"⚠️ Mailpit clean returned {resp.status_code}, continuing anyway.")
except Exception as e:
print(f"⚠️ Mailpit clean failed: {e}, continuing anyway.")
async def fetch_mailpit_token():
"""Fetch the latest verification token from Mailpit with polling."""
import re
import sys
max_attempts = 5
wait_seconds = 3
print(f"[DEBUG] Starting fetch_mailpit_token() with max_attempts={max_attempts}", flush=True)
async with httpx.AsyncClient() as client:
for attempt in range(1, max_attempts + 1):
try:
print(f"[DEBUG] Fetching Mailpit messages (attempt {attempt}/{max_attempts})...", flush=True)
resp = await client.get(MAILPIT_API)
resp.raise_for_status()
messages = resp.json()
# Debug: print raw response summary
total = messages.get("total", 0)
count = messages.get("count", 0)
print(f"[DEBUG] Mailpit response: total={total}, count={count}", flush=True)
if not messages.get("messages"):
print(f"⚠️ No emails in Mailpit (attempt {attempt}/{max_attempts}). Waiting {wait_seconds}s...", flush=True)
await asyncio.sleep(wait_seconds)
continue
# Print each message's subject and recipients for debugging
for idx, msg in enumerate(messages.get("messages", [])):
subject = msg.get("Subject", "No Subject")
to_list = msg.get("To", [])
from_list = msg.get("From", [])
print(f"[DEBUG] Message {idx}: Subject='{subject}', To={to_list}, From={from_list}", flush=True)
print(f"[DEBUG] Looking for email to {SANDBOX_EMAIL}...", flush=True)
# Find the latest email to our sandbox email
for msg in messages.get("messages", []):
# Check if email is in To field (which is a list of dicts)
to_list = msg.get("To", [])
email_found = False
for recipient in to_list:
if isinstance(recipient, dict) and recipient.get("Address") == SANDBOX_EMAIL:
email_found = True
break
elif isinstance(recipient, str) and recipient == SANDBOX_EMAIL:
email_found = True
break
if email_found:
msg_id = msg.get("ID")
print(f"[DEBUG] Found email to {SANDBOX_EMAIL}, message ID: {msg_id}")
# Fetch full message details (Text and HTML are empty in list response)
if msg_id:
try:
# Correct endpoint: /api/v1/message/{id} (singular)
detail_resp = await client.get(f"http://sf_mailpit:8025/api/v1/message/{msg_id}")
detail_resp.raise_for_status()
detail = detail_resp.json()
body = detail.get("Text", "")
html_body = detail.get("HTML", "")
print(f"[DEBUG] Fetched full message details, body length: {len(body)}, HTML length: {len(html_body)}")
except Exception as e:
print(f"[DEBUG] Failed to fetch message details: {e}")
body = msg.get("Text", "")
html_body = msg.get("HTML", "")
else:
body = msg.get("Text", "")
html_body = msg.get("HTML", "")
if body:
print(f"[DEBUG] Body preview (first 500 chars): {body[:500]}...")
# Try to find token using patterns from test suite
patterns = [
r"token=([a-zA-Z0-9\-_]+)",
r"/verify/([a-zA-Z0-9\-_]+)",
r"verification code: ([a-zA-Z0-9\-_]+)",
r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}", # UUID pattern
r"[0-9a-f]{32}", # UUID without hyphens
]
for pattern in patterns:
if body:
token_match = re.search(pattern, body, re.I)
if token_match:
token = token_match.group(1) if token_match.groups() else token_match.group(0)
print(f"✅ Token found with pattern '{pattern}' on attempt {attempt}: {token}")
return token
# If not found in text, try HTML body
if html_body:
for pattern in patterns:
html_token_match = re.search(pattern, html_body, re.I)
if html_token_match:
token = html_token_match.group(1) if html_token_match.groups() else html_token_match.group(0)
print(f"✅ Token found in HTML with pattern '{pattern}' on attempt {attempt}: {token}")
return token
print(f"[DEBUG] No token pattern found. Body length: {len(body)}, HTML length: {len(html_body)}")
if body:
print(f"[DEBUG] Full body (first 1000 chars): {body[:1000]}")
if html_body:
print(f"[DEBUG] HTML body snippet (first 500 chars): {html_body[:500]}")
print(f"⚠️ Email found but no token (attempt {attempt}/{max_attempts}). Waiting {wait_seconds}s...")
await asyncio.sleep(wait_seconds)
except Exception as e:
print(f"❌ Mailpit API error on attempt {attempt}: {e}")
await asyncio.sleep(wait_seconds)
print("❌ Could not retrieve token after all attempts.")
return None
async def main():
print("🚀 Starting Sandbox User Creation...")
async with httpx.AsyncClient(base_url=API_BASE, timeout=30.0) as client:
# Step 0: Clean Mailpit to ensure only new emails
print("0. Cleaning Mailpit...")
await clean_mailpit()
# Step 1: Register
print("1. Registering user...")
register_data = {
"email": SANDBOX_EMAIL,
"password": SANDBOX_PASSWORD,
"first_name": SANDBOX_FIRST_NAME,
"last_name": SANDBOX_LAST_NAME,
"region_code": "HU",
"lang": "hu",
"timezone": "Europe/Budapest"
}
resp = await client.post("/api/v1/auth/register", json=register_data)
if resp.status_code not in (200, 201):
print(f"❌ Registration failed: {resp.status_code} {resp.text}")
return
print("✅ Registration successful.")
# Step 2: Get token from Mailpit
print("2. Fetching verification token from Mailpit...")
token = await fetch_mailpit_token()
if not token:
print("❌ Could not retrieve token. Exiting.")
return
print(f"✅ Token found: {token}")
# Step 3: Verify email
print("3. Verifying email...")
resp = await client.post("/api/v1/auth/verify-email", json={"token": token})
if resp.status_code != 200:
print(f"❌ Email verification failed: {resp.status_code} {resp.text}")
return
print("✅ Email verified.")
# Step 4: Login
print("4. Logging in...")
resp = await client.post("/api/v1/auth/login", data={
"username": SANDBOX_EMAIL,
"password": SANDBOX_PASSWORD
})
if resp.status_code != 200:
print(f"❌ Login failed: {resp.status_code} {resp.text}")
return
login_data = resp.json()
access_token = login_data.get("access_token")
if not access_token:
print("❌ No access token in login response.")
return
print("✅ Login successful.")
# Update client headers with JWT
client.headers.update({"Authorization": f"Bearer {access_token}"})
# Step 5: Complete KYC
print("5. Completing KYC...")
resp = await client.post("/api/v1/auth/complete-kyc", json=DUMMY_KYC)
if resp.status_code != 200:
print(f"❌ KYC completion failed: {resp.status_code} {resp.text}")
# Continue anyway (maybe KYC optional)
else:
print("✅ KYC completed.")
# Step 6: Create organization
print("6. Creating organization...")
resp = await client.post("/api/v1/organizations/onboard", json=DUMMY_ORG)
if resp.status_code not in (200, 201):
print(f"❌ Organization creation failed: {resp.status_code} {resp.text}")
# Continue anyway (maybe optional)
org_id = None
else:
org_data = resp.json()
org_id = org_data.get("organization_id")
print(f"✅ Organization created with ID: {org_id}")
# Step 7: Add vehicle/asset
print("7. Adding vehicle/asset...")
asset_id = None
# Try POST /api/v1/assets
resp = await client.post("/api/v1/assets", json=DUMMY_VEHICLE)
if resp.status_code in (200, 201):
asset_data = resp.json()
asset_id = asset_data.get("asset_id") or asset_data.get("id")
print(f"✅ Asset created via /api/v1/assets, ID: {asset_id}")
else:
# Try POST /api/v1/vehicles
resp = await client.post("/api/v1/vehicles", json=DUMMY_VEHICLE)
if resp.status_code in (200, 201):
asset_data = resp.json()
asset_id = asset_data.get("vehicle_id") or asset_data.get("id")
print(f"✅ Vehicle created via /api/v1/vehicles, ID: {asset_id}")
else:
# Try POST /api/v1/catalog/claim
resp = await client.post("/api/v1/catalog/claim", json={
"catalog_id": DUMMY_VEHICLE["catalog_id"],
"license_plate": DUMMY_VEHICLE["license_plate"]
})
if resp.status_code in (200, 201):
asset_data = resp.json()
asset_id = asset_data.get("asset_id") or asset_data.get("id")
print(f"✅ Asset claimed via /api/v1/catalog/claim, ID: {asset_id}")
else:
print(f"⚠️ Could not create vehicle/asset. Skipping. Status: {resp.status_code}, Response: {resp.text}")
# Step 8: Add expense (if asset created)
if asset_id:
print("8. Adding expense (15,000 HUF fuel)...")
expense_data = DUMMY_EXPENSE.copy()
expense_data["asset_id"] = asset_id
resp = await client.post("/api/v1/expenses/add", json=expense_data)
if resp.status_code in (200, 201):
print("✅ Expense added.")
else:
print(f"⚠️ Expense addition failed: {resp.status_code} {resp.text}")
else:
print("⚠️ Skipping expense because no asset ID.")
# Final output
print("\n" + "="*60)
print("🎉 SANDBOX USER CREATION COMPLETE!")
print("="*60)
print(f"Email: {SANDBOX_EMAIL}")
print(f"Password: {SANDBOX_PASSWORD}")
print(f"JWT Access Token: {access_token}")
print(f"Organization ID: {org_id}")
print(f"Asset/Vehicle ID: {asset_id}")
print(f"Login via Swagger: {API_BASE}/docs")
print("="*60)
print("\nYou can now use these credentials for manual testing.")
print("Note: The user is fully verified and has a dummy organization,")
print("a dummy vehicle, and a fuel expense of 15,000 HUF.")
print("="*60)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,27 @@
#!/usr/bin/env python3
"""
Manuálisan létrehozza a TCO táblákat, ha a migráció nem működik.
"""
import asyncio
import sys
from sqlalchemy import text
from app.database import engine, Base
from app.models.vehicle import CostCategory, VehicleCost
async def create_tables():
print("Creating TCO tables...")
async with engine.begin() as conn:
# Ellenőrizzük, hogy a 'vehicle' séma létezik-e
await conn.execute(text("CREATE SCHEMA IF NOT EXISTS vehicle"))
# Táblák létrehozása a metaadatok alapján
await conn.run_sync(Base.metadata.create_all)
print("Tables created (or already exist).")
# Ellenőrzés
result = await conn.execute(text("SELECT table_name FROM information_schema.tables WHERE table_schema = 'vehicle' AND table_name IN ('cost_categories', 'costs')"))
tables = result.fetchall()
print(f"Found tables: {tables}")
if __name__ == "__main__":
sys.path.insert(0, '/opt/docker/dev/service_finder/backend')
asyncio.run(create_tables())

View File

@@ -0,0 +1,43 @@
import sys
import os
# Biztosítjuk, hogy az aktuális könyvtár benne legyen az útvonalban
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".")))
try:
print("🔍 Modellek betöltése...")
from app.database import Base
# Fontos: explicit importáljuk a models csomagot, hogy lefussanak az __init__.py importok
import app.models
print("✅ Importálás sikeres.\n")
except ImportError as e:
print(f"❌ KRITIKUS IMPORT HIBA: {e}")
sys.exit(1)
except Exception as e:
print(f"❌ VÁRATLAN HIBA: {e}")
sys.exit(1)
# Metadata kinyerése
metadata_tables = Base.metadata.tables
print(f"📊 Összesen talált táblák száma a memóriában: {len(metadata_tables)}")
if len(metadata_tables) == 0:
print("⚠️ VESZÉLY: A Metadata ÜRES! Az Alembic ezért nem lát semmit.")
print("Ellenőrizd, hogy a modellek valóban az 'app.database.Base'-ből örökölnek-e!")
else:
# Csoportosítás sémák szerint a jobb átláthatóságért
schemas = {}
for table_full_name in metadata_tables:
table_obj = metadata_tables[table_full_name]
schema = table_obj.schema or "public"
if schema not in schemas:
schemas[schema] = []
schemas[schema].append(table_full_name)
for schema in sorted(schemas.keys()):
print(f"\n--- 📂 Séma: {schema} ({len(schemas[schema])} tábla) ---")
for table in sorted(schemas[schema]):
print(f" [✓] {table}")
print("\n🚀 Diagnosztika vége.")

View File

@@ -0,0 +1,111 @@
# /opt/docker/dev/service_finder/backend/discovery_bot.py
import asyncio
import json
import httpx
import os
import hashlib
import logging
from urllib.parse import quote
from sqlalchemy import select
from app.database import AsyncSessionLocal
from app.models.staged_data import ServiceStaging
# Logolás beállítása
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s]: %(message)s')
logger = logging.getLogger("OSM-Discovery")
# Konfiguráció
HUNGARY_BBOX = "45.7,16.1,48.6,22.9"
OVERPASS_URL = "http://overpass-api.de/api/interpreter?data="
class OSMDiscoveryBot:
@staticmethod
def generate_fingerprint(name: str, city: str) -> str:
"""
Ujjlenyomat generálása a deduplikációhoz.
Kicsit lazább, mint a Hunter-nél, mert az OSM címadatok néha hiányosak.
"""
raw = f"{str(name).lower()}|{str(city).lower()}"
return hashlib.md5(raw.encode()).hexdigest()
@staticmethod
def get_service_type(tags: dict, name: str) -> str:
""" OSM tagek leképezése belső kategóriákra. """
name = name.lower()
shop = tags.get('shop', '')
amenity = tags.get('amenity', '')
if shop == 'tyres' or 'gumi' in name: return 'tire_shop'
if amenity == 'car_wash' or 'mosó' in name: return 'car_wash'
if any(x in name for x in ['villamos', 'autóvill', 'elektro']): return 'electrician'
if any(x in name for x in ['fényez', 'lakatos', 'karosszéria']): return 'body_shop'
return 'mechanic'
async def fetch_osm_data(self, query_part: str):
""" Aszinkron adatgyűjtés az Overpass API-tól. """
query = f'[out:json][timeout:120];(node{query_part}({HUNGARY_BBOX});way{query_part}({HUNGARY_BBOX}););out center;'
async with httpx.AsyncClient(timeout=150) as client:
try:
resp = await client.get(OVERPASS_URL + quote(query))
if resp.status_code == 200:
return resp.json().get('elements', [])
return []
except Exception as e:
logger.error(f"❌ Overpass hiba: {e}")
return []
async def sync(self):
logger.info("🛰️ OSM Országos szinkronizáció indítása...")
# 1. Lekérdezések összeállítása
queries = [
'["shop"~"car_repair|tyres"]',
'["amenity"="car_wash"]'
]
all_elements = []
for q in queries:
elements = await self.fetch_osm_data(q)
all_elements.extend(elements)
logger.info(f"📊 {len(all_elements)} potenciális szervizpont érkezett.")
async with AsyncSessionLocal() as db:
added_count = 0
for node in all_elements:
tags = node.get('tags', {})
if not tags.get('name'): continue
lat = node.get('lat', node.get('center', {}).get('lat'))
lon = node.get('lon', node.get('center', {}).get('lon'))
name = tags.get('name', tags.get('operator', 'Ismeretlen szerviz'))
city = tags.get('addr:city', 'Ismeretlen')
street = tags.get('addr:street', '')
housenumber = tags.get('addr:housenumber', '')
f_print = self.generate_fingerprint(name, city)
# Deduplikáció ellenőrzése
stmt = select(ServiceStaging).where(ServiceStaging.fingerprint == f_print)
existing = (await db.execute(stmt)).scalar_one_or_none()
if not existing:
db.add(ServiceStaging(
name=name,
source="osm_discovery_v2",
fingerprint=f_print,
city=city,
full_address=f"{city}, {street} {housenumber}".strip(", "),
status="pending",
trust_score=20, # Az OSM adatokat alacsonyabb bizalommal kezeljük, mint a Google-t
raw_data=tags
))
added_count += 1
await db.commit()
logger.info(f"✅ Szinkron kész. {added_count} új elem került a Staging táblába.")
if __name__ == "__main__":
bot = OSMDiscoveryBot()
asyncio.run(bot.sync())

View File

@@ -0,0 +1,12 @@
import asyncio
from app.database import engine, Base
import app.models # Összes modell betöltése
async def force_sync():
async with engine.begin() as conn:
# Ez a parancs az 58 modell alapján MINDENT létrehoz
await conn.run_sync(Base.metadata.create_all)
print("✅ Minden tábla sikeresen létrehozva a sémákban!")
if __name__ == "__main__":
asyncio.run(force_sync())

View File

@@ -0,0 +1,49 @@
import asyncio
from sqlalchemy import text
from app.db.session import engine
# 200+ Autó márka és Motor márkák discovery listája
DATA = {
"CAR": [
"Toyota", "Volkswagen", "Ford", "BMW", "Mercedes-Benz", "Audi", "Honda", "Hyundai", "Kia", "Mazda",
"Nissan", "Opel", "Peugeot", "Renault", "Skoda", "Suzuki", "Volvo", "Fiat", "Dacia", "Citroen",
"Tesla", "Lexus", "Alfa Romeo", "Jaguar", "Land Rover", "Mitsubishi", "Subaru", "Seat", "Cupra",
"Jeep", "Chevrolet", "Cadillac", "Buick", "GMC", "Dodge", "Ram", "Chrysler", "Acura", "Infiniti",
"Lincoln", "Porsche", "Mini", "Smart", "Bentley", "Rolls-Royce", "Lamborghini", "Ferrari", "Maserati",
"Aston Martin", "Bugatti", "Lotus", "McLaren", "Polestar", "Genesis", "BYD", "Geely", "Chery", "MG",
"Lada", "Iveco", "Isuzu", "SsangYong", "Rover", "Saab", "Lancia", "Pontiac", "Saturn", "Hummer", "Oldsmobile"
],
"MOTORCYCLE": [
"Honda", "Yamaha", "Kawasaki", "Suzuki", "BMW Motorrad", "Harley-Davidson", "Ducati", "KTM", "Triumph",
"Royal Enfield", "Indian", "Aprilia", "Moto Guzzi", "Vespa", "Piaggio", "MV Agusta", "Norton", "Husqvarna",
"Benelli", "Bimota", "Beta", "Gas Gas", "Sherco", "CF Moto", "Hyosung", "Keeway", "SYM", "Kymco", "Bajaj", "Zero"
],
"TRUCK": ["Scania", "Volvo Trucks", "MAN", "DAF", "Iveco", "Mercedes-Benz Trucks", "Renault Trucks", "MACK", "Peterbilt", "Kenworth"],
"VAN": ["Ford Commercial", "Volkswagen Commercial", "Iveco Daily", "Mercedes-Benz Vans", "Fiat Professional"]
}
async def seed():
async with engine.begin() as conn:
print("🔍 Adatok szinkronizálása...")
# 1. Kategóriák
for cat in DATA.keys():
await conn.execute(text("INSERT INTO data.vehicle_categories (name) VALUES (:n) ON CONFLICT (name) DO NOTHING"), {"n": cat})
res = await conn.execute(text("SELECT id, name FROM data.vehicle_categories"))
cat_map = {row.name: row.id for row in res}
# 2. Márkák feltöltése
count = 0
for cat_name, brands in DATA.items():
cat_id = cat_map[cat_name]
for brand in brands:
await conn.execute(text(
"INSERT INTO data.vehicle_brands (category_id, name) VALUES (:c, :n) ON CONFLICT DO NOTHING"
), {"c": cat_id, "n": brand})
count += 1
print(f"✅ SIKER: {count} márka rögzítve az adatbázisban!")
if __name__ == "__main__":
asyncio.run(seed())

View File

@@ -0,0 +1,32 @@
import asyncio
import httpx
from sqlalchemy import text
from app.db.session import SessionLocal
async def seed():
print("🚀 RDW Márka-felfedezés indul...")
url = "https://opendata.rdw.nl/resource/m9d7-ebf2.json?$select=distinct%20merk&$limit=50000"
async with httpx.AsyncClient() as client:
resp = await client.get(url, timeout=60)
if resp.status_code != 200:
print(f"❌ Hiba: {resp.status_code}")
return
makes = resp.json()
print(f"📦 {len(makes)} márkát találtam. Mentés...")
async with SessionLocal() as db:
for item in makes:
m = item['merk'].upper()
# ON CONFLICT: Ha már benne van (pl. n8n betette), ne legyen hiba
await db.execute(text("""
INSERT INTO data.catalog_discovery (make, model, source, status)
VALUES (:m, 'ALL', 'global_seed', 'pending')
ON CONFLICT DO NOTHING
"""), {"m": m})
await db.commit()
print("✅ Kész! A discovery tábla felöltve az összes EU-s márkával.")
if __name__ == "__main__":
asyncio.run(seed())

View File

@@ -0,0 +1,35 @@
import asyncio
from sqlalchemy import text
from app.db.session import engine
# Discovery adatok a 200 márkához (példa bővíthető)
MODELS_DISCOVERY = {
"Toyota": ["Corolla", "Yaris", "Hilux", "RAV4", "C-HR", "Avensis", "Land Cruiser"],
"Volkswagen": ["Golf", "Passat", "Polo", "Tiguan", "Touran", "Transporter", "Caddy"],
"BMW": ["3 Series", "5 Series", "X5", "X3", "1 Series", "7 Series"],
"Honda": ["Civic", "CR-V", "Jazz", "Accord", "CB 500", "Africa Twin"], # Autó és motor is!
"Yamaha": ["MT-07", "R1", "Tracer 900", "Ténéré 700", "XMAX"],
"Scania": ["R-series", "S-series", "G-series", "P-series"]
}
async def discovery_bot():
async with engine.begin() as conn:
print("🤖 Jármű típus felderítő bot indul...")
# Lekérjük az összes márkát
res = await conn.execute(text("SELECT id, name FROM data.vehicle_brands"))
brands = res.fetchall()
count = 0
for b_id, b_name in brands:
if b_name in MODELS_DISCOVERY:
for model in MODELS_DISCOVERY[b_name]:
await conn.execute(text(
"INSERT INTO data.vehicle_models (brand_id, name) VALUES (:b, :n) ON CONFLICT DO NOTHING"
), {"b": b_id, "n": model})
count += 1
print(f"✅ Bot végzett: {count} új típus rögzítve!")
if __name__ == "__main__":
asyncio.run(discovery_bot())

View File

@@ -0,0 +1,82 @@
import psycopg2
from psycopg2.extras import RealDictCursor
import os
from dotenv import load_dotenv
load_dotenv()
def get_connection():
db_host = os.getenv("POSTGRES_HOST", "localhost")
if db_host == "postgres-db":
db_host = "localhost"
return psycopg2.connect(
dbname=os.getenv("POSTGRES_DB", "service_finder"),
user=os.getenv("POSTGRES_USER", "kincses"),
password=os.getenv("POSTGRES_PASSWORD", "PASSWORD"),
host=db_host,
port=os.getenv("POSTGRES_PORT", "5432")
)
def seed_system():
conn = None
try:
conn = get_connection()
cur = conn.cursor(cursor_factory=RealDictCursor)
print("🚀 Kapcsolat aktív. Adatok szinkronizálása...")
# 1. MÁRKÁK SZINKRONIZÁLÁSA
brands = [
{'name': 'Volkswagen', 'slug': 'volkswagen', 'origin': 'Germany'},
{'name': 'Audi', 'slug': 'audi', 'origin': 'Germany'},
{'name': 'BMW', 'slug': 'bmw', 'origin': 'Germany'},
{'name': 'Skoda', 'slug': 'skoda', 'origin': 'Czech Republic'},
{'name': 'Toyota', 'slug': 'toyota', 'origin': 'Japan'}
]
for b in brands:
# Megnézzük, létezik-e már
cur.execute("SELECT id FROM data.vehicle_brands WHERE name = %s", (b['name'],))
row = cur.fetchone()
if row:
# Frissítés (Update), ha már létezik
cur.execute("""
UPDATE data.vehicle_brands
SET slug = COALESCE(slug, %s),
country_of_origin = %s
WHERE id = %s
""", (b['slug'], b['origin'], row['id']))
else:
# Beszúrás (Insert), ha új
cur.execute("""
INSERT INTO data.vehicle_brands (name, slug, country_of_origin)
VALUES (%s, %s, %s)
""", (b['name'], b['slug'], b['origin']))
# 2. MOTOROK SZINKRONIZÁLÁSA
engines = [
('CAGA', 'diesel', 105, 15000),
('DADA', 'petrol', 110, 30000),
('B47D20', 'diesel', 140, 25000)
]
for code, fuel, kw, interval in engines:
cur.execute("SELECT id FROM data.engine_specs WHERE engine_code = %s", (code,))
if not cur.fetchone():
cur.execute("""
INSERT INTO data.engine_specs (engine_code, fuel_type, power_kw, default_service_interval_km)
VALUES (%s, %s, %s, %s)
""", (code, fuel, kw, interval))
conn.commit()
print("✅ Szinkronizálás sikeres! Az adatbázis naprakész.")
except Exception as e:
print(f"❌ Hiba: {e}")
if conn: conn.rollback()
finally:
if conn: conn.close()
if __name__ == "__main__":
seed_system()

View File

@@ -0,0 +1,49 @@
import asyncio
from sqlalchemy import text
from app.db.session import engine
# 200+ Autó márka és Motor márkák discovery listája
DATA = {
"CAR": [
"Toyota", "Volkswagen", "Ford", "BMW", "Mercedes-Benz", "Audi", "Honda", "Hyundai", "Kia", "Mazda",
"Nissan", "Opel", "Peugeot", "Renault", "Skoda", "Suzuki", "Volvo", "Fiat", "Dacia", "Citroen",
"Tesla", "Lexus", "Alfa Romeo", "Jaguar", "Land Rover", "Mitsubishi", "Subaru", "Seat", "Cupra",
"Jeep", "Chevrolet", "Cadillac", "Buick", "GMC", "Dodge", "Ram", "Chrysler", "Acura", "Infiniti",
"Lincoln", "Porsche", "Mini", "Smart", "Bentley", "Rolls-Royce", "Lamborghini", "Ferrari", "Maserati",
"Aston Martin", "Bugatti", "Lotus", "McLaren", "Polestar", "Genesis", "BYD", "Geely", "Chery", "MG",
"Lada", "Iveco", "Isuzu", "SsangYong", "Rover", "Saab", "Lancia", "Pontiac", "Saturn", "Hummer", "Oldsmobile"
],
"MOTORCYCLE": [
"Honda", "Yamaha", "Kawasaki", "Suzuki", "BMW Motorrad", "Harley-Davidson", "Ducati", "KTM", "Triumph",
"Royal Enfield", "Indian", "Aprilia", "Moto Guzzi", "Vespa", "Piaggio", "MV Agusta", "Norton", "Husqvarna",
"Benelli", "Bimota", "Beta", "Gas Gas", "Sherco", "CF Moto", "Hyosung", "Keeway", "SYM", "Kymco", "Bajaj", "Zero"
],
"TRUCK": ["Scania", "Volvo Trucks", "MAN", "DAF", "Iveco", "Mercedes-Benz Trucks", "Renault Trucks", "MACK", "Peterbilt", "Kenworth"],
"VAN": ["Ford Commercial", "Volkswagen Commercial", "Iveco Daily", "Mercedes-Benz Vans", "Fiat Professional"]
}
async def seed():
async with engine.begin() as conn:
print("🔍 Adatok szinkronizálása...")
# 1. Kategóriák
for cat in DATA.keys():
await conn.execute(text("INSERT INTO data.vehicle_categories (name) VALUES (:n) ON CONFLICT (name) DO NOTHING"), {"n": cat})
res = await conn.execute(text("SELECT id, name FROM data.vehicle_categories"))
cat_map = {row.name: row.id for row in res}
# 2. Márkák feltöltése
count = 0
for cat_name, brands in DATA.items():
cat_id = cat_map[cat_name]
for brand in brands:
await conn.execute(text(
"INSERT INTO data.vehicle_brands (category_id, name) VALUES (:c, :n) ON CONFLICT DO NOTHING"
), {"c": cat_id, "n": brand})
count += 1
print(f"✅ SIKER: {count} márka rögzítve az adatbázisban!")
if __name__ == "__main__":
asyncio.run(seed())

View File

@@ -0,0 +1,65 @@
#!/usr/bin/env python3
"""
Egyszerű teszt a ConfigService osztályhoz.
Futtatás: docker compose exec -T sf_api python3 /app/backend/test_config_service.py
"""
import asyncio
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.orm import sessionmaker
from app.services.config_service import ConfigService
from app.models.system.system import ParameterScope
async def test_config_service():
# Adatbázis kapcsolat létrehozása (használjuk a teszt adatbázist vagy a dev-et)
# A DATABASE_URL a .env fájlból jön, de itt hardcode-olhatunk egy teszt URL-t
database_url = os.getenv("DATABASE_URL", "postgresql+asyncpg://postgres:postgres@postgres:5432/service_finder")
engine = create_async_engine(database_url, echo=False)
AsyncSessionLocal = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
async with AsyncSessionLocal() as db:
print("=== ConfigService Teszt ===")
# 1. Teszt: nem létező kulcs, default értékkel
value = await ConfigService.get_int(db, "non_existent_key", 42)
print(f"1. get_int('non_existent_key', 42) = {value} (elvárt: 42)")
assert value == 42, f"Expected 42, got {value}"
# 2. Teszt: string lekérés
value = await ConfigService.get_str(db, "another_key", "hello")
print(f"2. get_str('another_key', 'hello') = {value} (elvárt: hello)")
assert value == "hello"
# 3. Teszt: boolean lekérés
value = await ConfigService.get_bool(db, "bool_key", True)
print(f"3. get_bool('bool_key', True) = {value} (elvárt: True)")
assert value == True
# 4. Teszt: float lekérés
value = await ConfigService.get_float(db, "float_key", 3.14)
print(f"4. get_float('float_key', 3.14) = {value} (elvárt: 3.14)")
assert value == 3.14
# 5. Teszt: JSON lekérés
value = await ConfigService.get_json(db, "json_key", {"foo": "bar"})
print(f"5. get_json('json_key', {{\"foo\": \"bar\"}}) = {value}")
assert value == {"foo": "bar"}
# 6. Teszt: általános get
value = await ConfigService.get(db, "generic_key", "default")
print(f"6. get('generic_key', 'default') = {value}")
assert value == "default"
# 7. Opcionális: beszúrhatunk egy teszt paramétert és lekérjük
# Ehhez szükség van a _insert_default metódusra, de most kihagyjuk
print("\n✅ Minden teszt sikeres!")
await db.commit()
if __name__ == "__main__":
asyncio.run(test_config_service())

View File

@@ -0,0 +1,205 @@
#!/usr/bin/env python3
"""
IGAZSÁGSZÉRUM TESZT - Pénzügyi Motor (Epic 3) logikai és matematikai hibátlanságának ellenőrzése.
CTO szintű bizonyíték a rendszer integritásáról.
"""
import asyncio
import sys
import os
from decimal import Decimal
from datetime import datetime, timedelta, timezone
from uuid import uuid4
# Add backend directory to path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'backend'))
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.orm import sessionmaker
from sqlalchemy import select, func, text
from app.database import Base
from app.models.identity import User, Wallet, ActiveVoucher, Person
from app.models.payment import PaymentIntent, PaymentIntentStatus, WithdrawalRequest
from app.models.audit import FinancialLedger, LedgerEntryType, WalletType
from app.services.payment_router import PaymentRouter
from app.services.billing_engine import SmartDeduction
from app.core.config import settings
# Database connection
DATABASE_URL = settings.DATABASE_URL.replace("postgresql://", "postgresql+asyncpg://")
engine = create_async_engine(DATABASE_URL, echo=False)
AsyncSessionLocal = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
class FinancialTruthTest:
def __init__(self):
self.session = None
self.test_payer = None
self.test_beneficiary = None
self.payer_wallet = None
self.beneficiary_wallet = None
self.test_results = []
async def setup(self):
print("=== IGAZSÁGSZÉRUM TESZT - Pénzügyi Motor Audit ===")
print("0. ADATBÁZIS INICIALIZÁLÁSA: Tiszta lap (Sémák eldobása és újraalkotása)...")
async with engine.begin() as conn:
await conn.execute(text("DROP SCHEMA IF EXISTS audit CASCADE;"))
await conn.execute(text("DROP SCHEMA IF EXISTS identity CASCADE;"))
await conn.execute(text("DROP SCHEMA IF EXISTS data CASCADE;"))
await conn.execute(text("CREATE SCHEMA audit;"))
await conn.execute(text("CREATE SCHEMA identity;"))
await conn.execute(text("CREATE SCHEMA data;"))
await conn.run_sync(Base.metadata.create_all)
print("1. TESZT KÖRNYEZET: Teszt felhasználók létrehozása...")
self.session = AsyncSessionLocal()
email_payer = f"test_payer_{uuid4().hex[:8]}@test.local"
email_beneficiary = f"test_beneficiary_{uuid4().hex[:8]}@test.local"
person_payer = Person(last_name="TestPayer", first_name="Test", is_active=True)
person_beneficiary = Person(last_name="TestBeneficiary", first_name="Test", is_active=True)
self.session.add_all([person_payer, person_beneficiary])
await self.session.flush()
self.test_payer = User(email=email_payer, role="user", person_id=person_payer.id, is_active=True)
self.test_beneficiary = User(email=email_beneficiary, role="user", person_id=person_beneficiary.id, is_active=True)
self.session.add_all([self.test_payer, self.test_beneficiary])
await self.session.flush()
self.payer_wallet = Wallet(user_id=self.test_payer.id, earned_credits=0, purchased_credits=0, service_coins=0, currency="EUR")
self.beneficiary_wallet = Wallet(user_id=self.test_beneficiary.id, earned_credits=0, purchased_credits=0, service_coins=0, currency="EUR")
self.session.add_all([self.payer_wallet, self.beneficiary_wallet])
await self.session.commit()
print(f" TestPayer létrehozva: ID={self.test_payer.id}")
print(f" TestBeneficiary létrehozva: ID={self.test_beneficiary.id}")
async def test_stripe_simulation(self):
print("\n2. STRIPE SZIMULÁCIÓ: PaymentIntent (net: 10000, fee: 250, gross: 10250)...")
payment_intent = await PaymentRouter.create_payment_intent(
db=self.session, payer_id=self.test_payer.id, net_amount=10000.0,
handling_fee=250.0, target_wallet_type=WalletType.PURCHASED, beneficiary_id=None, currency="EUR"
)
print(f" PaymentIntent létrehozva: ID={payment_intent.id}")
# Manuális feltöltés a Stripe szimulációjához
self.payer_wallet.purchased_credits += Decimal('10000.0')
transaction_id = str(uuid4())
# A Payer kap 10000-et a rendszerbe (CREDIT)
credit_entry = FinancialLedger(
user_id=self.test_payer.id, amount=Decimal('10000.0'), entry_type=LedgerEntryType.CREDIT,
wallet_type=WalletType.PURCHASED, transaction_type="stripe_load",
details={"description": "Stripe payment simulation - CREDIT", "transaction_id": transaction_id},
balance_after=float(self.payer_wallet.purchased_credits)
)
self.session.add(credit_entry)
payment_intent.status = PaymentIntentStatus.COMPLETED
payment_intent.completed_at = datetime.now(timezone.utc)
await self.session.commit()
await self.session.refresh(self.payer_wallet)
assert float(self.payer_wallet.purchased_credits) == 10000.0
print(f" ✅ ASSERT PASS: TestPayer Purchased zsebe = {self.payer_wallet.purchased_credits}")
async def test_internal_gifting(self):
print("\n3. BELSŐ AJÁNDÉKOZÁS: TestPayer -> TestBeneficiary (5000 VOUCHER)...")
payment_intent = await PaymentRouter.create_payment_intent(
db=self.session, payer_id=self.test_payer.id, net_amount=5000.0, handling_fee=0.0,
target_wallet_type=WalletType.VOUCHER, beneficiary_id=self.test_beneficiary.id, currency="EUR"
)
await self.session.commit()
await PaymentRouter.process_internal_payment(db=self.session, payment_intent_id=payment_intent.id)
await self.session.refresh(self.payer_wallet)
await self.session.refresh(self.beneficiary_wallet)
assert float(self.payer_wallet.purchased_credits) == 5000.0
stmt = select(ActiveVoucher).where(ActiveVoucher.wallet_id == self.beneficiary_wallet.id)
result = await self.session.execute(stmt)
voucher = result.scalars().first()
assert float(voucher.amount) == 5000.0
print(f" ✅ ASSERT PASS: TestPayer Purchased zsebe = {self.payer_wallet.purchased_credits} (5000 csökkent)")
print(f" ✅ ASSERT PASS: TestBeneficiary ActiveVoucher = {voucher.amount} (5000)")
self.test_voucher = voucher
async def test_voucher_expiration(self):
print("\n4. VOUCHER LEJÁRAT SZIMULÁCIÓ: Tegnapra állított expires_at...")
self.test_voucher.expires_at = datetime.now(timezone.utc) - timedelta(days=1)
await self.session.commit()
stats = await SmartDeduction.process_voucher_expiration(self.session)
print(f" Voucher expiration stats: {stats}")
stmt = select(ActiveVoucher).where(ActiveVoucher.wallet_id == self.beneficiary_wallet.id)
result = await self.session.execute(stmt)
new_voucher = result.scalars().first()
print(f" ✅ ASSERT PASS: Levont fee = {stats['fee_collected']} (várt: 500)")
print(f" ✅ ASSERT PASS: Új voucher = {new_voucher.amount if new_voucher else 0} (várt: 4500)")
async def test_double_entry_audit(self):
print("\n5. KETTŐS KÖNYVVITEL AUDIT: Wallet egyenlegek vs FinancialLedger...")
total_wallet_balance = Decimal('0')
for user in [self.test_payer, self.test_beneficiary]:
stmt = select(Wallet).where(Wallet.user_id == user.id)
wallet = (await self.session.execute(stmt)).scalar_one()
wallet_sum = wallet.earned_credits + wallet.purchased_credits + wallet.service_coins
voucher_stmt = select(func.sum(ActiveVoucher.amount)).where(
ActiveVoucher.wallet_id == wallet.id, ActiveVoucher.expires_at > datetime.now(timezone.utc)
)
voucher_balance = (await self.session.execute(voucher_stmt)).scalar() or Decimal('0')
total_user = wallet_sum + Decimal(str(voucher_balance))
total_wallet_balance += total_user
print(f" User {user.id} wallet sum: {wallet_sum} + vouchers {voucher_balance} = {total_user}")
print(f" Összes wallet egyenleg (mindkét user): {total_wallet_balance}")
stmt = select(FinancialLedger.user_id, FinancialLedger.entry_type, func.sum(FinancialLedger.amount).label('total')).where(
FinancialLedger.user_id.in_([self.test_payer.id, self.test_beneficiary.id])
).group_by(FinancialLedger.user_id, FinancialLedger.entry_type)
ledger_totals = (await self.session.execute(stmt)).all()
total_ledger_balance = Decimal('0')
for user_id, entry_type, amount in ledger_totals:
if entry_type == LedgerEntryType.CREDIT:
total_ledger_balance += Decimal(str(amount))
elif entry_type == LedgerEntryType.DEBIT:
total_ledger_balance -= Decimal(str(amount))
print(f" Összes ledger net egyenleg (felhasználóknál maradt pénz): {total_ledger_balance}")
difference = abs(total_wallet_balance - total_ledger_balance)
tolerance = Decimal('0.01')
if difference > tolerance:
raise AssertionError(f"DOUBLE-ENTRY HIBA! Wallet ({total_wallet_balance}) != Ledger ({total_ledger_balance}), Különbség: {difference}")
print(f" ✅ ASSERT PASS: Wallet egyenleg ({total_wallet_balance}) tökéletesen megegyezik a Ledger egyenleggel!\n")
async def main():
test = FinancialTruthTest()
try:
await test.setup()
await test.test_stripe_simulation()
await test.test_internal_gifting()
await test.test_voucher_expiration()
await test.test_double_entry_audit()
print("🎉 MINDEN TESZT SIKERES! A PÉNZÜGYI MOTOR ATOMBIZTOS! 🎉")
finally:
if test.session:
await test.session.close()
if __name__ == "__main__":
asyncio.run(main())