refakotorálás előtti állapot

This commit is contained in:
Roo
2026-03-10 07:34:01 +00:00
parent 4e40af8a08
commit 0304cb8142
39 changed files with 1552 additions and 125 deletions

View File

@@ -12,6 +12,7 @@ from app.models.payment import PaymentIntent, PaymentIntentStatus
from app.services.config_service import config
from app.services.payment_router import PaymentRouter
from app.services.stripe_adapter import stripe_adapter
from app.services.billing_engine import upgrade_subscription, get_user_balance
router = APIRouter()
logger = logging.getLogger(__name__)
@@ -19,55 +20,24 @@ logger = logging.getLogger(__name__)
@router.post("/upgrade")
async def upgrade_account(target_package: str, db: AsyncSession = Depends(get_db), current_user: User = Depends(get_current_user)):
"""
Univerzális csomagváltó.
Univerzális csomagváltó a Billing Engine segítségével.
Kezeli az 5+ csomagot, a Rank-ugrást és a különleges 'Service Coin' bónuszokat.
"""
# 1. Lekérjük a teljes csomagmátrixot az adminból
# Példa JSON: {"premium": {"price": 2000, "rank": 5, "type": "credit"}, "service_pro": {"price": 10000, "rank": 30, "type": "coin"}}
package_matrix = await config.get_setting(db, "subscription_packages_matrix")
if target_package not in package_matrix:
raise HTTPException(status_code=400, detail="Érvénytelen csomagválasztás.")
pkg_info = package_matrix[target_package]
price = pkg_info["price"]
# 2. Pénztárca ellenőrzése
stmt = select(Wallet).where(Wallet.user_id == current_user.id)
wallet = (await db.execute(stmt)).scalar_one_or_none()
total_balance = wallet.purchased_credits + wallet.earned_credits
if total_balance < price:
raise HTTPException(status_code=402, detail="Nincs elég kredited a csomagváltáshoz.")
# 3. Levonási logika (Purchased -> Earned sorrend)
if wallet.purchased_credits >= price:
wallet.purchased_credits -= price
else:
remaining = price - wallet.purchased_credits
wallet.purchased_credits = 0
wallet.earned_credits -= remaining
# 4. Speciális Szerviz Logika (Service Coins)
# Ha a csomag típusa 'coin', akkor a szerviz kap egy kezdő Coin csomagot is
if pkg_info.get("type") == "coin":
initial_coins = pkg_info.get("initial_coin_bonus", 100)
wallet.service_coins += initial_coins
logger.info(f"User {current_user.id} upgraded to Service Pro, awarded {initial_coins} coins.")
# 5. Rang frissítése és naplózás
current_user.role = target_package # Pl. 'service_pro' vagy 'vip'
db.add(FinancialLedger(
user_id=current_user.id,
amount=-price,
transaction_type=f"UPGRADE_{target_package.upper()}",
details=pkg_info
))
await db.commit()
return {"status": "success", "package": target_package, "rank_granted": pkg_info["rank"]}
try:
result = await upgrade_subscription(db, current_user.id, target_package)
return {
"status": "success",
"package": target_package,
"price_paid": result.get("price_paid", 0.0),
"new_plan": result.get("new_plan"),
"expires_at": result.get("expires_at"),
"transaction": result.get("transaction")
}
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f"Upgrade error: {e}")
raise HTTPException(status_code=500, detail=f"Belső hiba: {str(e)}")
@router.post("/payment-intent/create")
@@ -331,27 +301,14 @@ async def get_wallet_balance(
current_user: User = Depends(get_current_user)
):
"""
Felhasználó pénztárca egyenlegének lekérdezése.
Felhasználó pénztárca egyenlegének lekérdezése a Billing Engine segítségével.
"""
try:
stmt = select(Wallet).where(Wallet.user_id == current_user.id)
result = await db.execute(stmt)
wallet = result.scalar_one_or_none()
if not wallet:
raise HTTPException(status_code=404, detail="Pénztárca nem található")
return {
"earned": float(wallet.earned_credits),
"purchased": float(wallet.purchased_credits),
"service_coins": float(wallet.service_coins),
"total": float(
wallet.earned_credits +
wallet.purchased_credits +
wallet.service_coins
),
}
balances = await get_user_balance(db, current_user.id)
return balances
except ValueError as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
logger.error(f"Pénztárca egyenleg lekérdezési hiba: {e}")
raise HTTPException(status_code=500, detail=f"Belső hiba: {str(e)}")
raise HTTPException(status_code=500, detail=f"Belső hiba: {str(e)}")

View File

@@ -10,6 +10,7 @@ from .address import Address, GeoPostalCode, GeoStreet, GeoStreetType, Rating
# 3. Jármű definíciók
from .vehicle_definitions import VehicleModelDefinition, VehicleType, FeatureDefinition, ModelFeatureMap
from .reference_data import ReferenceLookup
# 4. Szervezeti felépítés
from .organization import Organization, OrganizationMember, OrganizationFinancials, OrganizationSalesAssignment, OrgType, OrgUserRole, Branch
@@ -61,7 +62,7 @@ __all__ = [
"AuditLog", "VehicleOwnership", "LogSeverity",
"SecurityAuditLog", "ProcessLog", "FinancialLedger",
"ServiceProfile", "ExpertiseTag", "ServiceExpertise", "ServiceStaging", "DiscoveryParameter",
"Vehicle", "UserVehicle", "VehicleCatalog", "ServiceRecord", "VehicleModelDefinition",
"Vehicle", "UserVehicle", "VehicleCatalog", "ServiceRecord", "VehicleModelDefinition", "ReferenceLookup",
"VehicleType", "FeatureDefinition", "ModelFeatureMap", "LegalDocument", "LegalAcceptance",
"Location", "LocationType"
]

View File

@@ -166,14 +166,13 @@ class VehicleOwnership(Base):
__table_args__ = {"schema": "data"}
id: Mapped[int] = mapped_column(Integer, primary_key=True)
asset_id: Mapped[uuid.UUID] = mapped_column(PG_UUID(as_uuid=True), ForeignKey("data.assets.id"), nullable=False)
user_id: Mapped[int] = mapped_column(Integer, ForeignKey("identity.users.id"), nullable=False)
acquired_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())
disposed_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True))
asset: Mapped["Asset"] = relationship("Asset", back_populates="ownership_history")
# EZ A SOR HIÁNYZIK A KÓDODBÓL ÉS EZ JAVÍTJA A HIBÁT:
# JAVÍTVA: Kapcsolat a User modellhez
user: Mapped["User"] = relationship("User", back_populates="ownership_history")
class AssetTelemetry(Base):
@@ -212,10 +211,27 @@ class ExchangeRate(Base):
rate: Mapped[float] = mapped_column(Numeric(18, 6), nullable=False)
class CatalogDiscovery(Base):
""" Robot munkaterület. """
""" Robot munkaterület a felfedezett modelleknek. """
__tablename__ = "catalog_discovery"
__table_args__ = (UniqueConstraint('make', 'model', name='_make_model_uc'), {"schema": "data"})
__table_args__ = (
# KIBŐVÍTETT EGYEDISÉGI SZABÁLY: Márka + Modell + Osztály + Piac + Évjárat
UniqueConstraint('make', 'model', 'vehicle_class', 'market', 'model_year', name='_make_model_market_year_uc'),
{"schema": "data"}
)
id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
make: Mapped[str] = mapped_column(String(100), nullable=False, index=True)
model: Mapped[str] = mapped_column(String(100), nullable=False, index=True)
status: Mapped[str] = mapped_column(String(20), server_default=text("'pending'"), index=True)
vehicle_class: Mapped[str] = mapped_column(String(50), server_default=text("'car'"), index=True)
# --- ÚJ MEZŐK A STATISZTIKÁHOZ ÉS PIACAZONOSÍTÁSHOZ ---
market: Mapped[str] = mapped_column(String(20), server_default=text("'GLOBAL'"), index=True) # pl: RDW, DVLA, USA_IMPORT
model_year: Mapped[Optional[int]] = mapped_column(Integer, index=True)
# Robot vezérlés
status: Mapped[str] = mapped_column(String(20), server_default=text("'pending'"), index=True)
source: Mapped[Optional[str]] = mapped_column(String(100)) # pl: STRATEGIST-V2, NHTSA-V1
priority_score: Mapped[int] = mapped_column(Integer, server_default=text("0"))
attempts: Mapped[int] = mapped_column(Integer, server_default=text("0"))
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now())
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())

View File

@@ -0,0 +1,20 @@
# /app/app/models/reference_data.py
from sqlalchemy import Column, Integer, String, DateTime, func
from sqlalchemy.dialects.postgresql import JSONB
from app.database import Base
class ReferenceLookup(Base):
__tablename__ = "reference_lookup"
__table_args__ = {"schema": "data"}
id = Column(Integer, primary_key=True, index=True)
make = Column(String, nullable=False, index=True)
model = Column(String, nullable=False, index=True)
year = Column(Integer, nullable=True, index=True)
# Itt tároljuk az egységesített adatokat
specs = Column(JSONB, nullable=False)
source = Column(String, nullable=False) # pl: 'os-vehicle-db', 'wikidata'
source_id = Column(String, nullable=True)
updated_at = Column(DateTime, server_default=func.now(), onupdate=func.now())

View File

@@ -42,6 +42,7 @@ class FeatureDefinition(Base):
class VehicleModelDefinition(Base):
market: Mapped[str] = mapped_column(String(20), server_default=text("'GLOBAL'"), index=True)
"""
Robot v1.1.0 Multi-Tier MDM Master Adattábla.
Az ökoszisztéma technikai igazságforrása.
@@ -126,7 +127,7 @@ class VehicleModelDefinition(Base):
# --- BEÁLLÍTÁSOK ---
__table_args__ = (
UniqueConstraint('make', 'normalized_name', 'variant_code', 'version_code', 'fuel_type', name='uix_vmd_precision'),
UniqueConstraint('make', 'normalized_name', 'variant_code', 'version_code', 'fuel_type', 'market', 'year_from', name='uix_vmd_precision_v2'),
Index('idx_vmd_lookup_fast', 'make', 'normalized_name'),
Index('idx_vmd_engine_bridge', 'make', 'engine_code'),
{"schema": "data"}

View File

@@ -682,3 +682,199 @@ async def get_wallet_info(
Segédfüggvény pénztárca információk lekérdezéséhez.
"""
return await AtomicTransactionManager.get_wallet_summary(db, user_id)
# ==================== Billing Engine Service Functions ====================
async def charge_user(
db: AsyncSession,
user_id: int,
amount: float,
currency: str = "EUR",
transaction_type: str = "service_payment",
description: Optional[str] = None
) -> Dict[str, Any]:
"""
Kredit levonás a felhasználótól intelligens levonási sorrendben.
Args:
db: Database session
user_id: Felhasználó ID
amount: Levonandó összeg
currency: Pénznem (jelenleg csak EUR támogatott)
transaction_type: Tranzakció típusa (pl. "service_payment", "subscription")
description: Opcionális leírás
Returns:
Dict: Tranzakció részletei (AtomicTransactionManager.atomic_billing_transaction eredménye)
"""
if currency != "EUR":
raise ValueError("Only EUR currency is currently supported")
desc = description or f"Charge for {transaction_type}"
return await AtomicTransactionManager.atomic_billing_transaction(
db=db,
user_id=user_id,
amount=amount,
description=desc,
reference_type=transaction_type,
reference_id=None
)
async def upgrade_subscription(
db: AsyncSession,
user_id: int,
target_package: str
) -> Dict[str, Any]:
"""
Felhasználó előfizetésének frissítése (csomagváltás).
Args:
db: Database session
user_id: Felhasználó ID
target_package: Cél csomag neve (pl. "premium", "vip")
Returns:
Dict: Tranzakció részletei és az új előfizetés információi
"""
from app.models.core_logic import SubscriptionTier
from app.models.identity import User
# 1. Ellenőrizze, hogy a cél csomag létezik-e
stmt = select(SubscriptionTier).where(SubscriptionTier.name == target_package)
result = await db.execute(stmt)
tier = result.scalar_one_or_none()
if not tier:
raise ValueError(f"Subscription tier '{target_package}' not found")
# 2. Számítsa ki az árát a csomagnak (egyszerűsítve: fix ár a tier.rules-ból)
price = tier.rules.get("price", 0.0) if tier.rules else 0.0
if price <= 0:
# Ingyenes csomag, nincs levonás
logger.info(f"Upgrading user {user_id} to free tier {target_package}")
# Frissítse a felhasználó subscription_plan mezőjét
user_stmt = select(User).where(User.id == user_id)
user_result = await db.execute(user_stmt)
user = user_result.scalar_one()
user.subscription_plan = target_package
user.subscription_expires_at = datetime.utcnow() + timedelta(days=30) # 30 nap
return {
"success": True,
"message": f"Upgraded to {target_package} (free)",
"new_plan": target_package,
"price_paid": 0.0
}
# 3. Ár kiszámítása a PricingCalculator segítségével
user_stmt = select(User).where(User.id == user_id)
user_result = await db.execute(user_stmt)
user = user_result.scalar_one()
final_price = await PricingCalculator.calculate_final_price(
db=db,
base_amount=price,
country_code=user.region_code,
user_role=user.role
)
# 4. Levonás a felhasználótól
transaction = await charge_user(
db=db,
user_id=user_id,
amount=final_price,
currency="EUR",
transaction_type="subscription_upgrade",
description=f"Upgrade to {target_package} subscription"
)
# 5. Frissítse a felhasználó előfizetési adatait
user.subscription_plan = target_package
user.subscription_expires_at = datetime.utcnow() + timedelta(days=30) # 30 nap
logger.info(f"User {user_id} upgraded to {target_package} for {final_price} EUR")
return {
"success": True,
"transaction": transaction,
"new_plan": target_package,
"price_paid": final_price,
"expires_at": user.subscription_expires_at.isoformat()
}
async def record_ledger_entry(
db: AsyncSession,
user_id: int,
amount: float,
entry_type: LedgerEntryType,
wallet_type: WalletType,
transaction_type: str,
description: str,
reference_type: Optional[str] = None,
reference_id: Optional[int] = None
) -> FinancialLedger:
"""
Közvetlen főkönyvbejegyzés létrehozása (pl. manuális korrekciók).
Megjegyzés: Ez a függvény NEM végez levonást a pénztárcából, csak naplóbejegyzést készít.
A pénztárca egyenleg frissítéséhez használd a charge_user vagy atomic_billing_transaction függvényeket.
Args:
db: Database session
user_id: Felhasználó ID
amount: Összeg
entry_type: DEBIT vagy CREDIT
wallet_type: Pénztárca típus
transaction_type: Tranzakció típusa
description: Leírás
reference_type: Referencia típus
reference_id: Referencia ID
Returns:
FinancialLedger: Létrehozott főkönyvbejegyzés
"""
ledger_entry = FinancialLedger(
user_id=user_id,
amount=Decimal(str(amount)),
entry_type=entry_type,
wallet_type=wallet_type,
transaction_type=transaction_type,
details={
"description": description,
"reference_type": reference_type,
"reference_id": reference_id,
"wallet_type": wallet_type.value
},
transaction_id=uuid.uuid4(),
balance_after=None, # Később számolható
currency="EUR"
)
db.add(ledger_entry)
await db.flush()
logger.info(f"Ledger entry recorded: user={user_id}, amount={amount}, type={entry_type.value}")
return ledger_entry
async def get_user_balance(
db: AsyncSession,
user_id: int
) -> Dict[str, float]:
"""
Felhasználó pénztárca egyenlegének lekérdezése.
Args:
db: Database session
user_id: Felhasználó ID
Returns:
Dict: Pénztárca típusonkénti egyenlegek
"""
wallet_summary = await AtomicTransactionManager.get_wallet_summary(db, user_id)
return wallet_summary["balances"]

View File

@@ -42,14 +42,13 @@ class FinancialTruthTest:
async def setup(self):
print("=== IGAZSÁGSZÉRUM TESZT - Pénzügyi Motor Audit ===")
print("0. ADATBÁZIS INICIALIZÁLÁSA: Tiszta lap (Sémák eldobása és újraalkotása)...")
print("0. ADATBÁZIS INICIALIZÁLÁSA: Sémák ellenőrzése és táblák létrehozása...")
async with engine.begin() as conn:
await conn.execute(text("DROP SCHEMA IF EXISTS audit CASCADE;"))
await conn.execute(text("DROP SCHEMA IF EXISTS identity CASCADE;"))
await conn.execute(text("DROP SCHEMA IF EXISTS data CASCADE;"))
await conn.execute(text("CREATE SCHEMA audit;"))
await conn.execute(text("CREATE SCHEMA identity;"))
await conn.execute(text("CREATE SCHEMA data;"))
# Sémák létrehozása, ha még nem léteznek (deadlock elkerülés)
await conn.execute(text("CREATE SCHEMA IF NOT EXISTS audit;"))
await conn.execute(text("CREATE SCHEMA IF NOT EXISTS identity;"))
await conn.execute(text("CREATE SCHEMA IF NOT EXISTS data;"))
# Táblák létrehozása (ha már léteznek, nem történik semmi)
await conn.run_sync(Base.metadata.create_all)
print("1. TESZT KÖRNYEZET: Teszt felhasználók létrehozása...")

View File

@@ -0,0 +1,194 @@
# /app/app/workers/monitor_dashboard.py
# docker exec sf_api python -m app.workers.monitor_dashboard
import asyncio
import os
import httpx
import pynvml
import psutil
from datetime import datetime, timedelta
from sqlalchemy import text
from sqlalchemy.ext.asyncio import create_async_engine
from rich.console import Console
from rich.table import Table
from rich.panel import Panel
from rich.live import Live
from rich.layout import Layout
from rich.text import Text
from app.core.config import settings
console = Console()
# NVIDIA inicializálása
try:
pynvml.nvmlInit()
gpu_available = True
except Exception:
gpu_available = False
async def get_hardware_stats():
"""Rendszererőforrások: CPU, RAM és GPU"""
stats = {
"cpu_usage": psutil.cpu_percent(interval=None),
"ram_total": psutil.virtual_memory().total // 1024**2,
"ram_used": psutil.virtual_memory().used // 1024**2,
"ram_perc": psutil.virtual_memory().percent,
"gpu": None
}
if gpu_available:
try:
handle = pynvml.nvmlDeviceGetHandleByIndex(0)
stats["gpu"] = {
"name": pynvml.nvmlDeviceGetName(handle),
"temp": pynvml.nvmlDeviceGetTemperature(handle, pynvml.NVML_TEMPERATURE_GPU),
"load": pynvml.nvmlDeviceGetUtilizationRates(handle).gpu,
"vram_total": pynvml.nvmlDeviceGetMemoryInfo(handle).total // 1024**2,
"vram_used": pynvml.nvmlDeviceGetMemoryInfo(handle).used // 1024**2,
"power": pynvml.nvmlDeviceGetPowerUsage(handle) / 1000
}
except: pass
return stats
async def get_ollama_models():
try:
async with httpx.AsyncClient(timeout=2.0) as client:
resp = await client.get("http://ollama:11434/api/ps")
if resp.status_code == 200:
return [m['name'] for m in resp.json().get("models", [])]
except: return ["Ollama Comm Error"]
return []
async def get_stats(engine):
async with engine.connect() as conn:
# 1. Sebesség adatok
res_hr = await conn.execute(text("SELECT count(*) FROM data.vehicle_model_definitions WHERE status = 'gold_enriched' AND updated_at > NOW() - INTERVAL '1 hour'"))
hr_rate = res_hr.scalar() or 0
res_day = await conn.execute(text("SELECT count(*) FROM data.vehicle_model_definitions WHERE status = 'gold_enriched' AND updated_at > NOW() - INTERVAL '24 hours'"))
day_rate = res_day.scalar() or 0
# 2. Pipeline
res_pipe = await conn.execute(text("""
SELECT
(SELECT count(*) FROM data.catalog_discovery WHERE status = 'pending') as r1,
(SELECT count(*) FROM data.vehicle_model_definitions WHERE status = 'unverified') as r2,
(SELECT count(*) FROM data.vehicle_model_definitions WHERE status = 'awaiting_ai_synthesis') as r3,
(SELECT count(*) FROM data.vehicle_model_definitions WHERE status = 'gold_enriched') as r4
"""))
r_counts = res_pipe.fetchone()
# 3. TOP 7
res_top = await conn.execute(text("SELECT make, count(*) as qty FROM data.vehicle_model_definitions GROUP BY make ORDER BY qty DESC LIMIT 7"))
top_makes = res_top.fetchall()
# 4. AKTIVITÁS (3 példány per robot)
res_r4 = await conn.execute(text("SELECT make, marketing_name FROM data.vehicle_model_definitions WHERE status = 'gold_enriched' ORDER BY updated_at DESC LIMIT 5"))
res_r3 = await conn.execute(text("SELECT make, marketing_name FROM data.vehicle_model_definitions WHERE status = 'ai_synthesis_in_progress' ORDER BY updated_at DESC LIMIT 5"))
res_r12 = await conn.execute(text("SELECT make, model FROM data.catalog_discovery WHERE status = 'processing' ORDER BY updated_at DESC LIMIT 5"))
hw = await get_hardware_stats()
ai = await get_ollama_models()
return (hr_rate, day_rate), r_counts, top_makes, (res_r4.fetchall(), res_r3.fetchall(), res_r12.fetchall()), hw, ai
def make_layout() -> Layout:
layout = Layout()
layout.split_column(
Layout(name="header", size=3),
Layout(name="main", ratio=1),
Layout(name="hardware", size=10), # Megnövelt hardver rész
Layout(name="footer", size=3)
)
layout["main"].split_row(
Layout(name="left", ratio=1),
Layout(name="right", ratio=2)
)
layout["left"].split_column(Layout(name="robot_stats"), Layout(name="inventory"))
layout["right"].split_column(Layout(name="live_ops"))
return layout
def update_dashboard(layout, data):
rates, r_counts, top_makes, live_data, hw, ai_models = data
r4_list, r3_list, r12_list = live_data
# Óra (UTC+1 korrekció)
local_time = datetime.now() + timedelta(hours=1)
# HEADER (Változatlan)
layout["header"].update(Panel(
f"🛰️ SENTINEL MISSION CONTROL | [bold yellow]{local_time.strftime('%Y-%m-%d %H:%M:%S')}[/] | AI: [green]{rates[0]}[/] /óra — [cyan]{rates[1]}[/] /nap",
style="bold white on blue"
))
# ROBOT PIPELINE
robot_table = Table(title="🤖 Pipeline Állapot", expand=True, border_style="cyan")
robot_table.add_column("Robot", style="bold")
robot_table.add_column("Várakozik", justify="right")
robot_table.add_row("R1-Hunter", f"{r_counts[0]} db")
robot_table.add_row("R2-Researcher", f"{r_counts[1]} db")
robot_table.add_row("R3-Alchemist", f"{r_counts[2]} db")
robot_table.add_row("R4-Validator", f"{r_counts[3]} db")
layout["robot_stats"].update(robot_table)
# TOP MÁRKÁK
brand_table = Table(title="🚜 Top 7 Márka", expand=True, border_style="magenta")
brand_table.add_column("Márka", style="yellow")
brand_table.add_column("db", justify="right")
for m, q in top_makes: brand_table.add_row(m, str(q))
layout["inventory"].update(brand_table)
# LIVE OPS (Bővítve 5-5 példányra)
ops_table = Table(title="⚡ Aktuális Folyamatok (Utolsó 3/robot)", expand=True, border_style="green")
ops_table.add_column("Robot", width=15)
ops_table.add_column("Márka / Típus")
for r in r4_list: ops_table.add_row("[gold1]R4-VALIDATOR[/]", f"{r[0]} {r[1] or ''}")
ops_table.add_section()
for r in r3_list: ops_table.add_row("[medium_purple1]R3-ALCHEMIST[/]", f"{r[0]} {r[1] or ''}")
ops_table.add_section()
for r in r12_list: ops_table.add_row("[sky_blue1]R1-HUNTER[/]", f"{r[0]} {r[1] or ''}")
layout["live_ops"].update(ops_table)
# HARDWARE & AI (3 OSZLOPOS ELRENDEZÉS)
hw_layout = Layout()
hw_layout.split_row(Layout(name="sys"), Layout(name="gpu"), Layout(name="ai"))
# 1. Rendszer (CPU/RAM)
sys_info = (
f"[bold]CPU Terhelés:[/] [bright_blue]{hw['cpu_usage']}%[/]\n"
f"[bold]RAM Használat:[/] [bright_magenta]{hw['ram_perc']}%[/]\n"
f"({hw['ram_used']} / {hw['ram_total']} MB)"
)
hw_layout["sys"].update(Panel(sys_info, title="💻 System Resources", border_style="bright_blue"))
# 2. GPU
if hw["gpu"]:
g = hw["gpu"]
gpu_info = (
f"[bold]{g['name']}[/]\n"
f"Load: [green]{g['load']}%[/] | Temp: {g['temp']}°C\n"
f"VRAM: {g['vram_used']} / {g['vram_total']} MB"
)
else:
gpu_info = "[red]NVIDIA GPU not detected[/]"
hw_layout["gpu"].update(Panel(gpu_info, title="🔌 GPU Monitor", border_style="orange3"))
# 3. AI Models
ai_info = "[bold]In Memory (VRAM):[/]\n" + ("\n".join([f"🧠 {m}" for m in ai_models]) if ai_models else "No active models.")
hw_layout["ai"].update(Panel(ai_info, title="🤖 AI Stack", border_style="plum1"))
layout["hardware"].update(hw_layout)
layout["footer"].update(Panel(f"Sentinel v2.5 | Kernel: Stabil | Heartbeat: OK", style="italic grey50"))
async def main():
engine = create_async_engine(settings.DATABASE_URL)
layout = make_layout()
with Live(layout, refresh_per_second=1, screen=True):
while True:
try:
data = await get_stats(engine)
update_dashboard(layout, data)
except: pass
await asyncio.sleep(2)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,52 @@
# /opt/docker/dev/service_finder/backend/app/workers/py_to_database.py
import asyncio
import importlib
import os
import sys
from sqlalchemy import text
from app.database import engine, Base
# Biztosítjuk, hogy a Python látja az /app mappát
sys.path.append("/app")
async def sync_database():
print("\n" + ""*60)
print("🚀 SENTINEL DATABASE SYNC - MB2.0")
print(""*60)
# 1. Sémák kényszerítése (hogy ne ezen bukjon el)
async with engine.begin() as conn:
for schema in ["data", "identity", "system", "audit"]:
await conn.execute(text(f"CREATE SCHEMA IF NOT EXISTS {schema}"))
print("✅ Sémák ellenőrizve.")
# 2. Modellek importálása (Szabályos úton)
# A diagnózis alapján ezek a fájlok biztosan ott vannak
model_modules = [
"address", "audit", "document", "history", "legal", "organization",
"security", "social", "system", "vehicle_definitions", "asset",
"core_logic", "gamification", "identity", "logistics", "payment",
"service", "staged_data", "translation"
]
print(f"📂 Modellek betöltése...")
for m in model_modules:
try:
# Ez a módszer regisztrálja a modult a sys.modules-ba
importlib.import_module(f"app.models.{m}")
print(f" 📦 [OK] app.models.{m}")
except Exception as e:
print(f" ⚠️ [INFO] app.models.{m} betöltése (vagy függősége) kihagyva: {e}")
# 3. Fizikai szinkronizáció
print(f"\n🏗️ Táblák létrehozása (Összesen: {len(Base.metadata.tables)} darab)...")
async with engine.begin() as conn:
# Ez a parancs hozza létre az összes hiányzó táblát
await conn.run_sync(Base.metadata.create_all)
print("\n" + ""*60)
print("✅ KÉSZ! Az adatbázis most már tartalmazza a System és Translation táblákat is.")
print(""*60)
if __name__ == "__main__":
asyncio.run(sync_database())

View File

@@ -0,0 +1,140 @@
#!/usr/bin/env python3
"""
🤖 Subscription Lifecycle Worker (Robot-20)
A 20-as Gitea kártya implementációja: Lejárt előfizetések automatikus kezelése.
Folyamat:
1. Lekérdezés: Azokat a User-eket, ahol subscription_expires_at < NOW() ÉS subscription_plan != 'FREE'
2. Downgrade: subscription_plan = "FREE", is_vip = False
3. Naplózás: Főkönyvi bejegyzés (SUBSCRIPTION_EXPIRED)
4. Értesítés: NotificationService küldése a downgrade-ről
Futtatás:
- Napi egyszer (cron) vagy manuálisan: docker exec sf_api python -m app.workers.system.subscription_worker
"""
import asyncio
import logging
from datetime import datetime, timezone
from typing import List
from sqlalchemy import select, update, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import AsyncSessionLocal
from app.models.identity import User
from app.models.audit import FinancialLedger, LedgerEntryType, WalletType
from app.services.billing_engine import record_ledger_entry
from app.services.notification_service import NotificationService
logger = logging.getLogger("subscription-worker")
async def process_expired_subscriptions(db: AsyncSession) -> dict:
"""
Atomikus tranzakcióban feldolgozza a lejárt előfizetéseket.
Returns:
dict: Statisztikák (processed_count, downgraded_users, errors)
"""
now = datetime.now(timezone.utc)
# 1. Lekérdezés: Lejárt előfizetések, amelyek még nem FREE-ek
stmt = select(User).where(
and_(
User.subscription_expires_at < now,
User.subscription_plan != 'FREE',
User.is_deleted == False,
User.is_active == True
)
).with_for_update(skip_locked=True) # Atomikus zárolás
result = await db.execute(stmt)
expired_users: List[User] = result.scalars().all()
stats = {
"processed_count": len(expired_users),
"downgraded_users": [],
"errors": []
}
for user in expired_users:
try:
# 2. Downgrade
old_plan = user.subscription_plan
user.subscription_plan = "FREE"
user.is_vip = False
# subscription_expires_at marad null vagy régi érték (lejárt)
# 3. Főkönyvi bejegyzés (SUBSCRIPTION_EXPIRED)
# Megjegyzés: amount = 0, mert nem történik pénzmozgás, csak naplózás
ledger_entry = await record_ledger_entry(
db=db,
user_id=user.id,
amount=0.0,
entry_type=LedgerEntryType.DEBIT,
wallet_type=WalletType.SYSTEM,
transaction_type="SUBSCRIPTION_EXPIRED",
description=f"Előfizetés lejárt: {old_plan} → FREE",
reference_type="subscription",
reference_id=user.id
)
# 4. Értesítés küldése
await NotificationService.send_notification(
db=db,
user_id=user.id,
title="Előfizetésed lejárt",
message=f"A(z) {old_plan} előfizetésed lejárt, ezért átállítottuk a FREE csomagra. További előnyökért frissíts előfizetést!",
category="billing",
priority="medium",
data={
"old_plan": old_plan,
"new_plan": "FREE",
"user_id": user.id,
"expired_at": user.subscription_expires_at.isoformat() if user.subscription_expires_at else None
},
send_email=True,
email_template="subscription_expired",
email_vars={
"recipient": user.email,
"first_name": user.person.first_name if user.person else "Partnerünk",
"old_plan": old_plan,
"new_plan": "FREE",
"lang": user.preferred_language
}
)
stats["downgraded_users"].append({
"user_id": user.id,
"email": user.email,
"old_plan": old_plan,
"new_plan": "FREE"
})
logger.info(f"User {user.id} ({user.email}) subscription downgraded from {old_plan} to FREE")
except Exception as e:
logger.error(f"Error processing user {user.id}: {e}", exc_info=True)
stats["errors"].append({"user_id": user.id, "error": str(e)})
# Commit a tranzakció
await db.commit()
return stats
async def main():
"""Fő futtató függvény, amelyet a cron hív."""
logger.info("Starting subscription worker...")
async with AsyncSessionLocal() as db:
try:
stats = await process_expired_subscriptions(db)
logger.info(f"Subscription worker completed. Stats: {stats}")
print(f"✅ Subscription worker completed. Processed: {stats['processed_count']}, Downgraded: {len(stats['downgraded_users'])}")
except Exception as e:
logger.error(f"Subscription worker failed: {e}", exc_info=True)
await db.rollback()
raise
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,24 @@
# MB 2.0: Strukturált adat-szótár a források egységesítéséhez
VEHICLE_MAPPING = {
"os-vehicle-db": {
"brand": "make",
"model": "model",
"year": "year",
"specs": {
"engine_hp": "specs.engine.hp",
"fuel": "specs.engine.fuel",
"body": "specs.body.type"
}
}
# Ide jönnek majd a carquery és fipe mappolások
}
def normalize_make(make: str) -> str:
""" Egységes márkanevek (pl. Mercedes-Benz vs Mercedes) """
m = make.upper().strip()
synonyms = {
"MERCEDES": "MERCEDES-BENZ",
"VW": "VOLKSWAGEN",
"ALFA": "ALFA ROMEO"
}
return synonyms.get(m, m)

View File

@@ -0,0 +1,26 @@
# /app/app/workers/vehicle/mapping_rules.py
SOURCE_MAPPINGS = {
"os-vehicle-db": {
"make": "brand",
"model": "model_name",
"year": "release_year",
"power": "specs.engine.hp"
},
"car-query": {
"make": "model_make_id",
"model": "model_name",
"year": "model_year",
"power": "model_engine_power_ps"
}
}
def unify_data(raw_data, source_name):
mapping = SOURCE_MAPPINGS.get(source_name, {})
unified = {
"normalized_make": raw_data.get(mapping.get("make"), "").upper(),
"normalized_model": raw_data.get(mapping.get("model"), "").upper(),
"normalized_year": raw_data.get(mapping.get("year")),
"raw_specs": raw_data # Megtartjuk az eredetit is
}
return unified

View File

@@ -0,0 +1,128 @@
# /opt/docker/dev/service_finder/backend/app/workers/vehicle/robot_report.py
import asyncio
import psutil
import pynvml
from datetime import datetime
from sqlalchemy import text
from sqlalchemy.ext.asyncio import create_async_engine
from rich.console import Console
from rich.table import Table
from rich.panel import Panel
from rich.layout import Layout
from app.core.config import settings
console = Console()
async def get_data():
engine = create_async_engine(settings.DATABASE_URL)
async with engine.connect() as conn:
# Pipeline adatok (R1-R4)
pipe = await conn.execute(text("""
SELECT
(SELECT count(*) FROM data.catalog_discovery WHERE status = 'pending') as r1,
(SELECT count(*) FROM data.vehicle_model_definitions WHERE status = 'unverified') as r2,
(SELECT count(*) FROM data.vehicle_model_definitions WHERE status = 'awaiting_ai_synthesis') as r3,
(SELECT count(*) FROM data.vehicle_model_definitions WHERE status = 'gold_enriched') as r4
"""))
p_res = pipe.fetchone()
# AI Termelés
ai_hr = await conn.execute(text("SELECT count(*) FROM data.vehicle_model_definitions WHERE status = 'gold_enriched' AND updated_at > NOW() - INTERVAL '1 hour'"))
ai_day = await conn.execute(text("SELECT count(*) FROM data.vehicle_model_definitions WHERE status = 'gold_enriched' AND updated_at > NOW() - INTERVAL '24 hours'"))
# Market Matrix (1.3)
market_res = await conn.execute(text("SELECT vehicle_class, market, count(*) FROM data.catalog_discovery GROUP BY 1, 2"))
m_data = market_res.fetchall()
# Robot Top listák (2.1 - 2.3)
r1_top = await conn.execute(text("SELECT make, count(*) FROM data.catalog_discovery WHERE market = 'RDW' GROUP BY 1 ORDER BY 2 DESC LIMIT 5"))
r12_top = await conn.execute(text("SELECT make, count(*) FROM data.catalog_discovery WHERE market = 'USA_IMPORT' GROUP BY 1 ORDER BY 2 DESC LIMIT 5"))
r14_top = await conn.execute(text("SELECT make, count(*) FROM data.catalog_discovery WHERE vehicle_class = 'motorcycle' GROUP BY 1 ORDER BY 2 DESC LIMIT 5"))
# Általános Top (3.1 - 3.3)
pending_top = await conn.execute(text("SELECT make, count(*) FROM data.catalog_discovery WHERE status = 'pending' GROUP BY 1 ORDER BY 2 DESC LIMIT 5"))
gold_top = await conn.execute(text("SELECT make, count(*) FROM data.vehicle_model_definitions WHERE status = 'gold_enriched' GROUP BY 1 ORDER BY 2 DESC LIMIT 5"))
status_stats = await conn.execute(text("SELECT status, count(*) FROM data.vehicle_model_definitions GROUP BY 1 ORDER BY 2 DESC LIMIT 5"))
# Kategória Top (4.1 - 4.3)
cat_tops = {}
for c in ['car', 'motorcycle', 'truck']:
res = await conn.execute(text(f"SELECT make, count(*) FROM data.catalog_discovery WHERE vehicle_class = '{c}' GROUP BY 1 ORDER BY 2 DESC LIMIT 4"))
total = await conn.execute(text(f"SELECT count(*) FROM data.catalog_discovery WHERE vehicle_class = '{c}'"))
cat_tops[c] = {"list": res.fetchall(), "total": total.scalar() or 0}
return {
"p": p_res, "ai": (ai_hr.scalar(), ai_day.scalar()), "markets": m_data,
"r1": r1_top.fetchall(), "r12": r12_top.fetchall(), "r14": r14_top.fetchall(),
"pending": pending_top.fetchall(), "gold": gold_top.fetchall(), "status": status_stats.fetchall(),
"cat": cat_tops
}
def get_hw():
try:
pynvml.nvmlInit()
h = pynvml.nvmlDeviceGetHandleByIndex(0)
info = pynvml.nvmlDeviceGetMemoryInfo(h)
return {"cpu": psutil.cpu_percent(), "ram": psutil.virtual_memory().percent, "gpu": pynvml.nvmlDeviceGetUtilizationRates(h).gpu,
"vram": f"{int(info.used/1024**2)}/{int(info.total/1024**2)}M", "temp": pynvml.nvmlDeviceGetTemperature(h, 0)}
except: return None
def mini_table(data, color="white"):
t = Table(show_header=False, box=None, expand=True, padding=(0,1))
t.add_column("L", ratio=3); t.add_column("R", justify="right", ratio=2)
for r in data: t.add_row(str(r[0])[:12], f"[{color}]{r[1]:,}[/]")
return t
async def main():
try:
d = await get_data(); hw = get_hw()
lay = Layout()
lay.split_column(Layout(name="head", size=3), Layout(name="body"))
lay["body"].split_column(Layout(name="row1"), Layout(name="row2"), Layout(name="row3"), Layout(name="row4"))
for r in ["row1", "row2", "row3", "row4"]:
lay[r].split_row(Layout(name=f"{r}1"), Layout(name=f"{r}2"), Layout(name=f"{r}3"))
# --- FEJLÉC ---
h_str = f"💻 CPU: {hw['cpu']}% | 🧠 RAM: {hw['ram']}% | 📟 VRAM: {hw['vram']} | 🔥 GPU: {hw['gpu']}% ({hw['temp']}°C)" if hw else "Hardware adatok nem elérhetőek"
lay["head"].update(Panel(h_str, title="[bold orange3]SZÁMÍTÓGÉP ADATOK[/]", style="orange3"))
# --- 1. SOR ---
r11_t = Table(show_header=False, box=None, expand=True)
r11_t.add_row("R1 (Hunter)", f"{d['p'][0]:,}"); r11_t.add_row("R2 (Res)", f"{d['p'][1]:,}")
r11_t.add_row("R3 (Wait)", f"{d['p'][2]:,}"); r11_t.add_row("R4 (Gold)", f"[green]{d['p'][3]:,}[/]")
r11_t.add_row("AI HR/DAY", f"[cyan]{d['ai'][0]}[/] / [yellow]{d['ai'][1]}[/]")
lay["row11"].update(Panel(r11_t, title="1.1 PIPE & AI"))
lay["row12"].update(Panel(mini_table(d['gold'], "green"), title="1.2 TOP MÁRKÁK"))
r13_t = Table(show_header=True, box=None, expand=True, header_style="bold blue")
r13_t.add_column("Típus", ratio=2); r13_t.add_column("EU", justify="right"); r13_t.add_column("USA", justify="right")
m_map = {c: {"EU": 0, "USA": 0} for c in ['car', 'motorcycle', 'truck', 'bus']}
for c, m, q in d['markets']:
key = 'USA' if m == 'USA_IMPORT' else 'EU'
if c in m_map: m_map[c][key] += q
for c, v in m_map.items(): r13_t.add_row(c[:4].upper(), f"{v['EU']:,}", f"{v['USA']:,}")
lay["row13"].update(Panel(r13_t, title="1.3 MARKET MATRIX"))
# --- 2. SOR (ROBOTOK) ---
lay["row21"].update(Panel(mini_table(d['r1']), title="2.1 ROBOT 1 (RDW)"))
lay["row22"].update(Panel(mini_table(d['r12']), title="2.2 ROBOT 1.2 (USA)"))
lay["row23"].update(Panel(mini_table(d['r14']), title="2.3 ROBOT 1.4 (BIKE)"))
# --- 3. SOR (ÖSSZESÍTŐ) ---
lay["row31"].update(Panel(mini_table(d['pending']), title="3.1 PENDING TOP"))
lay["row32"].update(Panel(mini_table(d['gold'], "green"), title="3.2 GOLD TOP", border_style="green"))
lay["row33"].update(Panel(mini_table(d['status'], "blue"), title="3.3 STATUS", border_style="blue"))
# --- 4. SOR (KATEGÓRIÁK) ---
for i, (k, l) in enumerate([('car', '4.1 AUTO'), ('motorcycle', '4.2 MOTOR'), ('truck', '4.3 TEHER')], 1):
t = mini_table(d['cat'][k]['list'], "yellow")
t.add_row("[bold]TOTAL[/]", f"[bold yellow]{d['cat'][k]['total']:,}[/]")
lay[f"row4{i}"].update(Panel(t, title=l, border_style="yellow"))
console.print(lay)
except Exception as e:
console.print(f"[bold red]HIBA TÖRTÉNT:[/] {e}")
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,121 @@
import asyncio
import httpx
import json
import logging
from sqlalchemy import text
from app.database import AsyncSessionLocal
# MB 2.0 Naplózási beállítások
logger = logging.getLogger("Data-Loader-Master")
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] %(message)s')
class VehicleDataLoader:
# Ellenőrzött, működő források
SOURCES = {
"car-data-kohut": "https://raw.githubusercontent.com/DanielKohut/car-data/master/car_data.json",
"car-list-matth": "https://raw.githubusercontent.com/matthlavacka/car-list/master/car-list.json"
}
@staticmethod
def normalize_name(name: str) -> str:
""" Alapvető szövegtisztítás: nagybetű, szóközmentesítés. """
if not name: return ""
n = str(name).upper().strip()
# Gyakori szinonimák egységesítése
synonyms = {"VW": "VOLKSWAGEN", "MERCEDES": "MERCEDES-BENZ", "ALFA": "ALFA ROMEO"}
return synonyms.get(n, n)
async def fetch_json(self, url: str):
""" Biztonságos letöltés időtúllépés kezeléssel. """
async with httpx.AsyncClient(timeout=60.0, follow_redirects=True) as client:
try:
resp = await client.get(url)
if resp.status_code == 200:
return resp.json()
logger.error(f"❌ Letöltési hiba ({resp.status_code}): {url}")
except Exception as e:
logger.error(f"🚨 Kommunikációs hiba: {url} -> {e}")
return None
def map_source_data(self, source_name, raw_data):
"""
Mapping Layer: Átfordítja a különböző források JSON szerkezetét
a mi egységes data.reference_lookup sémánkra.
"""
unified_entries = []
try:
if source_name == "car-data-kohut":
# Szerkezet: list[{"brand": "...", "models": ["...", ...]}]
for brand_item in raw_data:
make = self.normalize_name(brand_item.get("brand"))
for model in brand_item.get("models", []):
unified_entries.append({
"make": make, "model": self.normalize_name(model),
"year": None, "specs": json.dumps({"source": "kohut"}),
"source": source_name, "source_id": None
})
elif source_name == "car-list-matth":
# Szerkezet: list[{"brand": "...", "models": ["...", ...]}]
for brand_item in raw_data:
make = self.normalize_name(brand_item.get("brand"))
for model in brand_item.get("models", []):
unified_entries.append({
"make": make, "model": self.normalize_name(model),
"year": None, "specs": json.dumps({"source": "matthlavacka"}),
"source": source_name, "source_id": None
})
elif source_name == "os-vehicle-db":
# Szerkezet: list[{"make": "...", "model": "...", "year": ...}]
for item in raw_data:
unified_entries.append({
"make": self.normalize_name(item.get("make")),
"model": self.normalize_name(item.get("model")),
"year": item.get("year"),
"specs": json.dumps(item),
"source": source_name,
"source_id": str(item.get("id", ""))
})
except Exception as e:
logger.error(f"⚠️ Mapping hiba a(z) {source_name} feldolgozásakor: {e}")
return unified_entries
async def save_to_db(self, entries):
""" Batch Upsert folyamat az adatbázisba. """
if not entries: return
async with AsyncSessionLocal() as db:
stmt = text("""
INSERT INTO data.reference_lookup (make, model, year, specs, source, source_id)
VALUES (:make, :model, :year, :specs, :source, :source_id)
ON CONFLICT ON CONSTRAINT _ref_lookup_uc
DO UPDATE SET specs = EXCLUDED.specs, updated_at = NOW()
""")
try:
# 1000-es csomagokban küldjük be
for i in range(0, len(entries), 1000):
batch = entries[i:i+1000]
for row in batch:
await db.execute(stmt, row)
await db.commit()
logger.info(f"💾 Mentve: {min(i + 1000, len(entries))} / {len(entries)}")
except Exception as e:
await db.rollback()
logger.error(f"🚨 Adatbázis hiba: {e}")
async def run_sync(self):
logger.info("🚀 Data Loader (Fast Lane Support) elindul...")
for name, url in self.SOURCES.items():
raw_json = await self.fetch_json(url)
if raw_json:
logger.info(f"🧹 {name} feldolgozása...")
unified = self.map_source_data(name, raw_json)
await self.save_to_db(unified)
logger.info("🏁 Minden forrás szinkronizálva.")
if __name__ == "__main__":
asyncio.run(VehicleDataLoader().run_sync())

View File

@@ -60,8 +60,8 @@ class DiscoveryEngine:
async def seed_manual_bootstrap():
""" 2. FÁZIS: Alapozó adatok rögzítése """
initial_data = [
{"make": "AUDI", "model": "A4", "generation": "B8 (2008-2015)", "vehicle_class": "car"},
{"make": "BMW", "model": "3 SERIES", "generation": "F30 (2012-2019)", "vehicle_class": "car"}
{"make": "AUDI", "model": "A4", "generation": "B8 (2008-2015)"}, # vehicle_class törölve
{"make": "BMW", "model": "3 SERIES", "generation": "F30 (2012-2019)"}
]
try:
async with AsyncSessionLocal() as db:
@@ -131,13 +131,20 @@ class DiscoveryEngine:
elif "Motorfiets" in v_kind: v_class = 'motorcycle'
else: v_class = 'truck'
# A MÁGIA: Különbözeti Szinkronizáció SQL
# A MÁGIA: Különbözeti Szinkronizáció SQL + Explicit Type Casting
query = text("""
INSERT INTO data.catalog_discovery (make, model, vehicle_class, status, priority_score)
SELECT :make, :model, :v_class, 'pending', :priority
SELECT
CAST(:make AS VARCHAR),
CAST(:model AS VARCHAR),
CAST(:v_class AS VARCHAR),
'pending',
:priority
WHERE NOT EXISTS (
SELECT 1 FROM data.vehicle_model_definitions
WHERE make = :make AND marketing_name = :model AND status = 'gold_enriched'
WHERE make = CAST(:make AS VARCHAR)
AND marketing_name = CAST(:model AS VARCHAR)
AND status = 'gold_enriched'
)
ON CONFLICT (make, model)
DO UPDATE SET priority_score = EXCLUDED.priority_score

View File

@@ -0,0 +1,66 @@
# /app/app/workers/vehicle/vehicle_robot_1_2_nhtsa_fetcher.py
import asyncio
import httpx
import logging
from sqlalchemy import text
from app.database import AsyncSessionLocal
logger = logging.getLogger("Robot-1-2-NHTSA-EU-Only")
logging.basicConfig(level=logging.INFO)
class NHTSAFetcher:
API_URL = "https://vpic.nhtsa.dot.gov/api/vehicles/GetModelsForMakeYear/make/{make}/modelyear/{year}?format=json"
@classmethod
async def get_eu_makes(cls):
"""Lekéri azokat a márkákat, amik már benne vannak az adatbázisban EU-s forrásból."""
async with AsyncSessionLocal() as db:
# Csak azokat a márkákat keressük az USA-ban, amiket az EU-ban (RDW) már láttunk
query = text("SELECT DISTINCT make FROM data.catalog_discovery WHERE market = 'EU' OR source = 'RDW'")
res = await db.execute(query)
return [row[0] for row in res.fetchall()]
@classmethod
async def run(cls):
logger.info("🚀 Robot 1.2 (EU-Guided NHTSA) indítása...")
while True:
target_makes = await cls.get_eu_makes()
if not target_makes:
logger.warning("⚠️ Még nincs EU-s márka az adatbázisban. Várakozás...")
await asyncio.sleep(60)
continue
# 2026-tól megyünk vissza a múltba
for year in range(2026, 1950, -1):
async with AsyncSessionLocal() as db:
for make in target_makes:
try:
async with httpx.AsyncClient(timeout=20.0) as client:
url = cls.API_URL.format(make=make, year=year)
resp = await client.get(url)
if resp.status_code != 200: continue
models = resp.json().get("Results", [])
inserted = 0
for m in models:
model_name = m.get("Model_Name").upper().strip()
# USA_IMPORT jelölés, de csak EU-s márkákhoz!
query = text("""
INSERT INTO data.catalog_discovery
(make, model, vehicle_class, status, market, model_year, priority_score, source)
VALUES (:make, :model, 'car', 'pending', 'USA_IMPORT', :year, 5, 'NHTSA-EU-FILTERED')
ON CONFLICT ON CONSTRAINT _make_model_market_year_uc DO NOTHING
""")
res = await db.execute(query, {"make": make, "model": model_name, "year": year})
if res.rowcount > 0: inserted += 1
if inserted > 0:
logger.info(f"{make} ({year}): {inserted} variáns dúsítva az USA-ból.")
await db.commit()
except Exception as e:
logger.error(f"❌ Hiba: {make} {year}: {e}")
await asyncio.sleep(0.5)
if __name__ == "__main__":
asyncio.run(NHTSAFetcher.run())

View File

@@ -0,0 +1,56 @@
import asyncio
import httpx
import logging
from sqlalchemy import text
from app.database import AsyncSessionLocal
logger = logging.getLogger("Robot-1-4-Bike")
logging.basicConfig(level=logging.INFO)
BIKE_MAKES = [
"HONDA", "YAMAHA", "KAWASAKI", "SUZUKI", "HARLEY-DAVIDSON",
"BMW", "DUCATI", "KTM", "TRIUMPH", "APRILIA", "MOTO GUZZI", "INDIAN"
]
class BikeHunter:
API_URL = "https://vpic.nhtsa.dot.gov/api/vehicles/GetModelsForMakeYear/make/{make}/modelyear/{year}/vehicleType/motorcycle?format=json"
@classmethod
async def run(cls):
logger.info("🏍️ Robot 1.4 (Bike Hunter) indítása...")
# 2026-tól 1970-ig pörgetjük a motorokat
years = range(2026, 1969, -1)
async with AsyncSessionLocal() as db:
for year in years:
for make in BIKE_MAKES:
try:
async with httpx.AsyncClient(timeout=20.0) as client:
resp = await client.get(cls.API_URL.format(make=make, year=year))
if resp.status_code != 200: continue
models = resp.json().get("Results", [])
inserted = 0
for m in models:
model_name = m.get("Model_Name").upper().strip()
# TISZTA SQL - Nincs Simon!
query = text("""
INSERT INTO data.catalog_discovery
(make, model, vehicle_class, status, market, model_year, priority_score, source)
VALUES (:make, :model, 'motorcycle', 'pending', 'USA_IMPORT', :year, 8, 'NHTSA-V1-BIKE')
ON CONFLICT ON CONSTRAINT _make_model_market_year_uc DO NOTHING
""")
await db.execute(query, {"make": make, "model": model_name, "year": year})
inserted += 1
if inserted > 0:
logger.info(f"🏍️ {make} ({year}): {inserted} új motor rögzítve.")
await db.commit()
except Exception as e:
logger.error(f"❌ Bike Error {make} ({year}): {e}")
# Évjáratonként egy pici pihenő az API-nak
await asyncio.sleep(0.5)
if __name__ == "__main__":
asyncio.run(BikeHunter.run())

View File

@@ -0,0 +1,66 @@
# /app/app/workers/vehicle/vehicle_robot_1_5_heavy_eu.py
import asyncio
import httpx
import logging
from sqlalchemy import text
from app.database import AsyncSessionLocal
logger = logging.getLogger("Robot-1-5-Heavy-EU")
logging.basicConfig(level=logging.INFO)
class HeavyEUHunter:
# RDW Open Data - Hollandia az EU kapuja
RDW_URL = "https://opendata.rdw.nl/resource/m9d7-ebf2.json"
@classmethod
async def fetch_rdw_heavy(cls, vehicle_type: str):
"""
vehicle_type: 'Vrachtwagen' (Teher), 'Bus', 'Kampeerauto' (Lakóautó)
"""
# Lekérjük az összes egyedi márka-típus párost
query_url = f"{cls.RDW_URL}?voertuigsoort={vehicle_type}&$select=merk,handelsbenaming&$limit=10000"
async with httpx.AsyncClient(timeout=30.0) as client:
try:
resp = await client.get(query_url)
return resp.json() if resp.status_code == 200 else []
except Exception as e:
logger.error(f"❌ RDW Error: {e}")
return []
@classmethod
async def run(cls):
logger.info("🚛 Robot 1.5 (EU Heavy Duty) indítása...")
# Definíciók: RDW név -> Mi kategóriánk
job_list = {
"Vrachtwagen": "truck",
"Bus": "bus",
"Kampeerauto": "rv"
}
async with AsyncSessionLocal() as db:
for rdw_name, internal_class in job_list.items():
logger.info(f"📥 {rdw_name} adatok letöltése...")
data = await cls.fetch_rdw_heavy(rdw_name)
inserted = 0
for item in data:
make = item.get('merk', '').upper().strip()
model = item.get('handelsbenaming', '').upper().strip()
if not make or not model: continue
# Szűrés a kért EU márkákra + amik jönnek az RDW-ből
query = text("""
INSERT INTO data.catalog_discovery
(make, model, vehicle_class, status, market, priority_score, source)
VALUES (:make, :model, :v_class, 'pending', 'EU', 20, 'RDW-HEAVY')
ON CONFLICT ON CONSTRAINT _make_model_market_year_uc DO NOTHING
""")
res = await db.execute(query, {"make": make, "model": model, "v_class": internal_class})
if res.rowcount > 0: inserted += 1
await db.commit()
logger.info(f"{rdw_name}: {inserted} új EU-s nagygép rögzítve.")
if __name__ == "__main__":
asyncio.run(HeavyEUHunter.run())

View File

View File

@@ -1,228 +0,0 @@
import asyncio
import httpx
import logging
import os
import re
import sys
from sqlalchemy import text, select
from app.database import AsyncSessionLocal
from app.models.vehicle_definitions import VehicleModelDefinition
# MB 2.0 Szigorú naplózás
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s [%(levelname)s] Robot-1-Hunter: %(message)s',
stream=sys.stdout
)
logger = logging.getLogger("Robot-1")
class CatalogHunter:
"""
Vehicle Robot 1.7.3: Mega-Hunter (Industrial Master Version)
Teljes körű RDW adatbányászat Variant/Version szintű granuláltsággal.
"""
RDW_MAIN = "https://opendata.rdw.nl/resource/m9d7-ebf2.json"
RDW_FUEL = "https://opendata.rdw.nl/resource/8ys7-d773.json"
RDW_ENGINE = "https://opendata.rdw.nl/resource/jh96-v4pq.json"
RDW_TOKEN = os.getenv("RDW_APP_TOKEN")
HEADERS = {"X-App-Token": RDW_TOKEN} if RDW_TOKEN else {}
BATCH_SIZE = 50
@classmethod
def normalize(cls, text_val: str) -> str:
if not text_val: return ""
return re.sub(r'[^a-zA-Z0-9]', '', text_val).lower()
@classmethod
def parse_int(cls, value) -> int:
try:
if value is None or str(value).strip() == "": return 0
return int(float(value))
except (ValueError, TypeError): return 0
@classmethod
def parse_float(cls, value) -> float:
try:
if value is None or str(value).strip() == "": return 0.0
return float(value)
except (ValueError, TypeError): return 0.0
@classmethod
async def fetch_tech_details(cls, client, plate):
""" Extra technikai adatok (kW, Euro, CO2, Motorkód) párhuzamos lekérése. """
urls = {
"fuel": f"{cls.RDW_FUEL}?kenteken={plate}",
"engine": f"{cls.RDW_ENGINE}?kenteken={plate}"
}
results = {
"power_kw": 0,
"engine_code": None,
"euro_class": None,
"fuel_desc": "Unknown",
"co2": 0,
"consumption": 0.0
}
try:
resps = await asyncio.gather(*[client.get(u, headers=cls.HEADERS) for u in urls.values()])
# Üzemanyag és emisszió (8ys7-d773)
if resps[0].status_code == 200 and resps[0].json():
f = resps[0].json()[0]
p1 = cls.parse_int(f.get("netto_maximum_vermogen") or f.get("nettomaximumvermogen"))
p2 = cls.parse_int(f.get("nominaal_continu_maximum_vermogen") or f.get("nominaalcontinuvermogen"))
results.update({
"power_kw": max(p1, p2),
"fuel_desc": f.get("brandstof_omschrijving") or "Unknown",
"euro_class": f.get("euro_klasse") or f.get("uitlaatemissieniveau"),
"co2": cls.parse_int(f.get("co2_uitstoot_gecombineerd")),
"consumption": cls.parse_float(f.get("brandstofverbruik_gecombineerd"))
})
# Motorkód (jh96-v4pq)
if resps[1].status_code == 200 and resps[1].json():
results["engine_code"] = resps[1].json()[0].get("motorcode")
except Exception as e:
logger.error(f"❌ RDW-Extra hiba ({plate}): {e}")
return results
@classmethod
async def process_make_model(cls, db, task_id, make_name, model_name, v_class, priority):
clean_make = make_name.strip().upper()
clean_model = model_name.strip().upper()
logger.info(f"🎯 MEGA-VADÁSZAT INDUL: {clean_make} {clean_model}")
current_offset = 0
async with httpx.AsyncClient(timeout=30.0) as client:
while True:
params = {
"merk": clean_make,
"handelsbenaming": clean_model,
"$limit": cls.BATCH_SIZE,
"$offset": current_offset,
"$order": "kenteken DESC"
}
try:
r = await client.get(cls.RDW_MAIN, params=params, headers=cls.HEADERS)
batch = r.json() if r.status_code == 200 else []
except Exception: break
if not batch:
await db.execute(text("UPDATE data.catalog_discovery SET status = 'processed' WHERE id = :id"), {"id": task_id})
await db.commit()
logger.info(f"🏁 {clean_make} {clean_model} minden variánsa feldolgozva.")
return
for item in batch:
try:
plate = item.get("kenteken")
if not plate: continue
# Alapadatok azonosítása
variant = item.get("variant")
version = item.get("uitvoering") # Az Execution/Version kód
ccm = cls.parse_int(item.get("cilinderinhoud"))
raw_model = str(item.get("handelsbenaming", "Unknown")).upper()
model_name_clean = raw_model.replace(clean_make, "").strip() or raw_model
norm_name = cls.normalize(model_name_clean)
# Extra technikai mélyfúrás (kW, Fuel, Engine)
tech = await cls.fetch_tech_details(client, plate)
# Ellenőrzés: Létezik-e már ez a pontos technikai variáns?
stmt = select(VehicleModelDefinition).where(
VehicleModelDefinition.make == clean_make,
VehicleModelDefinition.normalized_name == norm_name,
VehicleModelDefinition.engine_capacity == ccm,
VehicleModelDefinition.variant_code == variant,
VehicleModelDefinition.version_code == version,
VehicleModelDefinition.fuel_type == tech["fuel_desc"]
).limit(1)
existing = (await db.execute(stmt)).scalar_one_or_none()
if existing:
# Meglévő rekord frissítése a prioritással és hiányzó adatokkal
existing.priority_score = priority
if tech["power_kw"] > 0: existing.power_kw = tech["power_kw"]
if tech["engine_code"]: existing.engine_code = tech["engine_code"]
if tech["co2"] > 0: existing.co2_emissions_combined = tech["co2"]
else:
# ÚJ REKORD LÉTREHOZÁSA - MINDEN ADAT MEZŐVEL
db.add(VehicleModelDefinition(
make=clean_make,
marketing_name=model_name_clean,
normalized_name=norm_name,
variant_code=variant,
version_code=version,
type_approval_number=item.get("typegoedkeuringsnummer"),
technical_code=plate, # Kötelező mező!
engine_capacity=ccm,
power_kw=tech["power_kw"],
fuel_type=tech["fuel_desc"],
engine_code=tech["engine_code"],
# Fizikai méretek és súlyok (RDW Main-ből)
seats=cls.parse_int(item.get("aantal_zitplaatsen")),
doors=cls.parse_int(item.get("aantal_deuren")),
width=cls.parse_int(item.get("breedte")),
wheelbase=cls.parse_int(item.get("wielbasis")),
list_price=cls.parse_int(item.get("catalogusprijs")),
max_speed=cls.parse_int(item.get("maximale_constructiesnelheid")),
towing_weight_unbraked=cls.parse_int(item.get("maximum_massa_trekken_ongeremd")),
towing_weight_braked=cls.parse_int(item.get("maximum_trekken_massa_geremd")),
curb_weight=cls.parse_int(item.get("massa_ledig_voertuig")),
max_weight=cls.parse_int(item.get("technische_max_massa_voertuig") or item.get("toegestane_maximum_massa_voertuig")),
body_type=item.get("inrichting"),
# Emissziós és környezeti adatok (RDW Extra-ból)
co2_emissions_combined=tech["co2"],
fuel_consumption_combined=tech["consumption"],
euro_classification=tech["euro_class"],
cylinders=cls.parse_int(item.get("aantal_cilinders")),
# Meta adatok
vehicle_class=v_class,
priority_score=priority,
status="ACTIVE",
source="MEGA-HUNTER-v1.7.3"
))
except Exception as e:
logger.warning(f"⚠️ Hiba a sor feldolgozásakor ({plate}): {e}")
# Batch commit 50 soronként
await db.commit()
current_offset += len(batch)
# Biztonsági korlát: egy típusból ne szedjünk le többet, mint ami a variációkhoz kell
if current_offset >= 300: break
await asyncio.sleep(0.1)
@classmethod
async def run(cls):
logger.info("🤖 Mega-Hunter Robot v1.7.3 ONLINE")
while True:
try:
async with AsyncSessionLocal() as db:
# Legmagasabb prioritású modellek bekérése a Discovery listából
query = text("""
SELECT id, make, model, vehicle_class, priority_score
FROM data.catalog_discovery
WHERE status IN ('pending', 'processing')
ORDER BY priority_score DESC
LIMIT 1
""")
task = (await db.execute(query)).fetchone()
if task:
# Állapot rögzítése a "dupla munka" ellen
await db.execute(text("UPDATE data.catalog_discovery SET status = 'processing' WHERE id = :id"), {"id": task[0]})
await db.commit()
await cls.process_make_model(db, task[0], task[1], task[2], task[3], task[4])
else:
logger.info("😴 Nincs több feladat, pihenés 30 másodpercig...")
await asyncio.sleep(30)
except Exception as e:
logger.error(f"💀 Kritikus hiba a futtatás során: {e}")
await asyncio.sleep(10)
if __name__ == "__main__":
asyncio.run(CatalogHunter.run())

View File

@@ -1,173 +0,0 @@
# /app/app/workers/vehicle/vehicle_robot_1_catalog_hunter.py
import asyncio
import httpx
import logging
import os
import re
from sqlalchemy import text, select, update
from app.database import AsyncSessionLocal
from app.models.vehicle_definitions import VehicleModelDefinition
# MB 2.0 Naplózás
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] Robot-1-Hunter: %(message)s')
logger = logging.getLogger("Robot-1")
class CatalogHunter:
RDW_MAIN = "https://opendata.rdw.nl/resource/m9d7-ebf2.json"
RDW_FUEL = "https://opendata.rdw.nl/resource/8ys7-d773.json"
RDW_ENGINE = "https://opendata.rdw.nl/resource/jh96-v4pq.json"
RDW_TOKEN = os.getenv("RDW_APP_TOKEN")
HEADERS = {"X-App-Token": RDW_TOKEN} if RDW_TOKEN else {}
BATCH_SIZE = 50
@classmethod
def normalize(cls, text_val: str) -> str:
if not text_val: return ""
return re.sub(r'[^a-zA-Z0-9]', '', text_val).lower()
@classmethod
def parse_int(cls, value) -> int:
try:
if value is None or str(value).strip() == "": return 0
return int(float(value))
except (ValueError, TypeError): return 0
@classmethod
async def fetch_extra_tech(cls, client, plate):
params = {"kenteken": plate}
results = {"power_kw": 0, "euro_klasse": None, "fuel_desc": "Unknown", "engine_code": None}
try:
resp_fuel, resp_eng = await asyncio.gather(
client.get(cls.RDW_FUEL, params=params, headers=cls.HEADERS),
client.get(cls.RDW_ENGINE, params=params, headers=cls.HEADERS)
)
if resp_fuel.status_code == 200:
fuel_rows = resp_fuel.json()
max_p = 0
f_types = []
for row in fuel_rows:
p1 = cls.parse_int(row.get("netto_maximum_vermogen") or row.get("nettomaximumvermogen"))
p2 = cls.parse_int(row.get("nominaal_continu_maximum_vermogen") or row.get("nominaalcontinuvermogen"))
p = max(p1, p2)
if p > max_p: max_p = p
f = row.get("brandstof_omschrijving")
if f and f not in f_types: f_types.append(f)
if not results["euro_klasse"]:
results["euro_klasse"] = row.get("uitlaatemissieniveau") or row.get("euro_klasse")
results["power_kw"] = max_p
results["fuel_desc"] = ", ".join(f_types) if f_types else "Unknown"
if resp_eng.status_code == 200:
eng_rows = resp_eng.json()
if eng_rows: results["engine_code"] = eng_rows[0].get("motorcode")
except Exception as e:
logger.error(f"❌ RDW-Extra hiba ({plate}): {e}")
return results
@classmethod
async def process_make_model(cls, db, task_id, make_name, model_name, v_class, priority):
clean_make = make_name.strip().upper()
clean_model = model_name.strip().upper()
logger.info(f"🎯 VADÁSZAT INDUL: {clean_make} {clean_model} (Prio: {priority})")
current_offset = 0
async with httpx.AsyncClient(timeout=30.0) as client:
while True:
params = {
"merk": clean_make,
"handelsbenaming": clean_model,
"$limit": cls.BATCH_SIZE,
"$offset": current_offset,
"$order": "kenteken DESC"
}
try:
r = await client.get(cls.RDW_MAIN, params=params, headers=cls.HEADERS)
batch = r.json() if r.status_code == 200 else []
except Exception: break
if not batch:
await db.execute(text("UPDATE data.catalog_discovery SET status = 'processed' WHERE id = :id"), {"id": task_id})
await db.commit()
logger.info(f"🏁 {clean_make} {clean_model} feldolgozva.")
return
for item in batch:
try:
plate = item.get("kenteken")
if not plate: continue
raw_model = str(item.get("handelsbenaming", "Unknown")).upper()
model_name_clean = raw_model.replace(clean_make, "").strip() or raw_model
norm_name = cls.normalize(model_name_clean)
ccm = cls.parse_int(item.get("cilinderinhoud"))
tech = await cls.fetch_extra_tech(client, plate)
# Ellenőrizzük, van-e már ilyen technikai variációnk
stmt = select(VehicleModelDefinition).where(
VehicleModelDefinition.make == clean_make,
VehicleModelDefinition.normalized_name == norm_name,
VehicleModelDefinition.engine_capacity == ccm,
VehicleModelDefinition.fuel_type == tech["fuel_desc"]
).limit(1)
existing = (await db.execute(stmt)).scalar_one_or_none()
if existing:
# Csak frissítjük, ha találtunk pontosabb adatot
if tech["engine_code"]: existing.engine_code = tech["engine_code"]
if tech["power_kw"] > 0: existing.power_kw = tech["power_kw"]
existing.priority_score = priority # Prioritás frissítése
else:
# ÚJ REKORD LÉTREHOZÁSA
db.add(VehicleModelDefinition(
make=clean_make,
marketing_name=model_name_clean,
normalized_name=norm_name,
marketing_name_aliases=[raw_model],
technical_code=plate,
fuel_type=tech["fuel_desc"],
engine_capacity=ccm,
engine_code=tech["engine_code"],
power_kw=tech["power_kw"],
cylinders=cls.parse_int(item.get("aantal_cilinders")),
euro_classification=tech["euro_klasse"],
vehicle_class=v_class,
priority_score=priority,
status="ACTIVE", # <--- EZ KELL A RÖNTGENNEK!
source="PRECISION-HUNTER-v2.1"
))
except Exception as e:
logger.warning(f"⚠️ Hiba a sor feldolgozásakor ({plate}): {e}")
await db.commit()
current_offset += len(batch)
# Ha már van elég variációnk ebből a típusból, nem kell mind a 100.000 autót átnézni
if current_offset >= 500: break
await asyncio.sleep(0.1)
@classmethod
async def run(cls):
logger.info("🤖 Vehicle Catalog Hunter ONLINE")
while True:
async with AsyncSessionLocal() as db:
# Lekérjük a prioritásos feladatokat
query = text("""
SELECT id, make, model, vehicle_class, priority_score
FROM data.catalog_discovery
WHERE status IN ('pending', 'processing')
ORDER BY priority_score DESC
LIMIT 1
""")
task = (await db.execute(query)).fetchone()
if task:
# status 'processing'-re állítása, hogy más robot ne nyúljon hozzá
await db.execute(text("UPDATE data.catalog_discovery SET status = 'processing' WHERE id = :id"), {"id": task[0]})
await db.commit()
await cls.process_make_model(db, task[0], task[1], task[2], task[3], task[4])
else:
await asyncio.sleep(30)
if __name__ == "__main__":
asyncio.run(CatalogHunter.run())

View File

@@ -159,7 +159,7 @@ class VehicleResearcher:
SET status = 'research_in_progress'
WHERE id = (
SELECT id FROM data.vehicle_model_definitions
WHERE status IN ('unverified', 'awaiting_research')
WHERE status IN ('unverified', 'awaiting_research', 'ACTIVE')
AND attempts < :max_attempts
ORDER BY
CASE WHEN make = 'TOYOTA' THEN 1 ELSE 2 END,

View File

@@ -1,137 +0,0 @@
# /opt/docker/dev/service_finder/backend/app/workers/researcher_v2_1.py
import asyncio
import logging
import warnings
import os
from datetime import datetime, timezone
from typing import Optional, List
from sqlalchemy import select, update, and_, func, or_, case
from app.db.session import AsyncSessionLocal
from app.models.vehicle_definitions import VehicleModelDefinition
# DuckDuckGo search API hiba-elnyomás és import
warnings.filterwarnings("ignore", category=RuntimeWarning, module='duckduckgo_search')
from duckduckgo_search import DDGS
# Logolás beállítása
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] %(name)s: %(message)s')
logger = logging.getLogger("Robot-Researcher-v2.1")
class ResearcherBot:
"""
Robot 2.1: Az internet porszívója.
Technikai adatokat gyűjt (DuckDuckGo), hogy előkészítse az AI dúsítást.
Kihasználja a motorkódot és a gyártási évet a pontosabb találatokért.
"""
def __init__(self):
self.batch_size = 5 # Egyszerre 5 járművet vesz ki
self.max_parallel_queries = 3 # Párhuzamos keresések száma
async def fetch_source(self, label: str, query: str) -> str:
""" Egyedi forrás lekérése szálbiztos módon. """
try:
def search():
with DDGS() as ddgs:
# Az első 3 találat body részét gyűjtjük be kontextusnak
results = ddgs.text(query, max_results=3)
return [f"[{r.get('title', 'No Title')}] {r.get('body', '')}" for r in results] if results else []
results = await asyncio.to_thread(search)
if not results:
return f"=== SOURCE: {label} | STATUS: EMPTY ===\n\n"
content = f"=== SOURCE: {label} | QUERY: {query} ===\n"
content += "\n---\n".join(results)
content += "\n=== END SOURCE ===\n\n"
return content
except Exception as e:
logger.error(f"❌ Keresési hiba ({label}): {str(e)}")
return f"=== SOURCE: {label} | ERROR: {str(e)} ===\n\n"
async def research_vehicle(self, vehicle_id: int):
""" Egyetlen jármű teljes körű átvilágítása. """
async with AsyncSessionLocal() as db:
res = await db.execute(select(VehicleModelDefinition).where(VehicleModelDefinition.id == vehicle_id))
v = res.scalar_one_or_none()
if not v: return
make = v.make
model = v.marketing_name
engine = v.engine_code or ""
year = f"{v.year_from}" if v.year_from else ""
# Státusz zárolása
v.status = 'research_in_progress'
await db.commit()
logger.info(f"🔎 Kutatás indul: {make} {model} (Motor: {engine}, Év: {year})")
# Célzott keresési kulcsszavak (Multi-Channel stratégia)
queries = [
("TECH_SPECS", f"{make} {model} {engine} {year} technical specifications engine power kw torque"),
("MAINTENANCE", f"{make} {model} {engine} oil capacity coolant transmission fluid type capacity"),
("TIRES_PROD", f"{make} {model} {year} tire size load index production years status")
]
# Párhuzamos forrásgyűjtés
tasks = [self.fetch_source(label, q) for label, q in queries]
search_results = await asyncio.gather(*tasks)
full_context = "".join(search_results)
async with AsyncSessionLocal() as db:
if len(full_context.strip()) > 200: # Ha van elegendő kontextus
await db.execute(
update(VehicleModelDefinition)
.where(VehicleModelDefinition.id == vehicle_id)
.values(
raw_search_context=full_context,
status='awaiting_ai_synthesis', # Átadás a Robot 2.2-nek
last_research_at=func.now(),
attempts=VehicleModelDefinition.attempts + 1
)
)
logger.info(f"✅ Kontextus rögzítve: {make} {model}")
else:
# Sikertelen keresés, visszatesszük később
await db.execute(
update(VehicleModelDefinition)
.where(VehicleModelDefinition.id == vehicle_id)
.values(
status='unverified',
attempts=VehicleModelDefinition.attempts + 1,
last_research_at=func.now()
)
)
logger.warning(f"⚠️ Kevés adat: {make} {model} - Újrapróbálkozás később")
await db.commit()
async def run(self):
logger.info("🚀 Robot 2.1 (Researcher) ONLINE - Cél: 407 Toyota feldolgozása")
while True:
async with AsyncSessionLocal() as db:
# Prioritás: unverified autók előre
priorities = case(
(VehicleModelDefinition.make == 'TOYOTA', 1),
else_=2
)
stmt = select(VehicleModelDefinition.id).where(
or_(VehicleModelDefinition.status == 'unverified',
VehicleModelDefinition.status == 'awaiting_research')
).order_by(priorities, VehicleModelDefinition.attempts.asc()).limit(self.batch_size)
res = await db.execute(stmt)
ids = [r[0] for r in res.fetchall()]
if not ids:
await asyncio.sleep(30)
continue
# Szekvenciális feldolgozás a rate-limit miatt
for rid in ids:
await self.research_vehicle(rid)
await asyncio.sleep(5) # 5 másodperc szünet a keresések között
if __name__ == "__main__":
asyncio.run(ResearcherBot().run())

View File

@@ -1,262 +0,0 @@
# /app/app/workers/vehicle/vehicle_robot_3_alchemist_pro.py
import asyncio
import logging
import datetime
import random
import sys
import warnings
from sqlalchemy import select, and_, update, func, case
from app.database import AsyncSessionLocal
from app.models.vehicle_definitions import VehicleModelDefinition
from app.models.asset import AssetCatalog
from app.services.ai_service import AIService
# DuckDuckGo hiba-elnyomás
warnings.filterwarnings("ignore", category=RuntimeWarning, module='duckduckgo_search')
from duckduckgo_search import DDGS
# MB 2.0 Szigorú naplózás
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s [%(levelname)s] Vehicle-Alchemist-Pro: %(message)s',
stream=sys.stdout
)
logger = logging.getLogger("Vehicle-Robot-3-Alchemist-Pro")
class TechEnricher:
"""
Vehicle Robot 3: Industrial Alchemist (Pro Edition).
Felelős az MDM 'Arany' rekordjainak előállításáért hibrid (RDW + AI + Web) logikával.
"""
def __init__(self):
self.max_attempts = 5
self.batch_size = 10
self.daily_ai_limit = 500
self.ai_calls_today = 0
self.last_reset_date = datetime.date.today()
self.search_timeout = 15.0
def check_budget(self) -> bool:
""" Napi AI keret ellenőrzése. """
if datetime.date.today() > self.last_reset_date:
self.ai_calls_today = 0
self.last_reset_date = datetime.date.today()
return self.ai_calls_today < self.daily_ai_limit
def is_data_sane(self, data: dict, rdw_kw: int, rdw_ccm: int) -> bool:
"""
Hallucináció elleni védelem: technikai józansági vizsgálat.
ÚJ: Ha az RDW-től van biztos adatunk, akkor megengedőbbek vagyunk az AI-val,
mert a fő adatokat az RDW-ből vesszük.
"""
# Ha van hivatalos adat, akkor "Sane", a többit megoldjuk a hibrid logikával
if rdw_kw > 0 or rdw_ccm > 0:
return True
try:
if not data: return False
ccm = int(data.get("ccm", 0) or 0)
kw = int(data.get("kw", 0) or 0)
# Ne engedjünk be teljesen üres adatot, ha nincs RDW támasz sem
if ccm == 0 and kw == 0 and data.get("vehicle_type") != "trailer":
return False
if ccm > 16000 or (kw > 1500 and data.get("vehicle_type") != "truck"):
return False
return True
except Exception as e:
logger.debug(f"Data Sane Error: {e}")
return False
async def get_web_wisdom(self, make: str, model: str) -> str:
""" Adatgyűjtés a DuckDuckGo-ról szálbiztos és timeouttal védett módon. """
query = f"{make} {model} technical specifications engine code fuel"
try:
def sync_search():
with DDGS() as ddgs:
results = ddgs.text(query, max_results=3)
return "\n".join([r['body'] for r in results]) if results else ""
return await asyncio.wait_for(asyncio.to_thread(sync_search), timeout=self.search_timeout)
except asyncio.TimeoutError:
logger.warning(f"⏱️ Web keresési időtúllépés ({make} {model})")
return ""
except Exception as e:
logger.warning(f"🌐 Keresési hiba ({make}): {e}")
return ""
async def process_single_record(self, record_id: int):
""" Rekord dúsítás: Read -> AI Process -> Hybrid Gold Data Merge. """
make, m_name, v_type = "", "", "car"
web_context = ""
# ÚJ: RDW adatok tárolója
rdw_kw, rdw_ccm, rdw_fuel, rdw_engine = 0, 0, "petrol", ""
# 1. LÉPÉS: Olvasás és státuszváltás
try:
async with AsyncSessionLocal() as db:
res = await db.execute(
select(VehicleModelDefinition)
.where(VehicleModelDefinition.id == record_id)
.with_for_update(skip_locked=True)
)
rec = res.scalar_one_or_none()
if not rec:
return
make = rec.make
m_name = rec.marketing_name
v_type = rec.vehicle_class or "car"
web_context = rec.raw_search_context or ""
# ÚJ: Kimentjük a Hunter által szerzett hivatalos RDW adatokat!
rdw_kw = rec.power_kw or 0
rdw_ccm = rec.engine_capacity or 0
rdw_fuel = rec.fuel_type or "petrol"
rdw_engine = rec.engine_code or ""
rec.status = "ai_synthesis_in_progress"
await db.commit()
except Exception as e:
logger.error(f"🚨 Adatbázis hiba olvasáskor (ID: {record_id}): {e}")
return
# 2. LÉPÉS: AI és Web munka
try:
logger.info(f"🧠 AI elemzés indul: {make} {m_name}")
# Átadjuk az AI-nak az RDW adatokat is kontextusként, hogy "okosodjon" belőle
sources_dict = {
"web_context": web_context,
"vehicle_class": v_type,
"rdw_kw": rdw_kw,
"rdw_ccm": rdw_ccm
}
ai_data = await AIService.get_clean_vehicle_data(make, m_name, sources_dict)
# Ha az AI gyenge adatot hoz vissza, és az RDW adatunk is hiányos, akkor webezünk
if (not ai_data or not ai_data.get("kw")) and rdw_kw == 0:
logger.info(f"🔍 Adathiány, extra webes mélyfúrás: {make} {m_name}")
extra_web_info = await self.get_web_wisdom(make, m_name)
sources_dict["web_context"] = extra_web_info
ai_data = await AIService.get_clean_vehicle_data(make, m_name, sources_dict)
# ÚJ: Hibrid józansági vizsgálat
if not ai_data: ai_data = {}
if not self.is_data_sane(ai_data, rdw_kw, rdw_ccm):
raise ValueError("Az AI válasza hallucinált ÉS hivatalos RDW adatunk sincs.")
self.ai_calls_today += 1
# ÚJ: HIBRID ADAT-ÖSSZEVONÁS (The Magic!)
# RDW (hivatalos) > AI (generált)
final_kw = rdw_kw if rdw_kw > 0 else (ai_data.get("kw") or 0)
final_ccm = rdw_ccm if rdw_ccm > 0 else (ai_data.get("ccm") or 0)
# Üzemanyag tisztítás (az RDW néha hollandul írja, ezt az AI tisztázhatja, de ha nincs AI, marad az RDW)
final_fuel = rdw_fuel if (rdw_fuel and rdw_fuel != "Unknown") else ai_data.get("fuel_type", "petrol")
final_engine = rdw_engine if rdw_engine else ai_data.get("engine_code", "Nincs adat")
# Befrissítjük a JSON payloadot is a biztos adatokkal
ai_data["kw"] = final_kw
ai_data["ccm"] = final_ccm
ai_data["engine_code"] = final_engine
# 3. LÉPÉS: Arany rekord mentése
async with AsyncSessionLocal() as db:
clean_model = str(ai_data.get("marketing_name", m_name))[:50].upper()
cat_stmt = select(AssetCatalog).where(and_(
AssetCatalog.make == make.upper(),
AssetCatalog.model == clean_model,
AssetCatalog.power_kw == final_kw # A pontos KW alapján egyedi
)).limit(1)
existing_cat = (await db.execute(cat_stmt)).scalar_one_or_none()
if not existing_cat:
db.add(AssetCatalog(
make=make.upper(),
model=clean_model,
power_kw=final_kw,
engine_capacity=final_ccm,
fuel_type=final_fuel,
vehicle_class=v_type,
factory_data=ai_data # Dúsított JSON
))
logger.info(f"✨ ÚJ ARANY REKORD (HIBRID): {make.upper()} {clean_model} ({final_ccm}ccm, {final_kw}kW)")
# Staging frissítése a biztos adatokkal
await db.execute(
update(VehicleModelDefinition)
.where(VehicleModelDefinition.id == record_id)
.values(
status="gold_enriched",
technical_code=ai_data.get("technical_code") or f"REF-{record_id}",
engine_capacity=final_ccm,
power_kw=final_kw,
updated_at=func.now()
)
)
await db.commit()
except Exception as e:
# 4. LÉPÉS: Hibakezelés
logger.error(f"🚨 Hiba a(z) {record_id} rekordnál ({make} {m_name}): {e}")
try:
async with AsyncSessionLocal() as db:
await db.execute(
update(VehicleModelDefinition)
.where(VehicleModelDefinition.id == record_id)
.values(
attempts=VehicleModelDefinition.attempts + 1,
last_error=str(e)[:200],
status=case(
(VehicleModelDefinition.attempts >= self.max_attempts - 1, "suspended"),
else_="unverified"
),
updated_at=func.now()
)
)
await db.commit()
except Exception as db_err:
logger.critical(f"💀 Végzetes adatbázis hiba a fallback mentésnél: {db_err}")
async def run(self):
logger.info(f"🚀 Alchemist Pro HIBRID ONLINE (Napi limit: {self.daily_ai_limit})")
while True:
try:
if not self.check_budget():
logger.warning("💰 AI Keret kimerült. Alvás 1 órát.")
await asyncio.sleep(3600)
continue
async with AsyncSessionLocal() as db:
stmt = select(VehicleModelDefinition.id).where(and_(
VehicleModelDefinition.status.in_(["unverified", "awaiting_ai_synthesis"]),
VehicleModelDefinition.attempts < self.max_attempts
)).limit(self.batch_size)
res = await db.execute(stmt)
ids = [r[0] for r in res.fetchall()]
if not ids:
await asyncio.sleep(60)
continue
for rid in ids:
await self.process_single_record(rid)
await asyncio.sleep(random.uniform(5.0, 15.0)) # GPU kímélés
except Exception as e:
logger.error(f"💀 Kritikus hiba a főciklusban: {e}")
await asyncio.sleep(10)
if __name__ == "__main__":
try:
asyncio.run(TechEnricher().run())
except KeyboardInterrupt:
logger.info("🛑 Alchemist Pro leállítva.")

View File

@@ -1,224 +0,0 @@
import asyncio
import logging
import datetime
import random
import sys
import json
from sqlalchemy import text, func, update, case
from app.database import AsyncSessionLocal
from app.models.vehicle_definitions import VehicleModelDefinition
from app.models.asset import AssetCatalog
from app.services.ai_service import AIService
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] Vehicle-Alchemist-Pro: %(message)s', stream=sys.stdout)
logger = logging.getLogger("Vehicle-Robot-3-Alchemist-Pro")
class TechEnricher:
"""
Vehicle Robot 3: Alchemist Pro (Atomi Zárolás Patch)
Tiszta GPU fókusz: Csak az AI elemzésre és adategyesítésre koncentrál.
Nincs felesleges webkeresés. Szigorú Sane-Check.
"""
def __init__(self):
self.max_attempts = 5
self.daily_ai_limit = int(os.getenv("AI_DAILY_LIMIT", "10000"))
self.ai_calls_today = 0
self.last_reset_date = datetime.date.today()
def check_budget(self) -> bool:
if datetime.date.today() > self.last_reset_date:
self.ai_calls_today = 0
self.last_reset_date = datetime.date.today()
return self.ai_calls_today < self.daily_ai_limit
ddef is_data_sane(self, data: dict, base_info: dict) -> bool:
""" Szigorított, de intelligens AI Hallucináció szűrő """
if not data:
logger.warning("Sane-check: Teljesen üres AI válasz.")
return False
try:
# 1. Alapvető fizikai korlátok vizsgálata (csak az AI adatokon)
ai_ccm = int(data.get("ccm", 0) or 0)
ai_kw = int(data.get("kw", 0) or 0)
v_class = base_info.get("v_type", "car")
if ai_ccm > 18000:
logger.warning(f"Sane-check bukás: Irreális CCM érték ({ai_ccm})")
return False
if ai_kw > 1500 and v_class != "truck":
logger.warning(f"Sane-check bukás: Irreális KW érték ({ai_kw})")
return False
# 2. KOMBINÁLT Adat teljesség vizsgálata (RDW + AI)
# Ha az RDW tudja, akkor nem baj, ha az AI nem találta meg!
merged_kw = base_info.get('rdw_kw') or ai_kw
merged_ccm = base_info.get('rdw_ccm') or ai_ccm
fuel = data.get("fuel_type", base_info.get("rdw_fuel", "")).lower()
# Ha még kombinálva sincs meg a KW
if merged_kw == 0:
logger.warning("Sane-check figyelmeztetés: Hiányzó KW (se RDW, se AI). Engedélyezve részleges adatként.")
# Nem térünk vissza False-al, inkább mentsük el, amit eddig tudunk!
# Ha még kombinálva sincs meg a CCM (és nem elektromos)
if merged_ccm == 0 and "electric" not in fuel and "elektric" not in fuel and v_class != "trailer":
logger.warning("Sane-check figyelmeztetés: Hiányzó CCM egy belsőégésű motornál. Engedélyezve részleges adatként.")
# Ezt is átengedjük, hogy kitörjünk a végtelen hurokból.
return True
except Exception as e:
logger.error(f"Sane check hiba: {e}")
return False
async def process_single_record(self, db, record_id: int, base_info: dict, current_attempts: int):
try:
logger.info(f"🧠 AI dúsítás indul: {base_info['make']} {base_info['m_name']}")
# 1. LÉPÉS: AI Hívás (Rábízzuk az adatokat a modellre)
ai_data = await AIService.get_clean_vehicle_data(
base_info['make'],
base_info['m_name'],
base_info
)
# 2. LÉPÉS: Validáció (Ha az AI rossz adatot ad, NEM megyünk ki a webre, hanem dobjuk az aktát!)
if not ai_data or not self.is_data_sane(ai_data, base_info):
raise ValueError("Az AI hiányos adatot adott vissza vagy hallucinált.")
# 3. LÉPÉS: HIBRID MERGE (Az RDW adatok felülbírálják az AI-t a hatósági paramétereknél)
final_kw = base_info['rdw_kw'] if base_info['rdw_kw'] > 0 else (ai_data.get("kw") or 0)
final_ccm = base_info['rdw_ccm'] if base_info['rdw_ccm'] > 0 else (ai_data.get("ccm") or 0)
# Üzemanyag tisztítása
fuel_rdw = base_info.get('rdw_fuel', '')
final_fuel = fuel_rdw if fuel_rdw and fuel_rdw != "Unknown" else ai_data.get("fuel_type", "petrol")
final_engine = base_info['rdw_engine'] if base_info['rdw_engine'] else ai_data.get("engine_code", "Unknown")
final_euro = base_info['rdw_euro'] or ai_data.get("euro_classification")
final_cylinders = base_info['rdw_cylinders'] or ai_data.get("cylinders")
# 4. LÉPÉS: Mentés az Arany Katalógusba
clean_model = str(ai_data.get("marketing_name", base_info['m_name']))[:50].upper()
cat_stmt = text("""
INSERT INTO data.vehicle_catalog
(master_definition_id, make, model, power_kw, engine_capacity, fuel_type, factory_data)
VALUES (:m_id, :make, :model, :kw, :ccm, :fuel, :factory)
RETURNING id;
""")
await db.execute(cat_stmt, {
"m_id": record_id,
"make": base_info['make'].upper(),
"model": clean_model,
"kw": final_kw,
"ccm": final_ccm,
"fuel": final_fuel,
"factory": json.dumps(ai_data)
})
# 5. LÉPÉS: Staging tábla (VMD) lezárása
await db.execute(
update(VehicleModelDefinition)
.where(VehicleModelDefinition.id == record_id)
.values(
status="gold_enriched",
engine_capacity=final_ccm,
power_kw=final_kw,
fuel_type=final_fuel,
engine_code=final_engine,
euro_classification=final_euro,
cylinders=final_cylinders,
specifications=ai_data, # Elmentjük az AI teljes outputját a mestertáblába is
updated_at=func.now()
)
)
await db.commit()
logger.info(f"✨ ARANY REKORD KÉSZ: {base_info['make'].upper()} {clean_model}")
self.ai_calls_today += 1
except Exception as e:
await db.rollback()
logger.warning(f"⚠️ Alkimista hiba ({base_info['make']} {base_info['m_name']}): {e}")
# Visszaküldés a sorba vagy felfüggesztés
new_status = 'suspended' if current_attempts + 1 >= self.max_attempts else 'unverified'
await db.execute(
update(VehicleModelDefinition)
.where(VehicleModelDefinition.id == record_id)
.values(
attempts=current_attempts + 1,
last_error=str(e)[:200],
status=new_status,
updated_at=func.now()
)
)
await db.commit()
if new_status == 'unverified':
logger.info("♻️ Akta visszaküldve a Robot-2-nek (Kutató).")
async def run(self):
logger.info(f"🚀 Alchemist Pro HIBRID ONLINE (Atomi Zárolás Patch)")
while True:
if not self.check_budget():
logger.warning("💸 Napi AI limit kimerítve! Pihenés...")
await asyncio.sleep(3600); continue
try:
async with AsyncSessionLocal() as db:
# ATOMI ZÁROLÁS (A "Szent Grál" a race condition ellen)
# A Robot-1 (ACTIVE) és a Robot-2 (awaiting_ai_synthesis) aktáit is felveszi!
query = text("""
UPDATE data.vehicle_model_definitions
SET status = 'ai_synthesis_in_progress'
WHERE id = (
SELECT id FROM data.vehicle_model_definitions
WHERE status IN ('awaiting_ai_synthesis', 'ACTIVE')
AND attempts < :max_attempts
ORDER BY
CASE WHEN status = 'awaiting_ai_synthesis' THEN 1 ELSE 2 END,
priority_score DESC
FOR UPDATE SKIP LOCKED
LIMIT 1
)
RETURNING id, make, marketing_name, vehicle_class, power_kw, engine_capacity,
fuel_type, engine_code, euro_classification, cylinders, raw_search_context, attempts;
""")
result = await db.execute(query, {"max_attempts": self.max_attempts})
task = result.fetchone()
await db.commit()
if task:
# Szétbontjuk a lekérdezett rekordot a base_info dict-be
r_id = task[0]
base_info = {
"make": task[1], "m_name": task[2], "v_type": task[3] or "car",
"rdw_kw": task[4] or 0, "rdw_ccm": task[5] or 0,
"rdw_fuel": task[6] or "petrol", "rdw_engine": task[7] or "",
"rdw_euro": task[8], "rdw_cylinders": task[9],
"web_context": task[10] or ""
}
attempts = task[11]
# Külön adatbázis kapcsolat a feldolgozáshoz (hosszú AI hívás miatt)
async with AsyncSessionLocal() as process_db:
await self.process_single_record(process_db, r_id, base_info, attempts)
# GPU hűtés / Ollama rate limit
await asyncio.sleep(random.uniform(1.5, 3.5))
else:
logger.info("😴 Nincs feldolgozandó akta, az Alkimista pihen...")
await asyncio.sleep(15)
except Exception as e:
logger.error(f"💀 Kritikus hiba a főciklusban: {e}")
await asyncio.sleep(10)
if __name__ == "__main__":
import os # Import az AI limit környezeti változóhoz
asyncio.run(TechEnricher().run())

View File

View File

@@ -0,0 +1,28 @@
"""mdm_market_and_year_expansion
Revision ID: 0472f45a7d62
Revises: ddaaee0dc5d2
Create Date: 2026-03-09 12:05:43.937729
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '0472f45a7d62'
down_revision: Union[str, Sequence[str], None] = 'ddaaee0dc5d2'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
pass
def downgrade() -> None:
"""Downgrade schema."""
pass

View File

@@ -0,0 +1,28 @@
"""Add reference lookup table
Revision ID: 365190cf24e5
Revises: 62c259b715b0
Create Date: 2026-03-09 17:17:36.726879
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '365190cf24e5'
down_revision: Union[str, Sequence[str], None] = '62c259b715b0'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
pass
def downgrade() -> None:
"""Downgrade schema."""
pass

View File

@@ -0,0 +1,28 @@
"""Add reference lookup table
Revision ID: 5a8ffc9bf401
Revises: 365190cf24e5
Create Date: 2026-03-09 17:23:19.533190
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '5a8ffc9bf401'
down_revision: Union[str, Sequence[str], None] = '365190cf24e5'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
pass
def downgrade() -> None:
"""Downgrade schema."""
pass

View File

@@ -0,0 +1,28 @@
"""mdm_market_and_year_expansion
Revision ID: 62c259b715b0
Revises: 0472f45a7d62
Create Date: 2026-03-09 12:34:12.318095
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '62c259b715b0'
down_revision: Union[str, Sequence[str], None] = '0472f45a7d62'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
pass
def downgrade() -> None:
"""Downgrade schema."""
pass

View File

@@ -0,0 +1,28 @@
"""Sync reference lookup table
Revision ID: 98814bd15f99
Revises: 5a8ffc9bf401
Create Date: 2026-03-09 17:27:43.099664
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '98814bd15f99'
down_revision: Union[str, Sequence[str], None] = '5a8ffc9bf401'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
pass
def downgrade() -> None:
"""Downgrade schema."""
pass

View File

@@ -36,3 +36,6 @@ apscheduler
pytest
pytest-asyncio
psycopg2-binary
rich
nvidia-ml-py
psutil