admin firs step
This commit is contained in:
@@ -365,7 +365,7 @@ async def approve_staged_service(
|
||||
|
||||
from app.workers.service.validation_pipeline import ValidationPipeline
|
||||
from app.models.marketplace.service import ServiceProfile
|
||||
from app.models.gamification.gamification import GamificationProfile
|
||||
from app.models.gamification.gamification import UserStats
|
||||
|
||||
|
||||
class LocationUpdate(BaseModel):
|
||||
@@ -509,13 +509,13 @@ async def apply_gamification_penalty(
|
||||
)
|
||||
|
||||
# Megkeressük a felhasználó gamification profilját (vagy létrehozzuk)
|
||||
gamification_stmt = select(GamificationProfile).where(GamificationProfile.user_id == user_id)
|
||||
gamification_stmt = select(UserStats).where(UserStats.user_id == user_id)
|
||||
gamification_result = await db.execute(gamification_stmt)
|
||||
gamification = gamification_result.scalar_one_or_none()
|
||||
|
||||
if not gamification:
|
||||
# Ha nincs profil, létrehozzuk alapértelmezett értékekkel
|
||||
gamification = GamificationProfile(
|
||||
gamification = UserStats(
|
||||
user_id=user_id,
|
||||
level=0,
|
||||
xp=0,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/api/v1/endpoints/services.py
|
||||
from fastapi import APIRouter, Depends, Form, Query, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, and_, text
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
#/opt/docker/dev/service_finder/backend/app/api/v1/endpoints/users.py
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from typing import Dict, Any
|
||||
|
||||
115
backend/app/scripts/generate_db_map.py
Normal file
115
backend/app/scripts/generate_db_map.py
Normal file
@@ -0,0 +1,115 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/scripts/generate_db_map.py
|
||||
#!/usr/bin/env python3
|
||||
import asyncio
|
||||
import importlib
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy import inspect
|
||||
|
||||
# ==============================================================================
|
||||
# THOUGHT PROCESS (Gondolatmenet):
|
||||
# 1. Biztonság: Nem módosítjuk a működő sync_engine.py-t, hanem új eszközt hozunk létre.
|
||||
# 2. Útvonal (Path) dinamikus feloldása: Ahelyett, hogy fixen bedrótoznánk a
|
||||
# '/app/docs/v02' útvonalat (ami Docker környezetben eltérhet), a Path(__file__)
|
||||
# segítségével "visszamászunk" a könyvtárfában.
|
||||
# Útvonal: scripts -> app -> backend -> service_finder -> docs/v02
|
||||
# 3. Modellek betöltése: Újrahasznosítjuk a bevált dynamic_import_models() logikát,
|
||||
# hogy a Base.metadata biztosan tartalmazzon minden táblát.
|
||||
# ==============================================================================
|
||||
|
||||
# Alap elérési út beállítása
|
||||
current_file = Path(__file__).resolve()
|
||||
backend_dir = current_file.parent.parent.parent
|
||||
project_root = backend_dir.parent
|
||||
docs_dir = project_root / "docs" / "v02"
|
||||
|
||||
sys.path.insert(0, str(backend_dir))
|
||||
|
||||
from app.database import Base
|
||||
from app.core.config import settings
|
||||
|
||||
def dynamic_import_models():
|
||||
"""Modellek betöltése a Metadata feltöltéséhez (a sync_engine.py alapján)."""
|
||||
models_dir = current_file.parent.parent / "models"
|
||||
for py_file in models_dir.rglob("*.py"):
|
||||
if py_file.name == "__init__.py": continue
|
||||
relative_path = py_file.relative_to(models_dir)
|
||||
module_stem = str(relative_path).replace('/', '.').replace('\\', '.')[:-3]
|
||||
module_name = f"app.models.{module_stem}"
|
||||
try:
|
||||
importlib.import_module(module_name)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
async def generate_markdown():
|
||||
engine = create_async_engine(str(settings.SQLALCHEMY_DATABASE_URI))
|
||||
|
||||
# Biztosítjuk, hogy a célkönyvtár létezik
|
||||
docs_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
filename = f"database_schema_{timestamp}.md"
|
||||
filepath = docs_dir / filename
|
||||
|
||||
markdown_content = "# 🗺️ Service Finder Adatbázis Térkép\n\n"
|
||||
markdown_content += f"> Generálva: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n\n"
|
||||
|
||||
def inspect_db(connection):
|
||||
nonlocal markdown_content
|
||||
inspector = inspect(connection)
|
||||
metadata = Base.metadata
|
||||
|
||||
# Csak azokat a sémákat nézzük, amikben vannak modelljeink
|
||||
model_schemas = sorted({t.schema for t in metadata.sorted_tables if t.schema})
|
||||
|
||||
for schema in model_schemas:
|
||||
markdown_content += f"## Séma: `{schema}`\n\n"
|
||||
db_tables = inspector.get_table_names(schema=schema)
|
||||
|
||||
if not db_tables:
|
||||
markdown_content += "*Ebben a sémában még nincsenek táblák az adatbázisban.*\n\n"
|
||||
continue
|
||||
|
||||
for table_name in sorted(db_tables):
|
||||
# Oszlopok kinyerése a valós adatbázisból
|
||||
columns = inspector.get_columns(table_name, schema=schema)
|
||||
pk_constraint = inspector.get_pk_constraint(table_name, schema=schema)
|
||||
pks = pk_constraint.get('constrained_columns', [])
|
||||
fk_constraints = inspector.get_foreign_keys(table_name, schema=schema)
|
||||
fk_cols = [col for fk in fk_constraints for col in fk.get('constrained_columns', [])]
|
||||
|
||||
markdown_content += f"### Tábla: `{table_name}`\n"
|
||||
markdown_content += "| Oszlop | Típus | Nullable | Alapértelmezett | Extrák |\n"
|
||||
markdown_content += "| :--- | :--- | :--- | :--- | :--- |\n"
|
||||
|
||||
for col in columns:
|
||||
extras_list = []
|
||||
if col['name'] in pks:
|
||||
extras_list.append("🔑 PK")
|
||||
if col['name'] in fk_cols:
|
||||
extras_list.append("🔗 FK")
|
||||
|
||||
extras = " ".join(extras_list)
|
||||
default_val = f"`{col['default']}`" if col.get('default') else ""
|
||||
|
||||
markdown_content += f"| **{col['name']}** | `{str(col['type'])}` | {col['nullable']} | {default_val} | {extras} |\n"
|
||||
|
||||
markdown_content += "\n"
|
||||
|
||||
async with engine.connect() as conn:
|
||||
await conn.run_sync(inspect_db)
|
||||
await engine.dispose()
|
||||
|
||||
with open(filepath, "w", encoding="utf-8") as f:
|
||||
f.write(markdown_content)
|
||||
|
||||
print(f"✅ Adatbázis térkép sikeresen legenerálva ide: {filepath}")
|
||||
|
||||
async def main():
|
||||
dynamic_import_models()
|
||||
await generate_markdown()
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
@@ -148,14 +148,11 @@ async def get_stats(engine):
|
||||
res_r12 = (await conn.execute(text("SELECT make, model FROM vehicle.catalog_discovery WHERE status = 'processing' ORDER BY id DESC LIMIT 5"))).fetchall()
|
||||
|
||||
# 5. Új adatbázis statisztikák
|
||||
# Kiemelt összesítő: published (published) és manual_review_needed (unverified)
|
||||
published_count = (await conn.execute(text("SELECT COUNT(*) FROM vehicle.vehicle_model_definitions WHERE status = 'published'"))).scalar()
|
||||
manual_review_needed_count = (await conn.execute(text("SELECT COUNT(*) FROM vehicle.vehicle_model_definitions WHERE status = 'unverified'"))).scalar()
|
||||
|
||||
# Státusz eloszlás
|
||||
status_distribution = (await conn.execute(text("SELECT status, COUNT(*) as count FROM vehicle.vehicle_model_definitions GROUP BY status ORDER BY count DESC"))).fetchall()
|
||||
|
||||
# Márka szerinti eloszlás - csak véglegesített (published)
|
||||
make_distribution = (await conn.execute(text("SELECT make, COUNT(*) as count FROM vehicle.vehicle_model_definitions WHERE status = 'published' GROUP BY make ORDER BY count DESC LIMIT 15"))).fetchall()
|
||||
|
||||
# 6. Kézi javításra várók listája (Top 15)
|
||||
@@ -255,12 +252,8 @@ def update_dashboard(layout, data, error_msg=""):
|
||||
|
||||
layout["hardware"].update(hw_layout)
|
||||
|
||||
# Database stats panels
|
||||
# Kiemelt összesítő
|
||||
summary_text = f"[bold green]Véglegesített: {published_count:,}[/] | [bold yellow]Kézi ellenőrzés: {manual_review_needed_count:,}[/]"
|
||||
summary_panel = Panel(summary_text, title="📊 Jármű Katalógus Összesítő", border_style="cyan")
|
||||
|
||||
# Bal oldali panel: Státusz eloszlás (magyar fordításokkal)
|
||||
status_table = Table(title="📈 Státusz eloszlás", expand=True, border_style="magenta")
|
||||
status_table.add_column("Státusz", style="bold")
|
||||
status_table.add_column("Mennyiség", justify="right")
|
||||
@@ -269,7 +262,6 @@ def update_dashboard(layout, data, error_msg=""):
|
||||
status_table.add_row(translated, f"{count:,}")
|
||||
layout["db_left"].update(Panel(status_table, title="📊 Státuszok", border_style="magenta"))
|
||||
|
||||
# Jobb oldali panel: Márka szerinti eloszlás (csak véglegesített)
|
||||
make_table = Table(title="🚗 Márkák (véglegesített)", expand=True, border_style="green")
|
||||
make_table.add_column("Márka", style="yellow")
|
||||
make_table.add_column("Véglegesített DB", justify="right")
|
||||
@@ -277,7 +269,6 @@ def update_dashboard(layout, data, error_msg=""):
|
||||
make_table.add_row(str(make), f"{count:,}")
|
||||
layout["db_right"].update(Panel(make_table, title="🏆 Top Márkák", border_style="green"))
|
||||
|
||||
# Kézi javításra várók táblázata
|
||||
manual_table = Table(title="🛠️ Kézi Javításra Várók (Top 15)", expand=True, border_style="yellow")
|
||||
manual_table.add_column("Márka", style="bold")
|
||||
manual_table.add_column("Modell", style="cyan")
|
||||
@@ -286,7 +277,6 @@ def update_dashboard(layout, data, error_msg=""):
|
||||
manual_table.add_row(str(make), str(model) if model else "N/A", f"{count:,}")
|
||||
layout["manual_review"].update(Panel(manual_table, title="🛠️ Kézi Javításra Várók", border_style="yellow"))
|
||||
|
||||
# Ha volt hiba az adatlekérésnél, írjuk ki alulra!
|
||||
footer_text = f"Sentinel v2.6 | Kernel: Stabil | R1 Pörög: {r_counts[0]} várakozik"
|
||||
if error_msg: footer_text = f"[red bold]HIBA: {error_msg}[/]"
|
||||
layout["footer"].update(Panel(footer_text, style="italic grey50"))
|
||||
@@ -300,8 +290,17 @@ async def main():
|
||||
data = await get_stats(engine)
|
||||
update_dashboard(layout, data)
|
||||
except Exception as e:
|
||||
# Ezt már nem nyeljük el!
|
||||
update_dashboard(layout, ((0,0), (0,0,0,0), [], ([],[],[]), {"cpu_usage":0,"ram_perc":0,"ram_used":0,"ram_total":0,"gpu":None}, [], (0, 0, [], [])), str(e))
|
||||
# JAVÍTVA: A db_stats tuple most már 5 elemű, ahogy az update_dashboard várja!
|
||||
fallback_data = (
|
||||
(0, 0), # rates
|
||||
(0, 0, 0, 0), # r_counts
|
||||
[], # top_makes
|
||||
([], [], []), # live_data
|
||||
{"cpu_usage": 0, "ram_perc": 0, "ram_used": 0, "ram_total": 0, "gpu": None, "gpu_content": "Várakozás..."}, # hw
|
||||
[], # ai
|
||||
(0, 0, [], [], []) # db_stats -> 5 ELEM!
|
||||
)
|
||||
update_dashboard(layout, fallback_data, str(e))
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/workers/vehicle/ultimatespecs/vehicle_ultimate_r0_spider.py
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Worker: vehicle_ultimate_r0_spider
|
||||
@@ -32,7 +33,7 @@ logging.basicConfig(
|
||||
logger = logging.getLogger("R0-SPIDER")
|
||||
|
||||
# Konfiguráció
|
||||
SLEEP_INTERVAL = random.uniform(3, 6) # 3-6 mp között várakozás
|
||||
SLEEP_INTERVAL = random.uniform(1, 2) # 1-2 mp között várakozás
|
||||
MAX_RETRIES = 3
|
||||
BASE_URL = "https://www.ultimatespecs.com/index.php?q={query}"
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/workers/vehicle/ultimatespecs/vehicle_ultimate_r1_scraper.py
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Worker: vehicle_ultimate_r1_scraper
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/workers/vehicle/ultimatespecs/vehicle_ultimate_r2_enricher.py
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Worker: vehicle_ultimate_r2_enricher
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# /opt/docker/dev/service_finder/backend/app/workers/vehicle/ultimatespecs/vehicle_ultimate_r3_finalizer.py
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Worker: vehicle_ultimate_r3_finalizer
|
||||
@@ -389,7 +390,7 @@ def main():
|
||||
# Fő ciklus indítása - korlátozott számú iterációval teszteléshez
|
||||
try:
|
||||
# Teszteléshez: maximum 5 iteráció
|
||||
asyncio.run(finalizer.run(max_iterations=5))
|
||||
asyncio.run(finalizer.run(max_iterations=sys.maxsize))
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Keyboard interrupt received, shutting down...")
|
||||
finally:
|
||||
|
||||
60
backend/app/workers/vehicle/vehicle_efficiency_optimizer.py
Normal file
60
backend/app/workers/vehicle/vehicle_efficiency_optimizer.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from sqlalchemy import text
|
||||
from app.database import AsyncSessionLocal
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s [OPTIMIZER] %(message)s')
|
||||
logger = logging.getLogger("Efficiency-Optimizer")
|
||||
|
||||
async def optimize_queue():
|
||||
async with AsyncSessionLocal() as db:
|
||||
try:
|
||||
# 1. FÁZIS: AUTO-GOLD (Ami már kész van, ne menjen AI-hoz)
|
||||
# Ha az UltimateSpecs vagy az RDW már kitöltötte a lényeget, lőjük Aranyba!
|
||||
logger.info("🚀 1. Fázis: Auto-Gold ellenőrzés indítása...")
|
||||
auto_gold_query = text("""
|
||||
UPDATE vehicle.vehicle_model_definitions
|
||||
SET status = 'gold_enriched',
|
||||
updated_at = NOW(),
|
||||
source = source || ' + AUTO_GOLD'
|
||||
WHERE status = 'awaiting_ai_synthesis'
|
||||
AND power_kw > 0
|
||||
AND engine_capacity > 0
|
||||
AND fuel_type != 'Unknown'
|
||||
AND body_type IS NOT NULL
|
||||
AND trim_level != ''
|
||||
RETURNING id;
|
||||
""")
|
||||
result = await db.execute(auto_gold_query)
|
||||
logger.info(f"✅ {len(result.fetchall())} járművet automatikusan ARANY státuszba emeltem (AI megspórolva).")
|
||||
|
||||
# 2. FÁZIS: DEDUPLIKÁCIÓ (Katalógus összehasonlítás)
|
||||
# Keressük azokat a várakozókat, amiknek már van egy ARANY párjuk
|
||||
logger.info("🚀 2. Fázis: Duplikációk szűrése a katalógus alapján...")
|
||||
dedup_query = text("""
|
||||
UPDATE vehicle.vehicle_model_definitions AS pending
|
||||
SET status = 'merged_duplicate',
|
||||
updated_at = NOW()
|
||||
FROM vehicle.vehicle_model_definitions AS gold
|
||||
WHERE pending.status = 'awaiting_ai_synthesis'
|
||||
AND gold.status = 'gold_enriched'
|
||||
AND pending.make = gold.make
|
||||
AND pending.normalized_name = gold.normalized_name
|
||||
AND pending.year_from = gold.year_from
|
||||
AND pending.fuel_type = gold.fuel_type
|
||||
AND pending.market = gold.market
|
||||
AND pending.id != gold.id
|
||||
RETURNING pending.id;
|
||||
""")
|
||||
result = await db.execute(dedup_query)
|
||||
logger.info(f"🗑️ {len(result.fetchall())} duplikált várakozót töröltem a sorból (Már van Arany párjuk).")
|
||||
|
||||
await db.commit()
|
||||
logger.info("🏆 Optimalizálás befejezve. A sor megtisztítva!")
|
||||
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(f"❌ Hiba az optimalizálás során: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(optimize_queue())
|
||||
108
backend/app/workers/vehicle/vehicle_master_cleaner.py
Normal file
108
backend/app/workers/vehicle/vehicle_master_cleaner.py
Normal file
@@ -0,0 +1,108 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
import json
|
||||
import sys
|
||||
from sqlalchemy import text, update
|
||||
from app.database import AsyncSessionLocal
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s [MASTER-CLEANER] %(message)s', stream=sys.stdout)
|
||||
logger = logging.getLogger("Master-Cleaner")
|
||||
|
||||
# --- REGEX MINTÁK (A "Kód" amivel az adatot keressük a szövegben) ---
|
||||
KW_PATTERN = re.compile(r'(\d{2,3})\s*(?:kW|kw|kilowatt)', re.IGNORECASE)
|
||||
CCM_PATTERN = re.compile(r'(\d{3,4})\s*(?:ccm|cm3|cc|cubic)', re.IGNORECASE)
|
||||
|
||||
class MasterCleaner:
|
||||
"""
|
||||
Thought Process:
|
||||
1. A robot célja a 126k rekord AI-mentes tisztítása.
|
||||
2. Első körben azokat a sorokat keressük, amik már technikailag teljesek (Auto-Gold).
|
||||
3. Második körben a 'raw_search_context' szövegeiből Regex-szel kinyerjük a hiányzó kW/ccm adatokat.
|
||||
4. Harmadik körben a duplikációkat (uix_vmd_precision_v2 alapján) összeolvasztjuk.
|
||||
"""
|
||||
|
||||
async def run_audit(self):
|
||||
async with AsyncSessionLocal() as db:
|
||||
try:
|
||||
logger.info("🔍 Audit indítása a teljes állományon...")
|
||||
|
||||
# 1. AUTO-GOLD: Ha már minden mező kitöltött (UltimateSpecs R2/R3 jóvoltából)
|
||||
# Ez a leggyorsabb: ha van kW, ccm, fuel és body, akkor az kész.
|
||||
gold_query = text("""
|
||||
UPDATE vehicle.vehicle_model_definitions
|
||||
SET status = 'gold_enriched', updated_at = NOW(), source = source || ' + AUDITOR_FIX'
|
||||
WHERE status IN ('awaiting_ai_synthesis', 'unverified')
|
||||
AND power_kw > 0 AND engine_capacity > 0
|
||||
AND fuel_type != 'Unknown' AND body_type IS NOT NULL
|
||||
RETURNING id;
|
||||
""")
|
||||
res_gold = await db.execute(gold_query)
|
||||
logger.info(f"✨ {len(res_gold.fetchall())} járművet találtam, ami már eleve 'Arany' volt.")
|
||||
|
||||
# 2. REGEX EXTRACTION: Beleolvasunk a 'raw_search_context'-be
|
||||
# Olyanokat keresünk, ahol power_kw vagy engine_capacity még 0.
|
||||
logger.info("🧪 Regex extrakció indítása a szöveges kontextusból...")
|
||||
fetch_query = text("""
|
||||
SELECT id, raw_search_context, power_kw, engine_capacity
|
||||
FROM vehicle.vehicle_model_definitions
|
||||
WHERE (power_kw = 0 OR engine_capacity = 0)
|
||||
AND raw_search_context != ''
|
||||
AND status != 'gold_enriched'
|
||||
LIMIT 10000;
|
||||
""")
|
||||
|
||||
rows = (await db.execute(fetch_query)).fetchall()
|
||||
extracted_count = 0
|
||||
|
||||
for r_id, context, p_kw, e_ccm in rows:
|
||||
updates = {}
|
||||
|
||||
if p_kw == 0:
|
||||
kw_match = KW_PATTERN.search(context)
|
||||
if kw_match:
|
||||
updates["power_kw"] = int(kw_match.group(1))
|
||||
|
||||
if e_ccm == 0:
|
||||
ccm_match = CCM_PATTERN.search(context)
|
||||
if ccm_match:
|
||||
updates["engine_capacity"] = int(ccm_match.group(1))
|
||||
|
||||
if updates:
|
||||
# Ha találtunk valamit, frissítjük a rekordot
|
||||
stmt = text("""
|
||||
UPDATE vehicle.vehicle_model_definitions
|
||||
SET power_kw = COALESCE(:kw, power_kw),
|
||||
engine_capacity = COALESCE(:ccm, engine_capacity),
|
||||
source = source || ' + REGEX_EXTRACT'
|
||||
WHERE id = :id
|
||||
""")
|
||||
await db.execute(stmt, {"kw": updates.get("power_kw"), "ccm": updates.get("engine_capacity"), "id": r_id})
|
||||
extracted_count += 1
|
||||
|
||||
logger.info(f"📝 {extracted_count} járműnél találtam meg az adatokat a szöveges kontextusban.")
|
||||
|
||||
# 3. DEDUPLIKÁCIÓ: Márka + Név + Üzemanyag + Évjárat alapján
|
||||
logger.info("✂️ Duplikációk összeolvasztása...")
|
||||
dedup_query = text("""
|
||||
UPDATE vehicle.vehicle_model_definitions AS p
|
||||
SET status = 'merged_duplicate'
|
||||
FROM vehicle.vehicle_model_definitions AS g
|
||||
WHERE p.status != 'gold_enriched' AND g.status = 'gold_enriched'
|
||||
AND p.make = g.make AND p.normalized_name = g.normalized_name
|
||||
AND p.year_from = g.year_from AND p.id != g.id
|
||||
RETURNING p.id;
|
||||
""")
|
||||
res_dedup = await db.execute(dedup_query)
|
||||
logger.info(f"🗑️ {len(res_dedup.fetchall())} duplikációt távolítottam el.")
|
||||
|
||||
await db.commit()
|
||||
logger.info("🏆 A 126k rekord átvizsgálása befejeződött!")
|
||||
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(f"❌ Kritikus hiba az audit során: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
cleaner = MasterCleaner()
|
||||
asyncio.run(cleaner.run_audit())
|
||||
@@ -29,7 +29,7 @@ OLLAMA_URL = "http://sf_ollama:11434/api/generate"
|
||||
OLLAMA_MODEL = "qwen2.5-coder:14b" # A 14b paraméteres modell az agy
|
||||
MAX_ATTEMPTS = 3
|
||||
TIMEOUT_SECONDS = 45 # Megemelt timeout a 14b modell lassabb válaszideje miatt
|
||||
BATCH_SIZE = 3 # Maximum 3 párhuzamos AI hívás a CPU fagyás elkerülésére
|
||||
BATCH_SIZE = 10 # Maximum 10 párhuzamos AI hívás a CPU fagyás elkerülésére
|
||||
|
||||
class AlchemistPro:
|
||||
def __init__(self):
|
||||
|
||||
Reference in New Issue
Block a user