Cleanup: MB 2.0 Gap Analysis előtti állapot (adatok kizárva)
This commit is contained in:
Binary file not shown.
Binary file not shown.
@@ -1,5 +1,5 @@
|
||||
from fastapi import APIRouter
|
||||
from app.api.v1.endpoints import auth, catalog, assets, organizations, documents, services, admin
|
||||
from app.api.v1.endpoints import auth, catalog, assets, organizations, documents, services, admin, expenses, evidence
|
||||
|
||||
api_router = APIRouter()
|
||||
|
||||
@@ -23,4 +23,10 @@ api_router.include_router(documents.router, prefix="/documents", tags=["Document
|
||||
|
||||
# --- 🛡️ SENTINEL ADMIN KONTROLL PANEL ---
|
||||
# Ez a rész tette láthatóvá az Admin API-t a felületen
|
||||
api_router.include_router(admin.router, prefix="/admin", tags=["Admin Control Center (Sentinel)"])
|
||||
api_router.include_router(admin.router, prefix="/admin", tags=["Admin Control Center (Sentinel)"])
|
||||
|
||||
# Evidence & OCR Robot 3
|
||||
api_router.include_router(evidence.router, prefix="/evidence", tags=["Evidence & OCR (Robot 3)"])
|
||||
|
||||
# Fleet Expenses TCO
|
||||
api_router.include_router(expenses.router, prefix="/expenses", tags=["Fleet Expenses (TCO)"])
|
||||
Binary file not shown.
@@ -1,115 +1,164 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func
|
||||
from typing import List, Any, Dict
|
||||
from sqlalchemy import select, func, text, delete
|
||||
from typing import List, Any, Dict, Optional
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from app.api import deps
|
||||
from app.models.identity import User, UserRole
|
||||
from app.models import SystemParameter
|
||||
from app.models.system import SystemParameter
|
||||
from app.models.security import PendingAction, ActionStatus
|
||||
from app.models.history import AuditLog, LogSeverity
|
||||
from app.schemas.admin_security import PendingActionResponse, SecurityStatusResponse
|
||||
|
||||
from app.services.security_service import security_service
|
||||
# Feltételezve, hogy a JSON-alapú TranslationService-ed már készen van
|
||||
from app.services.translation_service import TranslationService
|
||||
from pydantic import BaseModel
|
||||
|
||||
class ConfigUpdate(BaseModel):
|
||||
key: str
|
||||
value: Any
|
||||
scope_level: str = "global"
|
||||
scope_id: Optional[str] = None
|
||||
category: str = "general"
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# --- 🛡️ ADMIN JOGOSULTSÁG ELLENŐRZŐ ---
|
||||
async def check_admin_access(current_user: User = Depends(deps.get_current_active_user)):
|
||||
"""Szigorú hozzáférés-ellenőrzés: Csak Admin vagy Superadmin."""
|
||||
if current_user.role not in [UserRole.admin, UserRole.superadmin]:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Admin jogosultság szükséges!"
|
||||
detail="Sentinel jogosultság szükséges a művelethez!"
|
||||
)
|
||||
return current_user
|
||||
|
||||
# --- 1. SENTINEL: NÉGY SZEM ELV (Approval System) ---
|
||||
# --- 🛰️ 1. SENTINEL: RENDSZERÁLLAPOT ÉS MONITORING ---
|
||||
|
||||
@router.get("/pending-actions", response_model=List[PendingActionResponse])
|
||||
@router.get("/health-monitor", tags=["Sentinel Monitoring"])
|
||||
async def get_system_health(
|
||||
db: AsyncSession = Depends(deps.get_db),
|
||||
admin: User = Depends(check_admin_access)
|
||||
):
|
||||
"""
|
||||
Rendszer pulzusának ellenőrzése (pgAdmin nélkül).
|
||||
Látod a felhasználók eloszlását, az eszközök számát és a kritikus hibákat.
|
||||
"""
|
||||
stats = {}
|
||||
|
||||
# Adatbázis statisztikák (Dynamic counts)
|
||||
user_stats = await db.execute(text("SELECT subscription_plan, count(*) FROM data.users GROUP BY subscription_plan"))
|
||||
stats["user_distribution"] = {row[0]: row[1] for row in user_stats}
|
||||
|
||||
asset_count = await db.execute(text("SELECT count(*) FROM data.assets"))
|
||||
stats["total_assets"] = asset_count.scalar()
|
||||
|
||||
org_count = await db.execute(text("SELECT count(*) FROM data.organizations"))
|
||||
stats["total_organizations"] = org_count.scalar()
|
||||
|
||||
# Biztonsági státusz (Kritikus logok az elmúlt 24 órában)
|
||||
day_ago = datetime.now() - timedelta(days=1)
|
||||
crit_logs = await db.execute(select(func.count(AuditLog.id)).where(
|
||||
AuditLog.severity.in_([LogSeverity.critical, LogSeverity.emergency]),
|
||||
AuditLog.timestamp >= day_ago
|
||||
))
|
||||
stats["critical_alerts_24h"] = crit_logs.scalar() or 0
|
||||
|
||||
return stats
|
||||
|
||||
# --- ⚖️ 2. SENTINEL: NÉGY SZEM ELV (Approval System) ---
|
||||
|
||||
@router.get("/pending-actions", response_model=List[PendingActionResponse], tags=["Sentinel Security"])
|
||||
async def list_pending_actions(
|
||||
db: AsyncSession = Depends(deps.get_db),
|
||||
admin: User = Depends(check_admin_access)
|
||||
):
|
||||
"""Jóváhagyásra váró kritikus kérések listázása."""
|
||||
"""Jóváhagyásra váró kritikus kérések listázása (pl. törlések, rang-emelések)."""
|
||||
stmt = select(PendingAction).where(PendingAction.status == ActionStatus.pending)
|
||||
result = await db.execute(stmt)
|
||||
return result.scalars().all()
|
||||
|
||||
@router.post("/approve/{action_id}")
|
||||
@router.post("/approve/{action_id}", tags=["Sentinel Security"])
|
||||
async def approve_action(
|
||||
action_id: int,
|
||||
db: AsyncSession = Depends(deps.get_db),
|
||||
admin: User = Depends(check_admin_access)
|
||||
):
|
||||
"""Művelet véglegesítése (második admin által)."""
|
||||
"""Művelet véglegesítése. Csak egy második admin hagyhatja jóvá az első kérését."""
|
||||
try:
|
||||
await security_service.approve_action(db, admin.id, action_id)
|
||||
return {"status": "success", "message": "Művelet végrehajtva."}
|
||||
return {"status": "success", "message": "Művelet sikeresen végrehajtva."}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
# --- 2. SENTINEL: BIZTONSÁGI ÖSSZEGZÉS ---
|
||||
# --- ⚙️ 3. DINAMIKUS KONFIGURÁCIÓ (Hierarchical Config) ---
|
||||
|
||||
@router.get("/security-status", response_model=SecurityStatusResponse)
|
||||
async def get_security_status(
|
||||
db: AsyncSession = Depends(deps.get_db),
|
||||
@router.get("/parameters", tags=["Dynamic Configuration"])
|
||||
async def list_all_parameters(
|
||||
db: AsyncSession = Depends(deps.get_db),
|
||||
admin: User = Depends(check_admin_access)
|
||||
):
|
||||
"""Rendszerállapot: Zárolt júzerek és kritikus események."""
|
||||
day_ago = datetime.now() - timedelta(days=1)
|
||||
|
||||
crit_count = (await db.execute(select(func.count(AuditLog.id)).where(
|
||||
AuditLog.severity.in_([LogSeverity.critical, LogSeverity.emergency]),
|
||||
AuditLog.timestamp >= day_ago
|
||||
))).scalar() or 0
|
||||
|
||||
locked_count = (await db.execute(select(func.count(User.id)).where(
|
||||
User.is_active == False, User.is_deleted == False
|
||||
))).scalar() or 0
|
||||
|
||||
return {
|
||||
"total_pending": (await db.execute(select(func.count(PendingAction.id)).where(PendingAction.status == ActionStatus.pending))).scalar() or 0,
|
||||
"critical_logs_last_24h": crit_count,
|
||||
"emergency_locks_active": locked_count
|
||||
}
|
||||
|
||||
# --- 3. RENDSZERBEÁLLÍTÁSOK (Dynamic Config) ---
|
||||
|
||||
@router.get("/settings")
|
||||
async def get_settings(db: AsyncSession = Depends(deps.get_db), admin: User = Depends(check_admin_access)):
|
||||
"""Minden globális paraméter (Gamification, Limitek stb.) lekérése."""
|
||||
"""Minden globális és lokális paraméter (Limitek, XP szorzók stb.) lekérése."""
|
||||
result = await db.execute(select(SystemParameter))
|
||||
return result.scalars().all()
|
||||
|
||||
@router.put("/settings/{key}")
|
||||
async def update_setting(key: str, value: Any, db: AsyncSession = Depends(deps.get_db), admin: User = Depends(check_admin_access)):
|
||||
"""Paraméter módosítása és Audit Log generálása."""
|
||||
stmt = select(SystemParameter).where(SystemParameter.key == key)
|
||||
param = (await db.execute(stmt)).scalar_one_or_none()
|
||||
if not param:
|
||||
raise HTTPException(status_code=404, detail="Nincs ilyen beállítás.")
|
||||
@router.post("/parameters", tags=["Dynamic Configuration"])
|
||||
async def set_parameter(
|
||||
config: ConfigUpdate, # <--- Most már egy objektumot várunk a Body-ban
|
||||
db: AsyncSession = Depends(deps.get_db),
|
||||
admin: User = Depends(check_admin_access)
|
||||
):
|
||||
"""
|
||||
Paraméter beállítása. A Swaggerben most már látsz egy JSON ablakot a 'value' számára!
|
||||
"""
|
||||
query = text("""
|
||||
INSERT INTO data.system_parameters (key, value, scope_level, scope_id, category, last_modified_by)
|
||||
VALUES (:key, :val, :sl, :sid, :cat, :user)
|
||||
ON CONFLICT (key, scope_level, scope_id)
|
||||
DO UPDATE SET
|
||||
value = EXCLUDED.value,
|
||||
category = EXCLUDED.category,
|
||||
last_modified_by = EXCLUDED.last_modified_by,
|
||||
updated_at = now()
|
||||
""")
|
||||
|
||||
old_val = param.value
|
||||
param.value = value
|
||||
|
||||
await security_service.log_event(
|
||||
db, admin.id, action="SETTING_CHANGE", severity=LogSeverity.warning,
|
||||
old_data={key: old_val}, new_data={key: value}
|
||||
)
|
||||
await db.execute(query, {
|
||||
"key": config.key,
|
||||
"val": config.value, # Itt bármilyen komplex JSON-t átadhatsz
|
||||
"sl": config.scope_level,
|
||||
"sid": config.scope_id,
|
||||
"cat": config.category,
|
||||
"user": admin.email
|
||||
})
|
||||
await db.commit()
|
||||
return {"status": "success", "key": key, "new_value": value}
|
||||
return {"status": "success", "message": f"'{config.key}' frissítve."}
|
||||
|
||||
# --- 🌍 JSON FORDÍTÁSOK KEZELÉSE ---
|
||||
@router.delete("/parameters/{key}", tags=["Dynamic Configuration"])
|
||||
async def delete_parameter(
|
||||
key: str,
|
||||
scope_level: str = "global",
|
||||
scope_id: Optional[str] = None,
|
||||
db: AsyncSession = Depends(deps.get_db),
|
||||
admin: User = Depends(check_admin_access)
|
||||
):
|
||||
"""Egy adott konfiguráció törlése (visszaállás az eggyel magasabb szintű alapértelmezésre)."""
|
||||
stmt = delete(SystemParameter).where(
|
||||
SystemParameter.key == key,
|
||||
SystemParameter.scope_level == scope_level,
|
||||
SystemParameter.scope_id == scope_id
|
||||
)
|
||||
await db.execute(stmt)
|
||||
await db.commit()
|
||||
return {"status": "success", "message": "Konfiguráció törölve."}
|
||||
|
||||
@router.post("/translations/sync")
|
||||
# --- 🌍 4. UTILITY: FORDÍTÁSOK ---
|
||||
|
||||
@router.post("/translations/sync", tags=["System Utilities"])
|
||||
async def sync_translations_to_json(
|
||||
db: AsyncSession = Depends(deps.get_db),
|
||||
admin: User = Depends(check_admin_access)
|
||||
):
|
||||
"""Szinkronizálja az adatbázisban tárolt fordításokat a JSON fájlokba."""
|
||||
# A TranslationService-ben kell megírni a fájlbaíró logikát
|
||||
await TranslationService.export_to_json(db)
|
||||
return {"message": "JSON nyelvi fájlok frissítve."}
|
||||
return {"message": "JSON nyelvi fájlok frissítve a fájlrendszerben."}
|
||||
66
backend/app/api/v1/endpoints/evidence.py
Normal file
66
backend/app/api/v1/endpoints/evidence.py
Normal file
@@ -0,0 +1,66 @@
|
||||
# backend/app/api/v1/endpoints/evidence.py
|
||||
from fastapi import APIRouter, UploadFile, File, HTTPException, status, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import text
|
||||
from app.api.deps import get_db, get_current_user
|
||||
from app.schemas.evidence import OcrResponse
|
||||
from app.services.image_processor import DocumentImageProcessor
|
||||
from app.services.ai_ocr_service import AiOcrService
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@router.post("/scan-registration", response_model=OcrResponse)
|
||||
async def scan_registration_document(
|
||||
file: UploadFile = File(...),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
current_user = Depends(get_current_user)
|
||||
):
|
||||
"""
|
||||
Forgalmi engedély feldolgozása dinamikus, rendszer-szintű korlátok ellenőrzésével.
|
||||
"""
|
||||
try:
|
||||
# 1. 🔍 DINAMIKUS LIMIT LEKÉRDEZÉS (Hierarchikus system_parameters táblából)
|
||||
limit_query = text("""
|
||||
SELECT (value->>:plan)::int
|
||||
FROM data.system_parameters
|
||||
WHERE key = 'VEHICLE_LIMIT'
|
||||
AND scope_level = 'global'
|
||||
AND is_active = true
|
||||
""")
|
||||
limit_res = await db.execute(limit_query, {"plan": current_user.subscription_plan})
|
||||
max_allowed = limit_res.scalar() or 1 # Ha nincs paraméter, 1-re korlátozunk a biztonság kedvéért
|
||||
|
||||
# 2. 📊 FELHASZNÁLÓI JÁRMŰSZÁM ELLENŐRZÉSE
|
||||
count_query = text("SELECT count(*) FROM data.assets WHERE operator_person_id = :p_id")
|
||||
current_count = (await db.execute(count_query, {"p_id": current_user.person_id})).scalar()
|
||||
|
||||
if current_count >= max_allowed:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=f"Csomaglimit túllépés. A jelenlegi '{current_user.subscription_plan}' csomagod max {max_allowed} járművet engedélyez."
|
||||
)
|
||||
|
||||
# 3. 📸 KÉPFELDOLGOZÁS ÉS AI OCR
|
||||
raw_bytes = await file.read()
|
||||
clean_bytes = DocumentImageProcessor.process_for_ocr(raw_bytes)
|
||||
|
||||
if not clean_bytes:
|
||||
raise ValueError("A kép optimalizálása az OCR számára nem sikerült.")
|
||||
|
||||
extracted_data = await AiOcrService.extract_registration_data(clean_bytes)
|
||||
|
||||
return OcrResponse(
|
||||
success=True,
|
||||
message=f"Sikeres adatkivonás ({current_user.subscription_plan} csomag).",
|
||||
data=extracted_data
|
||||
)
|
||||
|
||||
except HTTPException as he:
|
||||
# FastAPI hibák továbbdobása (pl. 403 Forbidden)
|
||||
raise he
|
||||
except Exception as e:
|
||||
# Általános hiba kezelése korrekt indentálással
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Robot 3 feldolgozási hiba: {str(e)}"
|
||||
)
|
||||
Binary file not shown.
Binary file not shown.
@@ -2,21 +2,33 @@
|
||||
from app.db.base_class import Base # noqa
|
||||
|
||||
# Közvetlen importok (HOZZÁADVA az audit és sales modellek)
|
||||
from app.models.address import Address, GeoPostalCode, GeoStreet, GeoStreetType, Branch # noqa
|
||||
from app.models.identity import User, Person, VerificationToken, Wallet # noqa
|
||||
from app.models.organization import Organization, OrganizationMember, OrganizationSalesAssignment # noqa
|
||||
from app.models.address import Address, GeoPostalCode, GeoStreet, GeoStreetType, Branch, Rating # noqa
|
||||
|
||||
from app.models.identity import Person, User, Wallet, VerificationToken, SocialAccount # noqa
|
||||
|
||||
from app.models.organization import Organization, OrganizationMember, OrganizationFinancials, OrganizationSalesAssignment # noqa
|
||||
|
||||
from app.models.service import ServiceProfile, ExpertiseTag, ServiceExpertise, ServiceStaging, DiscoveryParameter # noqa
|
||||
|
||||
from app.models.vehicle_definitions import VehicleType, VehicleModelDefinition, FeatureDefinition # noqa
|
||||
|
||||
from app.models.audit import SecurityAuditLog, OperationalLog, FinancialLedger # noqa <--- KRITIKUS!
|
||||
|
||||
from app.models.asset import ( # noqa
|
||||
Asset, AssetCatalog, AssetCost, AssetEvent,
|
||||
AssetFinancials, AssetTelemetry, AssetReview, ExchangeRate
|
||||
)
|
||||
from app.models.gamification import ( # noqa
|
||||
PointRule, LevelConfig, UserStats, Badge, UserBadge, Rating, PointsLedger
|
||||
)
|
||||
|
||||
from app.models.gamification import PointRule, LevelConfig, UserStats, Badge, UserBadge, PointsLedger # noqa
|
||||
|
||||
from app.models.system import SystemParameter # noqa (system.py használata)
|
||||
|
||||
from app.models.history import AuditLog, VehicleOwnership # noqa
|
||||
|
||||
from app.models.document import Document # noqa
|
||||
|
||||
from app.models.translation import Translation # noqa
|
||||
|
||||
from app.models.core_logic import ( # noqa
|
||||
SubscriptionTier, OrganizationSubscription, CreditTransaction, ServiceSpecialty
|
||||
)
|
||||
|
||||
@@ -3,11 +3,12 @@ from app.core.config import settings
|
||||
from typing import AsyncGenerator
|
||||
|
||||
engine = create_async_engine(
|
||||
settings.DATABASE_URL, # A te eredeti kulcsod
|
||||
echo=getattr(settings, "DEBUG", False),
|
||||
settings.DATABASE_URL,
|
||||
echo=False, # Termelésben ne legyen True a log-áradat miatt
|
||||
future=True,
|
||||
pool_size=20,
|
||||
max_overflow=10
|
||||
pool_size=30, # Megemelve a Researcher 15-20 szála miatt
|
||||
max_overflow=20, # Extra rugalmasság csúcsidőben
|
||||
pool_pre_ping=True # Megakadályozza a "Server closed connection" hibákat
|
||||
)
|
||||
|
||||
AsyncSessionLocal = async_sessionmaker(
|
||||
@@ -16,7 +17,7 @@ AsyncSessionLocal = async_sessionmaker(
|
||||
expire_on_commit=False,
|
||||
autoflush=False
|
||||
)
|
||||
# Ez a sor kell, mert a main.py és a többiek ezen a néven keresik
|
||||
|
||||
SessionLocal = AsyncSessionLocal
|
||||
|
||||
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
|
||||
@@ -6,10 +6,12 @@ from starlette.middleware.sessions import SessionMiddleware # ÚJ
|
||||
from app.api.v1.api import api_router
|
||||
from app.core.config import settings
|
||||
|
||||
# Statikus mappák létrehozása induláskor
|
||||
os.makedirs("static/previews", exist_ok=True)
|
||||
|
||||
app = FastAPI(
|
||||
title="Service Finder API",
|
||||
description="Traffic Ecosystem, Asset Vault & AI Evidence Processing",
|
||||
version="2.0.0",
|
||||
openapi_url="/api/v1/openapi.json",
|
||||
docs_url="/docs"
|
||||
@@ -21,6 +23,7 @@ app.add_middleware(
|
||||
secret_key=settings.SECRET_KEY
|
||||
)
|
||||
|
||||
# --- CORS BEÁLLÍTÁSOK ---
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=[
|
||||
@@ -34,13 +37,30 @@ app.add_middleware(
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Statikus fájlok kiszolgálása (képek, letöltések)
|
||||
app.mount("/static", StaticFiles(directory="static"), name="static")
|
||||
|
||||
# A V1-es API router bekötése a /api/v1 prefix alá
|
||||
app.include_router(api_router, prefix="/api/v1")
|
||||
|
||||
@app.get("/")
|
||||
# --- ALAPVETŐ VÉGPONTOK ---
|
||||
@app.get("/", tags=["System"])
|
||||
async def root():
|
||||
return {
|
||||
"status": "online",
|
||||
"message": "Service Finder Master System v2.0",
|
||||
"features": ["Google Auth Enabled", "Asset Vault", "Org Onboarding"]
|
||||
}
|
||||
"features": [
|
||||
"Google Auth Enabled",
|
||||
"Asset Vault",
|
||||
"Org Onboarding",
|
||||
"AI Evidence OCR (Robot 3)",
|
||||
"Fleet Expenses (TCO)"
|
||||
]
|
||||
}
|
||||
|
||||
@app.get("/health", tags=["System"])
|
||||
async def health_check():
|
||||
"""
|
||||
Monitoring és Load Balancer egészségügyi ellenőrző végpont.
|
||||
"""
|
||||
return {"status": "ok", "message": "Service Finder API is running flawlessly."}
|
||||
@@ -3,10 +3,10 @@
|
||||
from app.db.base_class import Base
|
||||
|
||||
# Identitás és Jogosultság
|
||||
from .identity import User, Person, Wallet, UserRole, VerificationToken, SocialAccount
|
||||
from .identity import Person, User, Wallet, VerificationToken, SocialAccount
|
||||
|
||||
# Szervezeti struktúra (HOZZÁADVA: OrganizationSalesAssignment)
|
||||
from .organization import Organization, OrganizationMember, OrganizationSalesAssignment
|
||||
from .organization import Organization, OrganizationMember, OrganizationFinancials, OrganizationSalesAssignment
|
||||
|
||||
# Járművek és Eszközök (Digital Twin)
|
||||
from .asset import (
|
||||
@@ -15,13 +15,13 @@ from .asset import (
|
||||
)
|
||||
|
||||
# Szerviz és Szakértelem
|
||||
from .service import ServiceProfile, ExpertiseTag, ServiceExpertise, ServiceStaging
|
||||
from .service import ServiceProfile, ExpertiseTag, ServiceExpertise, ServiceStaging, DiscoveryParameter
|
||||
|
||||
# Földrajzi adatok és Címek
|
||||
from .address import Address, GeoPostalCode, GeoStreet, GeoStreetType, Branch
|
||||
from .address import Address, GeoPostalCode, GeoStreet, GeoStreetType, Branch, Rating
|
||||
|
||||
# Gamification és Economy
|
||||
from .gamification import PointRule, LevelConfig, UserStats, Badge, UserBadge, Rating, PointsLedger
|
||||
from .gamification import PointRule, LevelConfig, UserStats, Badge, UserBadge, PointsLedger
|
||||
|
||||
# Rendszerkonfiguráció (HASZNÁLJUK a frissített system.py-t!)
|
||||
from .system import SystemParameter
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,12 +1,11 @@
|
||||
import uuid
|
||||
# Hozzáadva: Boolean, text, func
|
||||
from sqlalchemy import Column, String, Integer, ForeignKey, Text, DateTime, Float, Boolean, text, func
|
||||
# PostgreSQL specifikus típusok
|
||||
from sqlalchemy import Column, String, Integer, ForeignKey, Text, DateTime, Float, Boolean, text, func, Numeric, Index
|
||||
from sqlalchemy.dialects.postgresql import UUID as PG_UUID, JSONB
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import relationship, foreign
|
||||
from app.db.base_class import Base
|
||||
|
||||
class GeoPostalCode(Base):
|
||||
"""Irányítószám alapú földrajzi kereső tábla."""
|
||||
__tablename__ = "geo_postal_codes"
|
||||
__table_args__ = {"schema": "data"}
|
||||
id = Column(Integer, primary_key=True)
|
||||
@@ -15,6 +14,7 @@ class GeoPostalCode(Base):
|
||||
city = Column(String(100), nullable=False)
|
||||
|
||||
class GeoStreet(Base):
|
||||
"""Utcajegyzék tábla."""
|
||||
__tablename__ = "geo_streets"
|
||||
__table_args__ = {"schema": "data"}
|
||||
id = Column(Integer, primary_key=True)
|
||||
@@ -22,6 +22,7 @@ class GeoStreet(Base):
|
||||
name = Column(String(200), nullable=False)
|
||||
|
||||
class GeoStreetType(Base):
|
||||
"""Közterület jellege (utca, út, köz stb.)."""
|
||||
__tablename__ = "geo_street_types"
|
||||
__table_args__ = {"schema": "data"}
|
||||
id = Column(Integer, primary_key=True)
|
||||
@@ -49,7 +50,6 @@ class Address(Base):
|
||||
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
# Add to /app/models/address.py
|
||||
class Branch(Base):
|
||||
"""
|
||||
Telephely entitás. A fizikai helyszín, ahol a szolgáltatás vagy flotta-kezelés zajlik.
|
||||
@@ -62,7 +62,7 @@ class Branch(Base):
|
||||
organization_id = Column(Integer, ForeignKey("data.organizations.id"), nullable=False)
|
||||
address_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.addresses.id"), nullable=True)
|
||||
|
||||
name = Column(String(100), nullable=False) # pl. "Központi iroda", "Dunakeszi Szerviz"
|
||||
name = Column(String(100), nullable=False)
|
||||
is_main = Column(Boolean, default=False)
|
||||
|
||||
# Részletes címadatok (Denormalizált a gyors kereséshez)
|
||||
@@ -74,9 +74,8 @@ class Branch(Base):
|
||||
stairwell = Column(String(20))
|
||||
floor = Column(String(20))
|
||||
door = Column(String(20))
|
||||
hrsz = Column(String(50)) # Helyrajzi szám
|
||||
hrsz = Column(String(50))
|
||||
|
||||
# Telephely specifikus adatok
|
||||
opening_hours = Column(JSONB, server_default=text("'{}'::jsonb"))
|
||||
branch_rating = Column(Float, default=0.0)
|
||||
|
||||
@@ -86,5 +85,34 @@ class Branch(Base):
|
||||
|
||||
organization = relationship("Organization", back_populates="branches")
|
||||
address = relationship("Address")
|
||||
# Kapcsolat a szerviz értékelésekkel
|
||||
reviews = relationship("Rating", primaryjoin="and_(Branch.id==foreign(Rating.target_id), Rating.target_type=='branch')")
|
||||
|
||||
# JAVÍTOTT KAPCSOLAT: target_branch_id használata target_id helyett
|
||||
reviews = relationship(
|
||||
"Rating",
|
||||
primaryjoin="and_(Branch.id==foreign(Rating.target_branch_id))"
|
||||
)
|
||||
|
||||
class Rating(Base):
|
||||
"""Univerzális értékelési rendszer - v1.3.1"""
|
||||
__tablename__ = "ratings"
|
||||
__table_args__ = (
|
||||
Index('idx_rating_org', 'target_organization_id'),
|
||||
Index('idx_rating_user', 'target_user_id'),
|
||||
Index('idx_rating_branch', 'target_branch_id'),
|
||||
{"schema": "data"}
|
||||
)
|
||||
# Az ID most már Integer, ahogy kérted a statisztikákhoz
|
||||
id = Column(Integer, primary_key=True)
|
||||
author_id = Column(Integer, ForeignKey("data.users.id"), nullable=False)
|
||||
|
||||
# Explicit célpontok a típusbiztonság és gyorsaság érdekében
|
||||
target_organization_id = Column(Integer, ForeignKey("data.organizations.id"), nullable=True)
|
||||
target_user_id = Column(Integer, ForeignKey("data.users.id"), nullable=True)
|
||||
target_branch_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.branches.id"), nullable=True)
|
||||
|
||||
score = Column(Numeric(3, 2), nullable=False) # 1.00 - 5.00
|
||||
comment = Column(Text)
|
||||
images = Column(JSONB, server_default=text("'[]'::jsonb"))
|
||||
|
||||
is_verified = Column(Boolean, default=False)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
@@ -1,5 +1,5 @@
|
||||
import uuid
|
||||
from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey, Numeric, text, Text, UniqueConstraint
|
||||
from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey, Numeric, text, Text, UniqueConstraint, BigInteger
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.dialects.postgresql import UUID as PG_UUID, JSONB
|
||||
from sqlalchemy.sql import func
|
||||
@@ -16,7 +16,6 @@ class AssetCatalog(Base):
|
||||
)
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
# Kapcsolat az MDM-hez
|
||||
master_definition_id = Column(Integer, ForeignKey("data.vehicle_model_definitions.id"), nullable=True)
|
||||
|
||||
make = Column(String, index=True, nullable=False)
|
||||
@@ -27,18 +26,15 @@ class AssetCatalog(Base):
|
||||
year_to = Column(Integer)
|
||||
vehicle_class = Column(String)
|
||||
fuel_type = Column(String, index=True)
|
||||
# ÚJ MEZŐ: Kapcsolat az MDM-hez
|
||||
|
||||
|
||||
master_definition = relationship("VehicleModelDefinition", back_populates="variants")
|
||||
# --- ÚJ OSZLOPOK (Ezeket add hozzá!) ---
|
||||
|
||||
power_kw = Column(Integer, index=True)
|
||||
engine_capacity = Column(Integer, index=True)
|
||||
max_weight_kg = Column(Integer)
|
||||
axle_count = Column(Integer)
|
||||
euro_class = Column(String(20))
|
||||
body_type = Column(String(100))
|
||||
# ---------------------------------------
|
||||
|
||||
engine_code = Column(String)
|
||||
factory_data = Column(JSONB, server_default=text("'{}'::jsonb"))
|
||||
@@ -56,18 +52,25 @@ class Asset(Base):
|
||||
current_organization_id = Column(Integer, ForeignKey("data.organizations.id"), nullable=True)
|
||||
catalog_id = Column(Integer, ForeignKey("data.vehicle_catalog.id"))
|
||||
|
||||
# Moderációs mezők a Robot 3 (OCR) számára
|
||||
is_verified = Column(Boolean, default=False)
|
||||
verification_method = Column(String(20)) # 'manual', 'ocr', 'vin_api'
|
||||
verification_notes = Column(Text, nullable=True) # Eltérések jegyzőkönyve
|
||||
catalog_match_score = Column(Numeric(5, 2), nullable=True) # 0-100% egyezési arány
|
||||
verification_method = Column(String(20))
|
||||
verification_notes = Column(Text, nullable=True)
|
||||
catalog_match_score = Column(Numeric(5, 2), nullable=True)
|
||||
|
||||
status = Column(String(20), default="active")
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
||||
|
||||
# --- KAPCSOLATOK (A kettőzött current_org törölve, pontosítva) ---
|
||||
catalog = relationship("AssetCatalog", back_populates="assets")
|
||||
current_org = relationship("Organization")
|
||||
|
||||
# 1. Jelenlegi szervezet (Üzemeltető telephely)
|
||||
current_org = relationship(
|
||||
"Organization",
|
||||
primaryjoin="Asset.current_organization_id == Organization.id",
|
||||
foreign_keys="[Asset.current_organization_id]"
|
||||
)
|
||||
|
||||
financials = relationship("AssetFinancials", back_populates="asset", uselist=False)
|
||||
telemetry = relationship("AssetTelemetry", back_populates="asset", uselist=False)
|
||||
assignments = relationship("AssetAssignment", back_populates="asset")
|
||||
@@ -76,6 +79,43 @@ class Asset(Base):
|
||||
reviews = relationship("AssetReview", back_populates="asset")
|
||||
ownership_history = relationship("VehicleOwnership", back_populates="vehicle")
|
||||
|
||||
registration_uuid = Column(PG_UUID(as_uuid=True), default=uuid.uuid4, index=True, nullable=False)
|
||||
is_corporate = Column(Boolean, default=False, server_default=text("false"))
|
||||
|
||||
# Tulajdonos és Üzembentartó oszlopok
|
||||
owner_person_id = Column(BigInteger, ForeignKey("data.persons.id"), nullable=True)
|
||||
owner_org_id = Column(Integer, ForeignKey("data.organizations.id"), nullable=True)
|
||||
operator_person_id = Column(BigInteger, ForeignKey("data.persons.id"), nullable=True)
|
||||
operator_org_id = Column(Integer, ForeignKey("data.organizations.id"), nullable=True)
|
||||
|
||||
# 2. Tulajdonos szervezet (Kapcsolat pótolva)
|
||||
owner_org = relationship(
|
||||
"Organization",
|
||||
primaryjoin="Asset.owner_org_id == Organization.id",
|
||||
foreign_keys="[Asset.owner_org_id]"
|
||||
)
|
||||
|
||||
# 3. Üzembentartó szervezet
|
||||
operator_org = relationship(
|
||||
"Organization",
|
||||
primaryjoin="Asset.operator_org_id == Organization.id",
|
||||
foreign_keys="[Asset.operator_org_id]"
|
||||
)
|
||||
|
||||
# 4. Tulajdonos magánszemély
|
||||
owner_person = relationship(
|
||||
"Person",
|
||||
primaryjoin="Asset.owner_person_id == Person.id",
|
||||
foreign_keys="[Asset.owner_person_id]"
|
||||
)
|
||||
|
||||
# 5. Üzembentartó magánszemély
|
||||
operator_person = relationship(
|
||||
"Person",
|
||||
primaryjoin="Asset.operator_person_id == Person.id",
|
||||
foreign_keys="[Asset.operator_person_id]"
|
||||
)
|
||||
|
||||
class AssetFinancials(Base):
|
||||
__tablename__ = "asset_financials"
|
||||
__table_args__ = {"schema": "data"}
|
||||
@@ -117,17 +157,14 @@ class AssetAssignment(Base):
|
||||
id = Column(PG_UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
asset_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.assets.id"), nullable=False)
|
||||
organization_id = Column(Integer, ForeignKey("data.organizations.id"), nullable=False)
|
||||
|
||||
# ÚJ: Telephelyi hozzárendelés
|
||||
branch_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.branches.id"), nullable=True)
|
||||
|
||||
assigned_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
released_at = Column(DateTime(timezone=True), nullable=True)
|
||||
status = Column(String(30), default="active")
|
||||
|
||||
asset = relationship("Asset", back_populates="assignments")
|
||||
organization = relationship("Organization")
|
||||
branch = relationship("Branch") # Új kapcsolat
|
||||
branch = relationship("Branch")
|
||||
|
||||
class AssetEvent(Base):
|
||||
__tablename__ = "asset_events"
|
||||
@@ -138,6 +175,7 @@ class AssetEvent(Base):
|
||||
recorded_mileage = Column(Integer)
|
||||
data = Column(JSONB, server_default=text("'{}'::jsonb"))
|
||||
asset = relationship("Asset", back_populates="events")
|
||||
registration_uuid = Column(PG_UUID(as_uuid=True), index=True, nullable=True)
|
||||
|
||||
class AssetCost(Base):
|
||||
__tablename__ = "asset_costs"
|
||||
@@ -159,6 +197,7 @@ class AssetCost(Base):
|
||||
asset = relationship("Asset", back_populates="costs")
|
||||
organization = relationship("Organization")
|
||||
driver = relationship("User")
|
||||
registration_uuid = Column(PG_UUID(as_uuid=True), index=True, nullable=True)
|
||||
|
||||
class ExchangeRate(Base):
|
||||
__tablename__ = "exchange_rates"
|
||||
@@ -169,23 +208,17 @@ class ExchangeRate(Base):
|
||||
rate = Column(Numeric(18, 6), nullable=False)
|
||||
|
||||
class CatalogDiscovery(Base):
|
||||
"""
|
||||
Discovery tábla: Ide gyűjtjük a piaci 'neveket' (pl. Citroen C3).
|
||||
A Robot innen indulva keresi meg az összes létező technikai variánst.
|
||||
"""
|
||||
__tablename__ = "catalog_discovery"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
make = Column(String(100), nullable=False, index=True)
|
||||
model = Column(String(100), nullable=False, index=True)
|
||||
vehicle_class = Column(String(50), index=True) # car, motorcycle, truck, stb.
|
||||
source = Column(String(50)) # 'hasznaltauto', 'mobile.de'
|
||||
vehicle_class = Column(String(50), index=True)
|
||||
source = Column(String(50))
|
||||
status = Column(String(20), server_default=text("'pending'"), index=True)
|
||||
attempts = Column(Integer, default=0)
|
||||
last_attempt = Column(DateTime(timezone=True))
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
# EGYESÍTETT __table_args__
|
||||
__table_args__ = (
|
||||
UniqueConstraint('make', 'model', 'vehicle_class', name='_make_model_class_uc'),
|
||||
{"schema": "data"}
|
||||
|
||||
@@ -81,12 +81,3 @@ class UserBadge(Base):
|
||||
|
||||
user: Mapped["User"] = relationship("User")
|
||||
|
||||
class Rating(Base):
|
||||
__tablename__ = "ratings"
|
||||
__table_args__ = SCHEMA_ARGS
|
||||
id: Mapped[uuid.UUID] = mapped_column(PG_UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
author_id: Mapped[int] = mapped_column(Integer, ForeignKey("data.users.id"))
|
||||
target_type: Mapped[str] = mapped_column(String(20))
|
||||
target_id: Mapped[uuid.UUID] = mapped_column(PG_UUID(as_uuid=True))
|
||||
score: Mapped[int] = mapped_column(Integer)
|
||||
comment: Mapped[Optional[str]] = mapped_column(String)
|
||||
@@ -32,6 +32,9 @@ class Organization(Base):
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
address_id = Column(PG_UUID(as_uuid=True), ForeignKey("data.addresses.id"), nullable=True)
|
||||
|
||||
is_anonymized = Column(Boolean, default=False, server_default=text("false"))
|
||||
anonymized_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
full_name = Column(String, nullable=False) # Hivatalos név
|
||||
name = Column(String, nullable=False) # Rövid név
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import uuid
|
||||
from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey, JSON, text, Text, Float
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy import Column, Integer, String, Boolean, DateTime, ForeignKey, JSON, text, Text, Float, Index, Numeric
|
||||
from sqlalchemy.orm import relationship, backref
|
||||
from sqlalchemy.dialects.postgresql import UUID as PG_UUID, JSONB
|
||||
from geoalchemy2 import Geometry # PostGIS támogatás
|
||||
from sqlalchemy.sql import func
|
||||
@@ -8,45 +8,78 @@ from app.db.base_class import Base
|
||||
|
||||
class ServiceProfile(Base):
|
||||
"""
|
||||
Szerviz szolgáltató kiterjesztett adatai.
|
||||
Szerviz szolgáltató kiterjesztett adatai (v1.3.1).
|
||||
Egy Organization-höz (org_type='service') kapcsolódik.
|
||||
Támogatja a hierarchiát (Franchise/Telephely) és az automatizált dúsítást.
|
||||
"""
|
||||
__tablename__ = "service_profiles"
|
||||
__table_args__ = {"schema": "data"}
|
||||
__table_args__ = (
|
||||
# Egyedi ujjlenyomat index a robot számára a duplikációk elkerülésére
|
||||
Index('idx_service_fingerprint', 'fingerprint', unique=True),
|
||||
{"schema": "data"}
|
||||
)
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
|
||||
# --- KAPCSOLAT A CÉGES IKERHEZ (Twin) ---
|
||||
organization_id = Column(Integer, ForeignKey("data.organizations.id"), unique=True)
|
||||
|
||||
# --- HIERARCHIA (Fa struktúra) ---
|
||||
# Ez tárolja a szülő egység ID-ját (pl. hálózat központja)
|
||||
parent_id = Column(Integer, ForeignKey("data.service_profiles.id"), nullable=True)
|
||||
|
||||
# --- ROBOT IDENTITÁS ---
|
||||
# Normalize(Név + Város + Utca) hash, hogy ne legyen duplikáció
|
||||
fingerprint = Column(String(255), nullable=False, index=True)
|
||||
|
||||
# PostGIS GPS pont (SRID 4326 = WGS84 koordináták)
|
||||
location = Column(Geometry(geometry_type='POINT', srid=4326), index=True)
|
||||
|
||||
# Állapotkezelés: ghost, active, flagged, inactive
|
||||
# Állapotkezelés: ghost (robot találta), active, flagged, inactive
|
||||
status = Column(String(20), server_default=text("'ghost'"), index=True)
|
||||
last_audit_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
last_audit_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
||||
|
||||
# --- MAGÁNNYOMOZÓ (Deep Enrichment) ADATOK ---
|
||||
# --- GOOGLE ÉS KÜLSŐ ADATOK ---
|
||||
google_place_id = Column(String(100), unique=True)
|
||||
rating = Column(Float)
|
||||
user_ratings_total = Column(Integer)
|
||||
|
||||
# Bentley vs BMW logika: JSONB a gyors, márkaszintű szűréshez
|
||||
# Példa: {"brands": ["Bentley", "Audi"], "specialty": ["engine", "tuning"]}
|
||||
# --- MÉLYFÚRÁS (Deep Enrichment) ADATOK ---
|
||||
# AI elemzés: {"tone": "barátságos", "pricing": "közép", "reliability": "magas"}
|
||||
vibe_analysis = Column(JSONB, server_default=text("'{}'::jsonb"))
|
||||
|
||||
# Közösségi háló: {"facebook": "url", "tiktok": "url", "insta": "url"}
|
||||
social_links = Column(JSONB, server_default=text("'{}'::jsonb"))
|
||||
|
||||
# Speciális szűrő címkék: {"brands": ["Yamaha", "Suzuki"], "specialty": ["engine", "tuning"]}
|
||||
specialization_tags = Column(JSONB, server_default=text("'{}'::jsonb"))
|
||||
|
||||
# Trust Engine (Bot Discovery=30, User Entry=50, Admin/Partner=100)
|
||||
trust_score = Column(Integer, default=30)
|
||||
is_verified = Column(Boolean, default=False)
|
||||
verification_log = Column(JSON, server_default=text("'{}'::jsonb"))
|
||||
verification_log = Column(JSONB, server_default=text("'{}'::jsonb"))
|
||||
|
||||
opening_hours = Column(JSON, server_default=text("'{}'::jsonb"))
|
||||
# --- ELÉRHETŐSÉG ---
|
||||
opening_hours = Column(JSONB, server_default=text("'{}'::jsonb"))
|
||||
contact_phone = Column(String)
|
||||
contact_email = Column(String)
|
||||
website = Column(String)
|
||||
bio = Column(Text)
|
||||
|
||||
# Kapcsolatok
|
||||
organization = relationship("Organization")
|
||||
# --- KAPCSOLATOK ---
|
||||
organization = relationship("Organization", back_populates="service_profile")
|
||||
expertises = relationship("ServiceExpertise", back_populates="service")
|
||||
|
||||
# --- ÖNMAGÁRA HIVATKOZÓ KAPCSOLAT (Hierarchia) ---
|
||||
sub_services = relationship(
|
||||
"ServiceProfile",
|
||||
backref=backref("parent_service", remote_side=[id]),
|
||||
cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
||||
|
||||
class ExpertiseTag(Base):
|
||||
"""Szakmai szempontok taxonómiája."""
|
||||
__tablename__ = "expertise_tags"
|
||||
@@ -74,56 +107,57 @@ class ServiceExpertise(Base):
|
||||
class ServiceStaging(Base):
|
||||
"""
|
||||
Átmeneti tábla a Hunter (n8n/scraping) adatoknak.
|
||||
A címek itt már darabolva (IRSZ, Város, Utca, Házszám) szerepelnek
|
||||
a jobb kereshetőség és validálás érdekében.
|
||||
"""
|
||||
__tablename__ = "service_staging"
|
||||
__table_args__ = {"schema": "data"}
|
||||
__table_args__ = (
|
||||
Index('idx_staging_fingerprint', 'fingerprint', unique=True),
|
||||
{"schema": "data"}
|
||||
)
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
|
||||
# --- Alapadatok ---
|
||||
name = Column(String, nullable=False, index=True)
|
||||
|
||||
# --- Strukturált cím adatok (A kérésedre bontva) ---
|
||||
# --- Strukturált cím adatok ---
|
||||
postal_code = Column(String(10), index=True)
|
||||
city = Column(String(100), index=True)
|
||||
street_name = Column(String(150))
|
||||
street_type = Column(String(50)) # utca, út, tér...
|
||||
street_type = Column(String(50))
|
||||
house_number = Column(String(20))
|
||||
stairwell = Column(String(20)) # lépcsőház
|
||||
floor = Column(String(20)) # emelet
|
||||
door = Column(String(20)) # ajtó
|
||||
hrsz = Column(String(50)) # helyrajzi szám
|
||||
stairwell = Column(String(20))
|
||||
floor = Column(String(20))
|
||||
door = Column(String(20))
|
||||
hrsz = Column(String(50))
|
||||
|
||||
full_address = Column(String) # Eredeti string (audit célból)
|
||||
# --- Elérhetőségek ---
|
||||
full_address = Column(String)
|
||||
contact_phone = Column(String, nullable=True)
|
||||
email = Column(String, nullable=True)
|
||||
website = Column(String, nullable=True)
|
||||
|
||||
# --- Forrás és Azonosítás ---
|
||||
source = Column(String(50), nullable=True, index=True) # Forrás: 'OSM', 'Facebook', stb.
|
||||
external_id = Column(String(100), nullable=True, index=True) # Külső ID (pl. OSM node id)
|
||||
source = Column(String(50), nullable=True, index=True)
|
||||
external_id = Column(String(100), nullable=True, index=True)
|
||||
|
||||
# Robot ujjlenyomat a Staging szintű deduplikációhoz
|
||||
fingerprint = Column(String(255), nullable=False)
|
||||
|
||||
# --- Adatmentés ---
|
||||
# Itt landol a teljes robot-zsákmány minden apró részlettel
|
||||
raw_data = Column(JSONB, server_default=text("'{}'::jsonb"))
|
||||
|
||||
# --- Státusz és Bizalom ---
|
||||
# status lehet: pending (feldolgozás alatt), enriched (nyomozó által bővített),
|
||||
# duplicate (már megvan), verified (élesítésre kész)
|
||||
status = Column(String(20), server_default=text("'pending'"), index=True)
|
||||
trust_score = Column(Integer, default=0)
|
||||
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
class DiscoveryParameter(Base):
|
||||
"""Robot vezérlési paraméterek."""
|
||||
__tablename__ = "discovery_parameters"
|
||||
__table_args__ = {"schema": "data"}
|
||||
id = Column(Integer, primary_key=True)
|
||||
city = Column(String(100), nullable=False)
|
||||
keyword = Column(String(100), nullable=False) # pl. "autóvillamosság"
|
||||
keyword = Column(String(100), nullable=False)
|
||||
country_code = Column(String(2), default="HU")
|
||||
is_active = Column(Boolean, default=True)
|
||||
last_run_at = Column(DateTime(timezone=True))
|
||||
@@ -1,17 +1,35 @@
|
||||
from sqlalchemy import Column, String, JSON, DateTime, Boolean
|
||||
# backend/app/models/system.py
|
||||
import enum
|
||||
from sqlalchemy import Column, String, DateTime, Boolean, text, UniqueConstraint, Integer
|
||||
from sqlalchemy.dialects.postgresql import JSONB # <-- JSONB-t használunk a stabilitásért
|
||||
from sqlalchemy.sql import func
|
||||
from app.db.base_class import Base
|
||||
|
||||
class SystemParameter(Base):
|
||||
"""
|
||||
Központi, dinamikus konfigurációs tábla.
|
||||
Támogatja a többlépcsős felülbírálást (Global -> Country -> Region -> Individual).
|
||||
"""
|
||||
__tablename__ = "system_parameters"
|
||||
__table_args__ = {"schema": "data", "extend_existing": True}
|
||||
__table_args__ = (
|
||||
UniqueConstraint('key', 'scope_level', 'scope_id', name='uix_param_scope'),
|
||||
{"schema": "data", "extend_existing": True}
|
||||
)
|
||||
|
||||
key = Column(String, primary_key=True, index=True, nullable=False)
|
||||
# Csoportosítás az Admin felületnek (pl. 'xp', 'scout', 'routing')
|
||||
# Technikai ID, hogy a 'key' ne legyen Primary Key, így engedve a hierarchiát
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
key = Column(String, index=True, nullable=False) # pl. 'VEHICLE_LIMIT'
|
||||
category = Column(String, index=True, server_default="general")
|
||||
value = Column(JSON, nullable=False)
|
||||
|
||||
# A tényleges érték (JSONB-ben tárolva)
|
||||
value = Column(JSONB, nullable=False) # pl. {"FREE": 1, "PREMIUM": 4}
|
||||
|
||||
# --- 🛡️ HIERARCHIKUS SZINTEK ---
|
||||
scope_level = Column(String(30), server_default=text("'global'"), index=True)
|
||||
scope_id = Column(String(50), nullable=True)
|
||||
|
||||
is_active = Column(Boolean, default=True)
|
||||
description = Column(String)
|
||||
# Kötelező audit mező: ki módosította utoljára?
|
||||
last_modified_by = Column(String, nullable=True)
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now(), server_default=func.now())
|
||||
@@ -1,15 +0,0 @@
|
||||
from sqlalchemy import Column, String, JSON
|
||||
from app.db.base import Base
|
||||
|
||||
class SystemSetting(Base):
|
||||
"""
|
||||
Globális rendszerbeállítások tárolása.
|
||||
Kulcs-Érték párok (JSON támogatással a komplex szabályokhoz).
|
||||
Példa: key='FREE_VEHICLE_LIMIT', value='2'
|
||||
"""
|
||||
__tablename__ = "system_settings"
|
||||
__table_args__ = {"schema": "data"}
|
||||
|
||||
key = Column(String, primary_key=True, index=True)
|
||||
value = Column(JSON, nullable=False)
|
||||
description = Column(String, nullable=True)
|
||||
18
backend/app/models/translation.py
Executable file → Normal file
18
backend/app/models/translation.py
Executable file → Normal file
@@ -1,16 +1,10 @@
|
||||
from sqlalchemy import Column, Integer, String, Text, Boolean, UniqueConstraint
|
||||
# JAVÍTÁS: Közvetlenül a base_class-ból importálunk, hogy elkerüljük a körkörös importot
|
||||
from app.db.base_class import Base
|
||||
from sqlalchemy import Column, Integer, String, Text
|
||||
from app.db.base_class import Base
|
||||
|
||||
class Translation(Base):
|
||||
__tablename__ = "translations"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("key", "lang_code", name="uq_translation_key_lang"),
|
||||
{"schema": "data"}
|
||||
)
|
||||
|
||||
__table_args__ = {"schema": "data"}
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
key = Column(String(100), nullable=False, index=True)
|
||||
lang_code = Column(String(5), nullable=False, index=True)
|
||||
value = Column(Text, nullable=False)
|
||||
is_published = Column(Boolean, default=False)
|
||||
key = Column(String(255), index=True)
|
||||
lang = Column(String(5), index=True) # pl: 'hu', 'en'
|
||||
value = Column(Text)
|
||||
@@ -1,6 +1,7 @@
|
||||
from sqlalchemy import Column, Integer, String, JSON, UniqueConstraint, text, Boolean, DateTime, ForeignKey, Numeric, Index
|
||||
from sqlalchemy import Column, Integer, String, JSON, UniqueConstraint, text, Boolean, DateTime, ForeignKey, Numeric, Index, Text
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from app.db.base_class import Base
|
||||
|
||||
class VehicleType(Base):
|
||||
@@ -9,8 +10,8 @@ class VehicleType(Base):
|
||||
__table_args__ = {"schema": "data"}
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
code = Column(String(30), unique=True, index=True) # car, motorcycle, truck, bus, boat, etc.
|
||||
name = Column(String(50)) # Megjelenítendő név
|
||||
code = Column(String(30), unique=True, index=True)
|
||||
name = Column(String(50))
|
||||
icon = Column(String(50))
|
||||
units = Column(JSON, server_default=text("'{\"power\": \"kW\", \"weight\": \"kg\", \"cargo\": \"m3\"}'::jsonb"))
|
||||
|
||||
@@ -24,24 +25,24 @@ class FeatureDefinition(Base):
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
vehicle_type_id = Column(Integer, ForeignKey("data.vehicle_types.id"))
|
||||
category = Column(String(50)) # Műszaki, Beltér, Kültér, Multimédia
|
||||
category = Column(String(50))
|
||||
name = Column(String(100), nullable=False)
|
||||
data_type = Column(String(20), default="boolean")
|
||||
|
||||
vehicle_type = relationship("VehicleType", back_populates="features")
|
||||
|
||||
class ModelFeatureMap(Base):
|
||||
"""Modell-szintű felszereltségi sablon (Alap vs Extra)"""
|
||||
"""Modell-szintű felszereltségi sablon"""
|
||||
__tablename__ = "model_feature_maps"
|
||||
__table_args__ = {"schema": "data"}
|
||||
|
||||
model_id = Column(Integer, ForeignKey("data.vehicle_model_definitions.id"), primary_key=True)
|
||||
feature_id = Column(Integer, ForeignKey("data.feature_definitions.id"), primary_key=True)
|
||||
availability = Column(String(20), default="standard") # standard, optional, accessory
|
||||
availability = Column(String(20), default="standard")
|
||||
value = Column(String(100))
|
||||
|
||||
class VehicleModelDefinition(Base):
|
||||
"""MDM Master rekordok - Kibővítve Deduplikációs és Évjárat mezőkkel (v1.2.5)"""
|
||||
"""MDM Master rekordok - v1.3.0 Pipeline Edition (Researcher & Alchemist)"""
|
||||
__tablename__ = "vehicle_model_definitions"
|
||||
__table_args__ = (
|
||||
UniqueConstraint('make', 'technical_code', 'vehicle_type', name='uix_make_tech_type'),
|
||||
@@ -59,19 +60,24 @@ class VehicleModelDefinition(Base):
|
||||
vehicle_type_id = Column(Integer, ForeignKey("data.vehicle_types.id"))
|
||||
vehicle_class = Column(String(50))
|
||||
|
||||
# --- ÚJ MEZŐK AZ INTELLIGENS ÖSSZEFÉSÜLÉSHEZ ---
|
||||
# Ha ez a rekord egy duplikátum, itt tároljuk, melyik az eredeti (Master) rekord
|
||||
parent_id = Column(Integer, ForeignKey("data.vehicle_model_definitions.id"), nullable=True)
|
||||
|
||||
# Gyártási intervallum meghatározása
|
||||
year_from = Column(Integer, nullable=True, index=True)
|
||||
year_to = Column(Integer, nullable=True, index=True)
|
||||
|
||||
# Alternatív elnevezések kereshetőséghez (pl. ["Tracer 9", "MT-09 Tracer"])
|
||||
synonyms = Column(JSON, server_default=text("'[]'::jsonb"))
|
||||
# -----------------------------------------------
|
||||
|
||||
# --- LOGISZTIKAI ÉS TECHNIKAI FIX OSZLOPOK ---
|
||||
# --- ROBOT VÉDELMI ÉS PIPELINE MEZŐK (v1.3.0) ---
|
||||
is_manual = Column(Boolean, default=False, server_default=text("false"), index=True)
|
||||
attempts = Column(Integer, default=0, server_default=text("0"), index=True)
|
||||
last_error = Column(Text, nullable=True)
|
||||
|
||||
# Robot 2.1 "Researcher" porszívózott nyers adatai (A szemetesláda)
|
||||
raw_search_context = Column(Text, nullable=True)
|
||||
|
||||
# Telemetria és forrás adatok (JSONB a hatékonyabb kereséshez)
|
||||
research_metadata = Column(JSONB, server_default=text("'{}'::jsonb"), nullable=False)
|
||||
# --------------------------------------------------
|
||||
|
||||
# --- TECHNIKAI FIX OSZLOPOK ---
|
||||
engine_capacity = Column(Integer, index=True)
|
||||
power_kw = Column(Integer, index=True)
|
||||
max_weight_kg = Column(Integer, index=True)
|
||||
@@ -82,23 +88,19 @@ class VehicleModelDefinition(Base):
|
||||
cargo_length_mm = Column(Integer)
|
||||
cargo_width_mm = Column(Integer)
|
||||
cargo_height_mm = Column(Integer)
|
||||
# ----------------------------------------------
|
||||
|
||||
specifications = Column(JSON, server_default=text("'{}'::jsonb"))
|
||||
features_json = Column(JSON, server_default=text("'{}'::jsonb"))
|
||||
|
||||
status = Column(String(20), server_default="unverified") # unverified, ai_enriched, duplicate, manual_check
|
||||
# Státusz mező hossza 30-ra növelve az automatikus migrációhoz
|
||||
status = Column(String(30), server_default="unverified", index=True)
|
||||
is_master = Column(Boolean, default=False)
|
||||
source = Column(String(50))
|
||||
source = Column(String(50))
|
||||
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
||||
|
||||
# Kapcsolatok
|
||||
v_type_rel = relationship("VehicleType", back_populates="definitions")
|
||||
|
||||
# Önmagára hivatkozó kapcsolat a duplikációk kezeléséhez
|
||||
master_record = relationship("VehicleModelDefinition", remote_side=[id], backref="merged_variants")
|
||||
|
||||
# Meglévő kapcsolatok megtartása
|
||||
variants = relationship("AssetCatalog", back_populates="master_definition")
|
||||
variants = relationship("AssetCatalog", back_populates="master_definition", primaryjoin="VehicleModelDefinition.id == AssetCatalog.master_definition_id")
|
||||
109
backend/app/models/vehicle_definitions1.0.0.py
Normal file
109
backend/app/models/vehicle_definitions1.0.0.py
Normal file
@@ -0,0 +1,109 @@
|
||||
from sqlalchemy import Column, Integer, String, JSON, UniqueConstraint, text, Boolean, DateTime, ForeignKey, Numeric, Index, Text
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
from sqlalchemy.dialects.postgresql import JSONB # PostgreSQL specifikus JSONB a hatékony kereséshez
|
||||
from app.db.base_class import Base
|
||||
|
||||
class VehicleType(Base):
|
||||
"""Jármű főtípusok sémája (Séma-gazda)"""
|
||||
__tablename__ = "vehicle_types"
|
||||
__table_args__ = {"schema": "data"}
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
code = Column(String(30), unique=True, index=True) # car, motorcycle, truck, bus, boat, etc.
|
||||
name = Column(String(50)) # Megjelenítendő név
|
||||
icon = Column(String(50))
|
||||
units = Column(JSON, server_default=text("'{\"power\": \"kW\", \"weight\": \"kg\", \"cargo\": \"m3\"}'::jsonb"))
|
||||
|
||||
features = relationship("FeatureDefinition", back_populates="vehicle_type")
|
||||
definitions = relationship("VehicleModelDefinition", back_populates="v_type_rel")
|
||||
|
||||
class FeatureDefinition(Base):
|
||||
"""Globális felszereltség szótár"""
|
||||
__tablename__ = "feature_definitions"
|
||||
__table_args__ = {"schema": "data"}
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
vehicle_type_id = Column(Integer, ForeignKey("data.vehicle_types.id"))
|
||||
category = Column(String(50)) # Műszaki, Beltér, Kültér, Multimédia
|
||||
name = Column(String(100), nullable=False)
|
||||
data_type = Column(String(20), default="boolean")
|
||||
|
||||
vehicle_type = relationship("VehicleType", back_populates="features")
|
||||
|
||||
class ModelFeatureMap(Base):
|
||||
"""Modell-szintű felszereltségi sablon (Alap vs Extra)"""
|
||||
__tablename__ = "model_feature_maps"
|
||||
__table_args__ = {"schema": "data"}
|
||||
|
||||
model_id = Column(Integer, ForeignKey("data.vehicle_model_definitions.id"), primary_key=True)
|
||||
feature_id = Column(Integer, ForeignKey("data.feature_definitions.id"), primary_key=True)
|
||||
availability = Column(String(20), default="standard") # standard, optional, accessory
|
||||
value = Column(String(100))
|
||||
|
||||
class VehicleModelDefinition(Base):
|
||||
"""MDM Master rekordok - v1.3.0 Pipeline Edition (Researcher & Alchemist)"""
|
||||
__tablename__ = "vehicle_model_definitions"
|
||||
__table_args__ = (
|
||||
UniqueConstraint('make', 'technical_code', 'vehicle_type', name='uix_make_tech_type'),
|
||||
Index('idx_vmd_lookup', 'make', 'technical_code'),
|
||||
{"schema": "data"}
|
||||
)
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
make = Column(String(50), nullable=False, index=True)
|
||||
technical_code = Column(String(50), nullable=False, index=True)
|
||||
marketing_name = Column(String(100), index=True)
|
||||
family_name = Column(String(100))
|
||||
|
||||
vehicle_type = Column(String(30), index=True)
|
||||
vehicle_type_id = Column(Integer, ForeignKey("data.vehicle_types.id"))
|
||||
vehicle_class = Column(String(50))
|
||||
|
||||
parent_id = Column(Integer, ForeignKey("data.vehicle_model_definitions.id"), nullable=True)
|
||||
year_from = Column(Integer, nullable=True, index=True)
|
||||
year_to = Column(Integer, nullable=True, index=True)
|
||||
synonyms = Column(JSON, server_default=text("'[]'::jsonb"))
|
||||
|
||||
# --- ROBOT VÉDELMI ÉS PIPELINE MEZŐK (v1.3.0) ---
|
||||
is_manual = Column(Boolean, default=False, server_default=text("false"), index=True)
|
||||
attempts = Column(Integer, default=0, server_default=text("0"), index=True)
|
||||
last_error = Column(Text, nullable=True)
|
||||
|
||||
# Robot 2.1 "Researcher" porszívózott nyers adatai (A szemetesláda)
|
||||
raw_search_context = Column(Text, nullable=True)
|
||||
|
||||
# Telemetria és forrás adatok (melyik API/URL hozta az adatot)
|
||||
research_metadata = Column(JSONB, server_default=text("'{}'::jsonb"), nullable=False)
|
||||
# --------------------------------------------------
|
||||
|
||||
# --- TECHNIKAI FIX OSZLOPOK ---
|
||||
engine_capacity = Column(Integer, index=True)
|
||||
power_kw = Column(Integer, index=True)
|
||||
max_weight_kg = Column(Integer, index=True)
|
||||
|
||||
axle_count = Column(Integer)
|
||||
payload_capacity_kg = Column(Integer)
|
||||
cargo_volume_m3 = Column(Numeric(10, 2))
|
||||
cargo_length_mm = Column(Integer)
|
||||
cargo_width_mm = Column(Integer)
|
||||
cargo_height_mm = Column(Integer)
|
||||
|
||||
specifications = Column(JSON, server_default=text("'{}'::jsonb"))
|
||||
features_json = Column(JSON, server_default=text("'{}'::jsonb"))
|
||||
|
||||
# Státusz mező hossza növelve a pipeline flagekhez
|
||||
status = Column(String(30), server_default="unverified", index=True)
|
||||
is_master = Column(Boolean, default=False)
|
||||
source = Column(String(50)) # 'ROBOT-v1.3.0-Pipeline'
|
||||
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
||||
|
||||
# Kapcsolatok
|
||||
v_type_rel = relationship("VehicleType", back_populates="definitions")
|
||||
master_record = relationship("VehicleModelDefinition", remote_side=[id], backref="merged_variants")
|
||||
|
||||
# AssetCatalog kapcsolat
|
||||
# Megjegyzés: Ellenőrizd, hogy az AssetCatalog modell be van-e importálva a Base-be!
|
||||
variants = relationship("AssetCatalog", back_populates="master_definition", primaryjoin="VehicleModelDefinition.id == AssetCatalog.master_definition_id")
|
||||
@@ -1,47 +0,0 @@
|
||||
from fastapi import FastAPI, Request, HTTPException
|
||||
from fastapi.security.utils import get_authorization_scheme_param
|
||||
from app.core.config import settings
|
||||
from app.core.security import decode_token
|
||||
|
||||
from app.api.auth import router as auth_router
|
||||
from app.api.recommend import router as recommend_router
|
||||
|
||||
app = FastAPI(title="Service Finder API")
|
||||
|
||||
@app.middleware("http")
|
||||
async def jwt_claims_middleware(request: Request, call_next):
|
||||
"""
|
||||
Ha van Authorization: Bearer <token>, akkor claims bekerül request.state.claims-be.
|
||||
Auth endpointoknál nem kötelező.
|
||||
"""
|
||||
auth = request.headers.get("Authorization")
|
||||
if auth:
|
||||
scheme, token = get_authorization_scheme_param(auth)
|
||||
if scheme.lower() == "bearer" and token:
|
||||
try:
|
||||
claims = decode_token(token, settings.JWT_SECRET)
|
||||
if claims.get("type") != "access":
|
||||
raise HTTPException(status_code=401, detail="Invalid access token type")
|
||||
request.state.claims = claims
|
||||
except Exception:
|
||||
# nem dobunk mindig 401-et, csak a védett endpointoknál; itt “néma” marad
|
||||
request.state.claims = None
|
||||
else:
|
||||
request.state.claims = None
|
||||
|
||||
return await call_next(request)
|
||||
|
||||
@app.get("/health")
|
||||
def health():
|
||||
return {"status": "ok"}
|
||||
|
||||
@app.get("/health/db")
|
||||
def health_db():
|
||||
from app.db.session import get_conn
|
||||
with get_conn() as conn:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("SELECT 1;")
|
||||
return {"db": "ok", "result": cur.fetchone()[0]}
|
||||
|
||||
app.include_router(auth_router)
|
||||
app.include_router(recommend_router)
|
||||
47
backend/app/schemas/evidence.py
Normal file
47
backend/app/schemas/evidence.py
Normal file
@@ -0,0 +1,47 @@
|
||||
# app/schemas/evidence.py
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional
|
||||
|
||||
class RegistrationDocumentExtracted(BaseModel):
|
||||
"""A magyar forgalmi engedély teljes adattartalma."""
|
||||
# A - Okmány adatok
|
||||
license_plate: Optional[str] = Field(None, alias="A", description="Rendszám")
|
||||
first_registration_date: Optional[str] = Field(None, alias="B", description="Első nyilvántartásba vétel")
|
||||
doc_serial_number: Optional[str] = Field(None, description="Okmány sorszáma (jobb felső sarok)")
|
||||
|
||||
# C - Tulajdonos/Üzembentartó adatok
|
||||
owner_last_name: Optional[str] = Field(None, alias="C.1.1", description="Családi név vagy cégnév")
|
||||
owner_first_name: Optional[str] = Field(None, alias="C.1.2", description="Utónév")
|
||||
owner_address: Optional[str] = Field(None, alias="C.1.3", description="Lakcím/Székhely")
|
||||
owner_status: Optional[str] = Field(None, alias="C.4", description="Jogosultság státusza (a=tulaj, b=nem tulaj)")
|
||||
|
||||
# D - Jármű technikai adatai
|
||||
make: Optional[str] = Field(None, alias="D.1", description="Gyártmány")
|
||||
vehicle_type: Optional[str] = Field(None, alias="D.2", description="Típus")
|
||||
commercial_description: Optional[str] = Field(None, alias="D.3", description="Kereskedelmi leírás")
|
||||
vin: Optional[str] = Field(None, alias="E", description="Alvázszám (17 karakter)")
|
||||
|
||||
# G, F - Tömeg adatok
|
||||
weight_kg: Optional[int] = Field(None, alias="G", description="Saját tömeg")
|
||||
max_weight_kg: Optional[int] = Field(None, alias="F.1", description="Együttes tömeg")
|
||||
|
||||
# P, V - Motor és Környezetvédelem
|
||||
engine_capacity: Optional[int] = Field(None, alias="P.1", description="Hengerűrtartalom (cm3)")
|
||||
engine_power: Optional[float] = Field(None, alias="P.2", description="Teljesítmény (kW)")
|
||||
fuel_type: Optional[str] = Field(None, alias="P.3", description="Hajtóanyag")
|
||||
engine_code: Optional[str] = Field(None, alias="P.5", description="Motorkód")
|
||||
env_category: Optional[str] = Field(None, alias="V.9", description="Környezetvédelmi osztály")
|
||||
|
||||
# R, S, H - Egyéb
|
||||
color: Optional[str] = Field(None, alias="R", description="Szín")
|
||||
seats: Optional[int] = Field(None, alias="S.1", description="Ülések száma")
|
||||
expiry_date: Optional[str] = Field(None, alias="H", description="Műszaki érvényesség")
|
||||
transmission_type: Optional[str] = Field(None, description="Sebességváltó fajtája")
|
||||
|
||||
class Config:
|
||||
populate_by_name = True
|
||||
|
||||
class OcrResponse(BaseModel):
|
||||
success: bool
|
||||
message: str
|
||||
data: Optional[RegistrationDocumentExtracted] = None
|
||||
@@ -1,20 +1,45 @@
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
class ServiceCreateInternal(BaseModel):
|
||||
name: str
|
||||
postal_code: str
|
||||
name: str = Field(..., description="A szolgáltató neve")
|
||||
|
||||
# --- HIERARCHIA ---
|
||||
# Ha a robot felismeri, hogy egy lánc része, itt tároljuk a szülő ID-t
|
||||
parent_id: Optional[int] = Field(None, description="Szülő egység ID-ja (pl. Franchise központ)")
|
||||
|
||||
# --- CÍM ADATOK ---
|
||||
postal_code: Optional[str] = None
|
||||
city: str
|
||||
street_name: str
|
||||
street_type: str
|
||||
house_number: str
|
||||
street_name: Optional[str] = None
|
||||
street_type: Optional[str] = "utca"
|
||||
house_number: Optional[str] = None
|
||||
stairwell: Optional[str] = None
|
||||
floor: Optional[str] = None
|
||||
door: Optional[str] = None
|
||||
hrsz: Optional[str] = None
|
||||
|
||||
full_address: Optional[str] = Field(None, description="Eredeti, nyers cím szövege")
|
||||
|
||||
# --- ELÉRHETŐSÉG ---
|
||||
contact_phone: Optional[str] = None
|
||||
email: Optional[str] = None
|
||||
website: Optional[str] = None
|
||||
source: str
|
||||
external_id: Optional[str] = None
|
||||
|
||||
# --- SOCIAL & AI ---
|
||||
# A Deep Dive fázishoz előkészítve
|
||||
social_links: Optional[Dict[str, str]] = Field(default_factory=dict)
|
||||
vibe_analysis: Optional[Dict[str, Any]] = Field(default_factory=dict)
|
||||
|
||||
# --- IDENTITÁS ÉS FORRÁS ---
|
||||
source: str # 'google', 'osm', 'manual', 'fb_scraper'
|
||||
external_id: Optional[str] = None
|
||||
|
||||
# Ez a robot "horgonya" a duplikációk ellen
|
||||
fingerprint: str = Field(..., description="Egyedi ujjlenyomat: Hash(Name+City+Street)")
|
||||
|
||||
trust_score: int = Field(30, ge=0, le=100)
|
||||
raw_data: Optional[Dict[str, Any]] = {}
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
64
backend/app/services/ai_ocr_service.py
Normal file
64
backend/app/services/ai_ocr_service.py
Normal file
@@ -0,0 +1,64 @@
|
||||
# app/services/ai_ocr_service.py
|
||||
import json
|
||||
import httpx
|
||||
import base64
|
||||
from app.schemas.evidence import RegistrationDocumentExtracted
|
||||
|
||||
class AiOcrService:
|
||||
OLLAMA_URL = "http://service_finder_ollama:11434/api/generate"
|
||||
MODEL_NAME = "llama3.2-vision"
|
||||
|
||||
@classmethod
|
||||
async def extract_registration_data(cls, clean_image_bytes: bytes) -> RegistrationDocumentExtracted:
|
||||
base64_image = base64.b64encode(clean_image_bytes).decode('utf-8')
|
||||
|
||||
prompt = """
|
||||
Te egy magyar hatósági okmány-szakértő AI vagy. A feladatod a mellékelt magyar forgalmi engedély (kép) összes adatának kinyerése.
|
||||
|
||||
Keresd meg és olvasd le az adatokat az alábbi hatósági kódok alapján:
|
||||
- A: Rendszám (kötőjellel, pl: ABC-123 vagy AA-BB-123)
|
||||
- B: Első nyilvántartásba vétel dátuma (YYYY.MM.DD)
|
||||
- C.1.1: Családi név vagy cégnév
|
||||
- C.1.2: Utónév
|
||||
- C.1.3: Teljes lakcím (Irsz, Város, Utca, Házszám)
|
||||
- C.4: Jogosultság (a = tulajdonos, b = üzembentartó)
|
||||
- D.1: Gyártmány (pl. TOYOTA, VOLKSWAGEN)
|
||||
- D.2: Jármű típusa
|
||||
- D.3: Kereskedelmi leírás (pl. COROLLA, GOLF)
|
||||
- E: Alvázszám (pontosan 17 karakter)
|
||||
- G: Saját tömeg (kg)
|
||||
- F.1: Együttes tömeg (kg)
|
||||
- P.1: Hengerűrtartalom (cm3)
|
||||
- P.2: Teljesítmény (kW)
|
||||
- P.3: Hajtóanyag (pl. Benzin, Gázolaj, Elektromos)
|
||||
- P.5: Motorkód
|
||||
- V.9: Környezetvédelmi osztály kódja
|
||||
- R: Szín
|
||||
- S.1: Ülések száma
|
||||
- H: Műszaki érvényesség vége (YYYY.MM.DD)
|
||||
- Sebességváltó: Keresd a 0, 1, 2, 3 kódokat (0=mechanikus, 2=automata).
|
||||
|
||||
VÁLASZ FORMÁTUMA: Kizárólag érvényes JSON. Ha egy adat nem olvasható, az értéke null legyen.
|
||||
"""
|
||||
|
||||
payload = {
|
||||
"model": cls.MODEL_NAME,
|
||||
"prompt": prompt,
|
||||
"images": [base64_image],
|
||||
"stream": False,
|
||||
"format": "json"
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient(timeout=90.0) as client:
|
||||
try:
|
||||
response = await client.post(cls.OLLAMA_URL, json=payload)
|
||||
response.raise_for_status()
|
||||
|
||||
ai_response_text = response.json().get("response", "{}")
|
||||
data_dict = json.loads(ai_response_text)
|
||||
|
||||
return RegistrationDocumentExtracted(**data_dict)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Robot 3 AI Hiba: {e}")
|
||||
raise ValueError(f"AI hiba az adatkivonás során: {str(e)}")
|
||||
@@ -3,24 +3,20 @@ import json
|
||||
import logging
|
||||
import asyncio
|
||||
import re
|
||||
from typing import Dict, Any, Optional
|
||||
from google import genai
|
||||
from google.genai import types
|
||||
import base64
|
||||
import httpx
|
||||
from typing import Dict, Any, Optional, List
|
||||
from sqlalchemy import select
|
||||
from app.db.session import SessionLocal
|
||||
from app.models import SystemParameter
|
||||
from app.models.system import SystemParameter
|
||||
|
||||
logger = logging.getLogger("AI-Service")
|
||||
|
||||
class AIService:
|
||||
"""
|
||||
AI Service v1.2.5 - Final Integrated Edition
|
||||
- Robot 2: Technikai dúsítás (Search + Regex JSON parsing)
|
||||
- Robot 3: OCR (Controlled JSON generation)
|
||||
"""
|
||||
api_key = os.getenv("GEMINI_API_KEY")
|
||||
client = genai.Client(api_key=api_key) if api_key else None
|
||||
PRIMARY_MODEL = "gemini-2.0-flash"
|
||||
OLLAMA_BASE_URL = "http://ollama:11434/api/generate"
|
||||
TEXT_MODEL = "qwen2.5-coder:32b"
|
||||
VISION_MODEL = "llava:7b"
|
||||
DVLA_API_KEY = os.getenv("DVLA_API_KEY")
|
||||
|
||||
@classmethod
|
||||
async def get_config_delay(cls) -> float:
|
||||
@@ -29,83 +25,71 @@ class AIService:
|
||||
stmt = select(SystemParameter).where(SystemParameter.key == "AI_REQUEST_DELAY")
|
||||
res = await db.execute(stmt)
|
||||
param = res.scalar_one_or_none()
|
||||
return float(param.value) if param else 1.0
|
||||
except Exception: return 1.0
|
||||
return float(param.value) if param else 0.1
|
||||
except Exception:
|
||||
return 0.1
|
||||
|
||||
@classmethod
|
||||
async def get_clean_vehicle_data(cls, make: str, raw_model: str, v_type: str) -> Optional[Dict[str, Any]]:
|
||||
"""Robot 2: Adatbányászat Google Search segítségével."""
|
||||
if not cls.client: return None
|
||||
async def get_gold_data_from_research(cls, make: str, model: str, raw_context: str) -> Optional[Dict[str, Any]]:
|
||||
await asyncio.sleep(await cls.get_config_delay())
|
||||
|
||||
search_tool = types.Tool(google_search=types.GoogleSearch())
|
||||
|
||||
prompt = f"""
|
||||
KERESS RÁ az interneten: {make} {raw_model} ({v_type}) pontos gyári modellkódja és technikai adatai.
|
||||
Adj választ szigorúan csak egy JSON blokkban:
|
||||
FELADAT: A mellékelt kutatási adatokból állíts össze egy hiteles technikai adatlapot.
|
||||
JÁRMŰ: {make} {model}
|
||||
KUTATÁSI ADATOK (Szemetesláda tartalom):
|
||||
{raw_context}
|
||||
|
||||
SZIGORÚ SZABÁLYOK:
|
||||
1. Csak a megerősített adatokat töltsd ki.
|
||||
2. Ha lóerőt (hp/bhp) találsz, váltsd át kW-ra (hp * 0.745).
|
||||
3. A 'marketing_name' maradjon 50 karakter alatt.
|
||||
|
||||
VÁLASZ FORMÁTUM (Tiszta JSON):
|
||||
{{
|
||||
"marketing_name": "tiszta név",
|
||||
"synonyms": ["név1", "név2"],
|
||||
"technical_code": "gyári kód",
|
||||
"year_from": int,
|
||||
"year_to": int_vagy_null,
|
||||
"marketing_name": "string",
|
||||
"technical_code": "string",
|
||||
"ccm": int,
|
||||
"kw": int,
|
||||
"maintenance": {{ "oil_type": "string", "oil_qty": float, "spark_plug": "string", "coolant": "string" }}
|
||||
"maintenance": {{
|
||||
"oil_type": "string",
|
||||
"oil_qty_liters": float,
|
||||
"spark_plug": "string",
|
||||
"final_drive": "string"
|
||||
}},
|
||||
"tires": {{
|
||||
"front": "string",
|
||||
"rear": "string"
|
||||
}},
|
||||
"is_duplicate_potential": bool
|
||||
}}
|
||||
FONTOS: A 'technical_code' NEM lehet üres. Ha nem találod, adj 'N/A' értéket!
|
||||
"""
|
||||
|
||||
# Search tool használata esetén a response_mime_type tilos!
|
||||
config = types.GenerateContentConfig(
|
||||
system_instruction="Profi járműtechnikai adatbányász vagy. Csak tiszta JSON-t válaszolsz markdown kódblokk nélkül.",
|
||||
tools=[search_tool],
|
||||
temperature=0.1
|
||||
)
|
||||
return await cls._execute_ai_call(prompt, make, model)
|
||||
|
||||
@classmethod
|
||||
async def _execute_ai_call(cls, prompt: str, make: str, model: str) -> Optional[Dict[str, Any]]:
|
||||
payload = {
|
||||
"model": cls.TEXT_MODEL,
|
||||
"prompt": prompt,
|
||||
"stream": False,
|
||||
"format": "json",
|
||||
"options": {"temperature": 0.1}
|
||||
}
|
||||
try:
|
||||
response = cls.client.models.generate_content(model=cls.PRIMARY_MODEL, contents=prompt, config=config)
|
||||
text = response.text
|
||||
# Tisztítás: ha az AI mégis tenne bele markdown jeleket
|
||||
clean_json = re.sub(r'```json\s*|```', '', text).strip()
|
||||
res_json = json.loads(clean_json)
|
||||
if isinstance(res_json, list) and len(res_json) > 0: res_json = res_json[0]
|
||||
return res_json if isinstance(res_json, dict) else None
|
||||
async with httpx.AsyncClient(timeout=120.0) as client:
|
||||
response = await client.post(cls.OLLAMA_BASE_URL, json=payload)
|
||||
response.raise_for_status()
|
||||
res_json = response.json()
|
||||
return json.loads(res_json.get("response", "{}"))
|
||||
except Exception as e:
|
||||
logger.error(f"❌ AI hiba ({make} {raw_model}): {e}")
|
||||
logger.error(f"❌ AI hiba ({make} {model}): {e}")
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def analyze_document_image(cls, image_data: bytes, doc_type: str) -> Optional[Dict[str, Any]]:
|
||||
"""Robot 3: OCR funkció - Forgalmi, Személyi, Számla, Odometer."""
|
||||
if not cls.client: return None
|
||||
async def get_clean_vehicle_data(cls, make: str, raw_model: str, v_type: str, sources: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
await asyncio.sleep(await cls.get_config_delay())
|
||||
|
||||
prompts = {
|
||||
"identity": "Személyes okmány adatok (név, szám, lejárat).",
|
||||
"vehicle_reg": "Forgalmi adatok (rendszám, alvázszám, kW, ccm).",
|
||||
"invoice": "Számla adatok (partner, végösszeg, dátum).",
|
||||
"odometer": "Csak a kilométeróra állása számként."
|
||||
}
|
||||
|
||||
# Itt maradhat a response_mime_type, mert nem használunk Search-öt
|
||||
config = types.GenerateContentConfig(
|
||||
system_instruction="Profi OCR dokumentum-elemző vagy. Csak tiszta JSON-t válaszolsz.",
|
||||
response_mime_type="application/json"
|
||||
)
|
||||
|
||||
try:
|
||||
response = cls.client.models.generate_content(
|
||||
model=cls.PRIMARY_MODEL,
|
||||
contents=[
|
||||
f"Elemezd ezt a képet ({doc_type}): {prompts.get(doc_type, 'OCR')}",
|
||||
types.Part.from_bytes(data=image_data, mime_type="image/jpeg")
|
||||
],
|
||||
config=config
|
||||
)
|
||||
res_json = json.loads(response.text)
|
||||
if isinstance(res_json, list) and len(res_json) > 0: res_json = res_json[0]
|
||||
return res_json if isinstance(res_json, dict) else None
|
||||
except Exception as e:
|
||||
logger.error(f"❌ OCR hiba: {e}")
|
||||
return None
|
||||
prompt = f"""
|
||||
FELADAT: Normalizáld a jármű adatait.
|
||||
GYÁRTÓ: {make} | MODELL: {raw_model}
|
||||
ADATOK: {json.dumps(sources)}
|
||||
(JSON válasz marketing_name, synonyms, technical_code, ccm, kw, year_from, year_to)
|
||||
"""
|
||||
return await cls._execute_ai_call(prompt, make, raw_model)
|
||||
141
backend/app/services/ai_service1.1.0.py
Normal file
141
backend/app/services/ai_service1.1.0.py
Normal file
@@ -0,0 +1,141 @@
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
import asyncio
|
||||
import re
|
||||
import base64
|
||||
import httpx
|
||||
from typing import Dict, Any, Optional, List
|
||||
from sqlalchemy import select
|
||||
from app.db.session import SessionLocal
|
||||
from app.models import SystemParameter
|
||||
|
||||
logger = logging.getLogger("AI-Service")
|
||||
|
||||
class AIService:
|
||||
"""
|
||||
AI Service v1.3.5 - Private High-Performance Edition
|
||||
- Engine: Local Ollama (GPU Accelerated)
|
||||
- Features: DVLA Integration, 50-char Normalization, Private OCR
|
||||
"""
|
||||
|
||||
# A Docker belső hálózatán a szerviznév 'ollama'
|
||||
OLLAMA_BASE_URL = "http://ollama:11434/api/generate"
|
||||
TEXT_MODEL = "vehicle-pro"
|
||||
VISION_MODEL = "llava:7b"
|
||||
DVLA_API_KEY = os.getenv("DVLA_API_KEY")
|
||||
|
||||
@classmethod
|
||||
async def get_config_delay(cls) -> float:
|
||||
"""Késleltetés lekérése az adatbázisból."""
|
||||
try:
|
||||
async with SessionLocal() as db:
|
||||
stmt = select(SystemParameter).where(SystemParameter.key == "AI_REQUEST_DELAY")
|
||||
res = await db.execute(stmt)
|
||||
param = res.scalar_one_or_none()
|
||||
return float(param.value) if param else 0.1
|
||||
except Exception:
|
||||
return 0.1
|
||||
|
||||
@classmethod
|
||||
async def get_clean_vehicle_data(cls, make: str, raw_model: str, v_type: str, sources: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
"""Robot 2: Adat-összefésülés és normalizálás."""
|
||||
# Várjunk egy kicsit a GPU kímélése érdekében
|
||||
await asyncio.sleep(await cls.get_config_delay())
|
||||
|
||||
prompt = f"""
|
||||
FELADAT: Normalizáld a jármű adatait több forrás alapján.
|
||||
GYÁRTÓ: {make}
|
||||
NYERS MODELLNÉV: {raw_model}
|
||||
FORRÁSOK NYERS ADATAI: {json.dumps(sources, ensure_ascii=False)}
|
||||
|
||||
SZIGORÚ SZABÁLYOK:
|
||||
1. 'marketing_name': MAXIMUM 50 KARAKTER!
|
||||
2. 'synonyms': Gyűjtsd ide az összes többi névváltozatot.
|
||||
3. 'technical_code': Keresd meg a gyári kódokat.
|
||||
|
||||
VÁLASZ FORMÁTUM (Csak tiszta JSON):
|
||||
{{
|
||||
"marketing_name": "string (max 50)",
|
||||
"synonyms": ["string"],
|
||||
"technical_code": "string",
|
||||
"ccm": int,
|
||||
"kw": int,
|
||||
"euro_class": int,
|
||||
"year_from": int,
|
||||
"year_to": int vagy null,
|
||||
"maintenance": {{
|
||||
"oil_type": "string",
|
||||
"oil_qty": float,
|
||||
"spark_plug": "string"
|
||||
}},
|
||||
"is_duplicate_potential": bool
|
||||
}}
|
||||
"""
|
||||
|
||||
payload = {
|
||||
"model": cls.TEXT_MODEL,
|
||||
"prompt": prompt,
|
||||
"stream": False,
|
||||
"format": "json",
|
||||
"options": {"temperature": 0.1}
|
||||
}
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=90.0) as client:
|
||||
logger.info(f"📡 AI kérés küldése: {make} {raw_model}...")
|
||||
response = await client.post(cls.OLLAMA_BASE_URL, json=payload)
|
||||
response.raise_for_status()
|
||||
res_json = response.json()
|
||||
clean_data = json.loads(res_json.get("response", "{}"))
|
||||
|
||||
if clean_data.get("marketing_name"):
|
||||
clean_data["marketing_name"] = clean_data["marketing_name"][:50].strip()
|
||||
|
||||
return clean_data
|
||||
except Exception as e:
|
||||
logger.error(f"❌ AI hiba ({make} {raw_model}): {e}")
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def get_dvla_data(cls, vrm: str) -> Optional[Dict[str, Any]]:
|
||||
"""Brit rendszám alapú adatok lekérése."""
|
||||
if not cls.DVLA_API_KEY: return None
|
||||
url = "https://driver-vehicle-licensing.api.gov.uk/vehicle-enquiry/v1/vehicles"
|
||||
headers = {"x-api-key": cls.DVLA_API_KEY, "Content-Type": "application/json"}
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
resp = await client.post(url, json={"registrationNumber": vrm}, headers=headers)
|
||||
return resp.json() if resp.status_code == 200 else None
|
||||
except Exception as e:
|
||||
logger.error(f"❌ DVLA API hiba: {e}")
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def analyze_document_image(cls, image_data: bytes, doc_type: str) -> Optional[Dict[str, Any]]:
|
||||
"""Robot 3: Helyi OCR és dokumentum elemzés (Llava:7b)."""
|
||||
await asyncio.sleep(await cls.get_config_delay())
|
||||
prompts = {
|
||||
"identity": "Extract ID card data (name, id_number, expiry) as JSON.",
|
||||
"vehicle_reg": "Extract vehicle registration (plate, VIN, power_kw, engine_ccm) as JSON.",
|
||||
"invoice": "Extract invoice details (vendor, total_amount, date) as JSON.",
|
||||
"odometer": "Identify the number on the odometer and return as JSON: {'value': int}."
|
||||
}
|
||||
img_b64 = base64.b64encode(image_data).decode('utf-8')
|
||||
payload = {
|
||||
"model": cls.VISION_MODEL,
|
||||
"prompt": prompts.get(doc_type, "Perform OCR and return JSON"),
|
||||
"images": [img_b64],
|
||||
"stream": False,
|
||||
"format": "json"
|
||||
}
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=120.0) as client:
|
||||
response = await client.post(cls.OLLAMA_BASE_URL, json=payload)
|
||||
res_data = response.json()
|
||||
clean_json = res_data.get("response", "{}")
|
||||
match = re.search(r'\{.*\}', clean_json, re.DOTALL)
|
||||
return json.loads(match.group()) if match else json.loads(clean_json)
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Helyi OCR hiba: {e}")
|
||||
return None
|
||||
111
backend/app/services/ai_service_googleApi_old.py
Normal file
111
backend/app/services/ai_service_googleApi_old.py
Normal file
@@ -0,0 +1,111 @@
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
import asyncio
|
||||
import re
|
||||
from typing import Dict, Any, Optional
|
||||
from google import genai
|
||||
from google.genai import types
|
||||
from sqlalchemy import select
|
||||
from app.db.session import SessionLocal
|
||||
from app.models import SystemParameter
|
||||
|
||||
logger = logging.getLogger("AI-Service")
|
||||
|
||||
class AIService:
|
||||
"""
|
||||
AI Service v1.2.5 - Final Integrated Edition
|
||||
- Robot 2: Technikai dúsítás (Search + Regex JSON parsing)
|
||||
- Robot 3: OCR (Controlled JSON generation)
|
||||
"""
|
||||
api_key = os.getenv("GEMINI_API_KEY")
|
||||
client = genai.Client(api_key=api_key) if api_key else None
|
||||
PRIMARY_MODEL = "gemini-2.0-flash"
|
||||
|
||||
@classmethod
|
||||
async def get_config_delay(cls) -> float:
|
||||
try:
|
||||
async with SessionLocal() as db:
|
||||
stmt = select(SystemParameter).where(SystemParameter.key == "AI_REQUEST_DELAY")
|
||||
res = await db.execute(stmt)
|
||||
param = res.scalar_one_or_none()
|
||||
return float(param.value) if param else 1.0
|
||||
except Exception: return 1.0
|
||||
|
||||
@classmethod
|
||||
async def get_clean_vehicle_data(cls, make: str, raw_model: str, v_type: str) -> Optional[Dict[str, Any]]:
|
||||
"""Robot 2: Adatbányászat Google Search segítségével."""
|
||||
if not cls.client: return None
|
||||
await asyncio.sleep(await cls.get_config_delay())
|
||||
|
||||
search_tool = types.Tool(google_search=types.GoogleSearch())
|
||||
|
||||
prompt = f"""
|
||||
KERESS RÁ az interneten: {make} {raw_model} ({v_type}) pontos gyári modellkódja és technikai adatai.
|
||||
Adj választ szigorúan csak egy JSON blokkban:
|
||||
{{
|
||||
"marketing_name": "tiszta név",
|
||||
"synonyms": ["név1", "név2"],
|
||||
"technical_code": "gyári kód",
|
||||
"year_from": int,
|
||||
"year_to": int_vagy_null,
|
||||
"ccm": int,
|
||||
"kw": int,
|
||||
"maintenance": {{ "oil_type": "string", "oil_qty": float, "spark_plug": "string", "coolant": "string" }}
|
||||
}}
|
||||
FONTOS: A 'technical_code' NEM lehet üres. Ha nem találod, adj 'N/A' értéket!
|
||||
"""
|
||||
|
||||
# Search tool használata esetén a response_mime_type tilos!
|
||||
config = types.GenerateContentConfig(
|
||||
system_instruction="Profi járműtechnikai adatbányász vagy. Csak tiszta JSON-t válaszolsz markdown kódblokk nélkül.",
|
||||
tools=[search_tool],
|
||||
temperature=0.1
|
||||
)
|
||||
|
||||
try:
|
||||
response = cls.client.models.generate_content(model=cls.PRIMARY_MODEL, contents=prompt, config=config)
|
||||
text = response.text
|
||||
# Tisztítás: ha az AI mégis tenne bele markdown jeleket
|
||||
clean_json = re.sub(r'```json\s*|```', '', text).strip()
|
||||
res_json = json.loads(clean_json)
|
||||
if isinstance(res_json, list) and len(res_json) > 0: res_json = res_json[0]
|
||||
return res_json if isinstance(res_json, dict) else None
|
||||
except Exception as e:
|
||||
logger.error(f"❌ AI hiba ({make} {raw_model}): {e}")
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def analyze_document_image(cls, image_data: bytes, doc_type: str) -> Optional[Dict[str, Any]]:
|
||||
"""Robot 3: OCR funkció - Forgalmi, Személyi, Számla, Odometer."""
|
||||
if not cls.client: return None
|
||||
await asyncio.sleep(await cls.get_config_delay())
|
||||
|
||||
prompts = {
|
||||
"identity": "Személyes okmány adatok (név, szám, lejárat).",
|
||||
"vehicle_reg": "Forgalmi adatok (rendszám, alvázszám, kW, ccm).",
|
||||
"invoice": "Számla adatok (partner, végösszeg, dátum).",
|
||||
"odometer": "Csak a kilométeróra állása számként."
|
||||
}
|
||||
|
||||
# Itt maradhat a response_mime_type, mert nem használunk Search-öt
|
||||
config = types.GenerateContentConfig(
|
||||
system_instruction="Profi OCR dokumentum-elemző vagy. Csak tiszta JSON-t válaszolsz.",
|
||||
response_mime_type="application/json"
|
||||
)
|
||||
|
||||
try:
|
||||
response = cls.client.models.generate_content(
|
||||
model=cls.PRIMARY_MODEL,
|
||||
contents=[
|
||||
f"Elemezd ezt a képet ({doc_type}): {prompts.get(doc_type, 'OCR')}",
|
||||
types.Part.from_bytes(data=image_data, mime_type="image/jpeg")
|
||||
],
|
||||
config=config
|
||||
)
|
||||
res_json = json.loads(response.text)
|
||||
if isinstance(res_json, list) and len(res_json) > 0: res_json = res_json[0]
|
||||
return res_json if isinstance(res_json, dict) else None
|
||||
except Exception as e:
|
||||
logger.error(f"❌ OCR hiba: {e}")
|
||||
return None
|
||||
27
backend/app/services/dvla_service.py
Normal file
27
backend/app/services/dvla_service.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import httpx
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("DVLA-Service")
|
||||
|
||||
class DVLAService:
|
||||
API_URL = "https://driver-vehicle-licensing.api.gov.uk/vehicle-enquiry/v1/vehicles"
|
||||
API_KEY = "IDE_MÁSOLD_BE_AZ_API_KULCSOT"
|
||||
|
||||
@classmethod
|
||||
async def get_vehicle_details(cls, vrm: str):
|
||||
"""VRM az angol rendszám (pl. AB12 CDE)"""
|
||||
headers = {
|
||||
"x-api-key": cls.API_KEY,
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
payload = {"registrationNumber": vrm}
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
try:
|
||||
response = await client.post(cls.API_URL, json=payload, headers=headers)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"❌ DVLA hiba: {e}")
|
||||
return None
|
||||
62
backend/app/services/image_processor.py
Normal file
62
backend/app/services/image_processor.py
Normal file
@@ -0,0 +1,62 @@
|
||||
import cv2
|
||||
import numpy as np
|
||||
from typing import Optional
|
||||
|
||||
class DocumentImageProcessor:
|
||||
"""
|
||||
Saját fejlesztésű képtisztító pipeline OCR-hez.
|
||||
A nyers (mobillal fotózott) képekből kontrasztos, fekete-fehér, zajmentes változatot készít,
|
||||
amelyet az AI már közel 100%-os pontossággal tud olvasni.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def process_for_ocr(image_bytes: bytes) -> Optional[bytes]:
|
||||
try:
|
||||
# 1. Kép betöltése a memóriából (FastAPI UploadFile bytes-ból)
|
||||
# A képet nem mentjük a lemezre, villámgyorsan a RAM-ban dolgozzuk fel.
|
||||
nparr = np.frombuffer(image_bytes, np.uint8)
|
||||
img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
|
||||
|
||||
if img is None:
|
||||
raise ValueError("A képet nem sikerült dekódolni.")
|
||||
|
||||
# 2. Szürkeárnyalatossá alakítás (A színek csak zavarják a szövegfelismerést)
|
||||
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
||||
|
||||
# 3. Kép átméretezése (Felskálázás)
|
||||
# Az AI és az OCR motorok a minimum 300 DPI körüli képeket szeretik.
|
||||
height, width = gray.shape
|
||||
if width < 1000 or height < 1000:
|
||||
gray = cv2.resize(gray, None, fx=2.0, fy=2.0, interpolation=cv2.INTER_CUBIC)
|
||||
|
||||
# 4. Kontraszt növelése (CLAHE - Contrast Limited Adaptive Histogram Equalization)
|
||||
# Ez eltünteti a vaku okozta becsillanásokat és kiemeli a halvány betűket.
|
||||
clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))
|
||||
contrast = clahe.apply(gray)
|
||||
|
||||
# 5. Enyhe homályosítás (Denoising / Noise Reduction)
|
||||
# Eltünteti a papír textúráját (pl. a forgalmi engedély vízjelét vagy a blokk gyűrődéseit).
|
||||
blur = cv2.GaussianBlur(contrast, (5, 5), 0)
|
||||
|
||||
# 6. Adaptív Küszöbérték (Binarization)
|
||||
# Minden pixel környezetét külön vizsgálja. Ez küszöböli ki azt, amikor a fotó egyik
|
||||
# sarka sötét (pl. árnyékot vet a telefon), a másik meg világos.
|
||||
thresh = cv2.adaptiveThreshold(
|
||||
blur,
|
||||
255,
|
||||
cv2.ADAPTIVE_THRESH_GAUSSIAN_C,
|
||||
cv2.THRESH_BINARY,
|
||||
11, # Blokk méret (páratlan szám)
|
||||
2 # Konstans levonás
|
||||
)
|
||||
|
||||
# 7. Visszakódolás bájt formátumba (PNG), hogy átadhassuk az AI-nak
|
||||
success, encoded_image = cv2.imencode('.png', thresh)
|
||||
if not success:
|
||||
raise ValueError("Nem sikerült a feldolgozott képet PNG-be kódolni.")
|
||||
|
||||
return encoded_image.tobytes()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Hiba a képfeldolgozás során: {str(e)}")
|
||||
return None
|
||||
29
backend/app/services/storage_service.py
Normal file
29
backend/app/services/storage_service.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import uuid
|
||||
from minio import Minio
|
||||
from app.core.config import settings
|
||||
|
||||
class StorageService:
|
||||
client = Minio(
|
||||
settings.MINIO_ENDPOINT,
|
||||
access_key=settings.MINIO_ROOT_USER,
|
||||
secret_key=settings.MINIO_ROOT_PASSWORD,
|
||||
secure=settings.MINIO_SECURE
|
||||
)
|
||||
BUCKET_NAME = "vehicle-documents"
|
||||
|
||||
@classmethod
|
||||
async def upload_document(cls, file_bytes: bytes, file_name: str, folder: str) -> str:
|
||||
if not cls.client.bucket_exists(cls.BUCKET_NAME):
|
||||
cls.client.make_bucket(cls.BUCKET_NAME)
|
||||
|
||||
# Egyedi fájlnév generálása az ütközések elkerülésére
|
||||
unique_name = f"{folder}/{uuid.uuid4()}_{file_name}"
|
||||
|
||||
from io import BytesIO
|
||||
cls.client.put_object(
|
||||
cls.BUCKET_NAME,
|
||||
unique_name,
|
||||
BytesIO(file_bytes),
|
||||
len(file_bytes)
|
||||
)
|
||||
return f"{cls.BUCKET_NAME}/{unique_name}"
|
||||
16
backend/app/services/translation.py
Executable file
16
backend/app/services/translation.py
Executable file
@@ -0,0 +1,16 @@
|
||||
from sqlalchemy import Column, Integer, String, Text, Boolean, UniqueConstraint
|
||||
# JAVÍTÁS: Közvetlenül a base_class-ból importálunk, hogy elkerüljük a körkörös importot
|
||||
from app.db.base_class import Base
|
||||
|
||||
class Translation(Base):
|
||||
__tablename__ = "translations"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("key", "lang_code", name="uq_translation_key_lang"),
|
||||
{"schema": "data"}
|
||||
)
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
key = Column(String(100), nullable=False, index=True)
|
||||
lang_code = Column(String(5), nullable=False, index=True)
|
||||
value = Column(Text, nullable=False)
|
||||
is_published = Column(Boolean, default=False)
|
||||
Binary file not shown.
Binary file not shown.
111
backend/app/workers/alchemist_v2_2.py
Normal file
111
backend/app/workers/alchemist_v2_2.py
Normal file
@@ -0,0 +1,111 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from sqlalchemy import select, update, func, and_, case # JAVÍTVA: and_ és case importálva
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.vehicle_definitions import VehicleModelDefinition
|
||||
from app.services.ai_service import AIService
|
||||
|
||||
# Logolás finomhangolása
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] %(name)s: %(message)s')
|
||||
logger = logging.getLogger("Robot-Alchemist-v2.2")
|
||||
|
||||
class AlchemistBot:
|
||||
def __init__(self):
|
||||
self.batch_size = 5 # GPU VRAM kímélése (Ollama párhuzamosítás mellett)
|
||||
self.delay_between_records = 12 # Quadro P4000 hűtési idő/késleltetés
|
||||
|
||||
async def synthesize_vehicle(self, vehicle_id: int):
|
||||
"""AI dúsítás végrehajtása a begyűjtött kontextusból."""
|
||||
async with SessionLocal() as db:
|
||||
res = await db.execute(select(VehicleModelDefinition).where(VehicleModelDefinition.id == vehicle_id))
|
||||
v = res.scalar_one_or_none()
|
||||
|
||||
if not v or not v.raw_search_context:
|
||||
logger.warning(f"⚠️ Nincs kontextus az ID:{vehicle_id} rekordhoz, átugrás.")
|
||||
return
|
||||
|
||||
make, model = v.make, v.marketing_name
|
||||
logger.info(f"🧪 Arany dúsítás indul (AI Synthesis): {make} {model}")
|
||||
|
||||
# Státusz zárolása a feldolgozás idejére
|
||||
await db.execute(
|
||||
update(VehicleModelDefinition)
|
||||
.where(VehicleModelDefinition.id == vehicle_id)
|
||||
.values(status='ai_synthesis_in_progress')
|
||||
)
|
||||
await db.commit()
|
||||
|
||||
# AI hívás: Gold-Data kinyerése a "szemetesládából"
|
||||
gold_data = await AIService.get_gold_data_from_research(make, model, v.raw_search_context)
|
||||
|
||||
async with SessionLocal() as db:
|
||||
if gold_data:
|
||||
# Értékek kinyerése és normalizálása
|
||||
ccm = gold_data.get("ccm")
|
||||
kw = gold_data.get("kw")
|
||||
m_name = gold_data.get("marketing_name", model)[:50]
|
||||
t_code = gold_data.get("technical_code")
|
||||
|
||||
await db.execute(
|
||||
update(VehicleModelDefinition)
|
||||
.where(VehicleModelDefinition.id == vehicle_id)
|
||||
.values(
|
||||
marketing_name=m_name,
|
||||
technical_code=t_code or v.technical_code,
|
||||
engine_capacity=ccm,
|
||||
power_kw=kw,
|
||||
features_json=gold_data, # A teljes technikai JSON (olaj, gumi, stb.)
|
||||
status='gold_enriched',
|
||||
updated_at=func.now()
|
||||
)
|
||||
)
|
||||
logger.info(f"✨ GOLD ENRICHED: {make} {m_name} ({ccm} ccm, {kw} kW)")
|
||||
else:
|
||||
# Hiba esetén visszatesszük a sorba, növelve a kísérletek számát
|
||||
await db.execute(
|
||||
update(VehicleModelDefinition)
|
||||
.where(VehicleModelDefinition.id == vehicle_id)
|
||||
.values(
|
||||
status='awaiting_ai_synthesis',
|
||||
attempts=v.attempts + 1,
|
||||
last_error="AI extraction failed or returned empty"
|
||||
)
|
||||
)
|
||||
logger.warning(f"⚠️ Sikertelen dúsítás: {make} {model}")
|
||||
|
||||
await db.commit()
|
||||
|
||||
async def run(self):
|
||||
logger.info("🚀 Robot 2.2 (Alchemist) ONLINE - Prioritásos feldolgozás")
|
||||
while True:
|
||||
async with SessionLocal() as db:
|
||||
# --- PRIORITÁSI LOGIKA (Megegyezik a Researcher botéval) ---
|
||||
priorities = case(
|
||||
(and_(VehicleModelDefinition.vehicle_type == 'car',
|
||||
VehicleModelDefinition.make.in_(['SUZUKI', 'TOYOTA', 'SKODA', 'VOLKSWAGEN', 'OPEL'])), 1),
|
||||
(VehicleModelDefinition.vehicle_type == 'car', 2),
|
||||
(and_(VehicleModelDefinition.vehicle_type == 'motorcycle',
|
||||
VehicleModelDefinition.make.in_(['HONDA', 'YAMAHA', 'SUZUKI', 'KAWASAKI'])), 3),
|
||||
else_=4
|
||||
)
|
||||
|
||||
# Lekérdezés prioritás szerint, majd a legrégebben frissített rekordok szerint
|
||||
stmt = select(VehicleModelDefinition.id).where(
|
||||
VehicleModelDefinition.status == 'awaiting_ai_synthesis'
|
||||
).order_by(priorities, VehicleModelDefinition.updated_at.asc()).limit(self.batch_size)
|
||||
|
||||
res = await db.execute(stmt)
|
||||
ids = [r[0] for r in res.fetchall()]
|
||||
|
||||
if not ids:
|
||||
# Ha üres a tartály, pihenünk és várunk a porszívóra
|
||||
await asyncio.sleep(20)
|
||||
continue
|
||||
|
||||
for vid in ids:
|
||||
await self.synthesize_vehicle(vid)
|
||||
# Quadro P4000 hűtés és Ollama API tehermentesítés
|
||||
await asyncio.sleep(self.delay_between_records)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(AlchemistBot().run())
|
||||
@@ -1,208 +1,136 @@
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import datetime
|
||||
from sqlalchemy import text
|
||||
import sys
|
||||
from sqlalchemy import text, select
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.asset import AssetCatalog
|
||||
from app.models.vehicle_definitions import VehicleModelDefinition
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger("Robot-v1.0.13-Global-Hunter")
|
||||
# Logolás beállítása
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s [%(levelname)s] %(name)s: %(message)s'
|
||||
)
|
||||
logger = logging.getLogger("Hunter-v2.4-Paginator")
|
||||
|
||||
class CatalogMaster:
|
||||
"""
|
||||
Master Hunter Robot v1.0.13 - Global Hunter Edition
|
||||
- Holland (RDW), Brit (DVLA) és Amerikai (NHTSA) adatbázis integráció.
|
||||
- Ratio-Filter: Kiszűri a 0.19-es kW/kg arányszámokat.
|
||||
- Multi-field Power Discovery: Minden lehetséges mezőből kinyeri a kW-ot.
|
||||
- Dinamikus évjárat kezelés a duplikációk ellen.
|
||||
"""
|
||||
|
||||
# API Végpontok
|
||||
class CatalogHunter:
|
||||
RDW_MAIN = "https://opendata.rdw.nl/resource/m9d7-ebf2.json"
|
||||
RDW_FUEL = "https://opendata.rdw.nl/resource/8ys7-d773.json"
|
||||
RDW_AXLE = "https://opendata.rdw.nl/resource/3huj-srit.json"
|
||||
RDW_BODY = "https://opendata.rdw.nl/resource/vezc-m2t6.json"
|
||||
|
||||
UK_DVLA = "https://driver-vehicle-licensing.api.gov.uk/vehicle-enquiry/v1/vehicles"
|
||||
US_NHTSA = "https://vpic.nhtsa.dot.gov/api/vehicles/DecodeVinValuesBatch/"
|
||||
|
||||
RDW_TOKEN = os.getenv("RDW_APP_TOKEN")
|
||||
UK_API_KEY = os.getenv("UK_DVLA_API_KEY")
|
||||
|
||||
HEADERS_RDW = {"X-App-Token": RDW_TOKEN} if RDW_TOKEN else {}
|
||||
HEADERS_UK = {"x-api-key": UK_API_KEY, "Content-Type": "application/json"} if UK_API_KEY else {}
|
||||
|
||||
CATEGORY_MAP = {
|
||||
"Personenauto": "car",
|
||||
"Motorfiets": "motorcycle",
|
||||
"Bedrijfsauto": "truck",
|
||||
"Vrachtwagen": "truck",
|
||||
"Opleggertrekker": "truck",
|
||||
"Bus": "bus",
|
||||
"Aanhangwagen": "trailer",
|
||||
"Oplegger": "trailer",
|
||||
"Landbouw- of bosbouwtrekker": "agricultural",
|
||||
"camper": "camper"
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def clean_kw(cls, val):
|
||||
"""Speciális kW tisztító: ignorálja az 1.0 alatti arányszámokat."""
|
||||
async def get_total_count(cls, client, make_name):
|
||||
"""Lekéri, összesen hány rekord létezik az adott márkához."""
|
||||
query_filter = f"upper(merk) like '%{make_name.upper()}%'"
|
||||
params = {
|
||||
"$where": query_filter,
|
||||
"$select": "count(*)"
|
||||
}
|
||||
try:
|
||||
if val is None: return None
|
||||
f_val = float(str(val).replace(',', '.'))
|
||||
if 0 < f_val < 1.0: return None # Ez csak arányszám (kW/kg)
|
||||
v = int(f_val)
|
||||
return v if v > 0 else None
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def clean_int(cls, val):
|
||||
"""Általános egész szám tisztító."""
|
||||
try:
|
||||
if val is None: return None
|
||||
return int(float(str(val).replace(',', '.')))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def fetch_api(cls, url, params=None, headers=None, method="GET", json_data=None):
|
||||
"""Univerzális API hívó sebességkorlátozással."""
|
||||
async with httpx.AsyncClient(headers=headers, follow_redirects=True) as client:
|
||||
try:
|
||||
await asyncio.sleep(1.2) # Biztonsági késleltetés
|
||||
if method == "POST":
|
||||
resp = await client.post(url, json=json_data, timeout=30)
|
||||
else:
|
||||
resp = await client.get(url, params=params, timeout=30)
|
||||
return resp.json() if resp.status_code in [200, 201] else []
|
||||
except Exception as e:
|
||||
logger.error(f"❌ API Hiba ({url}): {e}")
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
async def get_deep_tech(cls, plate, main_kw=None, vin=None):
|
||||
"""Nemzetközi dúsítás: Holland -> Brit -> Amerikai sorrendben."""
|
||||
res = {"kw": cls.clean_kw(main_kw), "fuel": "Unknown", "axles": None, "body": "Standard", "euro": None}
|
||||
|
||||
# 1. HOLLAND (RDW) DÚSÍTÁS
|
||||
fuel_data = await cls.fetch_api(cls.RDW_FUEL, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
if fuel_data:
|
||||
f0 = fuel_data[0]
|
||||
if not res["kw"]:
|
||||
res["kw"] = cls.clean_kw(f0.get("nettomaximumvermogen") or f0.get("netto_maximum_vermogen"))
|
||||
res["fuel"] = f0.get("brandstof_omschrijving", "Unknown")
|
||||
res["euro"] = f0.get("uitlaatemissieniveau")
|
||||
|
||||
# 2. BRIT (DVLA) ELLENŐRZÉS (Ha van UK kulcs és még hiányzik adat)
|
||||
if cls.UK_API_KEY and (not res["kw"] or not res["euro"]):
|
||||
uk_data = await cls.fetch_api(cls.UK_DVLA, method="POST", json_data={"registrationNumber": plate}, headers=cls.HEADERS_UK)
|
||||
if uk_data:
|
||||
res["kw"] = res["kw"] or cls.clean_kw(uk_data.get("engineCapacity")) # Brit adatok finomítása
|
||||
res["euro"] = res["euro"] or uk_data.get("euroStatus")
|
||||
|
||||
# 3. AMERIKAI (NHTSA) KUTATÁS (Ha van alvázszám)
|
||||
if vin and len(vin) == 17:
|
||||
us_data = await cls.fetch_api(cls.US_NHTSA, params={"format": "json", "data": vin})
|
||||
if us_data and "Results" in us_data:
|
||||
# Az amerikai adatbázisból kinyerjük a lóerőt (HP), ha a kW még mindig nincs meg
|
||||
hp = us_data["Results"][0].get("EngineHP")
|
||||
if hp and not res["kw"]:
|
||||
res["kw"] = int(float(hp) * 0.7457) # HP -> kW konverzió
|
||||
|
||||
# RDW Extra adatok (Tengely, Karosszéria)
|
||||
axle = await cls.fetch_api(cls.RDW_AXLE, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
if axle: res["axles"] = cls.clean_int(axle[0].get("aantal_assen"))
|
||||
|
||||
body = await cls.fetch_api(cls.RDW_BODY, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
if body: res["body"] = body[0].get("carrosserie_omschrijving", "Standard")
|
||||
|
||||
return res
|
||||
resp = await client.get(cls.RDW_MAIN, params=params, headers=cls.HEADERS_RDW)
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
return int(data[0]['count'])
|
||||
return 0
|
||||
except Exception as e:
|
||||
logger.error(f"⚠️ Nem sikerült a számlálás: {e}")
|
||||
return 0
|
||||
|
||||
@classmethod
|
||||
async def process_make(cls, db, task_id, make_name):
|
||||
logger.info(f"🚀 >>> {make_name} GlobalHunter v1.0.13 INDUL...")
|
||||
offset, limit, total_saved = 0, 1000, 0
|
||||
unique_variants = {}
|
||||
|
||||
while True:
|
||||
params = {"merk": make_name.upper(), "$limit": limit, "$offset": offset}
|
||||
main_data = await cls.fetch_api(cls.RDW_MAIN, params, headers=cls.HEADERS_RDW)
|
||||
if not main_data: break
|
||||
|
||||
for item in main_data:
|
||||
plate = item.get("kenteken")
|
||||
if not plate: continue
|
||||
|
||||
model = str(item.get("handelsbenaming", "Unknown")).upper()
|
||||
ccm = cls.clean_int(item.get("cilinderinhoud"))
|
||||
weight = cls.clean_int(item.get("massa_ledig_voertuig") or item.get("massa_rijklaar"))
|
||||
kw_candidate = item.get("netto_maximum_vermogen") or item.get("vermogen_massarijklaar")
|
||||
|
||||
raw_date = item.get("datum_eerste_toelating")
|
||||
prod_year = int(str(raw_date)[:4]) if raw_date else 2024
|
||||
|
||||
v_class = cls.CATEGORY_MAP.get(item.get("voertuigsoort"), "other")
|
||||
if "kampeerwagen" in str(item.get("inrichting", "")).lower(): v_class = "camper"
|
||||
|
||||
# Variáns kulcs: Modell + CCM + Súly + kW + Év = Egyedi technikai ujjlenyomat
|
||||
variant_key = f"{model}-{ccm}-{weight}-{v_class}-{kw_candidate}-{prod_year}"
|
||||
|
||||
if variant_key not in unique_variants:
|
||||
unique_variants[variant_key] = {
|
||||
"model": model, "ccm": ccm, "weight": weight, "v_class": v_class,
|
||||
"plate": plate, "main_kw": kw_candidate, "prod_year": prod_year,
|
||||
"vin": item.get("vin") # Ha az RDW-ben benne van a VIN
|
||||
}
|
||||
|
||||
if len(main_data) < limit or offset > 90000: break
|
||||
offset += limit
|
||||
|
||||
logger.info(f"📊 {len(unique_variants)} egyedi variáns kutatása indul...")
|
||||
|
||||
for key, v in unique_variants.items():
|
||||
deep = await cls.get_deep_tech(v["plate"], main_kw=v["main_kw"], vin=v["vin"])
|
||||
try:
|
||||
db_item = AssetCatalog(
|
||||
make=make_name.upper(), model=v["model"], vehicle_class=v["v_class"],
|
||||
fuel_type=deep["fuel"], power_kw=deep["kw"], engine_capacity=v["ccm"],
|
||||
max_weight_kg=v["weight"], axle_count=deep["axles"], body_type=deep["body"],
|
||||
year_from=v["prod_year"], euro_class=deep["euro"],
|
||||
factory_data={
|
||||
"source": "GlobalHunter-v1.0.13",
|
||||
"sample_plate": v["plate"],
|
||||
"enriched_at": str(datetime.datetime.now())
|
||||
}
|
||||
)
|
||||
db.add(db_item)
|
||||
await db.commit()
|
||||
total_saved += 1
|
||||
if total_saved % 50 == 0: logger.info(f"✅ {total_saved} variáns elmentve.")
|
||||
except Exception:
|
||||
await db.rollback()
|
||||
continue
|
||||
clean_make = make_name.strip().upper()
|
||||
|
||||
await db.execute(text("UPDATE data.catalog_discovery SET status = 'processed' WHERE id = :id"), {"id": task_id})
|
||||
await db.commit()
|
||||
logger.info(f"🏁 {make_name} KÉSZ. {total_saved} rekord rögzítve.")
|
||||
async with httpx.AsyncClient(timeout=60) as client:
|
||||
# 1. LÉPÉS: Megszámoljuk az összes rekordot
|
||||
total_available = await cls.get_total_count(client, clean_make)
|
||||
logger.info(f"🚀 >>> {clean_make} feltérképezése: {total_available} variáns található az RDW-ben.")
|
||||
|
||||
if total_available == 0:
|
||||
logger.warning(f"⚠️ {clean_make} márkához nem érkezett adat az API-tól.")
|
||||
await db.execute(text("UPDATE data.catalog_discovery SET status = 'processed' WHERE id = :id"), {"id": task_id})
|
||||
await db.commit()
|
||||
return
|
||||
|
||||
# 2. LÉPÉS: Lapozás (Pagination)
|
||||
limit = 1000
|
||||
offset = 0
|
||||
total_added = 0
|
||||
|
||||
while offset < total_available:
|
||||
logger.info(f"📑 Lapozás: {clean_make} | {offset} -> {offset + limit} (Összesen: {total_available})")
|
||||
|
||||
query_filter = f"upper(merk) like '%{clean_make}%'"
|
||||
params = {
|
||||
"$where": query_filter,
|
||||
"$limit": limit,
|
||||
"$offset": offset,
|
||||
"$order": ":id" # Socrata stabil lapozáshoz javasolt
|
||||
}
|
||||
|
||||
resp = await client.get(cls.RDW_MAIN, params=params, headers=cls.HEADERS_RDW)
|
||||
if resp.status_code != 200:
|
||||
logger.error(f"❌ Hiba a lapozásnál ({offset}): {resp.status_code}")
|
||||
break
|
||||
|
||||
batch = resp.json()
|
||||
if not batch: break
|
||||
|
||||
# Feldolgozás
|
||||
for item in batch:
|
||||
res_make = str(item.get("merk", clean_make)).upper()
|
||||
model = str(item.get("handelsbenaming", "Unknown")).upper()
|
||||
ccm = int(float(item.get("cilinderinhoud") or 0))
|
||||
kw = int(float(item.get("netto_maximum_vermogen") or 0))
|
||||
|
||||
# Deduplikáció check
|
||||
stmt = select(VehicleModelDefinition.id).where(
|
||||
VehicleModelDefinition.make == res_make,
|
||||
VehicleModelDefinition.marketing_name == model,
|
||||
VehicleModelDefinition.engine_capacity == ccm,
|
||||
VehicleModelDefinition.power_kw == kw
|
||||
).limit(1)
|
||||
|
||||
exists = (await db.execute(stmt)).scalar_one_or_none()
|
||||
if not exists:
|
||||
db.add(VehicleModelDefinition(
|
||||
make=res_make,
|
||||
technical_code=item.get("kenteken"),
|
||||
marketing_name=model,
|
||||
engine_capacity=ccm,
|
||||
power_kw=kw if kw > 0 else None,
|
||||
status="unverified",
|
||||
source="HUNTER-v2.4-PAGINATED"
|
||||
))
|
||||
total_added += 1
|
||||
|
||||
await db.commit() # Lapvégi mentés
|
||||
offset += limit
|
||||
|
||||
# 3. LÉPÉS: Befejezés
|
||||
await db.execute(text("UPDATE data.catalog_discovery SET status = 'processed' WHERE id = :id"), {"id": task_id})
|
||||
await db.commit()
|
||||
logger.info(f"✅ {clean_make} KÉSZ. {total_available} rekord átnézve, {total_added} új variáns stagingbe mentve.")
|
||||
|
||||
@classmethod
|
||||
async def run(cls):
|
||||
logger.info("🤖 Robot 1.0.13 (Global Hunter) ONLINE")
|
||||
logger.info("🤖 Robot 1 (Hunter) ONLINE - Paginator v2.4")
|
||||
while True:
|
||||
async with SessionLocal() as db:
|
||||
res = await db.execute(text("SELECT id, make FROM data.catalog_discovery WHERE status = 'pending' LIMIT 1"))
|
||||
query = text("""
|
||||
SELECT id, make FROM data.catalog_discovery
|
||||
WHERE status = 'pending'
|
||||
ORDER BY
|
||||
CASE WHEN make IN ('SUZUKI', 'TOYOTA', 'SKODA', 'VOLKSWAGEN', 'OPEL') THEN 1 ELSE 2 END,
|
||||
id ASC
|
||||
LIMIT 1 FOR UPDATE SKIP LOCKED
|
||||
""")
|
||||
res = await db.execute(query)
|
||||
task = res.fetchone()
|
||||
if task:
|
||||
await cls.process_make(db, task[0], task[1])
|
||||
else:
|
||||
logger.info("😴 Várólista üres. Alvás 60 mp...")
|
||||
await asyncio.sleep(60)
|
||||
await asyncio.sleep(1)
|
||||
await asyncio.sleep(20)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(CatalogMaster.run())
|
||||
asyncio.run(CatalogHunter.run())
|
||||
208
backend/app/workers/catalog_robot1.3_old.py
Normal file
208
backend/app/workers/catalog_robot1.3_old.py
Normal file
@@ -0,0 +1,208 @@
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import datetime
|
||||
from sqlalchemy import text
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.asset import AssetCatalog
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger("Robot-v1.0.13-Global-Hunter")
|
||||
|
||||
class CatalogMaster:
|
||||
"""
|
||||
Master Hunter Robot v1.0.13 - Global Hunter Edition
|
||||
- Holland (RDW), Brit (DVLA) és Amerikai (NHTSA) adatbázis integráció.
|
||||
- Ratio-Filter: Kiszűri a 0.19-es kW/kg arányszámokat.
|
||||
- Multi-field Power Discovery: Minden lehetséges mezőből kinyeri a kW-ot.
|
||||
- Dinamikus évjárat kezelés a duplikációk ellen.
|
||||
"""
|
||||
|
||||
# API Végpontok
|
||||
RDW_MAIN = "https://opendata.rdw.nl/resource/m9d7-ebf2.json"
|
||||
RDW_FUEL = "https://opendata.rdw.nl/resource/8ys7-d773.json"
|
||||
RDW_AXLE = "https://opendata.rdw.nl/resource/3huj-srit.json"
|
||||
RDW_BODY = "https://opendata.rdw.nl/resource/vezc-m2t6.json"
|
||||
|
||||
UK_DVLA = "https://driver-vehicle-licensing.api.gov.uk/vehicle-enquiry/v1/vehicles"
|
||||
US_NHTSA = "https://vpic.nhtsa.dot.gov/api/vehicles/DecodeVinValuesBatch/"
|
||||
|
||||
RDW_TOKEN = os.getenv("RDW_APP_TOKEN")
|
||||
UK_API_KEY = os.getenv("UK_DVLA_API_KEY")
|
||||
|
||||
HEADERS_RDW = {"X-App-Token": RDW_TOKEN} if RDW_TOKEN else {}
|
||||
HEADERS_UK = {"x-api-key": UK_API_KEY, "Content-Type": "application/json"} if UK_API_KEY else {}
|
||||
|
||||
CATEGORY_MAP = {
|
||||
"Personenauto": "car",
|
||||
"Motorfiets": "motorcycle",
|
||||
"Bedrijfsauto": "truck",
|
||||
"Vrachtwagen": "truck",
|
||||
"Opleggertrekker": "truck",
|
||||
"Bus": "bus",
|
||||
"Aanhangwagen": "trailer",
|
||||
"Oplegger": "trailer",
|
||||
"Landbouw- of bosbouwtrekker": "agricultural",
|
||||
"camper": "camper"
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def clean_kw(cls, val):
|
||||
"""Speciális kW tisztító: ignorálja az 1.0 alatti arányszámokat."""
|
||||
try:
|
||||
if val is None: return None
|
||||
f_val = float(str(val).replace(',', '.'))
|
||||
if 0 < f_val < 1.0: return None # Ez csak arányszám (kW/kg)
|
||||
v = int(f_val)
|
||||
return v if v > 0 else None
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def clean_int(cls, val):
|
||||
"""Általános egész szám tisztító."""
|
||||
try:
|
||||
if val is None: return None
|
||||
return int(float(str(val).replace(',', '.')))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def fetch_api(cls, url, params=None, headers=None, method="GET", json_data=None):
|
||||
"""Univerzális API hívó sebességkorlátozással."""
|
||||
async with httpx.AsyncClient(headers=headers, follow_redirects=True) as client:
|
||||
try:
|
||||
await asyncio.sleep(1.2) # Biztonsági késleltetés
|
||||
if method == "POST":
|
||||
resp = await client.post(url, json=json_data, timeout=30)
|
||||
else:
|
||||
resp = await client.get(url, params=params, timeout=30)
|
||||
return resp.json() if resp.status_code in [200, 201] else []
|
||||
except Exception as e:
|
||||
logger.error(f"❌ API Hiba ({url}): {e}")
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
async def get_deep_tech(cls, plate, main_kw=None, vin=None):
|
||||
"""Nemzetközi dúsítás: Holland -> Brit -> Amerikai sorrendben."""
|
||||
res = {"kw": cls.clean_kw(main_kw), "fuel": "Unknown", "axles": None, "body": "Standard", "euro": None}
|
||||
|
||||
# 1. HOLLAND (RDW) DÚSÍTÁS
|
||||
fuel_data = await cls.fetch_api(cls.RDW_FUEL, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
if fuel_data:
|
||||
f0 = fuel_data[0]
|
||||
if not res["kw"]:
|
||||
res["kw"] = cls.clean_kw(f0.get("nettomaximumvermogen") or f0.get("netto_maximum_vermogen"))
|
||||
res["fuel"] = f0.get("brandstof_omschrijving", "Unknown")
|
||||
res["euro"] = f0.get("uitlaatemissieniveau")
|
||||
|
||||
# 2. BRIT (DVLA) ELLENŐRZÉS (Ha van UK kulcs és még hiányzik adat)
|
||||
if cls.UK_API_KEY and (not res["kw"] or not res["euro"]):
|
||||
uk_data = await cls.fetch_api(cls.UK_DVLA, method="POST", json_data={"registrationNumber": plate}, headers=cls.HEADERS_UK)
|
||||
if uk_data:
|
||||
res["kw"] = res["kw"] or cls.clean_kw(uk_data.get("engineCapacity")) # Brit adatok finomítása
|
||||
res["euro"] = res["euro"] or uk_data.get("euroStatus")
|
||||
|
||||
# 3. AMERIKAI (NHTSA) KUTATÁS (Ha van alvázszám)
|
||||
if vin and len(vin) == 17:
|
||||
us_data = await cls.fetch_api(cls.US_NHTSA, params={"format": "json", "data": vin})
|
||||
if us_data and "Results" in us_data:
|
||||
# Az amerikai adatbázisból kinyerjük a lóerőt (HP), ha a kW még mindig nincs meg
|
||||
hp = us_data["Results"][0].get("EngineHP")
|
||||
if hp and not res["kw"]:
|
||||
res["kw"] = int(float(hp) * 0.7457) # HP -> kW konverzió
|
||||
|
||||
# RDW Extra adatok (Tengely, Karosszéria)
|
||||
axle = await cls.fetch_api(cls.RDW_AXLE, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
if axle: res["axles"] = cls.clean_int(axle[0].get("aantal_assen"))
|
||||
|
||||
body = await cls.fetch_api(cls.RDW_BODY, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
if body: res["body"] = body[0].get("carrosserie_omschrijving", "Standard")
|
||||
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
async def process_make(cls, db, task_id, make_name):
|
||||
logger.info(f"🚀 >>> {make_name} GlobalHunter v1.0.13 INDUL...")
|
||||
offset, limit, total_saved = 0, 1000, 0
|
||||
unique_variants = {}
|
||||
|
||||
while True:
|
||||
params = {"merk": make_name.upper(), "$limit": limit, "$offset": offset}
|
||||
main_data = await cls.fetch_api(cls.RDW_MAIN, params, headers=cls.HEADERS_RDW)
|
||||
if not main_data: break
|
||||
|
||||
for item in main_data:
|
||||
plate = item.get("kenteken")
|
||||
if not plate: continue
|
||||
|
||||
model = str(item.get("handelsbenaming", "Unknown")).upper()
|
||||
ccm = cls.clean_int(item.get("cilinderinhoud"))
|
||||
weight = cls.clean_int(item.get("massa_ledig_voertuig") or item.get("massa_rijklaar"))
|
||||
kw_candidate = item.get("netto_maximum_vermogen") or item.get("vermogen_massarijklaar")
|
||||
|
||||
raw_date = item.get("datum_eerste_toelating")
|
||||
prod_year = int(str(raw_date)[:4]) if raw_date else 2024
|
||||
|
||||
v_class = cls.CATEGORY_MAP.get(item.get("voertuigsoort"), "other")
|
||||
if "kampeerwagen" in str(item.get("inrichting", "")).lower(): v_class = "camper"
|
||||
|
||||
# Variáns kulcs: Modell + CCM + Súly + kW + Év = Egyedi technikai ujjlenyomat
|
||||
variant_key = f"{model}-{ccm}-{weight}-{v_class}-{kw_candidate}-{prod_year}"
|
||||
|
||||
if variant_key not in unique_variants:
|
||||
unique_variants[variant_key] = {
|
||||
"model": model, "ccm": ccm, "weight": weight, "v_class": v_class,
|
||||
"plate": plate, "main_kw": kw_candidate, "prod_year": prod_year,
|
||||
"vin": item.get("vin") # Ha az RDW-ben benne van a VIN
|
||||
}
|
||||
|
||||
if len(main_data) < limit or offset > 90000: break
|
||||
offset += limit
|
||||
|
||||
logger.info(f"📊 {len(unique_variants)} egyedi variáns kutatása indul...")
|
||||
|
||||
for key, v in unique_variants.items():
|
||||
deep = await cls.get_deep_tech(v["plate"], main_kw=v["main_kw"], vin=v["vin"])
|
||||
try:
|
||||
db_item = AssetCatalog(
|
||||
make=make_name.upper(), model=v["model"], vehicle_class=v["v_class"],
|
||||
fuel_type=deep["fuel"], power_kw=deep["kw"], engine_capacity=v["ccm"],
|
||||
max_weight_kg=v["weight"], axle_count=deep["axles"], body_type=deep["body"],
|
||||
year_from=v["prod_year"], euro_class=deep["euro"],
|
||||
factory_data={
|
||||
"source": "GlobalHunter-v1.0.13",
|
||||
"sample_plate": v["plate"],
|
||||
"enriched_at": str(datetime.datetime.now())
|
||||
}
|
||||
)
|
||||
db.add(db_item)
|
||||
await db.commit()
|
||||
total_saved += 1
|
||||
if total_saved % 50 == 0: logger.info(f"✅ {total_saved} variáns elmentve.")
|
||||
except Exception:
|
||||
await db.rollback()
|
||||
continue
|
||||
|
||||
await db.execute(text("UPDATE data.catalog_discovery SET status = 'processed' WHERE id = :id"), {"id": task_id})
|
||||
await db.commit()
|
||||
logger.info(f"🏁 {make_name} KÉSZ. {total_saved} rekord rögzítve.")
|
||||
|
||||
@classmethod
|
||||
async def run(cls):
|
||||
logger.info("🤖 Robot 1.0.13 (Global Hunter) ONLINE")
|
||||
while True:
|
||||
async with SessionLocal() as db:
|
||||
res = await db.execute(text("SELECT id, make FROM data.catalog_discovery WHERE status = 'pending' LIMIT 1"))
|
||||
task = res.fetchone()
|
||||
if task:
|
||||
await cls.process_make(db, task[0], task[1])
|
||||
else:
|
||||
logger.info("😴 Várólista üres. Alvás 60 mp...")
|
||||
await asyncio.sleep(60)
|
||||
await asyncio.sleep(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(CatalogMaster.run())
|
||||
270
backend/app/workers/catalog_robot1.4.1.py
Normal file
270
backend/app/workers/catalog_robot1.4.1.py
Normal file
@@ -0,0 +1,270 @@
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import datetime
|
||||
import sys
|
||||
from sqlalchemy import text
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.asset import AssetCatalog
|
||||
|
||||
# --- KÉNYSZERÍTETT IDŐBÉLYEGES LOGOLÁS ---
|
||||
# Töröljük az esetleges korábbi konfigurációkat, hogy az időbélyeg garantált legyen
|
||||
for handler in logging.root.handlers[:]:
|
||||
logging.root.removeHandler(handler)
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s.%(msecs)03d [%(levelname)s] %(name)s: %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S',
|
||||
stream=sys.stdout
|
||||
)
|
||||
logger = logging.getLogger("Robot-v1.4.1-Powerhouse")
|
||||
|
||||
class CatalogMaster:
|
||||
"""
|
||||
Master Hunter Robot v1.4.1 - Powerhouse Edition
|
||||
- Párhuzamos Holland (RDW) és Amerikai (NHTSA Batch) Discovery.
|
||||
- Garantált időbélyeges naplózás.
|
||||
- Multi-Worker Safe (FOR UPDATE SKIP LOCKED).
|
||||
- Rate Limit (429) védelem.
|
||||
"""
|
||||
|
||||
# API Végpontok
|
||||
RDW_MAIN = "https://opendata.rdw.nl/resource/m9d7-ebf2.json"
|
||||
RDW_FUEL = "https://opendata.rdw.nl/resource/8ys7-d773.json"
|
||||
RDW_AXLE = "https://opendata.rdw.nl/resource/3huj-srit.json"
|
||||
RDW_BODY = "https://opendata.rdw.nl/resource/vezc-m2t6.json"
|
||||
US_BATCH = "https://vpic.nhtsa.dot.gov/api/vehicles/GetModelsForMakeYear/make/{make}/modelyear/{year}?format=json"
|
||||
|
||||
# BRIT API (Token után aktiválható)
|
||||
UK_DVLA = "https://driver-vehicle-licensing.api.gov.uk/vehicle-enquiry/v1/vehicles"
|
||||
|
||||
RDW_TOKEN = os.getenv("RDW_APP_TOKEN")
|
||||
UK_API_KEY = os.getenv("UK_DVLA_API_KEY")
|
||||
|
||||
HEADERS_RDW = {"X-App-Token": RDW_TOKEN} if RDW_TOKEN else {}
|
||||
HEADERS_UK = {"x-api-key": UK_API_KEY, "Content-Type": "application/json"} if UK_API_KEY else {}
|
||||
|
||||
CATEGORY_MAP = {
|
||||
"Personenauto": "car",
|
||||
"Motorfiets": "motorcycle",
|
||||
"Bedrijfsauto": "truck",
|
||||
"Vrachtwagen": "truck",
|
||||
"Opleggertrekker": "truck",
|
||||
"Bus": "bus",
|
||||
"Aanhangwagen": "trailer",
|
||||
"Oplegger": "trailer",
|
||||
"Landbouw- of bosbouwtrekker": "agricultural",
|
||||
"camper": "camper"
|
||||
}
|
||||
|
||||
# Szabályozzuk a párhuzamos dúsítást (egyszerre max 5 kérés robotpéldányonként)
|
||||
semaphore = asyncio.Semaphore(5)
|
||||
|
||||
@classmethod
|
||||
def clean_kw(cls, val):
|
||||
try:
|
||||
if val is None: return None
|
||||
f_val = float(str(val).replace(',', '.'))
|
||||
if 0 < f_val < 1.0: return None
|
||||
v = int(f_val)
|
||||
return v if v > 0 else None
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def clean_int(cls, val):
|
||||
try:
|
||||
if val is None: return None
|
||||
return int(float(str(val).replace(',', '.')))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def fetch_api(cls, url, params=None, headers=None, method="GET", json_data=None):
|
||||
"""Intelligens API hívó 429-es védelemmel és időzített logolással."""
|
||||
async with httpx.AsyncClient(headers=headers, follow_redirects=True) as client:
|
||||
for attempt in range(3):
|
||||
try:
|
||||
if method == "POST":
|
||||
resp = await client.post(url, json=json_data, timeout=30)
|
||||
else:
|
||||
resp = await client.get(url, params=params, timeout=30)
|
||||
|
||||
if resp.status_code == 429:
|
||||
wait_time = (attempt + 1) * 5
|
||||
logger.warning(f"⚠️ RATE LIMIT! Várakozás {wait_time}mp: {url}")
|
||||
await asyncio.sleep(wait_time)
|
||||
continue
|
||||
|
||||
return resp.json() if resp.status_code in [200, 201] else []
|
||||
except Exception as e:
|
||||
logger.error(f"❌ API Hiba ({url}): {e}")
|
||||
await asyncio.sleep(2)
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
async def get_deep_tech(cls, plate, main_kw=None, vin=None):
|
||||
"""Mély dúsítás több forrásból párhuzamosan."""
|
||||
async with cls.semaphore:
|
||||
res = {"kw": cls.clean_kw(main_kw), "fuel": "Unknown", "axles": None, "body": "Standard", "euro": None}
|
||||
|
||||
# --- 1. HOLLAND (RDW) DÚSÍTÁS ---
|
||||
fuel_task = cls.fetch_api(cls.RDW_FUEL, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
axle_task = cls.fetch_api(cls.RDW_AXLE, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
|
||||
fuel_data, axle_data = await asyncio.gather(fuel_task, axle_task)
|
||||
|
||||
if fuel_data:
|
||||
f0 = fuel_data[0]
|
||||
if not res["kw"]:
|
||||
res["kw"] = cls.clean_kw(f0.get("nettomaximumvermogen") or f0.get("netto_maximum_vermogen"))
|
||||
res["fuel"] = f0.get("brandstof_omschrijving", "Unknown")
|
||||
res["euro"] = f0.get("uitlaatemissieniveau")
|
||||
|
||||
if axle_data:
|
||||
res["axles"] = cls.clean_int(axle_data[0].get("aantal_assen"))
|
||||
|
||||
# --- 2. BRIT (DVLA) ELLENŐRZÉS (AKTIVÁLHATÓ KULCCSAL) ---
|
||||
"""
|
||||
if cls.UK_API_KEY and (not res["kw"] or not res["euro"]):
|
||||
uk_data = await cls.fetch_api(cls.UK_DVLA, method="POST",
|
||||
json_data={"registrationNumber": plate},
|
||||
headers=cls.HEADERS_UK)
|
||||
if uk_data and not isinstance(uk_data, list):
|
||||
res["kw"] = res["kw"] or cls.clean_kw(uk_data.get("engineCapacity"))
|
||||
res["euro"] = res["euro"] or uk_data.get("euroStatus")
|
||||
"""
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
async def discover_holland(cls, make_name, limit=1000):
|
||||
"""Holland Discovery ág: rendszámok gyűjtése."""
|
||||
offset, variants = 0, {}
|
||||
while True:
|
||||
params = {"merk": make_name.upper(), "$limit": limit, "$offset": offset}
|
||||
data = await cls.fetch_api(cls.RDW_MAIN, params, headers=cls.HEADERS_RDW)
|
||||
if not data: break
|
||||
|
||||
for item in data:
|
||||
plate = item.get("kenteken")
|
||||
if not plate: continue
|
||||
model = str(item.get("handelsbenaming", "Unknown")).upper()
|
||||
ccm = cls.clean_int(item.get("cilinderinhoud"))
|
||||
weight = cls.clean_int(item.get("massa_ledig_voertuig") or item.get("massa_rijklaar"))
|
||||
kw = item.get("netto_maximum_vermogen") or item.get("vermogen_massarijklaar")
|
||||
raw_date = item.get("datum_eerste_toelating")
|
||||
year = int(str(raw_date)[:4]) if raw_date else 2024
|
||||
|
||||
v_class = cls.CATEGORY_MAP.get(item.get("voertuigsoort"), "other")
|
||||
key = f"{model}-{ccm}-{weight}-{v_class}-{kw}-{year}"
|
||||
|
||||
if key not in variants:
|
||||
variants[key] = {
|
||||
"model": model, "ccm": ccm, "weight": weight, "v_class": v_class,
|
||||
"plate": plate, "main_kw": kw, "prod_year": year, "vin": item.get("vin")
|
||||
}
|
||||
if len(data) < limit: break
|
||||
offset += limit
|
||||
return variants
|
||||
|
||||
@classmethod
|
||||
async def discover_usa_batch(cls, make_name):
|
||||
"""Amerikai NHTSA Batch Discovery: Típusok gyűjtése."""
|
||||
variants = {}
|
||||
years = range(datetime.datetime.now().year - 5, datetime.datetime.now().year + 1)
|
||||
|
||||
async def fetch_year(year):
|
||||
url = cls.US_BATCH.format(make=make_name.upper(), year=year)
|
||||
logger.info(f"🇺🇸 USA Batch Discovery indítása: {make_name} ({year})")
|
||||
data = await cls.fetch_api(url)
|
||||
if data and "Results" in data:
|
||||
for m in data["Results"]:
|
||||
m_name = m.get("Model_Name", "Unknown").upper()
|
||||
key = f"US-{m_name}-{year}"
|
||||
if key not in variants:
|
||||
variants[key] = {
|
||||
"model": m_name, "ccm": None, "weight": None, "v_class": "car",
|
||||
"plate": "US-DISCOVERY", "main_kw": None, "prod_year": year, "vin": None
|
||||
}
|
||||
|
||||
await asyncio.gather(*(fetch_year(y) for y in years))
|
||||
return variants
|
||||
|
||||
@classmethod
|
||||
async def process_make(cls, db, task_id, make_name):
|
||||
logger.info(f"🚀 >>> {make_name} Powerhouse v1.4.1 INDUL...")
|
||||
|
||||
# Párhuzamos Discovery
|
||||
holland_task = cls.discover_holland(make_name)
|
||||
usa_task = cls.discover_usa_batch(make_name)
|
||||
|
||||
holland_variants, usa_variants = await asyncio.gather(holland_task, usa_task)
|
||||
all_variants = {**usa_variants, **holland_variants}
|
||||
|
||||
logger.info(f"📊 Összefésült variánsok száma: {len(all_variants)}")
|
||||
|
||||
async def enrich_and_save(v):
|
||||
deep = await cls.get_deep_tech(v["plate"], main_kw=v["main_kw"], vin=v["vin"])
|
||||
try:
|
||||
db_item = AssetCatalog(
|
||||
make=make_name.upper(), model=v["model"], vehicle_class=v["v_class"],
|
||||
fuel_type=deep["fuel"], power_kw=deep["kw"], engine_capacity=v["ccm"],
|
||||
max_weight_kg=v["weight"], axle_count=deep["axles"], body_type=deep["body"],
|
||||
year_from=v["prod_year"], euro_class=deep["euro"],
|
||||
factory_data={
|
||||
"source": "Powerhouse-v1.4.1",
|
||||
"discovery_nl": v["plate"] != "US-DISCOVERY",
|
||||
"enriched_at": str(datetime.datetime.now())
|
||||
}
|
||||
)
|
||||
return db_item
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# Párhuzamos dúsítás (Semaphore korláttal)
|
||||
results = await asyncio.gather(*(enrich_and_save(v) for v in all_variants.values()))
|
||||
|
||||
total_saved = 0
|
||||
for item in results:
|
||||
if item:
|
||||
db.add(item)
|
||||
total_saved += 1
|
||||
|
||||
await db.commit()
|
||||
await db.execute(text("UPDATE data.catalog_discovery SET status = 'processed' WHERE id = :id"), {"id": task_id})
|
||||
await db.commit()
|
||||
logger.info(f"🏁 {make_name} KÉSZ. {total_saved} egyedi rekord rögzítve.")
|
||||
|
||||
@classmethod
|
||||
async def run(cls):
|
||||
logger.info("🤖 Robot 1.4.1 (Powerhouse) ONLINE - Multi-Worker Safe Mode")
|
||||
while True:
|
||||
async with SessionLocal() as db:
|
||||
# SKIP LOCKED védelem a párhuzamos futtatáshoz
|
||||
query = text("""
|
||||
SELECT id, make FROM data.catalog_discovery
|
||||
WHERE status = 'pending'
|
||||
LIMIT 1
|
||||
FOR UPDATE SKIP LOCKED
|
||||
""")
|
||||
res = await db.execute(query)
|
||||
task = res.fetchone()
|
||||
|
||||
if task:
|
||||
task_id, make_name = task
|
||||
await db.execute(
|
||||
text("UPDATE data.catalog_discovery SET status = 'running' WHERE id = :id"),
|
||||
{"id": task_id}
|
||||
)
|
||||
await db.commit()
|
||||
await cls.process_make(db, task_id, make_name)
|
||||
else:
|
||||
logger.info("😴 Várólista üres vagy minden feladat foglalt. Alvás 60mp...")
|
||||
await asyncio.sleep(60)
|
||||
await asyncio.sleep(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(CatalogMaster.run())
|
||||
272
backend/app/workers/catalog_robot1.4.py
Normal file
272
backend/app/workers/catalog_robot1.4.py
Normal file
@@ -0,0 +1,272 @@
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import datetime
|
||||
from sqlalchemy import text
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.asset import AssetCatalog
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger("Robot-v1.4-Powerhouse")
|
||||
|
||||
class CatalogMaster:
|
||||
"""
|
||||
Master Hunter Robot v1.4 - Powerhouse Edition
|
||||
- Párhuzamos Holland (RDW) és Amerikai (NHTSA Batch) Discovery.
|
||||
- Előkészített, kikommentelt Brit (DVLA) integráció.
|
||||
- Async Semaphore: Párhuzamos technikai dúsítás (egyszerre 10 szálon).
|
||||
- Intelligens összefésülés a globális források között.
|
||||
"""
|
||||
|
||||
# API Végpontok
|
||||
RDW_MAIN = "https://opendata.rdw.nl/resource/m9d7-ebf2.json"
|
||||
RDW_FUEL = "https://opendata.rdw.nl/resource/8ys7-d773.json"
|
||||
RDW_AXLE = "https://opendata.rdw.nl/resource/3huj-srit.json"
|
||||
RDW_BODY = "https://opendata.rdw.nl/resource/vezc-m2t6.json"
|
||||
|
||||
# AMERIKAI BATCH API: Egyetlen hívással az összes modell évjárat szerint
|
||||
US_BATCH = "https://vpic.nhtsa.dot.gov/api/vehicles/GetModelsForMakeYear/make/{make}/modelyear/{year}?format=json"
|
||||
|
||||
# BRIT API (Kikapcsolva a tokenig)
|
||||
# UK_DVLA = "https://driver-vehicle-licensing.api.gov.uk/vehicle-enquiry/v1/vehicles"
|
||||
|
||||
RDW_TOKEN = os.getenv("RDW_APP_TOKEN")
|
||||
UK_API_KEY = os.getenv("UK_DVLA_API_KEY") # Jövőbeli token helye
|
||||
|
||||
HEADERS_RDW = {"X-App-Token": RDW_TOKEN} if RDW_TOKEN else {}
|
||||
# HEADERS_UK = {"x-api-key": UK_API_KEY, "Content-Type": "application/json"} if UK_API_KEY else {}
|
||||
|
||||
CATEGORY_MAP = {
|
||||
"Personenauto": "car",
|
||||
"Motorfiets": "motorcycle",
|
||||
"Bedrijfsauto": "truck",
|
||||
"Vrachtwagen": "truck",
|
||||
"Opleggertrekker": "truck",
|
||||
"Bus": "bus",
|
||||
"Aanhangwagen": "trailer",
|
||||
"Oplegger": "trailer",
|
||||
"Landbouw- of bosbouwtrekker": "agricultural",
|
||||
"camper": "camper"
|
||||
}
|
||||
|
||||
# Szabályozzuk a párhuzamos dúsítást, hogy ne tiltsanak le (egyszerre max 10 kérés)
|
||||
semaphore = asyncio.Semaphore(5)
|
||||
|
||||
@classmethod
|
||||
def clean_kw(cls, val):
|
||||
try:
|
||||
if val is None: return None
|
||||
f_val = float(str(val).replace(',', '.'))
|
||||
if 0 < f_val < 1.0: return None
|
||||
v = int(f_val)
|
||||
return v if v > 0 else None
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def clean_int(cls, val):
|
||||
try:
|
||||
if val is None: return None
|
||||
return int(float(str(val).replace(',', '.')))
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def fetch_api(cls, url, params=None, headers=None, method="GET", json_data=None):
|
||||
async with httpx.AsyncClient(headers=headers, follow_redirects=True) as client:
|
||||
for attempt in range(3): # 3-szor próbáljuk újra, ha kell
|
||||
try:
|
||||
if method == "POST":
|
||||
resp = await client.post(url, json=json_data, timeout=30)
|
||||
else:
|
||||
resp = await client.get(url, params=params, timeout=30)
|
||||
|
||||
if resp.status_code == 429: # HOPPÁ, túl gyorsak vagyunk!
|
||||
wait_time = (attempt + 1) * 5 # Egyre többet vár: 5s, 10s...
|
||||
logger.warning(f"⚠️ RDW limit elérve! Pihenő {wait_time} mp...")
|
||||
await asyncio.sleep(wait_time)
|
||||
continue
|
||||
|
||||
return resp.json() if resp.status_code in [200, 201] else []
|
||||
except Exception as e:
|
||||
logger.error(f"❌ API Hiba ({url}): {e}")
|
||||
await asyncio.sleep(2)
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
async def get_deep_tech(cls, plate, main_kw=None, vin=None):
|
||||
"""Mély dúsítás párhuzamos forrásokból."""
|
||||
async with cls.semaphore:
|
||||
res = {"kw": cls.clean_kw(main_kw), "fuel": "Unknown", "axles": None, "body": "Standard", "euro": None}
|
||||
|
||||
# --- 1. HOLLAND (RDW) DÚSÍTÁS ---
|
||||
fuel_task = cls.fetch_api(cls.RDW_FUEL, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
axle_task = cls.fetch_api(cls.RDW_AXLE, {"kenteken": plate}, headers=cls.HEADERS_RDW)
|
||||
|
||||
# Holland adatok párhuzamos lekérése
|
||||
fuel_data, axle_data = await asyncio.gather(fuel_task, axle_task)
|
||||
|
||||
if fuel_data:
|
||||
f0 = fuel_data[0]
|
||||
if not res["kw"]:
|
||||
res["kw"] = cls.clean_kw(f0.get("nettomaximumvermogen") or f0.get("netto_maximum_vermogen"))
|
||||
res["fuel"] = f0.get("brandstof_omschrijving", "Unknown")
|
||||
res["euro"] = f0.get("uitlaatemissieniveau")
|
||||
|
||||
if axle_data:
|
||||
res["axles"] = cls.clean_int(axle_data[0].get("aantal_assen"))
|
||||
|
||||
# --- 2. BRIT (DVLA) ELLENŐRZÉS (KIKOMMENTELVE A TOKENIG) ---
|
||||
"""
|
||||
if cls.UK_API_KEY and (not res["kw"] or not res["euro"]):
|
||||
uk_data = await cls.fetch_api(cls.UK_DVLA, method="POST",
|
||||
json_data={"registrationNumber": plate},
|
||||
headers=cls.HEADERS_UK)
|
||||
if uk_data and not isinstance(uk_data, list):
|
||||
res["kw"] = res["kw"] or cls.clean_kw(uk_data.get("engineCapacity"))
|
||||
res["euro"] = res["euro"] or uk_data.get("euroStatus")
|
||||
"""
|
||||
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
async def discover_holland(cls, make_name, limit=1000):
|
||||
"""Holland Discovery ág."""
|
||||
offset, variants = 0, {}
|
||||
while True:
|
||||
params = {"merk": make_name.upper(), "$limit": limit, "$offset": offset}
|
||||
data = await cls.fetch_api(cls.RDW_MAIN, params, headers=cls.HEADERS_RDW)
|
||||
if not data: break
|
||||
|
||||
for item in data:
|
||||
plate = item.get("kenteken")
|
||||
if not plate: continue
|
||||
model = str(item.get("handelsbenaming", "Unknown")).upper()
|
||||
ccm = cls.clean_int(item.get("cilinderinhoud"))
|
||||
weight = cls.clean_int(item.get("massa_ledig_voertuig") or item.get("massa_rijklaar"))
|
||||
kw = item.get("netto_maximum_vermogen") or item.get("vermogen_massarijklaar")
|
||||
raw_date = item.get("datum_eerste_toelating")
|
||||
year = int(str(raw_date)[:4]) if raw_date else 2024
|
||||
|
||||
v_class = cls.CATEGORY_MAP.get(item.get("voertuigsoort"), "other")
|
||||
key = f"{model}-{ccm}-{weight}-{v_class}-{kw}-{year}"
|
||||
|
||||
if key not in variants:
|
||||
variants[key] = {
|
||||
"model": model, "ccm": ccm, "weight": weight, "v_class": v_class,
|
||||
"plate": plate, "main_kw": kw, "prod_year": year, "vin": item.get("vin")
|
||||
}
|
||||
if len(data) < limit: break
|
||||
offset += limit
|
||||
return variants
|
||||
|
||||
@classmethod
|
||||
async def discover_usa_batch(cls, make_name):
|
||||
"""Amerikai NHTSA Batch Discovery ág (2020-2025 évjáratokra)."""
|
||||
variants = {}
|
||||
# Az utolsó 5 évjáratot nézzük a legfrissebb modellekért
|
||||
years = range(datetime.datetime.now().year - 5, datetime.datetime.now().year + 1)
|
||||
|
||||
async def fetch_year(year):
|
||||
url = cls.US_BATCH.format(make=make_name.upper(), year=year)
|
||||
data = await cls.fetch_api(url)
|
||||
if data and "Results" in data:
|
||||
for m in data["Results"]:
|
||||
m_name = m.get("Model_Name", "Unknown").upper()
|
||||
# US adatoknál nincs rendszám, de a Robot 2 dúsítani fogja ha kell
|
||||
key = f"US-{m_name}-{year}"
|
||||
variants[key] = {
|
||||
"model": m_name, "ccm": None, "weight": None, "v_class": "car",
|
||||
"plate": "US-DISCOVERY", "main_kw": None, "prod_year": year, "vin": None
|
||||
}
|
||||
|
||||
await asyncio.gather(*(fetch_year(y) for y in years))
|
||||
return variants
|
||||
|
||||
@classmethod
|
||||
async def process_make(cls, db, task_id, make_name):
|
||||
logger.info(f"🚀 >>> {make_name} Powerhouse v1.4 INDUL...")
|
||||
|
||||
# PÁRHUZAMOS DISCOVERY: Holland és USA egyszerre
|
||||
holland_task = cls.discover_holland(make_name)
|
||||
usa_task = cls.discover_usa_batch(make_name)
|
||||
|
||||
holland_variants, usa_variants = await asyncio.gather(holland_task, usa_task)
|
||||
|
||||
# Összefésülés (Holland élvez elsőbbséget a rendszám miatt)
|
||||
all_variants = {**usa_variants, **holland_variants}
|
||||
logger.info(f"📊 Összesen {len(all_variants)} egyedi variáns (NL: {len(holland_variants)}, US: {len(usa_variants)})")
|
||||
|
||||
# PÁRHUZAMOS DÚSÍTÁS
|
||||
async def enrich_and_save(v):
|
||||
deep = await cls.get_deep_tech(v["plate"], main_kw=v["main_kw"], vin=v["vin"])
|
||||
try:
|
||||
db_item = AssetCatalog(
|
||||
make=make_name.upper(), model=v["model"], vehicle_class=v["v_class"],
|
||||
fuel_type=deep["fuel"], power_kw=deep["kw"], engine_capacity=v["ccm"],
|
||||
max_weight_kg=v["weight"], axle_count=deep["axles"], body_type=deep["body"],
|
||||
year_from=v["prod_year"], euro_class=deep["euro"],
|
||||
factory_data={
|
||||
"source": "Powerhouse-v1.4",
|
||||
"discovery_nl": v["plate"] != "US-DISCOVERY",
|
||||
"enriched_at": str(datetime.datetime.now())
|
||||
}
|
||||
)
|
||||
return db_item
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# Egyszerre indítjuk a dúsításokat (A semaphore korlátozza a szálakat)
|
||||
results = await asyncio.gather(*(enrich_and_save(v) for v in all_variants.values()))
|
||||
|
||||
# Mentés
|
||||
total_saved = 0
|
||||
for item in results:
|
||||
if item:
|
||||
db.add(item)
|
||||
total_saved += 1
|
||||
|
||||
await db.commit()
|
||||
await db.execute(text("UPDATE data.catalog_discovery SET status = 'processed' WHERE id = :id"), {"id": task_id})
|
||||
await db.commit()
|
||||
logger.info(f"🏁 {make_name} KÉSZ. {total_saved} rekord rögzítve.")
|
||||
|
||||
@classmethod
|
||||
async def run(cls):
|
||||
logger.info("🤖 Robot 1.4 (Powerhouse) ONLINE - Multi-Worker Safe")
|
||||
while True:
|
||||
async with SessionLocal() as db:
|
||||
# 1. 'FOR UPDATE SKIP LOCKED' - Megfogjuk a sort és lelakatoljuk,
|
||||
# de a többi robot átugorja, amit mi már fogunk.
|
||||
query = text("""
|
||||
SELECT id, make FROM data.catalog_discovery
|
||||
WHERE status = 'pending'
|
||||
LIMIT 1
|
||||
FOR UPDATE SKIP LOCKED
|
||||
""")
|
||||
|
||||
res = await db.execute(query)
|
||||
task = res.fetchone()
|
||||
|
||||
if task:
|
||||
task_id, make_name = task
|
||||
# 2. Azonnal átállítjuk 'running'-ra a tranzakción belül,
|
||||
# így senki más nem nyúl hozzá.
|
||||
await db.execute(
|
||||
text("UPDATE data.catalog_discovery SET status = 'running' WHERE id = :id"),
|
||||
{"id": task_id}
|
||||
)
|
||||
await db.commit() # Itt véglegesítjük a foglalást
|
||||
|
||||
# 3. Indulhat a tényleges munka
|
||||
await cls.process_make(db, task_id, make_name)
|
||||
else:
|
||||
logger.info("😴 Várólista üres (vagy minden sor foglalt). Alvás 60 mp...")
|
||||
await asyncio.sleep(60)
|
||||
|
||||
await asyncio.sleep(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(CatalogMaster.run())
|
||||
117
backend/app/workers/researcher_v2_1.py
Normal file
117
backend/app/workers/researcher_v2_1.py
Normal file
@@ -0,0 +1,117 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import warnings
|
||||
import os
|
||||
from sqlalchemy import select, update, and_, func, or_, case # Explicit case import
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.vehicle_definitions import VehicleModelDefinition
|
||||
import httpx
|
||||
|
||||
# 1. KRITIKUS JAVÍTÁS: A figyelmeztetések globális elnyomása az import előtt
|
||||
warnings.filterwarnings("ignore", category=RuntimeWarning, module='duckduckgo_search')
|
||||
from duckduckgo_search import DDGS
|
||||
|
||||
# Logolás beállítása, hogy lássuk a haladást
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] %(name)s: %(message)s')
|
||||
logger = logging.getLogger("Robot-Researcher-v2.1")
|
||||
|
||||
class ResearcherBot:
|
||||
def __init__(self):
|
||||
self.batch_size = 15
|
||||
self.max_parallel_queries = 5
|
||||
|
||||
async def fetch_source(self, label, query):
|
||||
"""Egyedi forrás lekérése a DuckDuckGo-tól."""
|
||||
try:
|
||||
def search():
|
||||
# Az újabb verziókban a DDGS() hívás így a legstabilabb
|
||||
with DDGS() as ddgs:
|
||||
results = ddgs.text(query, max_results=3)
|
||||
return [r['body'] for r in results] if results else []
|
||||
|
||||
results = await asyncio.to_thread(search)
|
||||
|
||||
if not results:
|
||||
return f"=== SOURCE: {label} | NO DATA FOUND ===\n\n"
|
||||
|
||||
content = f"=== SOURCE: {label} | QUERY: {query} ===\n"
|
||||
content += "\n---\n".join(results)
|
||||
content += "\n=== END SOURCE ===\n\n"
|
||||
return content
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Keresési hiba ({label}): {e}")
|
||||
return f"=== SOURCE: {label} ERROR: {str(e)} ===\n\n"
|
||||
|
||||
async def research_vehicle(self, vehicle_id):
|
||||
async with SessionLocal() as db:
|
||||
res = await db.execute(select(VehicleModelDefinition).where(VehicleModelDefinition.id == vehicle_id))
|
||||
v = res.scalar_one_or_none()
|
||||
if not v: return
|
||||
|
||||
make, model = v.make, v.marketing_name
|
||||
# Jelöljük be, hogy a kutatás folyamatban van
|
||||
await db.execute(update(VehicleModelDefinition).where(VehicleModelDefinition.id == vehicle_id).values(status='research_in_progress'))
|
||||
await db.commit()
|
||||
|
||||
logger.info(f"🔎 Kutatás indul: {make} {model}")
|
||||
|
||||
queries = [
|
||||
("TECH_SPECS", f"{make} {model} technical specifications engine power"),
|
||||
("MAINTENANCE", f"{make} {model} service manual oil capacity spark plug"),
|
||||
("TIRES_BRAKES", f"{make} {model} tire size brake pad type"),
|
||||
("FLUIDS", f"{make} {model} coolant quantity transmission oil")
|
||||
]
|
||||
|
||||
tasks = [self.fetch_source(label, q) for label, q in queries]
|
||||
search_results = await asyncio.gather(*tasks)
|
||||
|
||||
full_context = "".join(search_results)
|
||||
|
||||
async with SessionLocal() as db:
|
||||
await db.execute(
|
||||
update(VehicleModelDefinition)
|
||||
.where(VehicleModelDefinition.id == vehicle_id)
|
||||
.values(
|
||||
raw_search_context=full_context,
|
||||
status='awaiting_ai_synthesis', # Itt adjuk át a Robot 2.2-nek (Alchemist)
|
||||
updated_at=func.now()
|
||||
)
|
||||
)
|
||||
await db.commit()
|
||||
logger.info(f"✅ Kutatás kész, adat a tartályban: {make} {model}")
|
||||
|
||||
async def run(self):
|
||||
logger.info("🚀 Robot 2.1 (Researcher) ONLINE")
|
||||
while True:
|
||||
async with SessionLocal() as db:
|
||||
# 2. KRITIKUS JAVÍTÁS: func.case helyett az explicit case() használata
|
||||
# Ez javítja a "TypeError: got an unexpected keyword argument 'else_'" hibát
|
||||
priorities = case(
|
||||
(and_(VehicleModelDefinition.vehicle_type == 'car',
|
||||
VehicleModelDefinition.make.in_(['SUZUKI', 'TOYOTA', 'SKODA', 'VOLKSWAGEN', 'OPEL'])), 1),
|
||||
(VehicleModelDefinition.vehicle_type == 'car', 2),
|
||||
(and_(VehicleModelDefinition.vehicle_type == 'motorcycle',
|
||||
VehicleModelDefinition.make.in_(['HONDA', 'YAMAHA', 'SUZUKI', 'KAWASAKI'])), 3),
|
||||
else_=4
|
||||
)
|
||||
|
||||
stmt = select(VehicleModelDefinition.id).where(
|
||||
or_(VehicleModelDefinition.status == 'unverified', VehicleModelDefinition.status == 'awaiting_research')
|
||||
).order_by(priorities).limit(self.batch_size)
|
||||
|
||||
res = await db.execute(stmt)
|
||||
ids = [r[0] for r in res.fetchall()]
|
||||
|
||||
if not ids:
|
||||
logger.info("💤 Nincs több feldolgozandó feladat, pihenés...")
|
||||
await asyncio.sleep(60)
|
||||
continue
|
||||
|
||||
# Batch feldolgozás indítása párhuzamosan
|
||||
await asyncio.gather(*[self.research_vehicle(rid) for rid in ids])
|
||||
|
||||
# Rövid szünet a keresőmotorok kímélése érdekében
|
||||
await asyncio.sleep(2)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(ResearcherBot().run())
|
||||
83
backend/app/workers/robot0_priority_setter.py
Normal file
83
backend/app/workers/robot0_priority_setter.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
import os
|
||||
from sqlalchemy import text
|
||||
from app.db.session import SessionLocal
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s]: %(message)s')
|
||||
logger = logging.getLogger("Robot-0-Strategist")
|
||||
|
||||
class Robot0Strategist:
|
||||
RDW_API = "https://opendata.rdw.nl/resource/m9d7-ebf2.json"
|
||||
RDW_TOKEN = os.getenv("RDW_APP_TOKEN")
|
||||
HEADERS = {"X-App-Token": RDW_TOKEN} if RDW_TOKEN else {}
|
||||
|
||||
# Holland típusok leképezése a mi kategóriáinkra a kért sorrendben
|
||||
CATEGORIES = [
|
||||
{"name": "car", "rdw_types": ["'Personenauto'"]},
|
||||
{"name": "motorcycle", "rdw_types": ["'Motorfiets'"]},
|
||||
{"name": "truck", "rdw_types": ["'Bedrijfswagen'", "'Vrachtwagen'", "'Opleggertrekker'"]},
|
||||
{"name": "other", "rdw_types": ["NOT IN ('Personenauto', 'Motorfiets', 'Bedrijfswagen', 'Vrachtwagen', 'Opleggertrekker')"]}
|
||||
]
|
||||
|
||||
async def get_popular_makes(self, vehicle_class, rdw_types):
|
||||
"""Lekéri az adott kategória legnépszerűbb márkáit az RDW-től."""
|
||||
# SQL-szerű szűrés az API-n keresztül
|
||||
type_filter = " OR ".join([f"voertuigsoort = {t}" for t in rdw_types])
|
||||
if "NOT IN" in rdw_types[0]: # Speciális kezelés az 'egyéb' kategóriához
|
||||
type_filter = f"voertuigsoort {rdw_types[0]}"
|
||||
|
||||
params = {
|
||||
"$select": "merk, count(*)",
|
||||
"$where": type_filter,
|
||||
"$group": "merk",
|
||||
"$order": "count DESC",
|
||||
"$limit": 500 # Kategóriánként az 500 legfontosabb márka bőven elég
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient(timeout=30) as client:
|
||||
try:
|
||||
resp = await client.get(self.RDW_API, params=params, headers=self.HEADERS)
|
||||
if resp.status_code == 200:
|
||||
return resp.json()
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Hiba a {vehicle_class} lekérdezésekor: {e}")
|
||||
return []
|
||||
|
||||
async def run(self):
|
||||
logger.info("🚀 Robot 0 (Strategist) INDUL - Piaci alapú sorrend felállítása...")
|
||||
|
||||
async with SessionLocal() as db:
|
||||
# 1. Töröljük a jelenlegi várólistát, hogy tiszta lappal induljunk (opcionális)
|
||||
# await db.execute(text("DELETE FROM data.catalog_discovery WHERE status = 'pending'"))
|
||||
|
||||
for category in self.CATEGORIES:
|
||||
v_class = category["name"]
|
||||
logger.info(f"📊 {v_class.upper()} kategória elemzése...")
|
||||
|
||||
makes = await self.get_popular_makes(v_class, category["rdw_types"])
|
||||
|
||||
added_count = 0
|
||||
for item in makes:
|
||||
make_name = item.get("merk")
|
||||
if not make_name: continue
|
||||
|
||||
# Beillesztés a Discovery táblába
|
||||
# A prioritást az ID-k sorrendje fogja adni, amit Robot 1 követ
|
||||
await db.execute(text("""
|
||||
INSERT INTO data.catalog_discovery (make, model, vehicle_class, status, source)
|
||||
VALUES (:make, 'ALL_MODELS', :class, 'pending', 'ROBOT-0-POPULARITY')
|
||||
ON CONFLICT (make, model, vehicle_class) DO UPDATE
|
||||
SET status = 'pending' WHERE catalog_discovery.status != 'processed'
|
||||
"""), {"make": make_name.upper(), "class": v_class})
|
||||
added_count += 1
|
||||
|
||||
await db.commit()
|
||||
logger.info(f"✅ {v_class.upper()}: {added_count} márka sorba állítva a népszerűség alapján.")
|
||||
|
||||
logger.info("🏁 Robot 0 végzett. A Discovery tábla készen áll a Robot 1 (Hunter) számára!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(Robot0Strategist().run())
|
||||
@@ -2,159 +2,159 @@ import asyncio
|
||||
import httpx
|
||||
import logging
|
||||
import os
|
||||
import hashlib
|
||||
from datetime import datetime, timezone
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, text
|
||||
from sqlalchemy import select, text, update
|
||||
from app.db.session import SessionLocal
|
||||
|
||||
# Modellek - Az új v1.3 struktúra
|
||||
from app.models.service import ServiceStaging, DiscoveryParameter
|
||||
|
||||
# Naplózás
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
logger = logging.getLogger("Robot-v1.3-ContinentalScout")
|
||||
logger = logging.getLogger("Robot-v1.3.1-ContinentalScout")
|
||||
|
||||
class ServiceHunter:
|
||||
"""
|
||||
Robot v1.3.0: Continental Scout.
|
||||
EU-szintű felderítő motor, Discovery tábla alapú vezérléssel.
|
||||
Robot v1.3.1: Continental Scout (Grid Search Edition)
|
||||
- Dinamikus rácsbejárás a sűrű területek lefedésére.
|
||||
- Ujjlenyomat-alapú deduplikáció.
|
||||
- Bővített kulcsszókezelés.
|
||||
"""
|
||||
OVERPASS_URL = "http://overpass-api.de/api/interpreter"
|
||||
PLACES_NEW_URL = "https://places.googleapis.com/v1/places:searchNearby"
|
||||
GEOCODE_URL = "https://maps.googleapis.com/maps/api/geocode/json"
|
||||
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
|
||||
|
||||
@classmethod
|
||||
async def get_coordinates(cls, city, country_code):
|
||||
"""Város központjának lekérése a keresés indításához."""
|
||||
params = {"address": f"{city}, {country_code}", "key": cls.GOOGLE_API_KEY}
|
||||
async with httpx.AsyncClient() as client:
|
||||
resp = await client.get(cls.GEOCODE_URL, params=params)
|
||||
if resp.status_code == 200:
|
||||
results = resp.json().get("results")
|
||||
if results:
|
||||
loc = results[0]["geometry"]["location"]
|
||||
return loc["lat"], loc["lng"]
|
||||
return None, None
|
||||
def generate_fingerprint(cls, name: str, city: str, street: str) -> str:
|
||||
"""Egyedi ujjlenyomat készítése a duplikációk kiszűrésére."""
|
||||
raw_string = f"{str(name).lower()}|{str(city).lower()}|{str(street).lower()[:5]}"
|
||||
return hashlib.md5(raw_string.encode()).hexdigest()
|
||||
|
||||
@classmethod
|
||||
async def get_city_bounds(cls, city, country_code):
|
||||
"""Város befoglaló téglalapjának (Bounding Box) lekérése Nominatim-al."""
|
||||
url = "https://nominatim.openstreetmap.org/search"
|
||||
params = {"city": city, "country": country_code, "format": "json"}
|
||||
async with httpx.AsyncClient(headers={"User-Agent": "ServiceFinder-Scout/1.0"}) as client:
|
||||
resp = await client.get(url, params=params)
|
||||
if resp.status_code == 200 and resp.json():
|
||||
bbox = resp.json()[0].get("boundingbox") # [min_lat, max_lat, min_lon, max_lon]
|
||||
return [float(x) for x in bbox]
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
async def run_grid_search(cls, db, task):
|
||||
"""Rács-alapú bejárás a városon belül."""
|
||||
bbox = await cls.get_city_bounds(task.city, task.country_code)
|
||||
if not bbox: return
|
||||
|
||||
# 1km-es lépések generálása (kb. 0.01 fok)
|
||||
lat_step = 0.015
|
||||
lon_step = 0.02
|
||||
|
||||
curr_lat = bbox[0]
|
||||
while curr_lat < bbox[1]:
|
||||
curr_lon = bbox[2]
|
||||
while curr_lon < bbox[3]:
|
||||
logger.info(f"🛰️ Rács-cella pásztázása: {curr_lat}, {curr_lon} - Kulcsszó: {task.keyword}")
|
||||
places = await cls.get_google_places(curr_lat, curr_lon, task.keyword)
|
||||
|
||||
for p in places:
|
||||
# Adatok kinyerése és tisztítása
|
||||
name = p.get('displayName', {}).get('text')
|
||||
full_addr = p.get('formattedAddress', '')
|
||||
|
||||
# Ujjlenyomat generálás
|
||||
f_print = cls.generate_fingerprint(name, task.city, full_addr)
|
||||
|
||||
await cls.save_to_staging(db, {
|
||||
"external_id": p.get('id'),
|
||||
"name": name,
|
||||
"full_address": full_addr,
|
||||
"phone": p.get('internationalPhoneNumber'),
|
||||
"website": p.get('websiteUri'),
|
||||
"fingerprint": f_print,
|
||||
"city": task.city,
|
||||
"source": "google",
|
||||
"raw": p,
|
||||
"trust": 30
|
||||
})
|
||||
curr_lon += lon_step
|
||||
await asyncio.sleep(0.5) # API védelem
|
||||
curr_lat += lat_step
|
||||
|
||||
@classmethod
|
||||
async def get_google_places(cls, lat, lon, keyword):
|
||||
"""Google Places New API - Javított, 400-as hiba elleni védelemmel."""
|
||||
"""Google Places New API hívás rács-pontra."""
|
||||
if not cls.GOOGLE_API_KEY: return []
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"X-Goog-Api-Key": cls.GOOGLE_API_KEY,
|
||||
"X-Goog-FieldMask": "places.displayName,places.id,places.types,places.internationalPhoneNumber,places.websiteUri,places.formattedAddress"
|
||||
"X-Goog-FieldMask": "places.displayName,places.id,places.internationalPhoneNumber,places.websiteUri,places.formattedAddress"
|
||||
}
|
||||
|
||||
# A 'keyword' a TextQuery-hez kellene, a SearchNearby-nél típusokat (includedTypes) használunk.
|
||||
# EU szintű trükk: Ha nincs pontos típus, a 'car_repair' az alapértelmezett.
|
||||
payload = {
|
||||
"includedTypes": ["car_repair", "gas_station", "car_wash", "motorcycle_repair"],
|
||||
"includedTypes": ["car_repair", "motorcycle_repair"],
|
||||
"maxResultCount": 20,
|
||||
"locationRestriction": {
|
||||
"circle": {
|
||||
"center": {"latitude": lat, "longitude": lon},
|
||||
"radius": 5000.0 # 5km körzet
|
||||
"radius": 1500.0 # 1.5km sugarú kör a fedés érdekében
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
resp = await client.post(cls.PLACES_NEW_URL, json=payload, headers=headers)
|
||||
if resp.status_code == 200:
|
||||
return resp.json().get("places", [])
|
||||
else:
|
||||
logger.error(f"❌ Google API hiba ({resp.status_code}): {resp.text}")
|
||||
return []
|
||||
return resp.json().get("places", []) if resp.status_code == 200 else []
|
||||
|
||||
@classmethod
|
||||
async def save_to_staging(cls, db: AsyncSession, data: dict):
|
||||
"""Mentés a Staging táblába 9-mezős bontással."""
|
||||
stmt = select(ServiceStaging).where(ServiceStaging.external_id == str(data['external_id']))
|
||||
if (await db.execute(stmt)).scalar_one_or_none(): return
|
||||
"""Mentés ujjlenyomat ellenőrzéssel."""
|
||||
# 1. Megnézzük, létezik-e már ez az ujjlenyomat
|
||||
stmt = select(ServiceStaging).where(ServiceStaging.fingerprint == data['fingerprint'])
|
||||
existing = (await db.execute(stmt)).scalar_one_or_none()
|
||||
|
||||
if existing:
|
||||
# Csak a bizalmi pontot növeljük és az utolsó észlelést frissítjük
|
||||
existing.trust_score += 5
|
||||
return
|
||||
|
||||
new_entry = ServiceStaging(
|
||||
name=data['name'],
|
||||
source=data['source'],
|
||||
external_id=str(data['external_id']),
|
||||
# Itt történik a 9-mezős bontás (ha érkezik adat)
|
||||
postal_code=data.get('zip'),
|
||||
city=data.get('city'),
|
||||
street_name=data.get('street'),
|
||||
street_type=data.get('street_type', 'utca'),
|
||||
house_number=data.get('number'),
|
||||
full_address=data.get('full_address'),
|
||||
contact_phone=data.get('phone'),
|
||||
website=data.get('website'),
|
||||
fingerprint=data['fingerprint'],
|
||||
city=data['city'],
|
||||
full_address=data['full_address'],
|
||||
contact_phone=data['phone'],
|
||||
website=data['website'],
|
||||
raw_data=data.get('raw', {}),
|
||||
status="pending",
|
||||
trust_score=data.get('trust', 10)
|
||||
trust_score=data.get('trust', 30)
|
||||
)
|
||||
db.add(new_entry)
|
||||
await db.flush()
|
||||
|
||||
@classmethod
|
||||
async def run(cls):
|
||||
logger.info("🤖 Robot v1.3.0: Continental Scout elindult...")
|
||||
|
||||
logger.info("🤖 Continental Scout v1.3.1 - Grid Engine INDUL...")
|
||||
while True:
|
||||
async with SessionLocal() as db:
|
||||
try:
|
||||
await db.execute(text("SET search_path TO data, public"))
|
||||
# 1. Paraméterek lekérése a táblából
|
||||
stmt = select(DiscoveryParameter).where(DiscoveryParameter.is_active == True)
|
||||
tasks = (await db.execute(stmt)).scalars().all()
|
||||
|
||||
for task in tasks:
|
||||
logger.info(f"🔎 Felderítés: {task.city} ({task.country_code}) -> {task.keyword}")
|
||||
logger.info(f"🔎 Mélyfúrás indítása: {task.city} -> {task.keyword}")
|
||||
await cls.run_grid_search(db, task)
|
||||
|
||||
# Koordináták beszerzése a kereséshez
|
||||
lat, lon = await cls.get_coordinates(task.city, task.country_code)
|
||||
if not lat: continue
|
||||
|
||||
# --- GOOGLE FÁZIS ---
|
||||
google_places = await cls.get_google_places(lat, lon, task.keyword)
|
||||
for p in google_places:
|
||||
await cls.save_to_staging(db, {
|
||||
"external_id": p.get('id'),
|
||||
"name": p.get('displayName', {}).get('text'),
|
||||
"full_address": p.get('formattedAddress'),
|
||||
"phone": p.get('internationalPhoneNumber'),
|
||||
"website": p.get('websiteUri'),
|
||||
"source": "google",
|
||||
"raw": p,
|
||||
"trust": 30
|
||||
})
|
||||
|
||||
# --- OSM FÁZIS (EU kompatibilis lekérdezés) ---
|
||||
osm_query = f"""[out:json][timeout:60];
|
||||
(nwr["amenity"~"car_repair|fuel"](around:5000, {lat}, {lon}););
|
||||
out center;"""
|
||||
async with httpx.AsyncClient() as client:
|
||||
resp = await client.post(cls.OVERPASS_URL, data={"data": osm_query})
|
||||
if resp.status_code == 200:
|
||||
for el in resp.json().get("elements", []):
|
||||
t = el.get("tags", {})
|
||||
await cls.save_to_staging(db, {
|
||||
"external_id": f"osm_{el['id']}",
|
||||
"name": t.get('name', 'Ismeretlen szerviz'),
|
||||
"city": t.get('addr:city', task.city),
|
||||
"zip": t.get('addr:postcode'),
|
||||
"street": t.get('addr:street'),
|
||||
"number": t.get('addr:housenumber'),
|
||||
"source": "osm",
|
||||
"raw": el,
|
||||
"trust": 15
|
||||
})
|
||||
|
||||
task.last_run_at = datetime.now(timezone.utc)
|
||||
await db.commit()
|
||||
logger.info(f"✅ {task.city} felderítve.")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"💥 Kritikus hiba a ciklusban: {e}")
|
||||
logger.error(f"💥 Hiba: {e}")
|
||||
await db.rollback()
|
||||
|
||||
logger.info("😴 Minden aktív feladat kész. Alvás 1 órán át...")
|
||||
await asyncio.sleep(3600)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -3,113 +3,169 @@ import httpx
|
||||
import logging
|
||||
import os
|
||||
import datetime
|
||||
from sqlalchemy import select, and_
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
import random
|
||||
import sys
|
||||
from sqlalchemy import select, and_, update, text, func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.vehicle_definitions import VehicleModelDefinition
|
||||
from app.models.asset import AssetCatalog
|
||||
from app.services.ai_service import AIService
|
||||
from duckduckgo_search import DDGS
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger("Robot-Bulk-Master")
|
||||
# --- SZIGORÚ NAPLÓZÁS KONFIGURÁCIÓ ---
|
||||
for handler in logging.root.handlers[:]:
|
||||
logging.root.removeHandler(handler)
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s.%(msecs)03d [%(levelname)s] Alchemist: %(message)s',
|
||||
datefmt='%Y-%m-%d %H:%M:%S',
|
||||
stream=sys.stdout
|
||||
)
|
||||
logger = logging.getLogger("Robot-Enricher-v1.3.0")
|
||||
|
||||
class TechEnricher:
|
||||
API_URL = "https://opendata.rdw.nl/resource/kyri-nuah.json"
|
||||
RDW_TOKEN = os.getenv("RDW_APP_TOKEN")
|
||||
HEADERS = {"X-App-Token": RDW_TOKEN} if RDW_TOKEN else {}
|
||||
"""
|
||||
Industrial TechEnricher v1.3.0
|
||||
- Fix: Deadlock elkerülése izolált session-kezeléssel.
|
||||
- Logika: Napi 500 AI hívás, Smart Merge, Web Fallback.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.max_attempts = 5
|
||||
self.batch_size = 15
|
||||
self.daily_ai_limit = 500
|
||||
self.ai_calls_today = 0
|
||||
self.last_reset_date = datetime.date.today()
|
||||
|
||||
def check_budget(self) -> bool:
|
||||
if datetime.date.today() > self.last_reset_date:
|
||||
self.ai_calls_today = 0
|
||||
self.last_reset_date = datetime.date.today()
|
||||
return self.ai_calls_today < self.daily_ai_limit
|
||||
|
||||
def is_data_sane(self, data: dict) -> bool:
|
||||
try:
|
||||
if not data: return False
|
||||
ccm = int(data.get("ccm", 0) or 0)
|
||||
kw = int(data.get("kw", 0) or 0)
|
||||
if ccm > 15000 or kw > 2000: return False
|
||||
return True
|
||||
except: return False
|
||||
|
||||
async def get_web_wisdom(self, make: str, model: str) -> str:
|
||||
"""Keresés a neten izolált szálon (nem blokkolja az aszinkron loopot)."""
|
||||
query = f"{make} {model} technical specs maintenance oil qty tire size"
|
||||
try:
|
||||
def sync_search():
|
||||
with DDGS() as ddgs:
|
||||
return "\n".join([r['body'] for r in ddgs.text(query, max_results=3)])
|
||||
return await asyncio.to_thread(sync_search)
|
||||
except Exception as e:
|
||||
logger.warning(f"🌐 Web hiba ({make}): {e}")
|
||||
return ""
|
||||
|
||||
async def process_single_record(self, record_id: int):
|
||||
"""
|
||||
Dúsítási folyamat 3 szigorúan elválasztott lépésben a fagyás ellen:
|
||||
1. Adat lekérése és DB bezárása.
|
||||
2. AI munka (DB nélkül).
|
||||
3. Mentés új sessionben.
|
||||
"""
|
||||
# --- 1. LÉPÉS: ADAT LEKÉRÉSE ---
|
||||
async with SessionLocal() as db:
|
||||
stmt = select(VehicleModelDefinition).where(VehicleModelDefinition.id == record_id)
|
||||
res = await db.execute(stmt)
|
||||
rec = res.scalar_one_or_none()
|
||||
if not rec: return
|
||||
make, m_name, v_type = rec.make, rec.marketing_name, (rec.vehicle_type or "car")
|
||||
logger.info(f"🧪 >>> Dúsítás indítása: {make} {m_name}")
|
||||
|
||||
# --- 2. LÉPÉS: AI MUNKA (DB session itt nincs nyitva!) ---
|
||||
try:
|
||||
# AIService hívása a kötelező 4. 'sources' paraméterrel
|
||||
ai_data = await AIService.get_clean_vehicle_data(make, m_name, v_type, {})
|
||||
|
||||
if not ai_data or not ai_data.get("kw"):
|
||||
logger.info(f"🔍 AI bizonytalan, webes dúsítás indul: {make} {m_name}")
|
||||
web_info = await self.get_web_wisdom(make, m_name)
|
||||
ai_data = await AIService.get_clean_vehicle_data(make, m_name, v_type, {"web_context": web_info})
|
||||
|
||||
if not ai_data: raise ValueError("Az AI nem adott értékelhető választ.")
|
||||
|
||||
# --- 3. LÉPÉS: MENTÉS (Új session nyitása) ---
|
||||
async with SessionLocal() as db:
|
||||
# MDM (AssetCatalog) Smart Merge
|
||||
cat_stmt = select(AssetCatalog).where(and_(
|
||||
AssetCatalog.make == make.upper(),
|
||||
AssetCatalog.model == ai_data.get("marketing_name", m_name)[:50],
|
||||
AssetCatalog.power_kw == ai_data.get("kw")
|
||||
)).limit(1)
|
||||
|
||||
if not (await db.execute(cat_stmt)).scalar_one_or_none():
|
||||
db.add(AssetCatalog(
|
||||
make=make.upper(),
|
||||
model=ai_data.get("marketing_name", m_name)[:50],
|
||||
power_kw=ai_data.get("kw"),
|
||||
engine_capacity=ai_data.get("ccm"),
|
||||
factory_data=ai_data
|
||||
))
|
||||
logger.info(f"✅ Mentve az MDM-be: {make} {m_name}")
|
||||
|
||||
# Staging frissítése
|
||||
await db.execute(
|
||||
update(VehicleModelDefinition)
|
||||
.where(VehicleModelDefinition.id == record_id)
|
||||
.values(
|
||||
status="ai_enriched",
|
||||
technical_code=ai_data.get("technical_code") or f"GEN-{record_id}",
|
||||
engine_capacity=ai_data.get("ccm"),
|
||||
power_kw=ai_data.get("kw"),
|
||||
updated_at=func.now()
|
||||
)
|
||||
)
|
||||
await db.commit()
|
||||
self.ai_calls_today += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"🚨 Hiba a(z) {record_id} rekordnál: {e}")
|
||||
async with SessionLocal() as db:
|
||||
await db.execute(update(VehicleModelDefinition).where(VehicleModelDefinition.id == record_id).values(
|
||||
attempts=VehicleModelDefinition.attempts + 1,
|
||||
last_error=str(e)[:200],
|
||||
status=text("CASE WHEN attempts >= 4 THEN 'suspended' ELSE 'unverified' END"),
|
||||
updated_at=func.now()
|
||||
))
|
||||
await db.commit()
|
||||
|
||||
async def run(self):
|
||||
logger.info(f"🚀 Robot 2 v1.3.0 ONLINE (Limit: {self.daily_ai_limit})")
|
||||
while True:
|
||||
if not self.check_budget():
|
||||
await asyncio.sleep(3600); continue
|
||||
|
||||
@classmethod
|
||||
async def fetch_rdw_tech_data(cls, make, model):
|
||||
params = {"merk": make.upper(), "handelsbenaming": str(model).strip().upper(), "$limit": 1}
|
||||
async with httpx.AsyncClient(headers=cls.HEADERS) as client:
|
||||
try:
|
||||
resp = await client.get(cls.API_URL, params=params, timeout=15)
|
||||
return resp.json()[0] if resp.status_code == 200 and resp.json() else None
|
||||
except: return None
|
||||
|
||||
@classmethod
|
||||
async def run(cls):
|
||||
logger.info("🚀 Master-Merge Robot FOLYAMATOS ÜZEMMÓD INDUL...")
|
||||
|
||||
while True: # Folyamatos ciklus, amíg el nem fogy az adat
|
||||
async with SessionLocal() as main_db:
|
||||
stmt = select(VehicleModelDefinition.id).where(
|
||||
VehicleModelDefinition.status == "unverified"
|
||||
).limit(50) # Egyszerre 50 ID-t foglalunk le
|
||||
res = await main_db.execute(stmt)
|
||||
ids = res.scalars().all()
|
||||
|
||||
if not ids:
|
||||
logger.info("🏁 Minden rekord feldolgozva. A robot megáll.")
|
||||
break
|
||||
|
||||
logger.info(f"📦 Új csomag indítása: {len(ids)} rekord.")
|
||||
|
||||
for m_id in ids:
|
||||
async with SessionLocal() as db:
|
||||
try:
|
||||
current = await db.get(VehicleModelDefinition, m_id)
|
||||
if not current: continue
|
||||
# Csak az ID-kat kérjük le, hogy ne tartsuk nyitva a session-t a dúsítás alatt
|
||||
stmt = select(VehicleModelDefinition.id).where(and_(
|
||||
VehicleModelDefinition.status == "unverified",
|
||||
VehicleModelDefinition.attempts < self.max_attempts
|
||||
)).limit(self.batch_size)
|
||||
ids = [r[0] for r in (await db.execute(stmt)).fetchall()]
|
||||
|
||||
logger.info(f"🧪 Feldolgozás: {current.make} {current.marketing_name} (ID: {m_id})")
|
||||
|
||||
rdw_data = await cls.fetch_rdw_tech_data(current.make, current.marketing_name)
|
||||
if rdw_data:
|
||||
current.engine_capacity = int(float(rdw_data.get("cilinderinhoud", 0))) or current.engine_capacity
|
||||
current.power_kw = int(float(rdw_data.get("netto_maximum_vermogen_kw", 0))) or current.power_kw
|
||||
if not ids:
|
||||
await asyncio.sleep(60); continue
|
||||
|
||||
ai_data = await AIService.get_clean_vehicle_data(current.make, current.marketing_name, current.vehicle_type)
|
||||
|
||||
if ai_data:
|
||||
tech_code = ai_data.get("technical_code") or "N/A"
|
||||
new_ccm = ai_data.get("ccm") or current.engine_capacity
|
||||
logger.info(f"📦 Batch indul: {len(ids)} rekord.")
|
||||
for rid in ids:
|
||||
await self.process_single_record(rid)
|
||||
await asyncio.sleep(random.uniform(10.0, 30.0)) # VGA kímélése
|
||||
|
||||
master_record = None
|
||||
if tech_code and tech_code != "N/A":
|
||||
stmt_master = select(VehicleModelDefinition).where(and_(
|
||||
VehicleModelDefinition.make == current.make,
|
||||
VehicleModelDefinition.technical_code == tech_code,
|
||||
VehicleModelDefinition.engine_capacity == new_ccm,
|
||||
VehicleModelDefinition.status == 'ai_enriched',
|
||||
VehicleModelDefinition.id != m_id
|
||||
))
|
||||
master_record = (await db.execute(stmt_master)).scalar_one_or_none()
|
||||
|
||||
if master_record:
|
||||
logger.info(f"🔗 Merge: ID:{m_id} -> Master ID:{master_record.id}")
|
||||
syns = set(master_record.synonyms or [])
|
||||
syns.update(ai_data.get("synonyms", []))
|
||||
syns.add(current.marketing_name)
|
||||
master_record.synonyms = list(syns)
|
||||
current.status = "duplicate"
|
||||
current.parent_id = master_record.id
|
||||
else:
|
||||
current.technical_code = tech_code if tech_code != "N/A" else f"N/A-{m_id}"
|
||||
current.marketing_name = ai_data.get("marketing_name", current.marketing_name)
|
||||
current.engine_capacity = new_ccm
|
||||
current.power_kw = ai_data.get("kw") or current.power_kw
|
||||
current.year_from = ai_data.get("year_from")
|
||||
current.year_to = ai_data.get("year_to")
|
||||
current.synonyms = ai_data.get("synonyms", [])
|
||||
|
||||
if ai_data.get("maintenance"):
|
||||
old_spec = current.specifications or {}
|
||||
old_spec.update(ai_data.get("maintenance"))
|
||||
current.specifications = old_spec
|
||||
|
||||
current.status = "ai_enriched"
|
||||
else:
|
||||
if not current.technical_code:
|
||||
current.technical_code = f"UNKNOWN-{m_id}"
|
||||
|
||||
current.updated_at = datetime.datetime.now()
|
||||
await db.commit()
|
||||
logger.info(f"✅ Mentve (ID: {m_id})")
|
||||
|
||||
except Exception as e:
|
||||
await db.rollback()
|
||||
logger.error(f"❌ Hiba ID:{m_id}: {e}")
|
||||
finally:
|
||||
await db.close()
|
||||
except Exception as e:
|
||||
logger.error(f"🚨 Főciklus hiba: {e}")
|
||||
await asyncio.sleep(30)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(TechEnricher.run())
|
||||
enricher = TechEnricher()
|
||||
asyncio.run(enricher.run())
|
||||
64
backend/app/workers/vin_auditor.py
Normal file
64
backend/app/workers/vin_auditor.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import sys
|
||||
import datetime
|
||||
from sqlalchemy import select, and_, text, update
|
||||
from sqlalchemy.orm import joinedload
|
||||
from app.db.session import SessionLocal
|
||||
from app.models.asset import Asset, AssetCatalog
|
||||
from app.services.ai_service import AIService
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] Auditor: %(message)s', stream=sys.stdout)
|
||||
logger = logging.getLogger("VIN-Auditor-v1.3.0")
|
||||
|
||||
class VINAuditor:
|
||||
"""
|
||||
VIN Auditor v1.3.0
|
||||
- Alvázszám alapú hitelesítés és MDM szinkron.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
async def audit_asset(cls, asset_id: int):
|
||||
# 1. ADATGYŰJTÉS ÉS SESSION ZÁRÁS
|
||||
async with SessionLocal() as db:
|
||||
stmt = select(Asset).options(joinedload(Asset.catalog)).where(Asset.id == asset_id)
|
||||
asset = (await db.execute(stmt)).scalar_one_or_none()
|
||||
if not asset or not asset.vin: return
|
||||
make, vin, current_kw = asset.catalog.make, asset.vin, asset.catalog.power_kw
|
||||
|
||||
# 2. AI FÁZIS (Izolált hívás)
|
||||
try:
|
||||
logger.info(f"🛡️ VIN Audit indul: {vin}")
|
||||
truth = await AIService.get_clean_vehicle_data(make, vin, "vin_audit", {"vin": vin})
|
||||
|
||||
if truth and truth.get("kw"):
|
||||
# 3. MENTÉSI FÁZIS (Új session)
|
||||
async with SessionLocal() as db:
|
||||
real_kw = int(truth["kw"])
|
||||
if abs(real_kw - (current_kw or 0)) >= 5:
|
||||
# Új variáns mentése
|
||||
new_v = AssetCatalog(make=make.upper(), model=truth.get("marketing_name", "Unknown"), power_kw=real_kw)
|
||||
db.add(new_v)
|
||||
await db.flush()
|
||||
await db.execute(update(Asset).where(Asset.id == asset_id).values(catalog_id=new_v.id, is_verified=True))
|
||||
else:
|
||||
await db.execute(update(Asset).where(Asset.id == asset_id).values(is_verified=True))
|
||||
|
||||
await db.commit()
|
||||
logger.info(f"✅ Audit sikeres: {vin}")
|
||||
except Exception as e:
|
||||
logger.error(f"🚨 Auditor hiba: {e}")
|
||||
|
||||
async def run(self):
|
||||
logger.info("🛡️ Auditor v1.3.0 ONLINE")
|
||||
while True:
|
||||
try:
|
||||
async with SessionLocal() as db:
|
||||
stmt = select(Asset.id).where(and_(Asset.is_verified == False, Asset.vin.isnot(None))).limit(1)
|
||||
aid = (await db.execute(stmt)).scalar_one_or_none()
|
||||
if aid: await self.audit_asset(aid)
|
||||
else: await asyncio.sleep(60)
|
||||
except: await asyncio.sleep(30)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(VINAuditor().run())
|
||||
@@ -0,0 +1,340 @@
|
||||
"""fix_system_params_final
|
||||
|
||||
Revision ID: 105626809486
|
||||
Revises: 835cc89dadc7
|
||||
Create Date: 2026-02-22 07:26:15.174460
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '105626809486'
|
||||
down_revision: Union[str, Sequence[str], None] = '835cc89dadc7'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(op.f('addresses_postal_code_id_fkey'), 'addresses', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_assignments_branch_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'branches', ['branch_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_costs_organization_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_costs_asset_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_costs_driver_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_costs', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_costs', 'users', ['driver_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_costs', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_events_asset_id_fkey'), 'asset_events', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_events', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_financials_asset_id_fkey'), 'asset_financials', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_financials', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_reviews', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_reviews', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_telemetry', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('assets_catalog_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_operator_person_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_operator_org_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_owner_person_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_owner_org_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_current_organization_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'assets', 'persons', ['owner_person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'organizations', ['owner_org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'organizations', ['current_organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'persons', ['operator_person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'vehicle_catalog', ['catalog_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'organizations', ['operator_org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('audit_logs_user_id_fkey'), 'audit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'audit_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('branches_organization_id_fkey'), 'branches', type_='foreignkey')
|
||||
op.drop_constraint(op.f('branches_address_id_fkey'), 'branches', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'branches', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'branches', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('credit_logs_org_id_fkey'), 'credit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'credit_logs', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('documents_uploaded_by_fkey'), 'documents', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'documents', 'users', ['uploaded_by'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('feature_definitions_vehicle_type_id_fkey'), 'feature_definitions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'feature_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('financial_ledger_related_agent_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.drop_constraint(op.f('financial_ledger_user_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.drop_constraint(op.f('financial_ledger_person_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'users', ['related_agent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('model_feature_maps_feature_id_fkey'), 'model_feature_maps', type_='foreignkey')
|
||||
op.drop_constraint(op.f('model_feature_maps_model_id_fkey'), 'model_feature_maps', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'model_feature_maps', 'feature_definitions', ['feature_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'model_feature_maps', 'vehicle_model_definitions', ['model_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('operational_logs_user_id_fkey'), 'operational_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'operational_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='SET NULL')
|
||||
op.drop_constraint(op.f('org_sales_assignments_organization_id_fkey'), 'org_sales_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('org_sales_assignments_agent_user_id_fkey'), 'org_sales_assignments', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'org_sales_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'org_sales_assignments', 'users', ['agent_user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'org_subscriptions', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('organization_financials_organization_id_fkey'), 'organization_financials', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organization_financials', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.alter_column('organization_members', 'role',
|
||||
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
|
||||
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(op.f('organization_members_organization_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organization_members_user_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organization_members_person_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organization_members', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organization_members', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organization_members', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.alter_column('organizations', 'org_type',
|
||||
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
|
||||
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(op.f('organizations_owner_id_fkey'), 'organizations', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organizations_address_id_fkey'), 'organizations', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organizations', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organizations', 'users', ['owner_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('pending_actions_requester_id_fkey'), 'pending_actions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('pending_actions_approver_id_fkey'), 'pending_actions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'pending_actions', 'users', ['approver_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'pending_actions', 'users', ['requester_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('persons_address_id_fkey'), 'persons', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'persons', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('points_ledger_user_id_fkey'), 'points_ledger', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'points_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('ratings_target_organization_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.drop_constraint(op.f('ratings_target_user_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.drop_constraint(op.f('ratings_author_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.drop_constraint(op.f('ratings_target_branch_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'ratings', 'organizations', ['target_organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'ratings', 'users', ['author_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'ratings', 'users', ['target_user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'ratings', 'branches', ['target_branch_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('security_audit_logs_target_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('security_audit_logs_actor_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('security_audit_logs_confirmed_by_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['actor_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['confirmed_by_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['target_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', type_='foreignkey')
|
||||
op.drop_constraint(op.f('service_expertises_service_id_fkey'), 'service_expertises', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_expertises', 'service_profiles', ['service_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_profiles_organization_id_fkey'), 'service_profiles', type_='foreignkey')
|
||||
op.drop_constraint(op.f('service_profiles_parent_id_fkey'), 'service_profiles', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_profiles', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'service_profiles', 'service_profiles', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_specialties_parent_id_fkey'), 'service_specialties', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_specialties', 'service_specialties', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('social_accounts_user_id_fkey'), 'social_accounts', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'social_accounts', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
|
||||
op.add_column('system_parameters', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False))
|
||||
op.alter_column('system_parameters', 'value',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=False)
|
||||
op.drop_constraint(op.f('user_badges_user_id_fkey'), 'user_badges', type_='foreignkey')
|
||||
op.drop_constraint(op.f('user_badges_badge_id_fkey'), 'user_badges', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'user_badges', 'badges', ['badge_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'user_badges', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('user_stats_user_id_fkey'), 'user_stats', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'user_stats', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('users_person_id_fkey'), 'users', type_='foreignkey')
|
||||
op.drop_constraint(op.f('users_current_sales_agent_id_fkey'), 'users', type_='foreignkey')
|
||||
op.drop_constraint(op.f('users_referred_by_id_fkey'), 'users', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'users', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'users', 'users', ['referred_by_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'users', 'users', ['current_sales_agent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_catalog_master_definition_id_fkey'), 'vehicle_catalog', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_catalog', 'vehicle_model_definitions', ['master_definition_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_model_definitions_parent_id_fkey'), 'vehicle_model_definitions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('vehicle_model_definitions_vehicle_type_id_fkey'), 'vehicle_model_definitions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_model_definitions', 'vehicle_model_definitions', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'vehicle_model_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
|
||||
op.drop_constraint(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_ownerships', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'verification_tokens', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
|
||||
op.drop_constraint(op.f('wallets_user_id_fkey'), 'wallets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'wallets', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'wallets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('wallets_user_id_fkey'), 'wallets', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'verification_tokens', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'])
|
||||
op.drop_constraint(None, 'vehicle_model_definitions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'vehicle_model_definitions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_model_definitions_vehicle_type_id_fkey'), 'vehicle_model_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'])
|
||||
op.create_foreign_key(op.f('vehicle_model_definitions_parent_id_fkey'), 'vehicle_model_definitions', 'vehicle_model_definitions', ['parent_id'], ['id'])
|
||||
op.drop_constraint(None, 'vehicle_catalog', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_catalog_master_definition_id_fkey'), 'vehicle_catalog', 'vehicle_model_definitions', ['master_definition_id'], ['id'])
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('users_referred_by_id_fkey'), 'users', 'users', ['referred_by_id'], ['id'])
|
||||
op.create_foreign_key(op.f('users_current_sales_agent_id_fkey'), 'users', 'users', ['current_sales_agent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('users_person_id_fkey'), 'users', 'persons', ['person_id'], ['id'])
|
||||
op.drop_constraint(None, 'user_stats', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('user_stats_user_id_fkey'), 'user_stats', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('user_badges_badge_id_fkey'), 'user_badges', 'badges', ['badge_id'], ['id'])
|
||||
op.create_foreign_key(op.f('user_badges_user_id_fkey'), 'user_badges', 'users', ['user_id'], ['id'])
|
||||
op.alter_column('system_parameters', 'value',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=postgresql.JSON(astext_type=sa.Text()),
|
||||
existing_nullable=False)
|
||||
op.drop_column('system_parameters', 'id')
|
||||
op.drop_constraint(None, 'social_accounts', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('social_accounts_user_id_fkey'), 'social_accounts', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_constraint(None, 'service_specialties', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_specialties_parent_id_fkey'), 'service_specialties', 'service_specialties', ['parent_id'], ['id'])
|
||||
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_profiles_parent_id_fkey'), 'service_profiles', 'service_profiles', ['parent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('service_profiles_organization_id_fkey'), 'service_profiles', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_expertises_service_id_fkey'), 'service_expertises', 'service_profiles', ['service_id'], ['id'])
|
||||
op.create_foreign_key(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'])
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('security_audit_logs_confirmed_by_id_fkey'), 'security_audit_logs', 'users', ['confirmed_by_id'], ['id'])
|
||||
op.create_foreign_key(op.f('security_audit_logs_actor_id_fkey'), 'security_audit_logs', 'users', ['actor_id'], ['id'])
|
||||
op.create_foreign_key(op.f('security_audit_logs_target_id_fkey'), 'security_audit_logs', 'users', ['target_id'], ['id'])
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('ratings_target_branch_id_fkey'), 'ratings', 'branches', ['target_branch_id'], ['id'])
|
||||
op.create_foreign_key(op.f('ratings_author_id_fkey'), 'ratings', 'users', ['author_id'], ['id'])
|
||||
op.create_foreign_key(op.f('ratings_target_user_id_fkey'), 'ratings', 'users', ['target_user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('ratings_target_organization_id_fkey'), 'ratings', 'organizations', ['target_organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'points_ledger', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('points_ledger_user_id_fkey'), 'points_ledger', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'persons', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('persons_address_id_fkey'), 'persons', 'addresses', ['address_id'], ['id'])
|
||||
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('pending_actions_approver_id_fkey'), 'pending_actions', 'users', ['approver_id'], ['id'])
|
||||
op.create_foreign_key(op.f('pending_actions_requester_id_fkey'), 'pending_actions', 'users', ['requester_id'], ['id'])
|
||||
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organizations_address_id_fkey'), 'organizations', 'addresses', ['address_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organizations_owner_id_fkey'), 'organizations', 'users', ['owner_id'], ['id'])
|
||||
op.alter_column('organizations', 'org_type',
|
||||
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
|
||||
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organization_members_person_id_fkey'), 'organization_members', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organization_members_user_id_fkey'), 'organization_members', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organization_members_organization_id_fkey'), 'organization_members', 'organizations', ['organization_id'], ['id'])
|
||||
op.alter_column('organization_members', 'role',
|
||||
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
|
||||
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'organization_financials', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organization_financials_organization_id_fkey'), 'organization_financials', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', 'organizations', ['org_id'], ['id'])
|
||||
op.create_foreign_key(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'])
|
||||
op.drop_constraint(None, 'org_sales_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'org_sales_assignments', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('org_sales_assignments_agent_user_id_fkey'), 'org_sales_assignments', 'users', ['agent_user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('org_sales_assignments_organization_id_fkey'), 'org_sales_assignments', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'operational_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('operational_logs_user_id_fkey'), 'operational_logs', 'users', ['user_id'], ['id'], ondelete='SET NULL')
|
||||
op.drop_constraint(None, 'model_feature_maps', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'model_feature_maps', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('model_feature_maps_model_id_fkey'), 'model_feature_maps', 'vehicle_model_definitions', ['model_id'], ['id'])
|
||||
op.create_foreign_key(op.f('model_feature_maps_feature_id_fkey'), 'model_feature_maps', 'feature_definitions', ['feature_id'], ['id'])
|
||||
op.drop_constraint(None, 'geo_streets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'])
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('financial_ledger_person_id_fkey'), 'financial_ledger', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('financial_ledger_user_id_fkey'), 'financial_ledger', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('financial_ledger_related_agent_id_fkey'), 'financial_ledger', 'users', ['related_agent_id'], ['id'])
|
||||
op.drop_constraint(None, 'feature_definitions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('feature_definitions_vehicle_type_id_fkey'), 'feature_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'])
|
||||
op.drop_constraint(None, 'documents', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('documents_uploaded_by_fkey'), 'documents', 'users', ['uploaded_by'], ['id'])
|
||||
op.drop_constraint(None, 'credit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('credit_logs_org_id_fkey'), 'credit_logs', 'organizations', ['org_id'], ['id'])
|
||||
op.drop_constraint(None, 'branches', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'branches', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('branches_address_id_fkey'), 'branches', 'addresses', ['address_id'], ['id'])
|
||||
op.create_foreign_key(op.f('branches_organization_id_fkey'), 'branches', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'audit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('audit_logs_user_id_fkey'), 'audit_logs', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('assets_current_organization_id_fkey'), 'assets', 'organizations', ['current_organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_owner_org_id_fkey'), 'assets', 'organizations', ['owner_org_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_owner_person_id_fkey'), 'assets', 'persons', ['owner_person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_operator_org_id_fkey'), 'assets', 'organizations', ['operator_org_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_operator_person_id_fkey'), 'assets', 'persons', ['operator_person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_catalog_id_fkey'), 'assets', 'vehicle_catalog', ['catalog_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_telemetry', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_financials', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_financials_asset_id_fkey'), 'asset_financials', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_events', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_events_asset_id_fkey'), 'asset_events', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_costs_driver_id_fkey'), 'asset_costs', 'users', ['driver_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_costs_asset_id_fkey'), 'asset_costs', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_costs_organization_id_fkey'), 'asset_costs', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_assignments_branch_id_fkey'), 'asset_assignments', 'branches', ['branch_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'addresses', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('addresses_postal_code_id_fkey'), 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'])
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,308 @@
|
||||
"""Add robot protection fields v1.2.4
|
||||
|
||||
Revision ID: 492a65da864d
|
||||
Revises: c64b951dbb86
|
||||
Create Date: 2026-02-18 13:05:23.918947
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '492a65da864d'
|
||||
down_revision: Union[str, Sequence[str], None] = 'c64b951dbb86'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(op.f('addresses_postal_code_id_fkey'), 'addresses', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_assignments_branch_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'branches', ['branch_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_costs_organization_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_costs_asset_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_costs_driver_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_costs', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_costs', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_costs', 'users', ['driver_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_events_asset_id_fkey'), 'asset_events', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_events', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_financials_asset_id_fkey'), 'asset_financials', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_financials', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_reviews', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_reviews', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_telemetry', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('assets_catalog_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_current_organization_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'assets', 'vehicle_catalog', ['catalog_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'organizations', ['current_organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('audit_logs_user_id_fkey'), 'audit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'audit_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('branches_organization_id_fkey'), 'branches', type_='foreignkey')
|
||||
op.drop_constraint(op.f('branches_address_id_fkey'), 'branches', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'branches', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'branches', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('credit_logs_org_id_fkey'), 'credit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'credit_logs', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('documents_uploaded_by_fkey'), 'documents', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'documents', 'users', ['uploaded_by'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('feature_definitions_vehicle_type_id_fkey'), 'feature_definitions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'feature_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('financial_ledger_person_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.drop_constraint(op.f('financial_ledger_user_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.drop_constraint(op.f('financial_ledger_related_agent_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'users', ['related_agent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('model_feature_maps_feature_id_fkey'), 'model_feature_maps', type_='foreignkey')
|
||||
op.drop_constraint(op.f('model_feature_maps_model_id_fkey'), 'model_feature_maps', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'model_feature_maps', 'vehicle_model_definitions', ['model_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'model_feature_maps', 'feature_definitions', ['feature_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('operational_logs_user_id_fkey'), 'operational_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'operational_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='SET NULL')
|
||||
op.drop_constraint(op.f('org_sales_assignments_agent_user_id_fkey'), 'org_sales_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('org_sales_assignments_organization_id_fkey'), 'org_sales_assignments', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'org_sales_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'org_sales_assignments', 'users', ['agent_user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'org_subscriptions', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('organization_financials_organization_id_fkey'), 'organization_financials', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organization_financials', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.alter_column('organization_members', 'role',
|
||||
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
|
||||
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(op.f('organization_members_organization_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organization_members_person_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organization_members_user_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organization_members', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organization_members', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organization_members', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.alter_column('organizations', 'org_type',
|
||||
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
|
||||
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(op.f('organizations_owner_id_fkey'), 'organizations', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organizations_address_id_fkey'), 'organizations', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organizations', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organizations', 'users', ['owner_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('pending_actions_approver_id_fkey'), 'pending_actions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('pending_actions_requester_id_fkey'), 'pending_actions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'pending_actions', 'users', ['requester_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'pending_actions', 'users', ['approver_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('persons_address_id_fkey'), 'persons', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'persons', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('points_ledger_user_id_fkey'), 'points_ledger', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'points_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('ratings_author_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'ratings', 'users', ['author_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('security_audit_logs_confirmed_by_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('security_audit_logs_actor_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('security_audit_logs_target_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['target_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['actor_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['confirmed_by_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', type_='foreignkey')
|
||||
op.drop_constraint(op.f('service_expertises_service_id_fkey'), 'service_expertises', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_expertises', 'service_profiles', ['service_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_profiles_organization_id_fkey'), 'service_profiles', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_profiles', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_specialties_parent_id_fkey'), 'service_specialties', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_specialties', 'service_specialties', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('social_accounts_user_id_fkey'), 'social_accounts', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'social_accounts', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
|
||||
op.drop_constraint(op.f('user_badges_user_id_fkey'), 'user_badges', type_='foreignkey')
|
||||
op.drop_constraint(op.f('user_badges_badge_id_fkey'), 'user_badges', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'user_badges', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'user_badges', 'badges', ['badge_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('user_stats_user_id_fkey'), 'user_stats', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'user_stats', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('users_referred_by_id_fkey'), 'users', type_='foreignkey')
|
||||
op.drop_constraint(op.f('users_current_sales_agent_id_fkey'), 'users', type_='foreignkey')
|
||||
op.drop_constraint(op.f('users_person_id_fkey'), 'users', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'users', 'users', ['referred_by_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'users', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'users', 'users', ['current_sales_agent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_catalog_master_definition_id_fkey'), 'vehicle_catalog', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_catalog', 'vehicle_model_definitions', ['master_definition_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.add_column('vehicle_model_definitions', sa.Column('is_manual', sa.Boolean(), server_default=sa.text('false'), nullable=True))
|
||||
op.add_column('vehicle_model_definitions', sa.Column('attempts', sa.Integer(), server_default=sa.text('0'), nullable=True))
|
||||
op.add_column('vehicle_model_definitions', sa.Column('last_error', sa.Text(), nullable=True))
|
||||
op.create_index(op.f('ix_data_vehicle_model_definitions_attempts'), 'vehicle_model_definitions', ['attempts'], unique=False, schema='data')
|
||||
op.create_index(op.f('ix_data_vehicle_model_definitions_is_manual'), 'vehicle_model_definitions', ['is_manual'], unique=False, schema='data')
|
||||
op.drop_constraint(op.f('vehicle_model_definitions_vehicle_type_id_fkey'), 'vehicle_model_definitions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('vehicle_model_definitions_parent_id_fkey'), 'vehicle_model_definitions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_model_definitions', 'vehicle_model_definitions', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'vehicle_model_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
|
||||
op.drop_constraint(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_ownerships', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'verification_tokens', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
|
||||
op.drop_constraint(op.f('wallets_user_id_fkey'), 'wallets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'wallets', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'wallets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('wallets_user_id_fkey'), 'wallets', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'verification_tokens', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'])
|
||||
op.create_foreign_key(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'vehicle_model_definitions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'vehicle_model_definitions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_model_definitions_parent_id_fkey'), 'vehicle_model_definitions', 'vehicle_model_definitions', ['parent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('vehicle_model_definitions_vehicle_type_id_fkey'), 'vehicle_model_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'])
|
||||
op.drop_index(op.f('ix_data_vehicle_model_definitions_is_manual'), table_name='vehicle_model_definitions', schema='data')
|
||||
op.drop_index(op.f('ix_data_vehicle_model_definitions_attempts'), table_name='vehicle_model_definitions', schema='data')
|
||||
op.drop_column('vehicle_model_definitions', 'last_error')
|
||||
op.drop_column('vehicle_model_definitions', 'attempts')
|
||||
op.drop_column('vehicle_model_definitions', 'is_manual')
|
||||
op.drop_constraint(None, 'vehicle_catalog', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_catalog_master_definition_id_fkey'), 'vehicle_catalog', 'vehicle_model_definitions', ['master_definition_id'], ['id'])
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('users_person_id_fkey'), 'users', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('users_current_sales_agent_id_fkey'), 'users', 'users', ['current_sales_agent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('users_referred_by_id_fkey'), 'users', 'users', ['referred_by_id'], ['id'])
|
||||
op.drop_constraint(None, 'user_stats', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('user_stats_user_id_fkey'), 'user_stats', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('user_badges_badge_id_fkey'), 'user_badges', 'badges', ['badge_id'], ['id'])
|
||||
op.create_foreign_key(op.f('user_badges_user_id_fkey'), 'user_badges', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'social_accounts', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('social_accounts_user_id_fkey'), 'social_accounts', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_constraint(None, 'service_specialties', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_specialties_parent_id_fkey'), 'service_specialties', 'service_specialties', ['parent_id'], ['id'])
|
||||
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_profiles_organization_id_fkey'), 'service_profiles', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_expertises_service_id_fkey'), 'service_expertises', 'service_profiles', ['service_id'], ['id'])
|
||||
op.create_foreign_key(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'])
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('security_audit_logs_target_id_fkey'), 'security_audit_logs', 'users', ['target_id'], ['id'])
|
||||
op.create_foreign_key(op.f('security_audit_logs_actor_id_fkey'), 'security_audit_logs', 'users', ['actor_id'], ['id'])
|
||||
op.create_foreign_key(op.f('security_audit_logs_confirmed_by_id_fkey'), 'security_audit_logs', 'users', ['confirmed_by_id'], ['id'])
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('ratings_author_id_fkey'), 'ratings', 'users', ['author_id'], ['id'])
|
||||
op.drop_constraint(None, 'points_ledger', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('points_ledger_user_id_fkey'), 'points_ledger', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'persons', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('persons_address_id_fkey'), 'persons', 'addresses', ['address_id'], ['id'])
|
||||
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('pending_actions_requester_id_fkey'), 'pending_actions', 'users', ['requester_id'], ['id'])
|
||||
op.create_foreign_key(op.f('pending_actions_approver_id_fkey'), 'pending_actions', 'users', ['approver_id'], ['id'])
|
||||
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organizations_address_id_fkey'), 'organizations', 'addresses', ['address_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organizations_owner_id_fkey'), 'organizations', 'users', ['owner_id'], ['id'])
|
||||
op.alter_column('organizations', 'org_type',
|
||||
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
|
||||
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organization_members_user_id_fkey'), 'organization_members', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organization_members_person_id_fkey'), 'organization_members', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organization_members_organization_id_fkey'), 'organization_members', 'organizations', ['organization_id'], ['id'])
|
||||
op.alter_column('organization_members', 'role',
|
||||
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
|
||||
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'organization_financials', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organization_financials_organization_id_fkey'), 'organization_financials', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'])
|
||||
op.create_foreign_key(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', 'organizations', ['org_id'], ['id'])
|
||||
op.drop_constraint(None, 'org_sales_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'org_sales_assignments', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('org_sales_assignments_organization_id_fkey'), 'org_sales_assignments', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('org_sales_assignments_agent_user_id_fkey'), 'org_sales_assignments', 'users', ['agent_user_id'], ['id'])
|
||||
op.drop_constraint(None, 'operational_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('operational_logs_user_id_fkey'), 'operational_logs', 'users', ['user_id'], ['id'], ondelete='SET NULL')
|
||||
op.drop_constraint(None, 'model_feature_maps', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'model_feature_maps', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('model_feature_maps_model_id_fkey'), 'model_feature_maps', 'vehicle_model_definitions', ['model_id'], ['id'])
|
||||
op.create_foreign_key(op.f('model_feature_maps_feature_id_fkey'), 'model_feature_maps', 'feature_definitions', ['feature_id'], ['id'])
|
||||
op.drop_constraint(None, 'geo_streets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'])
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('financial_ledger_related_agent_id_fkey'), 'financial_ledger', 'users', ['related_agent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('financial_ledger_user_id_fkey'), 'financial_ledger', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('financial_ledger_person_id_fkey'), 'financial_ledger', 'persons', ['person_id'], ['id'])
|
||||
op.drop_constraint(None, 'feature_definitions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('feature_definitions_vehicle_type_id_fkey'), 'feature_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'])
|
||||
op.drop_constraint(None, 'documents', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('documents_uploaded_by_fkey'), 'documents', 'users', ['uploaded_by'], ['id'])
|
||||
op.drop_constraint(None, 'credit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('credit_logs_org_id_fkey'), 'credit_logs', 'organizations', ['org_id'], ['id'])
|
||||
op.drop_constraint(None, 'branches', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'branches', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('branches_address_id_fkey'), 'branches', 'addresses', ['address_id'], ['id'])
|
||||
op.create_foreign_key(op.f('branches_organization_id_fkey'), 'branches', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'audit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('audit_logs_user_id_fkey'), 'audit_logs', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('assets_current_organization_id_fkey'), 'assets', 'organizations', ['current_organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_catalog_id_fkey'), 'assets', 'vehicle_catalog', ['catalog_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_telemetry', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_financials', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_financials_asset_id_fkey'), 'asset_financials', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_events', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_events_asset_id_fkey'), 'asset_events', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_costs_driver_id_fkey'), 'asset_costs', 'users', ['driver_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_costs_asset_id_fkey'), 'asset_costs', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_costs_organization_id_fkey'), 'asset_costs', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_assignments_branch_id_fkey'), 'asset_assignments', 'branches', ['branch_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'addresses', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('addresses_postal_code_id_fkey'), 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'])
|
||||
# ### end Alembic commands ###
|
||||
356
backend/migrations/versions/54cbd5c9e003_pipeline_v2_upgrade.py
Normal file
356
backend/migrations/versions/54cbd5c9e003_pipeline_v2_upgrade.py
Normal file
@@ -0,0 +1,356 @@
|
||||
"""pipeline_v2_upgrade
|
||||
|
||||
Revision ID: 54cbd5c9e003
|
||||
Revises: d362d1cb0b38
|
||||
Create Date: 2026-02-20 11:45:15.360508
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '54cbd5c9e003'
|
||||
down_revision: Union[str, Sequence[str], None] = 'd362d1cb0b38'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(op.f('addresses_postal_code_id_fkey'), 'addresses', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_assignments_branch_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'branches', ['branch_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_costs_organization_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_costs_driver_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_costs_asset_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_costs', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_costs', 'users', ['driver_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_costs', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_events_asset_id_fkey'), 'asset_events', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_events', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_financials_asset_id_fkey'), 'asset_financials', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_financials', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_reviews', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_reviews', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_telemetry', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('assets_current_organization_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_catalog_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'assets', 'organizations', ['current_organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'vehicle_catalog', ['catalog_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('audit_logs_user_id_fkey'), 'audit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'audit_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('branches_organization_id_fkey'), 'branches', type_='foreignkey')
|
||||
op.drop_constraint(op.f('branches_address_id_fkey'), 'branches', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'branches', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'branches', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('credit_logs_org_id_fkey'), 'credit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'credit_logs', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('documents_uploaded_by_fkey'), 'documents', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'documents', 'users', ['uploaded_by'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('feature_definitions_vehicle_type_id_fkey'), 'feature_definitions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'feature_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('financial_ledger_person_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.drop_constraint(op.f('financial_ledger_user_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.drop_constraint(op.f('financial_ledger_related_agent_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'users', ['related_agent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('model_feature_maps_model_id_fkey'), 'model_feature_maps', type_='foreignkey')
|
||||
op.drop_constraint(op.f('model_feature_maps_feature_id_fkey'), 'model_feature_maps', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'model_feature_maps', 'vehicle_model_definitions', ['model_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'model_feature_maps', 'feature_definitions', ['feature_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('operational_logs_user_id_fkey'), 'operational_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'operational_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='SET NULL')
|
||||
op.drop_constraint(op.f('org_sales_assignments_agent_user_id_fkey'), 'org_sales_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('org_sales_assignments_organization_id_fkey'), 'org_sales_assignments', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'org_sales_assignments', 'users', ['agent_user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'org_sales_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'org_subscriptions', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('organization_financials_organization_id_fkey'), 'organization_financials', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organization_financials', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.alter_column('organization_members', 'role',
|
||||
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
|
||||
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(op.f('organization_members_organization_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organization_members_user_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organization_members_person_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organization_members', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organization_members', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organization_members', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.alter_column('organizations', 'org_type',
|
||||
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
|
||||
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(op.f('organizations_owner_id_fkey'), 'organizations', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organizations_address_id_fkey'), 'organizations', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organizations', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organizations', 'users', ['owner_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('pending_actions_approver_id_fkey'), 'pending_actions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('pending_actions_requester_id_fkey'), 'pending_actions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'pending_actions', 'users', ['approver_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'pending_actions', 'users', ['requester_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('persons_address_id_fkey'), 'persons', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'persons', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('points_ledger_user_id_fkey'), 'points_ledger', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'points_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('ratings_author_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.drop_constraint(op.f('ratings_target_organization_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.drop_constraint(op.f('ratings_target_user_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.drop_constraint(op.f('ratings_target_branch_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'ratings', 'branches', ['target_branch_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'ratings', 'users', ['author_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'ratings', 'users', ['target_user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'ratings', 'organizations', ['target_organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('security_audit_logs_actor_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('security_audit_logs_confirmed_by_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('security_audit_logs_target_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['confirmed_by_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['target_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['actor_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', type_='foreignkey')
|
||||
op.drop_constraint(op.f('service_expertises_service_id_fkey'), 'service_expertises', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'service_expertises', 'service_profiles', ['service_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_profiles_parent_id_fkey'), 'service_profiles', type_='foreignkey')
|
||||
op.drop_constraint(op.f('service_profiles_organization_id_fkey'), 'service_profiles', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_profiles', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'service_profiles', 'service_profiles', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_specialties_parent_id_fkey'), 'service_specialties', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_specialties', 'service_specialties', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('social_accounts_user_id_fkey'), 'social_accounts', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'social_accounts', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
|
||||
op.add_column('translations', sa.Column('lang', sa.String(length=5), nullable=True))
|
||||
op.alter_column('translations', 'key',
|
||||
existing_type=sa.VARCHAR(length=100),
|
||||
type_=sa.String(length=255),
|
||||
nullable=True)
|
||||
op.alter_column('translations', 'value',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=True)
|
||||
op.drop_index(op.f('ix_data_translations_lang_code'), table_name='translations')
|
||||
op.drop_constraint(op.f('uq_translation_key_lang'), 'translations', type_='unique')
|
||||
op.create_index(op.f('ix_data_translations_lang'), 'translations', ['lang'], unique=False, schema='data')
|
||||
op.drop_column('translations', 'is_published')
|
||||
op.drop_column('translations', 'lang_code')
|
||||
op.drop_constraint(op.f('user_badges_badge_id_fkey'), 'user_badges', type_='foreignkey')
|
||||
op.drop_constraint(op.f('user_badges_user_id_fkey'), 'user_badges', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'user_badges', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'user_badges', 'badges', ['badge_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('user_stats_user_id_fkey'), 'user_stats', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'user_stats', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('users_current_sales_agent_id_fkey'), 'users', type_='foreignkey')
|
||||
op.drop_constraint(op.f('users_person_id_fkey'), 'users', type_='foreignkey')
|
||||
op.drop_constraint(op.f('users_referred_by_id_fkey'), 'users', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'users', 'users', ['referred_by_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'users', 'users', ['current_sales_agent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'users', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_catalog_master_definition_id_fkey'), 'vehicle_catalog', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_catalog', 'vehicle_model_definitions', ['master_definition_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.add_column('vehicle_model_definitions', sa.Column('raw_search_context', sa.Text(), nullable=True))
|
||||
op.add_column('vehicle_model_definitions', sa.Column('research_metadata', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'{}'::jsonb"), nullable=False))
|
||||
op.alter_column('vehicle_model_definitions', 'status',
|
||||
existing_type=sa.VARCHAR(length=20),
|
||||
type_=sa.String(length=30),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'unverified'::character varying"))
|
||||
op.create_index(op.f('ix_data_vehicle_model_definitions_status'), 'vehicle_model_definitions', ['status'], unique=False, schema='data')
|
||||
op.drop_constraint(op.f('vehicle_model_definitions_parent_id_fkey'), 'vehicle_model_definitions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('vehicle_model_definitions_vehicle_type_id_fkey'), 'vehicle_model_definitions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_model_definitions', 'vehicle_model_definitions', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'vehicle_model_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
|
||||
op.drop_constraint(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_ownerships', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'verification_tokens', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
|
||||
op.drop_constraint(op.f('wallets_user_id_fkey'), 'wallets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'wallets', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'wallets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('wallets_user_id_fkey'), 'wallets', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'verification_tokens', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'])
|
||||
op.drop_constraint(None, 'vehicle_model_definitions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'vehicle_model_definitions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_model_definitions_vehicle_type_id_fkey'), 'vehicle_model_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'])
|
||||
op.create_foreign_key(op.f('vehicle_model_definitions_parent_id_fkey'), 'vehicle_model_definitions', 'vehicle_model_definitions', ['parent_id'], ['id'])
|
||||
op.drop_index(op.f('ix_data_vehicle_model_definitions_status'), table_name='vehicle_model_definitions', schema='data')
|
||||
op.alter_column('vehicle_model_definitions', 'status',
|
||||
existing_type=sa.String(length=30),
|
||||
type_=sa.VARCHAR(length=20),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'unverified'::character varying"))
|
||||
op.drop_column('vehicle_model_definitions', 'research_metadata')
|
||||
op.drop_column('vehicle_model_definitions', 'raw_search_context')
|
||||
op.drop_constraint(None, 'vehicle_catalog', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_catalog_master_definition_id_fkey'), 'vehicle_catalog', 'vehicle_model_definitions', ['master_definition_id'], ['id'])
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('users_referred_by_id_fkey'), 'users', 'users', ['referred_by_id'], ['id'])
|
||||
op.create_foreign_key(op.f('users_person_id_fkey'), 'users', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('users_current_sales_agent_id_fkey'), 'users', 'users', ['current_sales_agent_id'], ['id'])
|
||||
op.drop_constraint(None, 'user_stats', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('user_stats_user_id_fkey'), 'user_stats', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('user_badges_user_id_fkey'), 'user_badges', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('user_badges_badge_id_fkey'), 'user_badges', 'badges', ['badge_id'], ['id'])
|
||||
op.add_column('translations', sa.Column('lang_code', sa.VARCHAR(length=5), autoincrement=False, nullable=False))
|
||||
op.add_column('translations', sa.Column('is_published', sa.BOOLEAN(), autoincrement=False, nullable=True))
|
||||
op.drop_index(op.f('ix_data_translations_lang'), table_name='translations', schema='data')
|
||||
op.create_unique_constraint(op.f('uq_translation_key_lang'), 'translations', ['key', 'lang_code'], postgresql_nulls_not_distinct=False)
|
||||
op.create_index(op.f('ix_data_translations_lang_code'), 'translations', ['lang_code'], unique=False)
|
||||
op.alter_column('translations', 'value',
|
||||
existing_type=sa.TEXT(),
|
||||
nullable=False)
|
||||
op.alter_column('translations', 'key',
|
||||
existing_type=sa.String(length=255),
|
||||
type_=sa.VARCHAR(length=100),
|
||||
nullable=False)
|
||||
op.drop_column('translations', 'lang')
|
||||
op.drop_constraint(None, 'social_accounts', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('social_accounts_user_id_fkey'), 'social_accounts', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_constraint(None, 'service_specialties', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_specialties_parent_id_fkey'), 'service_specialties', 'service_specialties', ['parent_id'], ['id'])
|
||||
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_profiles_organization_id_fkey'), 'service_profiles', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('service_profiles_parent_id_fkey'), 'service_profiles', 'service_profiles', ['parent_id'], ['id'])
|
||||
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_expertises_service_id_fkey'), 'service_expertises', 'service_profiles', ['service_id'], ['id'])
|
||||
op.create_foreign_key(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'])
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('security_audit_logs_target_id_fkey'), 'security_audit_logs', 'users', ['target_id'], ['id'])
|
||||
op.create_foreign_key(op.f('security_audit_logs_confirmed_by_id_fkey'), 'security_audit_logs', 'users', ['confirmed_by_id'], ['id'])
|
||||
op.create_foreign_key(op.f('security_audit_logs_actor_id_fkey'), 'security_audit_logs', 'users', ['actor_id'], ['id'])
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('ratings_target_branch_id_fkey'), 'ratings', 'branches', ['target_branch_id'], ['id'])
|
||||
op.create_foreign_key(op.f('ratings_target_user_id_fkey'), 'ratings', 'users', ['target_user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('ratings_target_organization_id_fkey'), 'ratings', 'organizations', ['target_organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('ratings_author_id_fkey'), 'ratings', 'users', ['author_id'], ['id'])
|
||||
op.drop_constraint(None, 'points_ledger', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('points_ledger_user_id_fkey'), 'points_ledger', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'persons', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('persons_address_id_fkey'), 'persons', 'addresses', ['address_id'], ['id'])
|
||||
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('pending_actions_requester_id_fkey'), 'pending_actions', 'users', ['requester_id'], ['id'])
|
||||
op.create_foreign_key(op.f('pending_actions_approver_id_fkey'), 'pending_actions', 'users', ['approver_id'], ['id'])
|
||||
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organizations_address_id_fkey'), 'organizations', 'addresses', ['address_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organizations_owner_id_fkey'), 'organizations', 'users', ['owner_id'], ['id'])
|
||||
op.alter_column('organizations', 'org_type',
|
||||
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
|
||||
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organization_members_person_id_fkey'), 'organization_members', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organization_members_user_id_fkey'), 'organization_members', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organization_members_organization_id_fkey'), 'organization_members', 'organizations', ['organization_id'], ['id'])
|
||||
op.alter_column('organization_members', 'role',
|
||||
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
|
||||
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'organization_financials', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organization_financials_organization_id_fkey'), 'organization_financials', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'])
|
||||
op.create_foreign_key(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', 'organizations', ['org_id'], ['id'])
|
||||
op.drop_constraint(None, 'org_sales_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'org_sales_assignments', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('org_sales_assignments_organization_id_fkey'), 'org_sales_assignments', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('org_sales_assignments_agent_user_id_fkey'), 'org_sales_assignments', 'users', ['agent_user_id'], ['id'])
|
||||
op.drop_constraint(None, 'operational_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('operational_logs_user_id_fkey'), 'operational_logs', 'users', ['user_id'], ['id'], ondelete='SET NULL')
|
||||
op.drop_constraint(None, 'model_feature_maps', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'model_feature_maps', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('model_feature_maps_feature_id_fkey'), 'model_feature_maps', 'feature_definitions', ['feature_id'], ['id'])
|
||||
op.create_foreign_key(op.f('model_feature_maps_model_id_fkey'), 'model_feature_maps', 'vehicle_model_definitions', ['model_id'], ['id'])
|
||||
op.drop_constraint(None, 'geo_streets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'])
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('financial_ledger_related_agent_id_fkey'), 'financial_ledger', 'users', ['related_agent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('financial_ledger_user_id_fkey'), 'financial_ledger', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('financial_ledger_person_id_fkey'), 'financial_ledger', 'persons', ['person_id'], ['id'])
|
||||
op.drop_constraint(None, 'feature_definitions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('feature_definitions_vehicle_type_id_fkey'), 'feature_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'])
|
||||
op.drop_constraint(None, 'documents', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('documents_uploaded_by_fkey'), 'documents', 'users', ['uploaded_by'], ['id'])
|
||||
op.drop_constraint(None, 'credit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('credit_logs_org_id_fkey'), 'credit_logs', 'organizations', ['org_id'], ['id'])
|
||||
op.drop_constraint(None, 'branches', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'branches', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('branches_address_id_fkey'), 'branches', 'addresses', ['address_id'], ['id'])
|
||||
op.create_foreign_key(op.f('branches_organization_id_fkey'), 'branches', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'audit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('audit_logs_user_id_fkey'), 'audit_logs', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('assets_catalog_id_fkey'), 'assets', 'vehicle_catalog', ['catalog_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_current_organization_id_fkey'), 'assets', 'organizations', ['current_organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_telemetry', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_financials', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_financials_asset_id_fkey'), 'asset_financials', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_events', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_events_asset_id_fkey'), 'asset_events', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_costs_asset_id_fkey'), 'asset_costs', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_costs_driver_id_fkey'), 'asset_costs', 'users', ['driver_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_costs_organization_id_fkey'), 'asset_costs', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_assignments_branch_id_fkey'), 'asset_assignments', 'branches', ['branch_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'addresses', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('addresses_postal_code_id_fkey'), 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'])
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,338 @@
|
||||
"""add_scope_columns_to_system_parameters
|
||||
|
||||
Revision ID: 835cc89dadc7
|
||||
Revises: dd910cabe24e
|
||||
Create Date: 2026-02-21 21:48:40.720825
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '835cc89dadc7'
|
||||
down_revision: Union[str, Sequence[str], None] = 'dd910cabe24e'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(op.f('addresses_postal_code_id_fkey'), 'addresses', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_assignments_branch_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'branches', ['branch_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_costs_asset_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_costs_organization_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_costs_driver_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_costs', 'users', ['driver_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_costs', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_costs', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_events_asset_id_fkey'), 'asset_events', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_events', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_financials_asset_id_fkey'), 'asset_financials', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_financials', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_reviews', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_reviews', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_telemetry', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('assets_operator_org_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_current_organization_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_owner_org_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_owner_person_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_catalog_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_operator_person_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'assets', 'organizations', ['current_organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'vehicle_catalog', ['catalog_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'persons', ['owner_person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'persons', ['operator_person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'organizations', ['owner_org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'organizations', ['operator_org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('audit_logs_user_id_fkey'), 'audit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'audit_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('branches_address_id_fkey'), 'branches', type_='foreignkey')
|
||||
op.drop_constraint(op.f('branches_organization_id_fkey'), 'branches', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'branches', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'branches', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('credit_logs_org_id_fkey'), 'credit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'credit_logs', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('documents_uploaded_by_fkey'), 'documents', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'documents', 'users', ['uploaded_by'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('feature_definitions_vehicle_type_id_fkey'), 'feature_definitions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'feature_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('financial_ledger_related_agent_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.drop_constraint(op.f('financial_ledger_user_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.drop_constraint(op.f('financial_ledger_person_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'users', ['related_agent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('model_feature_maps_feature_id_fkey'), 'model_feature_maps', type_='foreignkey')
|
||||
op.drop_constraint(op.f('model_feature_maps_model_id_fkey'), 'model_feature_maps', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'model_feature_maps', 'vehicle_model_definitions', ['model_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'model_feature_maps', 'feature_definitions', ['feature_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('operational_logs_user_id_fkey'), 'operational_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'operational_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='SET NULL')
|
||||
op.drop_constraint(op.f('org_sales_assignments_organization_id_fkey'), 'org_sales_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('org_sales_assignments_agent_user_id_fkey'), 'org_sales_assignments', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'org_sales_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'org_sales_assignments', 'users', ['agent_user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'org_subscriptions', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('organization_financials_organization_id_fkey'), 'organization_financials', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organization_financials', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.alter_column('organization_members', 'role',
|
||||
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
|
||||
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(op.f('organization_members_user_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organization_members_organization_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organization_members_person_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organization_members', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organization_members', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organization_members', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.alter_column('organizations', 'org_type',
|
||||
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
|
||||
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(op.f('organizations_owner_id_fkey'), 'organizations', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organizations_address_id_fkey'), 'organizations', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organizations', 'users', ['owner_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organizations', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('pending_actions_approver_id_fkey'), 'pending_actions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('pending_actions_requester_id_fkey'), 'pending_actions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'pending_actions', 'users', ['requester_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'pending_actions', 'users', ['approver_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('persons_address_id_fkey'), 'persons', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'persons', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('points_ledger_user_id_fkey'), 'points_ledger', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'points_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('ratings_author_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.drop_constraint(op.f('ratings_target_user_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.drop_constraint(op.f('ratings_target_organization_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.drop_constraint(op.f('ratings_target_branch_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'ratings', 'users', ['target_user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'ratings', 'branches', ['target_branch_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'ratings', 'users', ['author_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'ratings', 'organizations', ['target_organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('security_audit_logs_confirmed_by_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('security_audit_logs_actor_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('security_audit_logs_target_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['confirmed_by_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['actor_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['target_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', type_='foreignkey')
|
||||
op.drop_constraint(op.f('service_expertises_service_id_fkey'), 'service_expertises', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'service_expertises', 'service_profiles', ['service_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_profiles_organization_id_fkey'), 'service_profiles', type_='foreignkey')
|
||||
op.drop_constraint(op.f('service_profiles_parent_id_fkey'), 'service_profiles', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_profiles', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'service_profiles', 'service_profiles', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_specialties_parent_id_fkey'), 'service_specialties', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_specialties', 'service_specialties', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('social_accounts_user_id_fkey'), 'social_accounts', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'social_accounts', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
|
||||
op.add_column('system_parameters', sa.Column('scope_level', sa.String(length=30), server_default=sa.text("'global'"), nullable=True))
|
||||
op.add_column('system_parameters', sa.Column('scope_id', sa.String(length=50), nullable=True))
|
||||
op.create_index(op.f('ix_data_system_parameters_scope_level'), 'system_parameters', ['scope_level'], unique=False, schema='data')
|
||||
op.create_unique_constraint('uix_param_scope', 'system_parameters', ['key', 'scope_level', 'scope_id'], schema='data')
|
||||
op.drop_constraint(op.f('user_badges_user_id_fkey'), 'user_badges', type_='foreignkey')
|
||||
op.drop_constraint(op.f('user_badges_badge_id_fkey'), 'user_badges', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'user_badges', 'badges', ['badge_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'user_badges', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('user_stats_user_id_fkey'), 'user_stats', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'user_stats', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('users_person_id_fkey'), 'users', type_='foreignkey')
|
||||
op.drop_constraint(op.f('users_current_sales_agent_id_fkey'), 'users', type_='foreignkey')
|
||||
op.drop_constraint(op.f('users_referred_by_id_fkey'), 'users', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'users', 'users', ['current_sales_agent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'users', 'users', ['referred_by_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'users', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_catalog_master_definition_id_fkey'), 'vehicle_catalog', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_catalog', 'vehicle_model_definitions', ['master_definition_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_model_definitions_parent_id_fkey'), 'vehicle_model_definitions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('vehicle_model_definitions_vehicle_type_id_fkey'), 'vehicle_model_definitions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_model_definitions', 'vehicle_model_definitions', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'vehicle_model_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
|
||||
op.drop_constraint(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'vehicle_ownerships', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'verification_tokens', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
|
||||
op.drop_constraint(op.f('wallets_user_id_fkey'), 'wallets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'wallets', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'wallets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('wallets_user_id_fkey'), 'wallets', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'verification_tokens', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'])
|
||||
op.create_foreign_key(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'vehicle_model_definitions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'vehicle_model_definitions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_model_definitions_vehicle_type_id_fkey'), 'vehicle_model_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'])
|
||||
op.create_foreign_key(op.f('vehicle_model_definitions_parent_id_fkey'), 'vehicle_model_definitions', 'vehicle_model_definitions', ['parent_id'], ['id'])
|
||||
op.drop_constraint(None, 'vehicle_catalog', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_catalog_master_definition_id_fkey'), 'vehicle_catalog', 'vehicle_model_definitions', ['master_definition_id'], ['id'])
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('users_referred_by_id_fkey'), 'users', 'users', ['referred_by_id'], ['id'])
|
||||
op.create_foreign_key(op.f('users_current_sales_agent_id_fkey'), 'users', 'users', ['current_sales_agent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('users_person_id_fkey'), 'users', 'persons', ['person_id'], ['id'])
|
||||
op.drop_constraint(None, 'user_stats', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('user_stats_user_id_fkey'), 'user_stats', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('user_badges_badge_id_fkey'), 'user_badges', 'badges', ['badge_id'], ['id'])
|
||||
op.create_foreign_key(op.f('user_badges_user_id_fkey'), 'user_badges', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint('uix_param_scope', 'system_parameters', schema='data', type_='unique')
|
||||
op.drop_index(op.f('ix_data_system_parameters_scope_level'), table_name='system_parameters', schema='data')
|
||||
op.drop_column('system_parameters', 'scope_id')
|
||||
op.drop_column('system_parameters', 'scope_level')
|
||||
op.drop_constraint(None, 'social_accounts', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('social_accounts_user_id_fkey'), 'social_accounts', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_constraint(None, 'service_specialties', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_specialties_parent_id_fkey'), 'service_specialties', 'service_specialties', ['parent_id'], ['id'])
|
||||
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_profiles_parent_id_fkey'), 'service_profiles', 'service_profiles', ['parent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('service_profiles_organization_id_fkey'), 'service_profiles', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_expertises_service_id_fkey'), 'service_expertises', 'service_profiles', ['service_id'], ['id'])
|
||||
op.create_foreign_key(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'])
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('security_audit_logs_target_id_fkey'), 'security_audit_logs', 'users', ['target_id'], ['id'])
|
||||
op.create_foreign_key(op.f('security_audit_logs_actor_id_fkey'), 'security_audit_logs', 'users', ['actor_id'], ['id'])
|
||||
op.create_foreign_key(op.f('security_audit_logs_confirmed_by_id_fkey'), 'security_audit_logs', 'users', ['confirmed_by_id'], ['id'])
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('ratings_target_branch_id_fkey'), 'ratings', 'branches', ['target_branch_id'], ['id'])
|
||||
op.create_foreign_key(op.f('ratings_target_organization_id_fkey'), 'ratings', 'organizations', ['target_organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('ratings_target_user_id_fkey'), 'ratings', 'users', ['target_user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('ratings_author_id_fkey'), 'ratings', 'users', ['author_id'], ['id'])
|
||||
op.drop_constraint(None, 'points_ledger', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('points_ledger_user_id_fkey'), 'points_ledger', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'persons', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('persons_address_id_fkey'), 'persons', 'addresses', ['address_id'], ['id'])
|
||||
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('pending_actions_requester_id_fkey'), 'pending_actions', 'users', ['requester_id'], ['id'])
|
||||
op.create_foreign_key(op.f('pending_actions_approver_id_fkey'), 'pending_actions', 'users', ['approver_id'], ['id'])
|
||||
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organizations_address_id_fkey'), 'organizations', 'addresses', ['address_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organizations_owner_id_fkey'), 'organizations', 'users', ['owner_id'], ['id'])
|
||||
op.alter_column('organizations', 'org_type',
|
||||
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
|
||||
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organization_members_person_id_fkey'), 'organization_members', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organization_members_organization_id_fkey'), 'organization_members', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organization_members_user_id_fkey'), 'organization_members', 'users', ['user_id'], ['id'])
|
||||
op.alter_column('organization_members', 'role',
|
||||
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
|
||||
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'organization_financials', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organization_financials_organization_id_fkey'), 'organization_financials', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'])
|
||||
op.create_foreign_key(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', 'organizations', ['org_id'], ['id'])
|
||||
op.drop_constraint(None, 'org_sales_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'org_sales_assignments', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('org_sales_assignments_agent_user_id_fkey'), 'org_sales_assignments', 'users', ['agent_user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('org_sales_assignments_organization_id_fkey'), 'org_sales_assignments', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'operational_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('operational_logs_user_id_fkey'), 'operational_logs', 'users', ['user_id'], ['id'], ondelete='SET NULL')
|
||||
op.drop_constraint(None, 'model_feature_maps', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'model_feature_maps', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('model_feature_maps_model_id_fkey'), 'model_feature_maps', 'vehicle_model_definitions', ['model_id'], ['id'])
|
||||
op.create_foreign_key(op.f('model_feature_maps_feature_id_fkey'), 'model_feature_maps', 'feature_definitions', ['feature_id'], ['id'])
|
||||
op.drop_constraint(None, 'geo_streets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'])
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('financial_ledger_person_id_fkey'), 'financial_ledger', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('financial_ledger_user_id_fkey'), 'financial_ledger', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('financial_ledger_related_agent_id_fkey'), 'financial_ledger', 'users', ['related_agent_id'], ['id'])
|
||||
op.drop_constraint(None, 'feature_definitions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('feature_definitions_vehicle_type_id_fkey'), 'feature_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'])
|
||||
op.drop_constraint(None, 'documents', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('documents_uploaded_by_fkey'), 'documents', 'users', ['uploaded_by'], ['id'])
|
||||
op.drop_constraint(None, 'credit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('credit_logs_org_id_fkey'), 'credit_logs', 'organizations', ['org_id'], ['id'])
|
||||
op.drop_constraint(None, 'branches', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'branches', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('branches_organization_id_fkey'), 'branches', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('branches_address_id_fkey'), 'branches', 'addresses', ['address_id'], ['id'])
|
||||
op.drop_constraint(None, 'audit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('audit_logs_user_id_fkey'), 'audit_logs', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('assets_operator_person_id_fkey'), 'assets', 'persons', ['operator_person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_catalog_id_fkey'), 'assets', 'vehicle_catalog', ['catalog_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_owner_person_id_fkey'), 'assets', 'persons', ['owner_person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_owner_org_id_fkey'), 'assets', 'organizations', ['owner_org_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_current_organization_id_fkey'), 'assets', 'organizations', ['current_organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_operator_org_id_fkey'), 'assets', 'organizations', ['operator_org_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_telemetry', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_financials', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_financials_asset_id_fkey'), 'asset_financials', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_events', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_events_asset_id_fkey'), 'asset_events', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_costs_driver_id_fkey'), 'asset_costs', 'users', ['driver_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_costs_organization_id_fkey'), 'asset_costs', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_costs_asset_id_fkey'), 'asset_costs', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_assignments_branch_id_fkey'), 'asset_assignments', 'branches', ['branch_id'], ['id'])
|
||||
op.drop_constraint(None, 'addresses', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('addresses_postal_code_id_fkey'), 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'])
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,373 @@
|
||||
"""Unified Master Schema v1.3.2
|
||||
|
||||
Revision ID: d362d1cb0b38
|
||||
Revises: 492a65da864d
|
||||
Create Date: 2026-02-18 23:00:05.907043
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'd362d1cb0b38'
|
||||
down_revision: Union[str, Sequence[str], None] = '492a65da864d'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('ratings',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('author_id', sa.Integer(), nullable=False),
|
||||
sa.Column('target_organization_id', sa.Integer(), nullable=True),
|
||||
sa.Column('target_user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('target_branch_id', sa.UUID(), nullable=True),
|
||||
sa.Column('score', sa.Numeric(precision=3, scale=2), nullable=False),
|
||||
sa.Column('comment', sa.Text(), nullable=True),
|
||||
sa.Column('images', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'[]'::jsonb"), nullable=True),
|
||||
sa.Column('is_verified', sa.Boolean(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.ForeignKeyConstraint(['author_id'], ['data.users.id'], ),
|
||||
sa.ForeignKeyConstraint(['target_branch_id'], ['data.branches.id'], ),
|
||||
sa.ForeignKeyConstraint(['target_organization_id'], ['data.organizations.id'], ),
|
||||
sa.ForeignKeyConstraint(['target_user_id'], ['data.users.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
schema='data'
|
||||
)
|
||||
op.create_index('idx_rating_branch', 'ratings', ['target_branch_id'], unique=False, schema='data')
|
||||
op.create_index('idx_rating_org', 'ratings', ['target_organization_id'], unique=False, schema='data')
|
||||
op.create_index('idx_rating_user', 'ratings', ['target_user_id'], unique=False, schema='data')
|
||||
op.drop_constraint(op.f('addresses_postal_code_id_fkey'), 'addresses', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_assignments_branch_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'branches', ['branch_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_costs_organization_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_costs_driver_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_costs_asset_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_costs', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_costs', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_costs', 'users', ['driver_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_events_asset_id_fkey'), 'asset_events', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_events', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_financials_asset_id_fkey'), 'asset_financials', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_financials', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_reviews', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_reviews', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_telemetry', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('assets_current_organization_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_catalog_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'assets', 'organizations', ['current_organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'vehicle_catalog', ['catalog_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('audit_logs_user_id_fkey'), 'audit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'audit_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('branches_address_id_fkey'), 'branches', type_='foreignkey')
|
||||
op.drop_constraint(op.f('branches_organization_id_fkey'), 'branches', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'branches', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'branches', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('credit_logs_org_id_fkey'), 'credit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'credit_logs', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('documents_uploaded_by_fkey'), 'documents', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'documents', 'users', ['uploaded_by'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('feature_definitions_vehicle_type_id_fkey'), 'feature_definitions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'feature_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('financial_ledger_user_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.drop_constraint(op.f('financial_ledger_person_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.drop_constraint(op.f('financial_ledger_related_agent_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'users', ['related_agent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('model_feature_maps_feature_id_fkey'), 'model_feature_maps', type_='foreignkey')
|
||||
op.drop_constraint(op.f('model_feature_maps_model_id_fkey'), 'model_feature_maps', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'model_feature_maps', 'feature_definitions', ['feature_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'model_feature_maps', 'vehicle_model_definitions', ['model_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('operational_logs_user_id_fkey'), 'operational_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'operational_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='SET NULL')
|
||||
op.drop_constraint(op.f('org_sales_assignments_agent_user_id_fkey'), 'org_sales_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('org_sales_assignments_organization_id_fkey'), 'org_sales_assignments', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'org_sales_assignments', 'users', ['agent_user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'org_sales_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'org_subscriptions', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('organization_financials_organization_id_fkey'), 'organization_financials', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organization_financials', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.alter_column('organization_members', 'role',
|
||||
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
|
||||
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(op.f('organization_members_person_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organization_members_organization_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organization_members_user_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organization_members', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organization_members', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organization_members', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.add_column('organizations', sa.Column('is_anonymized', sa.Boolean(), server_default=sa.text('false'), nullable=True))
|
||||
op.add_column('organizations', sa.Column('anonymized_at', sa.DateTime(timezone=True), nullable=True))
|
||||
op.alter_column('organizations', 'org_type',
|
||||
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
|
||||
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(op.f('organizations_address_id_fkey'), 'organizations', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organizations_owner_id_fkey'), 'organizations', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organizations', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organizations', 'users', ['owner_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('pending_actions_requester_id_fkey'), 'pending_actions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('pending_actions_approver_id_fkey'), 'pending_actions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'pending_actions', 'users', ['requester_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'pending_actions', 'users', ['approver_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('persons_address_id_fkey'), 'persons', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'persons', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('points_ledger_user_id_fkey'), 'points_ledger', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'points_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('security_audit_logs_confirmed_by_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('security_audit_logs_target_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('security_audit_logs_actor_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['target_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['actor_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['confirmed_by_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', type_='foreignkey')
|
||||
op.drop_constraint(op.f('service_expertises_service_id_fkey'), 'service_expertises', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'service_expertises', 'service_profiles', ['service_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.add_column('service_profiles', sa.Column('parent_id', sa.Integer(), nullable=True))
|
||||
op.add_column('service_profiles', sa.Column('fingerprint', sa.String(length=255), nullable=True))
|
||||
op.add_column('service_profiles', sa.Column('vibe_analysis', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'{}'::jsonb"), nullable=True))
|
||||
op.add_column('service_profiles', sa.Column('social_links', postgresql.JSONB(astext_type=sa.Text()), server_default=sa.text("'{}'::jsonb"), nullable=True))
|
||||
op.add_column('service_profiles', sa.Column('contact_email', sa.String(), nullable=True))
|
||||
op.add_column('service_profiles', sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True))
|
||||
op.add_column('service_profiles', sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True))
|
||||
op.execute("UPDATE data.service_profiles SET fingerprint = 'legacy_' || id::text")
|
||||
op.alter_column('service_profiles', 'fingerprint', nullable=False)
|
||||
op.alter_column('service_profiles', 'verification_log',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('service_profiles', 'opening_hours',
|
||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::jsonb"))
|
||||
op.create_index('idx_service_fingerprint', 'service_profiles', ['fingerprint'], unique=True, schema='data')
|
||||
op.create_index(op.f('ix_data_service_profiles_fingerprint'), 'service_profiles', ['fingerprint'], unique=False, schema='data')
|
||||
op.drop_constraint(op.f('service_profiles_organization_id_fkey'), 'service_profiles', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_profiles', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'service_profiles', 'service_profiles', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_specialties_parent_id_fkey'), 'service_specialties', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_specialties', 'service_specialties', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
|
||||
op.add_column('service_staging', sa.Column('fingerprint', sa.String(length=255), nullable=True), schema='data')
|
||||
op.execute("UPDATE data.service_staging SET fingerprint = 'staging_' || id::text")
|
||||
op.alter_column('service_staging', 'fingerprint', nullable=False, schema='data')
|
||||
|
||||
op.create_index('idx_staging_fingerprint', 'service_staging', ['fingerprint'], unique=True, schema='data')
|
||||
op.drop_constraint(op.f('social_accounts_user_id_fkey'), 'social_accounts', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'social_accounts', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
|
||||
op.drop_constraint(op.f('user_badges_badge_id_fkey'), 'user_badges', type_='foreignkey')
|
||||
op.drop_constraint(op.f('user_badges_user_id_fkey'), 'user_badges', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'user_badges', 'badges', ['badge_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'user_badges', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('user_stats_user_id_fkey'), 'user_stats', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'user_stats', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('users_referred_by_id_fkey'), 'users', type_='foreignkey')
|
||||
op.drop_constraint(op.f('users_current_sales_agent_id_fkey'), 'users', type_='foreignkey')
|
||||
op.drop_constraint(op.f('users_person_id_fkey'), 'users', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'users', 'users', ['referred_by_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'users', 'users', ['current_sales_agent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'users', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_catalog_master_definition_id_fkey'), 'vehicle_catalog', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_catalog', 'vehicle_model_definitions', ['master_definition_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_model_definitions_vehicle_type_id_fkey'), 'vehicle_model_definitions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('vehicle_model_definitions_parent_id_fkey'), 'vehicle_model_definitions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_model_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'vehicle_model_definitions', 'vehicle_model_definitions', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
|
||||
op.drop_constraint(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_ownerships', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'verification_tokens', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
|
||||
op.drop_constraint(op.f('wallets_user_id_fkey'), 'wallets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'wallets', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'wallets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('wallets_user_id_fkey'), 'wallets', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'verification_tokens', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'])
|
||||
op.create_foreign_key(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'vehicle_model_definitions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'vehicle_model_definitions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_model_definitions_parent_id_fkey'), 'vehicle_model_definitions', 'vehicle_model_definitions', ['parent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('vehicle_model_definitions_vehicle_type_id_fkey'), 'vehicle_model_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'])
|
||||
op.drop_constraint(None, 'vehicle_catalog', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_catalog_master_definition_id_fkey'), 'vehicle_catalog', 'vehicle_model_definitions', ['master_definition_id'], ['id'])
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('users_person_id_fkey'), 'users', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('users_current_sales_agent_id_fkey'), 'users', 'users', ['current_sales_agent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('users_referred_by_id_fkey'), 'users', 'users', ['referred_by_id'], ['id'])
|
||||
op.drop_constraint(None, 'user_stats', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('user_stats_user_id_fkey'), 'user_stats', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('user_badges_user_id_fkey'), 'user_badges', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('user_badges_badge_id_fkey'), 'user_badges', 'badges', ['badge_id'], ['id'])
|
||||
op.drop_constraint(None, 'social_accounts', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('social_accounts_user_id_fkey'), 'social_accounts', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_index('idx_staging_fingerprint', table_name='service_staging', schema='data')
|
||||
op.drop_column('service_staging', 'fingerprint')
|
||||
op.drop_constraint(None, 'service_specialties', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_specialties_parent_id_fkey'), 'service_specialties', 'service_specialties', ['parent_id'], ['id'])
|
||||
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_profiles_organization_id_fkey'), 'service_profiles', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_index(op.f('ix_data_service_profiles_fingerprint'), table_name='service_profiles', schema='data')
|
||||
op.drop_index('idx_service_fingerprint', table_name='service_profiles', schema='data')
|
||||
op.alter_column('service_profiles', 'opening_hours',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=postgresql.JSON(astext_type=sa.Text()),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::jsonb"))
|
||||
op.alter_column('service_profiles', 'verification_log',
|
||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
||||
type_=postgresql.JSON(astext_type=sa.Text()),
|
||||
existing_nullable=True,
|
||||
existing_server_default=sa.text("'{}'::jsonb"))
|
||||
op.drop_column('service_profiles', 'updated_at')
|
||||
op.drop_column('service_profiles', 'created_at')
|
||||
op.drop_column('service_profiles', 'contact_email')
|
||||
op.drop_column('service_profiles', 'social_links')
|
||||
op.drop_column('service_profiles', 'vibe_analysis')
|
||||
op.drop_column('service_profiles', 'fingerprint')
|
||||
op.drop_column('service_profiles', 'parent_id')
|
||||
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_expertises_service_id_fkey'), 'service_expertises', 'service_profiles', ['service_id'], ['id'])
|
||||
op.create_foreign_key(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'])
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('security_audit_logs_actor_id_fkey'), 'security_audit_logs', 'users', ['actor_id'], ['id'])
|
||||
op.create_foreign_key(op.f('security_audit_logs_target_id_fkey'), 'security_audit_logs', 'users', ['target_id'], ['id'])
|
||||
op.create_foreign_key(op.f('security_audit_logs_confirmed_by_id_fkey'), 'security_audit_logs', 'users', ['confirmed_by_id'], ['id'])
|
||||
op.drop_constraint(None, 'points_ledger', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('points_ledger_user_id_fkey'), 'points_ledger', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'persons', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('persons_address_id_fkey'), 'persons', 'addresses', ['address_id'], ['id'])
|
||||
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('pending_actions_approver_id_fkey'), 'pending_actions', 'users', ['approver_id'], ['id'])
|
||||
op.create_foreign_key(op.f('pending_actions_requester_id_fkey'), 'pending_actions', 'users', ['requester_id'], ['id'])
|
||||
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organizations_owner_id_fkey'), 'organizations', 'users', ['owner_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organizations_address_id_fkey'), 'organizations', 'addresses', ['address_id'], ['id'])
|
||||
op.alter_column('organizations', 'org_type',
|
||||
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
|
||||
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
|
||||
existing_nullable=True)
|
||||
op.drop_column('organizations', 'anonymized_at')
|
||||
op.drop_column('organizations', 'is_anonymized')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organization_members_user_id_fkey'), 'organization_members', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organization_members_organization_id_fkey'), 'organization_members', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organization_members_person_id_fkey'), 'organization_members', 'persons', ['person_id'], ['id'])
|
||||
op.alter_column('organization_members', 'role',
|
||||
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
|
||||
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'organization_financials', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organization_financials_organization_id_fkey'), 'organization_financials', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'])
|
||||
op.create_foreign_key(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', 'organizations', ['org_id'], ['id'])
|
||||
op.drop_constraint(None, 'org_sales_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'org_sales_assignments', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('org_sales_assignments_organization_id_fkey'), 'org_sales_assignments', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('org_sales_assignments_agent_user_id_fkey'), 'org_sales_assignments', 'users', ['agent_user_id'], ['id'])
|
||||
op.drop_constraint(None, 'operational_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('operational_logs_user_id_fkey'), 'operational_logs', 'users', ['user_id'], ['id'], ondelete='SET NULL')
|
||||
op.drop_constraint(None, 'model_feature_maps', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'model_feature_maps', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('model_feature_maps_model_id_fkey'), 'model_feature_maps', 'vehicle_model_definitions', ['model_id'], ['id'])
|
||||
op.create_foreign_key(op.f('model_feature_maps_feature_id_fkey'), 'model_feature_maps', 'feature_definitions', ['feature_id'], ['id'])
|
||||
op.drop_constraint(None, 'geo_streets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'])
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('financial_ledger_related_agent_id_fkey'), 'financial_ledger', 'users', ['related_agent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('financial_ledger_person_id_fkey'), 'financial_ledger', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('financial_ledger_user_id_fkey'), 'financial_ledger', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'feature_definitions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('feature_definitions_vehicle_type_id_fkey'), 'feature_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'])
|
||||
op.drop_constraint(None, 'documents', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('documents_uploaded_by_fkey'), 'documents', 'users', ['uploaded_by'], ['id'])
|
||||
op.drop_constraint(None, 'credit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('credit_logs_org_id_fkey'), 'credit_logs', 'organizations', ['org_id'], ['id'])
|
||||
op.drop_constraint(None, 'branches', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'branches', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('branches_organization_id_fkey'), 'branches', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('branches_address_id_fkey'), 'branches', 'addresses', ['address_id'], ['id'])
|
||||
op.drop_constraint(None, 'audit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('audit_logs_user_id_fkey'), 'audit_logs', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('assets_catalog_id_fkey'), 'assets', 'vehicle_catalog', ['catalog_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_current_organization_id_fkey'), 'assets', 'organizations', ['current_organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_telemetry', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_financials', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_financials_asset_id_fkey'), 'asset_financials', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_events', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_events_asset_id_fkey'), 'asset_events', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_costs_asset_id_fkey'), 'asset_costs', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_costs_driver_id_fkey'), 'asset_costs', 'users', ['driver_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_costs_organization_id_fkey'), 'asset_costs', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_assignments_branch_id_fkey'), 'asset_assignments', 'branches', ['branch_id'], ['id'])
|
||||
op.drop_constraint(None, 'addresses', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('addresses_postal_code_id_fkey'), 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'])
|
||||
op.drop_index('idx_rating_user', table_name='ratings', schema='data')
|
||||
op.drop_index('idx_rating_org', table_name='ratings', schema='data')
|
||||
op.drop_index('idx_rating_branch', table_name='ratings', schema='data')
|
||||
op.drop_table('ratings', schema='data')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,344 @@
|
||||
"""add_ownership_twin_and_gdpr_uuid
|
||||
|
||||
Revision ID: dd910cabe24e
|
||||
Revises: 54cbd5c9e003
|
||||
Create Date: 2026-02-21 07:57:20.406746
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'dd910cabe24e'
|
||||
down_revision: Union[str, Sequence[str], None] = '54cbd5c9e003'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(op.f('addresses_postal_code_id_fkey'), 'addresses', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_assignments_branch_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'branches', ['branch_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.add_column('asset_costs', sa.Column('registration_uuid', sa.UUID(), nullable=True))
|
||||
op.create_index(op.f('ix_data_asset_costs_registration_uuid'), 'asset_costs', ['registration_uuid'], unique=False, schema='data')
|
||||
op.drop_constraint(op.f('asset_costs_driver_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_costs_organization_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_costs_asset_id_fkey'), 'asset_costs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_costs', 'users', ['driver_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_costs', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_costs', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.add_column('asset_events', sa.Column('registration_uuid', sa.UUID(), nullable=True))
|
||||
op.create_index(op.f('ix_data_asset_events_registration_uuid'), 'asset_events', ['registration_uuid'], unique=False, schema='data')
|
||||
op.drop_constraint(op.f('asset_events_asset_id_fkey'), 'asset_events', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_events', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_financials_asset_id_fkey'), 'asset_financials', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_financials', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', type_='foreignkey')
|
||||
op.drop_constraint(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_reviews', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'asset_reviews', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'asset_telemetry', 'assets', ['asset_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.add_column('assets', sa.Column('registration_uuid', sa.UUID(), nullable=False))
|
||||
op.add_column('assets', sa.Column('is_corporate', sa.Boolean(), server_default=sa.text('false'), nullable=True))
|
||||
op.add_column('assets', sa.Column('owner_person_id', sa.BigInteger(), nullable=True))
|
||||
op.add_column('assets', sa.Column('owner_org_id', sa.Integer(), nullable=True))
|
||||
op.add_column('assets', sa.Column('operator_person_id', sa.BigInteger(), nullable=True))
|
||||
op.add_column('assets', sa.Column('operator_org_id', sa.Integer(), nullable=True))
|
||||
op.create_index(op.f('ix_data_assets_registration_uuid'), 'assets', ['registration_uuid'], unique=False, schema='data')
|
||||
op.drop_constraint(op.f('assets_current_organization_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.drop_constraint(op.f('assets_catalog_id_fkey'), 'assets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'assets', 'organizations', ['operator_org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'organizations', ['owner_org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'organizations', ['current_organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'persons', ['owner_person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'persons', ['operator_person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'assets', 'vehicle_catalog', ['catalog_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('audit_logs_user_id_fkey'), 'audit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'audit_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('branches_address_id_fkey'), 'branches', type_='foreignkey')
|
||||
op.drop_constraint(op.f('branches_organization_id_fkey'), 'branches', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'branches', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'branches', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('credit_logs_org_id_fkey'), 'credit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'credit_logs', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('documents_uploaded_by_fkey'), 'documents', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'documents', 'users', ['uploaded_by'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('feature_definitions_vehicle_type_id_fkey'), 'feature_definitions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'feature_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('financial_ledger_related_agent_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.drop_constraint(op.f('financial_ledger_person_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.drop_constraint(op.f('financial_ledger_user_id_fkey'), 'financial_ledger', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'users', ['related_agent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'financial_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('model_feature_maps_model_id_fkey'), 'model_feature_maps', type_='foreignkey')
|
||||
op.drop_constraint(op.f('model_feature_maps_feature_id_fkey'), 'model_feature_maps', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'model_feature_maps', 'vehicle_model_definitions', ['model_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'model_feature_maps', 'feature_definitions', ['feature_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('operational_logs_user_id_fkey'), 'operational_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'operational_logs', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='SET NULL')
|
||||
op.drop_constraint(op.f('org_sales_assignments_agent_user_id_fkey'), 'org_sales_assignments', type_='foreignkey')
|
||||
op.drop_constraint(op.f('org_sales_assignments_organization_id_fkey'), 'org_sales_assignments', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'org_sales_assignments', 'users', ['agent_user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'org_sales_assignments', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'org_subscriptions', 'organizations', ['org_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('organization_financials_organization_id_fkey'), 'organization_financials', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organization_financials', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.alter_column('organization_members', 'role',
|
||||
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
|
||||
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(op.f('organization_members_user_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organization_members_person_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organization_members_organization_id_fkey'), 'organization_members', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organization_members', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organization_members', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organization_members', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.alter_column('organizations', 'org_type',
|
||||
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
|
||||
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(op.f('organizations_address_id_fkey'), 'organizations', type_='foreignkey')
|
||||
op.drop_constraint(op.f('organizations_owner_id_fkey'), 'organizations', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'organizations', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'organizations', 'users', ['owner_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('pending_actions_approver_id_fkey'), 'pending_actions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('pending_actions_requester_id_fkey'), 'pending_actions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'pending_actions', 'users', ['requester_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'pending_actions', 'users', ['approver_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('persons_address_id_fkey'), 'persons', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'persons', 'addresses', ['address_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('points_ledger_user_id_fkey'), 'points_ledger', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'points_ledger', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('ratings_author_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.drop_constraint(op.f('ratings_target_organization_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.drop_constraint(op.f('ratings_target_branch_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.drop_constraint(op.f('ratings_target_user_id_fkey'), 'ratings', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'ratings', 'users', ['target_user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'ratings', 'users', ['author_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'ratings', 'organizations', ['target_organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'ratings', 'branches', ['target_branch_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('security_audit_logs_actor_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('security_audit_logs_confirmed_by_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.drop_constraint(op.f('security_audit_logs_target_id_fkey'), 'security_audit_logs', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['actor_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['target_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'security_audit_logs', 'users', ['confirmed_by_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', type_='foreignkey')
|
||||
op.drop_constraint(op.f('service_expertises_service_id_fkey'), 'service_expertises', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_expertises', 'service_profiles', ['service_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_profiles_organization_id_fkey'), 'service_profiles', type_='foreignkey')
|
||||
op.drop_constraint(op.f('service_profiles_parent_id_fkey'), 'service_profiles', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_profiles', 'service_profiles', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'service_profiles', 'organizations', ['organization_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('service_specialties_parent_id_fkey'), 'service_specialties', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'service_specialties', 'service_specialties', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('social_accounts_user_id_fkey'), 'social_accounts', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'social_accounts', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
|
||||
op.drop_constraint(op.f('user_badges_user_id_fkey'), 'user_badges', type_='foreignkey')
|
||||
op.drop_constraint(op.f('user_badges_badge_id_fkey'), 'user_badges', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'user_badges', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'user_badges', 'badges', ['badge_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('user_stats_user_id_fkey'), 'user_stats', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'user_stats', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('users_referred_by_id_fkey'), 'users', type_='foreignkey')
|
||||
op.drop_constraint(op.f('users_current_sales_agent_id_fkey'), 'users', type_='foreignkey')
|
||||
op.drop_constraint(op.f('users_person_id_fkey'), 'users', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'users', 'users', ['referred_by_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'users', 'persons', ['person_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'users', 'users', ['current_sales_agent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_catalog_master_definition_id_fkey'), 'vehicle_catalog', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_catalog', 'vehicle_model_definitions', ['master_definition_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_model_definitions_vehicle_type_id_fkey'), 'vehicle_model_definitions', type_='foreignkey')
|
||||
op.drop_constraint(op.f('vehicle_model_definitions_parent_id_fkey'), 'vehicle_model_definitions', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_model_definitions', 'vehicle_model_definitions', ['parent_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'vehicle_model_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
|
||||
op.drop_constraint(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'vehicle_ownerships', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.create_foreign_key(None, 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
op.drop_constraint(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'verification_tokens', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data', ondelete='CASCADE')
|
||||
op.drop_constraint(op.f('wallets_user_id_fkey'), 'wallets', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'wallets', 'users', ['user_id'], ['id'], source_schema='data', referent_schema='data')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'wallets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('wallets_user_id_fkey'), 'wallets', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'verification_tokens', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('verification_tokens_user_id_fkey'), 'verification_tokens', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'vehicle_ownerships', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_ownerships_vehicle_id_fkey'), 'vehicle_ownerships', 'assets', ['vehicle_id'], ['id'])
|
||||
op.create_foreign_key(op.f('vehicle_ownerships_user_id_fkey'), 'vehicle_ownerships', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'vehicle_model_definitions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'vehicle_model_definitions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_model_definitions_parent_id_fkey'), 'vehicle_model_definitions', 'vehicle_model_definitions', ['parent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('vehicle_model_definitions_vehicle_type_id_fkey'), 'vehicle_model_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'])
|
||||
op.drop_constraint(None, 'vehicle_catalog', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('vehicle_catalog_master_definition_id_fkey'), 'vehicle_catalog', 'vehicle_model_definitions', ['master_definition_id'], ['id'])
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'users', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('users_person_id_fkey'), 'users', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('users_current_sales_agent_id_fkey'), 'users', 'users', ['current_sales_agent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('users_referred_by_id_fkey'), 'users', 'users', ['referred_by_id'], ['id'])
|
||||
op.drop_constraint(None, 'user_stats', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('user_stats_user_id_fkey'), 'user_stats', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'user_badges', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('user_badges_badge_id_fkey'), 'user_badges', 'badges', ['badge_id'], ['id'])
|
||||
op.create_foreign_key(op.f('user_badges_user_id_fkey'), 'user_badges', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'social_accounts', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('social_accounts_user_id_fkey'), 'social_accounts', 'users', ['user_id'], ['id'], ondelete='CASCADE')
|
||||
op.drop_constraint(None, 'service_specialties', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_specialties_parent_id_fkey'), 'service_specialties', 'service_specialties', ['parent_id'], ['id'])
|
||||
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'service_profiles', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_profiles_parent_id_fkey'), 'service_profiles', 'service_profiles', ['parent_id'], ['id'])
|
||||
op.create_foreign_key(op.f('service_profiles_organization_id_fkey'), 'service_profiles', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'service_expertises', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('service_expertises_service_id_fkey'), 'service_expertises', 'service_profiles', ['service_id'], ['id'])
|
||||
op.create_foreign_key(op.f('service_expertises_expertise_id_fkey'), 'service_expertises', 'expertise_tags', ['expertise_id'], ['id'])
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'security_audit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('security_audit_logs_target_id_fkey'), 'security_audit_logs', 'users', ['target_id'], ['id'])
|
||||
op.create_foreign_key(op.f('security_audit_logs_confirmed_by_id_fkey'), 'security_audit_logs', 'users', ['confirmed_by_id'], ['id'])
|
||||
op.create_foreign_key(op.f('security_audit_logs_actor_id_fkey'), 'security_audit_logs', 'users', ['actor_id'], ['id'])
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'ratings', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('ratings_target_user_id_fkey'), 'ratings', 'users', ['target_user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('ratings_target_branch_id_fkey'), 'ratings', 'branches', ['target_branch_id'], ['id'])
|
||||
op.create_foreign_key(op.f('ratings_target_organization_id_fkey'), 'ratings', 'organizations', ['target_organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('ratings_author_id_fkey'), 'ratings', 'users', ['author_id'], ['id'])
|
||||
op.drop_constraint(None, 'points_ledger', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('points_ledger_user_id_fkey'), 'points_ledger', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'persons', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('persons_address_id_fkey'), 'persons', 'addresses', ['address_id'], ['id'])
|
||||
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'pending_actions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('pending_actions_requester_id_fkey'), 'pending_actions', 'users', ['requester_id'], ['id'])
|
||||
op.create_foreign_key(op.f('pending_actions_approver_id_fkey'), 'pending_actions', 'users', ['approver_id'], ['id'])
|
||||
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organizations', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organizations_owner_id_fkey'), 'organizations', 'users', ['owner_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organizations_address_id_fkey'), 'organizations', 'addresses', ['address_id'], ['id'])
|
||||
op.alter_column('organizations', 'org_type',
|
||||
existing_type=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype', schema='data', inherit_schema=True),
|
||||
type_=postgresql.ENUM('individual', 'service', 'service_provider', 'fleet_owner', 'club', 'business', name='orgtype'),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'organization_members', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organization_members_organization_id_fkey'), 'organization_members', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organization_members_person_id_fkey'), 'organization_members', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('organization_members_user_id_fkey'), 'organization_members', 'users', ['user_id'], ['id'])
|
||||
op.alter_column('organization_members', 'role',
|
||||
existing_type=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole', schema='data', inherit_schema=True),
|
||||
type_=postgresql.ENUM('OWNER', 'ADMIN', 'FLEET_MANAGER', 'DRIVER', 'MECHANIC', 'RECEPTIONIST', name='orguserrole'),
|
||||
existing_nullable=True)
|
||||
op.drop_constraint(None, 'organization_financials', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('organization_financials_organization_id_fkey'), 'organization_financials', 'organizations', ['organization_id'], ['id'])
|
||||
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'org_subscriptions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('org_subscriptions_tier_id_fkey'), 'org_subscriptions', 'subscription_tiers', ['tier_id'], ['id'])
|
||||
op.create_foreign_key(op.f('org_subscriptions_org_id_fkey'), 'org_subscriptions', 'organizations', ['org_id'], ['id'])
|
||||
op.drop_constraint(None, 'org_sales_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'org_sales_assignments', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('org_sales_assignments_organization_id_fkey'), 'org_sales_assignments', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('org_sales_assignments_agent_user_id_fkey'), 'org_sales_assignments', 'users', ['agent_user_id'], ['id'])
|
||||
op.drop_constraint(None, 'operational_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('operational_logs_user_id_fkey'), 'operational_logs', 'users', ['user_id'], ['id'], ondelete='SET NULL')
|
||||
op.drop_constraint(None, 'model_feature_maps', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'model_feature_maps', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('model_feature_maps_feature_id_fkey'), 'model_feature_maps', 'feature_definitions', ['feature_id'], ['id'])
|
||||
op.create_foreign_key(op.f('model_feature_maps_model_id_fkey'), 'model_feature_maps', 'vehicle_model_definitions', ['model_id'], ['id'])
|
||||
op.drop_constraint(None, 'geo_streets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('geo_streets_postal_code_id_fkey'), 'geo_streets', 'geo_postal_codes', ['postal_code_id'], ['id'])
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'financial_ledger', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('financial_ledger_user_id_fkey'), 'financial_ledger', 'users', ['user_id'], ['id'])
|
||||
op.create_foreign_key(op.f('financial_ledger_person_id_fkey'), 'financial_ledger', 'persons', ['person_id'], ['id'])
|
||||
op.create_foreign_key(op.f('financial_ledger_related_agent_id_fkey'), 'financial_ledger', 'users', ['related_agent_id'], ['id'])
|
||||
op.drop_constraint(None, 'feature_definitions', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('feature_definitions_vehicle_type_id_fkey'), 'feature_definitions', 'vehicle_types', ['vehicle_type_id'], ['id'])
|
||||
op.drop_constraint(None, 'documents', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('documents_uploaded_by_fkey'), 'documents', 'users', ['uploaded_by'], ['id'])
|
||||
op.drop_constraint(None, 'credit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('credit_logs_org_id_fkey'), 'credit_logs', 'organizations', ['org_id'], ['id'])
|
||||
op.drop_constraint(None, 'branches', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'branches', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('branches_organization_id_fkey'), 'branches', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('branches_address_id_fkey'), 'branches', 'addresses', ['address_id'], ['id'])
|
||||
op.drop_constraint(None, 'audit_logs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('audit_logs_user_id_fkey'), 'audit_logs', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'assets', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('assets_catalog_id_fkey'), 'assets', 'vehicle_catalog', ['catalog_id'], ['id'])
|
||||
op.create_foreign_key(op.f('assets_current_organization_id_fkey'), 'assets', 'organizations', ['current_organization_id'], ['id'])
|
||||
op.drop_index(op.f('ix_data_assets_registration_uuid'), table_name='assets', schema='data')
|
||||
op.drop_column('assets', 'operator_org_id')
|
||||
op.drop_column('assets', 'operator_person_id')
|
||||
op.drop_column('assets', 'owner_org_id')
|
||||
op.drop_column('assets', 'owner_person_id')
|
||||
op.drop_column('assets', 'is_corporate')
|
||||
op.drop_column('assets', 'registration_uuid')
|
||||
op.drop_constraint(None, 'asset_telemetry', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_telemetry_asset_id_fkey'), 'asset_telemetry', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_reviews', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_reviews_asset_id_fkey'), 'asset_reviews', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_reviews_user_id_fkey'), 'asset_reviews', 'users', ['user_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_financials', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_financials_asset_id_fkey'), 'asset_financials', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'asset_events', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_events_asset_id_fkey'), 'asset_events', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_index(op.f('ix_data_asset_events_registration_uuid'), table_name='asset_events', schema='data')
|
||||
op.drop_column('asset_events', 'registration_uuid')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_costs', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_costs_asset_id_fkey'), 'asset_costs', 'assets', ['asset_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_costs_organization_id_fkey'), 'asset_costs', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_costs_driver_id_fkey'), 'asset_costs', 'users', ['driver_id'], ['id'])
|
||||
op.drop_index(op.f('ix_data_asset_costs_registration_uuid'), table_name='asset_costs', schema='data')
|
||||
op.drop_column('asset_costs', 'registration_uuid')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.drop_constraint(None, 'asset_assignments', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('asset_assignments_organization_id_fkey'), 'asset_assignments', 'organizations', ['organization_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_assignments_branch_id_fkey'), 'asset_assignments', 'branches', ['branch_id'], ['id'])
|
||||
op.create_foreign_key(op.f('asset_assignments_asset_id_fkey'), 'asset_assignments', 'assets', ['asset_id'], ['id'])
|
||||
op.drop_constraint(None, 'addresses', schema='data', type_='foreignkey')
|
||||
op.create_foreign_key(op.f('addresses_postal_code_id_fkey'), 'addresses', 'geo_postal_codes', ['postal_code_id'], ['id'])
|
||||
# ### end Alembic commands ###
|
||||
@@ -25,4 +25,9 @@ pyotp
|
||||
cryptography
|
||||
GeoAlchemy2>=0.14.0
|
||||
google-generativeai
|
||||
google-genai
|
||||
google-genai
|
||||
rapidfuzz
|
||||
duckduckgo-search>=6.0.0
|
||||
Shapely>=2.0.0
|
||||
opencv-python-headless==4.9.0.80
|
||||
numpy<2.0.0
|
||||
Reference in New Issue
Block a user