100 lines
3.2 KiB
Python
Executable File
100 lines
3.2 KiB
Python
Executable File
# /opt/docker/dev/service_finder/backend/migrations/env.py
|
|
import asyncio
|
|
from logging.config import fileConfig
|
|
import os
|
|
import sys
|
|
from sqlalchemy import pool, text
|
|
from sqlalchemy.ext.asyncio import async_engine_from_config
|
|
from alembic import context
|
|
|
|
# --- ÚTVONAL JAVÍTÁS ---
|
|
sys.path.insert(0, "/app")
|
|
|
|
try:
|
|
from app.core.config import settings
|
|
# Fontos: A központi Base importálása
|
|
from app.database import Base
|
|
|
|
# MB 2.0: Központi import az __init__.py-n keresztül!
|
|
# Mivel az __init__.py-ban minden benne van az __all__ listában,
|
|
# így a legegyszerűbb behúzni mindent az Alembic számára.
|
|
import app.models as models
|
|
|
|
except ImportError as e:
|
|
print(f"❌ Kritikus hiba az importálásnál: {e}")
|
|
raise
|
|
|
|
config = context.config
|
|
# Dinamikus adatbázis URL a .env fájlból
|
|
config.set_main_option("sqlalchemy.url", str(settings.SQLALCHEMY_DATABASE_URI))
|
|
|
|
if config.config_file_name is not None:
|
|
fileConfig(config.config_file_name)
|
|
|
|
# Az Alembic így látja az összes táblát, amit az __init__.py importált
|
|
target_metadata = Base.metadata
|
|
|
|
# MB 2.0: Sémák szűrése
|
|
def include_object(object, name, type_, reflected, compare_to):
|
|
# 1. PostGIS és Alembic belső táblák védelme
|
|
excluded_tables = [
|
|
"spatial_ref_sys", "alembic_version",
|
|
"geography_columns", "geometry_columns",
|
|
"raster_columns", "raster_overviews"
|
|
]
|
|
if type_ == "table" and name in excluded_tables:
|
|
return False
|
|
|
|
# 2. Csak a projekt sémáit figyeljük
|
|
allowed_schemas = ["identity", "data", "system", "public"]
|
|
if type_ == "schema":
|
|
return name in allowed_schemas
|
|
|
|
# 3. Séma-alapú objektumszűrés (táblák, indexek)
|
|
if hasattr(object, "schema"):
|
|
return object.schema in allowed_schemas
|
|
|
|
return True
|
|
|
|
def do_run_migrations(connection):
|
|
# Sémák biztosítása
|
|
connection.execute(text("CREATE SCHEMA IF NOT EXISTS identity;"))
|
|
connection.execute(text("CREATE SCHEMA IF NOT EXISTS data;"))
|
|
connection.execute(text("CREATE SCHEMA IF NOT EXISTS system;"))
|
|
|
|
context.configure(
|
|
connection=connection,
|
|
target_metadata=target_metadata,
|
|
include_schemas=True,
|
|
include_object=include_object,
|
|
version_table_schema='public'
|
|
)
|
|
with context.begin_transaction():
|
|
context.run_migrations()
|
|
|
|
async def run_migrations_online() -> None:
|
|
connectable = async_engine_from_config(
|
|
config.get_section(config.config_ini_section, {}),
|
|
prefix="sqlalchemy.",
|
|
poolclass=pool.NullPool,
|
|
)
|
|
async with connectable.connect() as connection:
|
|
await connection.run_sync(do_run_migrations)
|
|
await connectable.dispose()
|
|
|
|
if context.is_offline_mode():
|
|
context.configure(
|
|
url=config.get_main_option("sqlalchemy.url"),
|
|
target_metadata=target_metadata,
|
|
literal_binds=True,
|
|
include_schemas=True,
|
|
include_object=include_object
|
|
)
|
|
with context.begin_transaction():
|
|
context.run_migrations()
|
|
else:
|
|
try:
|
|
loop = asyncio.get_event_loop()
|
|
except RuntimeError:
|
|
loop = asyncio.new_event_loop()
|
|
loop.run_until_complete(run_migrations_online()) |