import asyncio
from typing import Any, Dict, List, Optional, Tuple, Union
from mysql.connector.aio import connect
from mysql.connector import errors as mysql_errors

# 🔑 Config de conexión
DB_CONFIG = {
    "host": "dev-rds-mysql.crs0e6k0ssxf.us-east-2.rds.amazonaws.com",
    "port": 3306,
    "user": "agencycic",
    "password": "3fU(>C&~I[X5,}%",
    "database": "imparables",
    "use_pure": True,
    "connection_timeout": 10,  # evita esperas eternas de red
}



# Tipos de parámetros aceptados por execute:
#  - Tupla -> execute normal
#  - Lista de tuplas -> executemany (bulk con chunking)
ParamsType = Optional[Union[Tuple[Any, ...], List[Tuple[Any, ...]]]]

# Tamaño de chunk recomendado (reduce locks largos)
_DEFAULT_CHUNK = 1000


async def get_connection():
    """
    Conexión asíncrona a MySQL con ajustes de sesión
    para reducir bloqueos y esperas por locks.
    """
    cnx = await connect(**DB_CONFIG)
    cur = await cnx.cursor()
    try:
        await cur.execute("SET SESSION TRANSACTION ISOLATION LEVEL READ COMMITTED")
        await cur.execute("SET SESSION innodb_lock_wait_timeout = 5")
    finally:
        await cur.close()
    return cnx


async def queries(sql: str, params: Optional[Tuple[Any, ...]] = None) -> List[Dict[str, Any]]:
    """
    SELECT → lista de dicts.
    """
    cnx = await get_connection()
    cur = await cnx.cursor(dictionary=True)
    try:
        await cur.execute(sql, params or ())
        rows = await cur.fetchall()
        return rows
    finally:
        await cur.close()
        await cnx.close()


async def execute(
    sql: str,
    params: ParamsType = None,
    *,
    chunk_size: int = _DEFAULT_CHUNK
) -> int:
    """
    INSERT/UPDATE/DELETE.
    - Si 'params' es tupla -> execute
    - Si 'params' es lista de tuplas -> executemany con chunking y reintento 1×
    Devuelve el número total de filas afectadas.
    """
    cnx = await get_connection()
    cur = await cnx.cursor()
    try:
        # Caso 1: una sola tupla de parámetros
        if not isinstance(params, list):
            await cur.execute(sql, params or ())
            affected = cur.rowcount or 0
            await cnx.commit()
            return affected

        # Caso 2: bulk → dividimos en chunks y hacemos commit por chunk
        total_affected = 0
        data: List[Tuple[Any, ...]] = params or []  # type: ignore[assignment]
        if not data:
            return 0

        # Seguridad: chunk_size mínimo 1
        chunk_size = max(1, int(chunk_size))

        for i in range(0, len(data), chunk_size):
            print(f"Procesando chunk {i // chunk_size + 1} (registros {i} a {i + chunk_size - 1})")
            chunk = data[i:i + chunk_size]

            # Reintento 1× si hay lock wait timeout (1205) o deadlock (1213)
            for attempt in (1, 2):
                try:
                    await cur.executemany(sql, chunk)
                    total_affected += cur.rowcount or 0
                    await cnx.commit()
                    break
                except mysql_errors.DatabaseError as e:
                    code = getattr(e, "errno", None)
                    # 1205 = Lock wait timeout, 1213 = Deadlock
                    if code in (1205, 1213) and attempt == 1:
                        await cnx.rollback()
                        # Pequeño yield para que otro commit libere el lock
                        await asyncio.sleep(0.1)
                        continue
                    await cnx.rollback()
                    raise

            # Ceder el event-loop entre chunks para no bloquear otras requests
            await asyncio.sleep(0)

        return total_affected

    finally:
        await cur.close()
        await cnx.close()
        
async def queries_masivos(
    sql: str, 
    params: Optional[Tuple[Any, ...]] = None,
    *,
    chunk_size: int = 15000
) -> List[Dict[str, Any]]:
    """
    SELECT masivo optimizado para grandes volúmenes (reemplazo directo de queries).
    Procesa por chunks pero devuelve todas las filas como lista completa.
    
    Args:
        sql: Query SQL
        params: Parámetros de la query
        chunk_size: Tamaño del chunk para streaming
    
    Returns:
        Lista completa de todas las filas como dicts
    """
    cnx = await get_connection()
    cur = await cnx.cursor(dictionary=True, cursor_class='SSCursor')
    
    try:
        print(f"🚀 Ejecutando query masiva: {sql[:100]}...")
        start_time = asyncio.get_event_loop().time()
        
        # Optimizaciones de sesión
        await cur.execute("SET SESSION sql_buffer_result = 0")
        await cur.execute("SET SESSION read_buffer_size = 2097152")
        
        await cur.execute(sql, params or ())
        
        all_rows = []
        total_rows = 0
        chunk_count = 0
        
        while True:
            chunk = await cur.fetchmany(chunk_size)
            if not chunk:
                break
                
            chunk_count += 1
            total_rows += len(chunk)
            all_rows.extend(chunk)
            
            elapsed = asyncio.get_event_loop().time() - start_time
            rate = total_rows / elapsed if elapsed > 0 else 0
            
            print(
                f"📦 Chunk {chunk_count}: {len(chunk)} filas | "
                f"Total: {total_rows:,} | "
                f"Velocidad: {rate:,.0f} filas/seg"
            )
            
            await asyncio.sleep(0)
        
        total_time = asyncio.get_event_loop().time() - start_time
        final_rate = total_rows / total_time if total_time > 0 else 0
        
        print(
            f"✅ Query masiva completada: {total_rows:,} filas en {total_time:.2f}s | "
            f"Promedio: {final_rate:,.0f} filas/seg"
        )
        
        return all_rows
        
    except Exception as e:
        print(f"❌ Error en query masiva: {e}")
        raise
    finally:
        await cur.close()
        await cnx.close()