File size: 3,243 Bytes
5b467cd
caaa41c
8ca0bfb
 
 
 
a5cd3be
52a061c
caaa41c
 
 
52a061c
 
 
caaa41c
 
 
 
 
 
 
 
a5cd3be
caaa41c
 
 
 
5450aad
caaa41c
 
 
 
 
 
 
 
 
 
5450aad
caaa41c
 
 
 
 
8ca0bfb
caaa41c
8ca0bfb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3523023
 
8ca0bfb
 
 
 
3523023
8ca0bfb
3523023
caaa41c
8ca0bfb
3523023
8ca0bfb
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
# FILE: utils/debug_utils.py
# DESCRIPTION: Utility for detailed function logging with persistent file output.

import os
import functools
import torch
import logging
import warnings
from huggingface_hub import logging as hf_logging

# --- CONFIGURAÇÃO INICIAL ---
warnings.filterwarnings("ignore", category=UserWarning)
warnings.filterwarnings("ignore", category=FutureWarning)
warnings.filterwarnings("ignore", message=".*")
hf_logging.set_verbosity_error()

LOG_LEVEL = os.environ.get("ADUC_LOG_LEVEL", "DEBUG").upper()
# --- NOVO CAMINHO PADRONIZADO PARA O LOG DA SESSÃO ATUAL ---
LOG_DIR = "/data/logs"
LOG_FILE_PATH = os.path.join(LOG_DIR, "session.log")

# --- CONFIGURAÇÃO DO LOGGER ---

# 1. Cria o diretório de log se ele não existir
os.makedirs(LOG_DIR, exist_ok=True)

# 2. Obtém o logger principal
logger = logging.getLogger("AducDebug")
logger.setLevel(LOG_LEVEL)

# 3. Evita adicionar handlers duplicados (importante em ambientes interativos)
if not logger.handlers:
    # Handler para o arquivo de log persistente. 'w' para começar um novo log a cada sessão.
    # O start.sh já terá movido o log antigo.
    file_handler = logging.FileHandler(LOG_FILE_PATH, mode='w')
    file_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
    file_handler.setFormatter(file_formatter)
    logger.addHandler(file_handler)

    # Handler para o console (terminal)
    stream_handler = logging.StreamHandler()
    stream_formatter = logging.Formatter('%(message)s') # Formato limpo para o terminal
    stream_handler.setFormatter(stream_formatter)
    logger.addHandler(stream_handler)

# --- FUNÇÕES DO DECORADOR ---

def _format_value(value):
    if isinstance(value, torch.Tensor):
        return f"Tensor(shape={list(value.shape)}, device='{value.device}', dtype={value.dtype})"
    if isinstance(value, str) and len(value) > 70:
        return f"'{value[:70]}...'"
    if isinstance(value, list) and len(value) > 5:
        return f"List(len={len(value)})"
    if isinstance(value, dict) and len(value.keys()) > 5:
        return f"Dict(keys={list(value.keys())[:5]}...)"
    return repr(value)

def log_function_io(func):
    @functools.wraps(func)
    def wrapper(*args, **kwargs):
        if logger.isEnabledFor(logging.DEBUG):
            func_name = f"{func.__module__}.{func.__name__}"
            args_repr = [_format_value(a) for a in args]
            kwargs_repr = {k: _format_value(v) for k, v in kwargs.items()}
            signature = ", ".join(args_repr + [f"{k}={v}" for k, v in kwargs_repr.items()])

            logger.debug(f"\n\n== INÍCIO: {func_name} ==========")
            logger.debug(f"\n  -> ENTRADA: \n({signature})")

            try:
                result = func(*args, **kwargs)
                result_repr = _format_value(result)
                logger.debug(f"\n\n  <- SAÍDA: \n{result_repr}")
            except Exception as e:
                logger.error(f"\n  <-- ERRO em \n{func_name}: {e}", exc_info=True)
                raise
            finally:
                logger.debug(f"============================================\n\n")
            
            return result
        else:
            return func(*args, **kwargs)
            
    return wrapper