Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 37 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# Log files
logs/
*.log

# Python cache
__pycache__/
*.py[cod]
*$py.class
*.so
.Python

# Virtual environments
python_env/
venv/
env/
ENV/

# Build artifacts
build/
dist/
*.egg-info/

# Temporary files
tmp/
*.tmp
*.bak
*.swp
*~

# IDE
.vscode/
.idea/
*.iml

# OS files
.DS_Store
Thumbs.db
231 changes: 231 additions & 0 deletions lib/classes/run_logger.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,231 @@
import os
import sys
import time
from datetime import datetime, timedelta


class RunLogger:
"""
Captures stdout and stderr to a log file while preserving normal output.
Does not change what the user sees in terminal/Gradio UI.
Uses memory buffering to minimize disk I/O overhead.
"""

# Buffer size for log file writes (128KB for optimal performance)
BUFFER_SIZE = 128 * 1024

def __init__(self, log_dir, session_id=None, enabled=True):
"""
Initialize the RunLogger.

Args:
log_dir: Directory to store log files
session_id: Optional session ID to include in log filename
enabled: Whether logging is enabled
"""
self.enabled = enabled
self.log_dir = log_dir
self.session_id = session_id or "unknown"
self.log_file = None
self.log_file_path = None
self.original_stdout = None
self.original_stderr = None
self.tee_stdout = None
self.tee_stderr = None
self.buffer = []
self.buffer_size = 0

def start(self):
"""Start capturing stdout/stderr to log file."""
if not self.enabled:
return

try:
# Create log directory if it doesn't exist
os.makedirs(self.log_dir, exist_ok=True)

# Generate log filename with timestamp
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
log_filename = f"run_{timestamp}_{self.session_id}.log"
self.log_file_path = os.path.join(self.log_dir, log_filename)

# Open log file with larger buffer for better performance
self.log_file = open(self.log_file_path, 'w', encoding='utf-8', buffering=self.BUFFER_SIZE)

# Write header
self.log_file.write(f"=== Log started at {datetime.now().isoformat()} ===\n")
self.log_file.write(f"Session ID: {self.session_id}\n")
self.log_file.write("=" * 60 + "\n\n")
# Flush header to ensure it's written immediately
self.log_file.flush()

# Save original streams
self.original_stdout = sys.stdout
self.original_stderr = sys.stderr

# Create tee streams that write to both original and log file
# Pass the RunLogger instance for buffered writing
self.tee_stdout = TeeStream(self.original_stdout, self)
self.tee_stderr = TeeStream(self.original_stderr, self)

# Replace sys streams
sys.stdout = self.tee_stdout
sys.stderr = self.tee_stderr

except Exception as e:
# If logging fails, just continue without logging
print(f"Warning: Could not initialize log file: {e}", file=sys.stderr)
self.enabled = False

def write_to_log(self, message):
"""
Write message to log file with buffering to minimize disk I/O.

Args:
message: String to write to log file
"""
if not self.enabled or not self.log_file or self.log_file.closed:
return

try:
# Write to file (Python's buffering handles the actual I/O optimization)
self.log_file.write(message)
except:
pass

def flush_log(self):
"""Flush the log file buffer to disk."""
if self.enabled and self.log_file and not self.log_file.closed:
try:
self.log_file.flush()
except:
pass

def stop(self):
"""Stop capturing stdout/stderr and close log file."""
if not self.enabled or not self.log_file:
return

try:
# Restore original streams
if self.original_stdout:
sys.stdout = self.original_stdout
if self.original_stderr:
sys.stderr = self.original_stderr

# Flush any remaining buffered data
self.flush_log()

# Write footer and close log file
if self.log_file and not self.log_file.closed:
self.log_file.write(f"\n\n{'=' * 60}\n")
self.log_file.write(f"=== Log ended at {datetime.now().isoformat()} ===\n")
self.log_file.flush()
self.log_file.close()

except Exception as e:
print(f"Warning: Error closing log file: {e}", file=sys.stderr)

def __enter__(self):
"""Context manager entry."""
self.start()
return self

def __exit__(self, exc_type, exc_val, exc_tb):
"""Context manager exit."""
# Log any exceptions
if exc_type is not None and self.enabled and self.log_file:
try:
import traceback
self.log_file.write("\n\n=== EXCEPTION OCCURRED ===\n")
self.log_file.write(f"Exception type: {exc_type.__name__}\n")
self.log_file.write(f"Exception value: {exc_val}\n")
self.log_file.write("\nTraceback:\n")
traceback.print_exception(exc_type, exc_val, exc_tb, file=self.log_file)
self.log_file.flush()
except:
pass

self.stop()
return False # Don't suppress exceptions

@staticmethod
def cleanup_old_logs(log_dir, retention_days):
"""
Clean up log files older than retention_days.

Args:
log_dir: Directory containing log files
retention_days: Number of days to keep log files
"""
if not os.path.exists(log_dir):
return

try:
cutoff_time = time.time() - (retention_days * 86400) # 86400 seconds per day

for filename in os.listdir(log_dir):
if filename.startswith("run_") and filename.endswith(".log"):
filepath = os.path.join(log_dir, filename)
try:
if os.path.isfile(filepath):
file_mtime = os.path.getmtime(filepath)
if file_mtime < cutoff_time:
os.remove(filepath)
print(f"Deleted old log file: {filename}")
except Exception as e:
print(f"Warning: Could not delete log file {filename}: {e}")

except Exception as e:
print(f"Warning: Error during log cleanup: {e}")


class TeeStream:
"""
A stream that writes to two destinations simultaneously.
Used to write to both terminal and log file with buffered writes.
"""

def __init__(self, terminal_stream, logger):
"""
Initialize TeeStream.

Args:
terminal_stream: Output stream for terminal (usually sys.__stdout__ or sys.__stderr__)
logger: RunLogger instance for buffered log file writing
"""
self.terminal_stream = terminal_stream
self.logger = logger

def write(self, message):
"""Write message to both terminal and log file."""
# Always write to terminal immediately for real-time feedback
try:
self.terminal_stream.write(message)
self.terminal_stream.flush()
except:
pass

# Write to log file via buffered method (no immediate flush)
try:
self.logger.write_to_log(message)
except:
pass

def flush(self):
"""Flush both terminal and log file."""
try:
self.terminal_stream.flush()
except:
pass
try:
self.logger.flush_log()
except:
pass

def isatty(self):
"""Check if stream is a TTY (delegate to terminal_stream)."""
try:
return self.terminal_stream.isatty()
except:
return False
7 changes: 6 additions & 1 deletion lib/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,4 +75,9 @@
default_audio_proc_format = 'flac' # or 'mp3', 'aac', 'm4a', 'm4b', 'amr', '3gp', 'alac'. 'wav' format is ok but limited to process files < 4GB
default_output_format = 'm4b'
default_output_split = False
default_output_split_hours = '6' # if the final ouput esceed outpout_split_hours * 2 hours the final file will be splitted by outpout_split_hours + the end if any.
default_output_split_hours = '6' # if the final ouput esceed outpout_split_hours * 2 hours the final file will be splitted by outpout_split_hours + the end if any.

# Logging configuration
enable_run_logging = True # Enable per-run log file capture
logs_dir = os.path.abspath('logs')
log_retention_days = 14 # days to keep log files
25 changes: 24 additions & 1 deletion lib/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
from lib import *
from lib.classes.voice_extractor import VoiceExtractor
from lib.classes.tts_manager import TTSManager
from lib.classes.run_logger import RunLogger
#from lib.classes.redirect_console import RedirectConsole
#from lib.classes.argos_translator import ArgosTranslator

Expand Down Expand Up @@ -1932,6 +1933,7 @@ def convert_ebook_batch(args, ctx=None):
sys.exit(1)

def convert_ebook(args, ctx=None):
run_logger = None
try:
global is_gui_process, context
error = None
Expand Down Expand Up @@ -2048,7 +2050,22 @@ def convert_ebook(args, ctx=None):
os.rename(old_session_dir, session['session_dir'])
session['process_dir'] = os.path.join(session['session_dir'], f"{hashlib.md5(session['ebook'].encode()).hexdigest()}")
session['chapters_dir'] = os.path.join(session['process_dir'], "chapters")
session['chapters_dir_sentences'] = os.path.join(session['chapters_dir'], 'sentences')
session['chapters_dir_sentences'] = os.path.join(session['chapters_dir'], 'sentences')

# Initialize run logging
if enable_run_logging:
# Prefer process_dir/logs if available, otherwise use top-level logs/
log_dir = os.path.join(session['process_dir'], 'logs') if session.get('process_dir') else logs_dir
run_logger = RunLogger(log_dir, session_id=id, enabled=enable_run_logging)
run_logger.start()
# Cleanup old logs from top-level logs directory
RunLogger.cleanup_old_logs(logs_dir, log_retention_days)
# Also cleanup old logs from process_dir if it exists
if session.get('process_dir') and os.path.exists(session['process_dir']):
process_log_dir = os.path.join(session['process_dir'], 'logs')
if os.path.exists(process_log_dir):
RunLogger.cleanup_old_logs(process_log_dir, log_retention_days)

if prepare_dirs(args['ebook'], session):
session['filename_noext'] = os.path.splitext(os.path.basename(session['ebook']))[0]
msg = ''
Expand Down Expand Up @@ -2148,6 +2165,8 @@ def convert_ebook(args, ctx=None):
progress_status = f'Audiobook(s) {", ".join(os.path.basename(f) for f in exported_files)} created!'
session['audiobook'] = exported_files[-1]
print(info_session)
if run_logger:
run_logger.stop()
return progress_status, True
else:
error = 'combine_audio_chapters() error: exported_files not created!'
Expand All @@ -2169,9 +2188,13 @@ def convert_ebook(args, ctx=None):
if not is_gui_process and id is not None:
error += info_session
print(error)
if run_logger:
run_logger.stop()
return error, False
except Exception as e:
print(f'convert_ebook() Exception: {e}')
if run_logger:
run_logger.stop()
return e, False

def restore_session_from_data(data, session):
Expand Down
Loading