Effective strategies for capturing and sending logs to SEER for better debugging
Logs provide crucial context when debugging failures. SEER's log capture feature allows you to:
import sys
from io import StringIO
from seerpy import SEER
# Initialize SEER
seer = SEER(api_key="your_api_key")
# Create string buffers to capture output
stdout_capture = StringIO()
stderr_capture = StringIO()
# Save original stdout/stderr
old_stdout = sys.stdout
old_stderr = sys.stderr
try:
# Redirect output to buffers
sys.stdout = stdout_capture
sys.stderr = stderr_capture
# Your script logic here
print("Processing data...")
# ... your code ...
print("Processing complete!")
# Mark as success
seer.success(logs=stdout_capture.getvalue())
except Exception as e:
# Capture both stdout and stderr on error
all_logs = stdout_capture.getvalue() + "\n" + stderr_capture.getvalue()
seer.error(error_message=str(e), logs=all_logs)
finally:
# Always restore original stdout/stderr
sys.stdout = old_stdout
sys.stderr = old_stderrimport logging
from io import StringIO
from seerpy import SEER
# Create string stream for logs
log_stream = StringIO()
# Configure logging to write to string stream
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
handlers=[
logging.StreamHandler(log_stream), # Capture to string
logging.StreamHandler() # Also print to console
]
)
logger = logging.getLogger(__name__)
seer = SEER(api_key="your_api_key")
try:
logger.info("Starting data processing")
# Your logic here
data = load_data()
logger.info(f"Loaded {len(data)} records")
results = process_data(data)
logger.info(f"Processed {len(results)} results")
save_results(results)
logger.info("Results saved successfully")
# Send success with logs
seer.success(logs=log_stream.getvalue())
except Exception as e:
logger.error(f"Processing failed: {e}", exc_info=True)
seer.error(
error_message=str(e),
logs=log_stream.getvalue()
)SEER accepts logs up to 1MB per job. For larger logs, consider:
def truncate_logs(logs, max_lines=1000):
"""Keep only the last N lines of logs"""
lines = logs.split('\n')
if len(lines) > max_lines:
return '\n'.join(lines[-max_lines:])
return logs
# Usage
seer.error(
error_message=str(e),
logs=truncate_logs(log_stream.getvalue(), max_lines=500)
)Be careful not to log sensitive information:
import re
def sanitize_logs(logs):
"""Remove sensitive information from logs"""
# Remove API keys
logs = re.sub(r'api[_-]?key[=:\s]+[\w-]+', 'api_key=***REDACTED***', logs, flags=re.IGNORECASE)
# Remove passwords
logs = re.sub(r'password[=:\s]+[\w-]+', 'password=***REDACTED***', logs, flags=re.IGNORECASE)
# Remove email addresses if needed
logs = re.sub(r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b', '***EMAIL***', logs)
return logs
# Usage
seer.error(
error_message=str(e),
logs=sanitize_logs(log_stream.getvalue())
)