Integrate SEER monitoring into your scripts and applications with our official SDKs.
Official Python library for easy integration with Python scripts and data pipelines.
pip install seerpy
Use our REST API for any programming language or platform.
https://api.ansrstudio.com
# Install the Python SDK pip install seerpy # Or use the REST API directly curl -X POST https://api.ansrstudio.com/monitoring \ -H "Authorization: your_api_key" \ -H "Content-Type: application/json"
from seerpy import Seer
# Initialize the SDK
seer = Seer(api_key="your_api_key")
# Start monitoring
seer.start(job_name="my-pipeline")
try:
# Your code here
process_data()
seer.success()
except Exception as e:
seer.failure(str(e))
raise# Add metadata and logging
seer.start(
job_name="data-pipeline",
metadata={
"environment": "production",
"version": "1.2.0"
}
)
# Log progress
seer.log("Processing started")
seer.log("Checkpoint 1 complete", {"records": 1000})
# Send heartbeats for long-running jobs
seer.heartbeat()
# Complete with custom metadata
seer.success(metadata={"records_processed": 5000})from seerpy import Seer
seer = Seer(
api_key="your_api_key",
base_url="https://api.ansrstudio.com",
timeout=30, # API timeout in seconds
max_retries=3, # Retry failed requests
enable_offline=True # Queue calls when offline
)Use Python's context manager for automatic cleanup:
with seer.monitor(job_name="data-pipeline"):
process_data()
# Automatically calls seer.success() on exit
# Calls seer.failure() on exceptionMonitor functions with a simple decorator:
@seer.monitor_function(job_name="etl-process")
def extract_transform_load():
# Your ETL logic here
return resultsMonitor batch processing with progress updates:
seer.start(job_name="batch-processor")
for i, batch in enumerate(batches):
process_batch(batch)
seer.log(f"Processed batch {i+1}/{len(batches)}")
seer.heartbeat()
seer.success(metadata={"total_batches": len(batches)})