#!/usr/bin/env python3
from app import app
from models import db, SummarizationJob
from datetime import datetime, timedelta

with app.app_context():
    # Find jobs stuck in processing for more than 10 minutes
    stuck_jobs = SummarizationJob.query.filter(
        SummarizationJob.status == 'processing',
        SummarizationJob.created_at < datetime.utcnow() - timedelta(minutes=10)
    ).all()
    
    print(f"Found {len(stuck_jobs)} stuck jobs")
    
    for job in stuck_jobs:
        print(f"\nJob: {job.job_uuid}")
        print(f"  File: {job.original_filename}")
        print(f"  Created: {job.created_at}")
        print(f"  Status: {job.status}")
        
        # Update to failed with appropriate message
        job.status = 'failed'
        job.error_message = "Processing timeout - document may be too large or complex. For large documents like Supreme Court decisions, try using 'Short' summary length or splitting the document into sections."
        
        print(f"  -> Updated to 'failed' status")
    
    if stuck_jobs:
        db.session.commit()
        print("\nChanges committed to database")