import pika
import boto3
import os
import json
from sqlalchemy import create_engine, Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from dotenv import load_dotenv

load_dotenv() 

for key, value in os.environ.items():
    print(f"{key}: {value}")
    
# Define the SQLAlchemy model
Base = declarative_base()

class ImageUpload(Base):
    __tablename__ = 'image_uploads'  # Adjust this based on your actual table name

    id = Column(Integer, primary_key=True)
    image_name = Column(String(251))  # Adjusted to match your Laravel schema
    json_file_name = Column(String(251))  # Adjusted to match your Laravel schema
    # s3_image_key = Column(String)  # Ensure this is defined in the database
    # s3_json_key = Column(String)  # Ensure this is defined in the database

# Database connection
DATABASE_URL = os.getenv('DATABASE_URL')  # Use environment variable for security
# print(DATABASE_URL)

engine = create_engine(DATABASE_URL)
Session = sessionmaker(bind=engine)

# AWS S3 configuration
S3_BUCKET_NAME = os.getenv('S3_BUCKET_NAME')  # Use environment variable for security
print(S3_BUCKET_NAME)
s3 = boto3.client('s3')

def check_s3_objects(image_key, json_key):
    """Check if the specified keys exist in S3 and download them if they exist."""
    try:
        # Check for image
        print(S3_BUCKET_NAME)
        s3.head_object(Bucket=S3_BUCKET_NAME, Key=image_key)
        print(f'Image {image_key} exists in S3. Downloading...')
        s3.download_file(S3_BUCKET_NAME, image_key, os.path.basename(image_key))
        print(f'Downloaded image: {os.path.basename(image_key)}')
    except Exception as e:
        print(f'Image {image_key} does not exist in S3: {e}')

    try:
        # Check for JSON file
        s3.head_object(Bucket=S3_BUCKET_NAME, Key=json_key)
        print(f'JSON file {json_key} exists in S3. Downloading...')
        s3.download_file(S3_BUCKET_NAME, json_key, os.path.basename(json_key))
        print(f'Downloaded JSON file: {os.path.basename(json_key)}')
    except Exception as e:
        print(f'JSON file {json_key} does not exist in S3: {e}')

def callback(ch, method, properties, body):
    """Callback function to process messages from the queue."""
    message = json.loads(body.decode('utf-8'))
    image_id = message['id']  # Extract ID from the message

    # Query the database to fetch image name and JSON file name using the ID
    session = Session()
    image_upload = session.query(ImageUpload).filter_by(id=image_id).first()

    if image_upload:
        print(f'Found record for ID {image_id}: {image_upload.image_name}, {image_upload.json_file_name}')
        # Check if the files exist in S3 and download them
        check_s3_objects(image_upload.image_name, image_upload.json_file_name)
    else:
        print(f'No record found for ID {image_id}')

    session.close()

def main():
    # Connect to RabbitMQ
    connection = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
    channel = connection.channel()

    # Declare the queue
    channel.queue_declare(queue='image_processing_queue', durable=True)

    # Set up subscription to the queue
    channel.basic_consume(queue='image_processing_queue', on_message_callback=callback, auto_ack=True)

    print('Waiting for messages. To exit press CTRL+C')
    channel.start_consuming()

if __name__ == '__main__':
    main()
