import { 
  PutObjectCommand, 
  DeleteObjectCommand, 
  GetObjectCommand
} from "@aws-sdk/client-s3";
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
import { s3Client, getS3BucketName, getS3Region } from "./s3-client";
import crypto from "crypto";

export type FileType = "avatar" | "tour-image";

interface UploadOptions {
  file: Buffer;
  filename: string;
  contentType: string;
  fileType: FileType;
  userId?: string;
  tourId?: string;
}

interface UploadResult {
  key: string;
  url: string;
  publicUrl: string;
}

/**
 * Generate a secure, unique S3 key for the file
 */
function generateS3Key(options: UploadOptions): string {
  const { fileType, userId, tourId, filename } = options;
  const timestamp = Date.now();
  const randomHash = crypto.randomBytes(8).toString("hex");
  const extension = filename.split(".").pop();

  switch (fileType) {
    case "avatar":
      if (!userId) throw new Error("userId required for avatar uploads");
      return `avatars/${userId}/avatar-${timestamp}-${randomHash}.${extension}`;
    
    case "tour-image":
      if (!tourId) throw new Error("tourId required for tour image uploads");
      return `tours/${tourId}/${timestamp}-${randomHash}.${extension}`;
    
    default:
      throw new Error(`Unknown file type: ${fileType}`);
  }
}

/**
 * Upload file to S3
 * @param options - Upload configuration including file buffer, type, and metadata
 * @returns Upload result with S3 key and URLs
 */
export async function uploadToS3(options: UploadOptions): Promise<UploadResult> {
  const key = generateS3Key(options);
  const bucketName = getS3BucketName();

  const command = new PutObjectCommand({
    Bucket: bucketName,
    Key: key,
    Body: options.file,
    ContentType: options.contentType,
    // Cache for 1 year (images don't change due to unique keys)
    CacheControl: 'public, max-age=31536000, immutable',
    Metadata: {
      originalName: options.filename,
      uploadedAt: new Date().toISOString(),
      ...(options.userId && { userId: options.userId }),
      ...(options.tourId && { tourId: options.tourId }),
    },
    ServerSideEncryption: "AES256",
  });

  await s3Client.send(command);

  // Generate a signed URL (valid for 1 hour)
  const region = getS3Region();
  
  const signedUrl = await getSignedUrl(s3Client, new GetObjectCommand({
    Bucket: bucketName,
    Key: key,
    // Request original quality, no additional compression
    ResponseCacheControl: 'public, max-age=31536000, immutable',
  }), { expiresIn: 3600 });

  // Public URL structure (direct S3 URL)
  const publicUrl = `https://${bucketName}.s3.${region}.amazonaws.com/${key}`;

  return { 
    key, 
    url: signedUrl,
    publicUrl 
  };
}

/**
 * Delete file from S3
 * @param key - S3 object key to delete
 */
export async function deleteFromS3(key: string): Promise<void> {
  try {
    const bucketName = getS3BucketName();
    const command = new DeleteObjectCommand({
      Bucket: bucketName,
      Key: key,
    });
    
    await s3Client.send(command);
    console.log(`Successfully deleted S3 object: ${key}`);
  } catch (error) {
    console.error("Error deleting from S3:", error);
    // Don't throw - file might already be deleted
  }
}

/**
 * Generate a signed URL for an existing S3 object
 * Note: This requires s3:GetObject permission to generate the signed URL
 * The URL allows temporary access to the object without additional permissions
 * @param key - S3 object key
 * @param expiresIn - URL expiration time in seconds (default: 1 hour)
 * @returns Signed URL that expires after the specified time
 */
export async function getSignedUrlForKey(
  key: string, 
  expiresIn: number = 3600
): Promise<string> {
  const bucketName = getS3BucketName();
  const command = new GetObjectCommand({
    Bucket: bucketName,
    Key: key,
    // Preserve original quality
    ResponseCacheControl: 'public, max-age=31536000, immutable',
  });

  return getSignedUrl(s3Client, command, { expiresIn });
}

/**
 * Extract S3 key from stored URL or path
 * Handles S3 URLs, local paths, and direct keys
 * @param urlOrPath - URL, path, or key to extract from
 * @returns S3 key or null if not extractable
 */
export function extractS3Key(urlOrPath: string): string | null {
  // Handle S3 URLs
  if (urlOrPath.includes("s3.amazonaws.com") || urlOrPath.includes("s3://")) {
    const match = urlOrPath.match(/(?:s3:\/\/[^\/]+\/|amazonaws\.com\/)(.+?)(?:\?|$)/);
    return match ? match[1] : null;
  }
  
  // Handle stored keys directly (avatars/*, tours/*)
  if (urlOrPath.startsWith("avatars/") || urlOrPath.startsWith("tours/")) {
    return urlOrPath;
  }
  
  // Handle local paths (for backward compatibility during migration)
  if (urlOrPath.startsWith("/uploads/")) {
    return null; // This is a local path, not S3
  }
  
  return null;
}

/**
 * Validate file before upload
 * @param file - File to validate
 * @param options - Validation options (maxSize, allowedTypes)
 * @returns Validation result with error message if invalid
 */
export function validateFile(
  file: File,
  options: {
    maxSize: number;
    allowedTypes: string[];
  }
): { valid: boolean; error?: string } {
  if (file.size > options.maxSize) {
    return {
      valid: false,
      error: `File too large. Maximum size is ${options.maxSize / (1024 * 1024)}MB.`,
    };
  }

  if (!options.allowedTypes.includes(file.type)) {
    return {
      valid: false,
      error: `Invalid file type. Allowed types: ${options.allowedTypes.join(", ")}`,
    };
  }

  return { valid: true };
}

/**
 * Check if a path is an S3 key (vs local path)
 * @param path - Path to check
 * @returns true if it's an S3 key, false if it's a local path
 */
export function isS3Key(path: string): boolean {
  return path.startsWith("avatars/") || path.startsWith("tours/");
}
