#!/usr/bin/env bash
set -euo pipefail

ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"

if [[ ! -f "${ROOT_DIR}/artisan" ]]; then
  echo "Run this script from the repository root." >&2
  exit 1
fi

if ! command -v php >/dev/null 2>&1; then
  echo "php is required to read .env" >&2
  exit 1
fi

if ! command -v mysqldump >/dev/null 2>&1; then
  echo "mysqldump is required to export the database." >&2
  exit 1
fi

eval "$(
  php -r '
    require __DIR__ . "/vendor/autoload.php";
    $dotenv = Dotenv\Dotenv::createImmutable(__DIR__);
    $dotenv->load();
    $keys = [
      "DB_CONNECTION","DB_HOST","DB_PORT","DB_DATABASE","DB_USERNAME","DB_PASSWORD",
      "FILESYSTEM_DISK","MEDIA_DISK","FILAMENT_FILESYSTEM_DRIVER","MEDIA_PREFIX",
      "AWS_ACCESS_KEY_ID","AWS_SECRET_ACCESS_KEY","AWS_SESSION_TOKEN",
      "AWS_DEFAULT_REGION","AWS_REGION","AWS_BUCKET","AWS_ENDPOINT","AWS_USE_PATH_STYLE_ENDPOINT",
      "S3_ASSETS_PREFIX",
    ];
    foreach ($keys as $key) {
      $value = $_ENV[$key] ?? "";
      if ($value !== "") {
        echo "export {$key}=" . escapeshellarg($value) . PHP_EOL;
      }
    }
  '
)"

if [[ "${DB_CONNECTION:-mysql}" != "mysql" ]]; then
  echo "This script only supports MySQL connections. Current: ${DB_CONNECTION:-unknown}" >&2
  exit 1
fi

DB_HOST="${DB_HOST:-127.0.0.1}"
DB_PORT="${DB_PORT:-3306}"

if [[ -z "${DB_DATABASE:-}" || -z "${DB_USERNAME:-}" ]]; then
  echo "Missing DB_DATABASE or DB_USERNAME in .env." >&2
  exit 1
fi

timestamp="$(date +%Y%m%d-%H%M%S)"
OUT_FILE="${1:-"${ROOT_DIR}/storage/sandbox-seed-${timestamp}.sql"}"

supports_flag() {
  mysqldump --help 2>/dev/null | grep -q -- "$1"
}

DUMP_FLAGS=(
  --single-transaction
  --quick
  --routines
  --triggers
  --events
  --hex-blob
  --default-character-set=utf8mb4
)

if supports_flag "--set-gtid-purged"; then
  DUMP_FLAGS+=(--set-gtid-purged=OFF)
fi

if supports_flag "--no-tablespaces"; then
  DUMP_FLAGS+=(--no-tablespaces)
fi

MYSQL_PWD="${DB_PASSWORD:-}" mysqldump \
  -h "${DB_HOST}" \
  -P "${DB_PORT}" \
  -u "${DB_USERNAME}" \
  "${DUMP_FLAGS[@]}" \
  "${DB_DATABASE}" \
  > "${OUT_FILE}"

cat <<'SQL' >> "${OUT_FILE}"

-- --------------------------------------------------------------------
-- Sandbox sanitization (applied on import)
-- --------------------------------------------------------------------
-- Users: customers
SET @rownum := 0;
UPDATE users u
JOIN (
  SELECT id, (@rownum := @rownum + 1) AS rn
  FROM users
  WHERE type = 'customer'
  ORDER BY id
) t ON u.id = t.id
SET u.email = CONCAT('staging_user_', t.rn, '@championspirit.com'),
    u.email_2 = NULL,
    u.firstname = 'Staging',
    u.lastname = CONCAT('User ', t.rn),
    u.phone = NULL;

-- Users: coaches
SET @rownum := 0;
UPDATE users u
JOIN (
  SELECT id, (@rownum := @rownum + 1) AS rn
  FROM users
  WHERE type = 'coach'
  ORDER BY id
) t ON u.id = t.id
SET u.email = CONCAT('staging_coach_', t.rn, '@championspirit.com'),
    u.email_2 = NULL,
    u.firstname = 'Staging',
    u.lastname = CONCAT('Coach ', t.rn),
    u.phone = NULL;

-- Users: employees
SET @rownum := 0;
UPDATE users u
JOIN (
  SELECT id, (@rownum := @rownum + 1) AS rn
  FROM users
  WHERE type = 'employee'
  ORDER BY id
) t ON u.id = t.id
SET u.email = CONCAT('staging_employee_', t.rn, '@championspirit.com'),
    u.email_2 = NULL,
    u.firstname = 'Staging',
    u.lastname = CONCAT('Employee ', t.rn),
    u.phone = NULL;

-- Users: clear external identifiers/tokens
UPDATE users
SET remember_token = NULL,
    stripe_customer_id = NULL,
    member_number = NULL;

-- Employees: scrub HR fields
UPDATE employees
SET employee_number = NULL,
    employment_start = NULL,
    employment_end = NULL,
    contract_details = NULL,
    comments = NULL;

-- Coaches: scrub payment + external IDs
UPDATE coaches
SET iban = NULL,
    stripe_id = NULL,
    stripe_access_token = NULL,
    stripe_on_boarding_completed_at = NULL;

-- Places: planning and physical assessment recipients
UPDATE places
SET planning_recipients = CONCAT('["staging_place_', id, '_planning@championspirit.com"]')
WHERE planning_recipients IS NOT NULL AND planning_recipients LIKE '%@%';

UPDATE places
SET physical_assessment_recipients = CONCAT('["staging_place_', id, '_assessment@championspirit.com"]')
WHERE physical_assessment_recipients IS NOT NULL AND physical_assessment_recipients LIKE '%@%';

-- Places: clear encrypted credentials (avoid APP_KEY mismatch errors)
UPDATE places
SET door_system_credentials = NULL;

-- Places: address emails
UPDATE addresses
SET email = CONCAT('staging_place_', addressable_id, '@championspirit.com')
WHERE addressable_type = 'App\\Models\\Place' AND email IS NOT NULL;

-- Guests: anonymize guests + related data
SET @rownum := 0;
UPDATE guests g
JOIN (
  SELECT id, (@rownum := @rownum + 1) AS rn
  FROM guests
  ORDER BY id
) t ON g.id = t.id
SET g.email = CONCAT('staging_guest_', t.rn, '@championspirit.com');

UPDATE guest_user
SET firstname = 'Staging',
    lastname = 'Guest',
    phone = NULL,
    comments = NULL;

UPDATE guest_invitations
SET metadata = NULL,
    sent_at = NULL;

SET @rownum := 0;
UPDATE pre_arrival_forms paf
JOIN (
  SELECT id, (@rownum := @rownum + 1) AS rn
  FROM pre_arrival_forms
  ORDER BY id
) t ON paf.id = t.id
SET paf.email = CONCAT('staging_prearrival_', t.rn, '@championspirit.com'),
    paf.firstname = 'Staging',
    paf.lastname = CONCAT('Guest ', t.rn),
    paf.phone = NULL,
    paf.address_line1 = 'Redacted',
    paf.address_line2 = NULL,
    paf.city = 'Redacted',
    paf.postal_code = '00000',
    paf.country = 'XX',
    paf.notes = NULL;

-- Kids + families: anonymize
SET @rownum := 0;
UPDATE kids k
JOIN (
  SELECT id, (@rownum := @rownum + 1) AS rn
  FROM kids
  ORDER BY id
) t ON k.id = t.id
SET k.name = CONCAT('Kid ', t.rn),
    k.dob = '2010-01-01',
    k.email = NULL,
    k.phone = NULL,
    k.comments = NULL;

UPDATE families
SET name = CONCAT('Family ', id),
    member_number = NULL,
    member_type = NULL,
    comments = NULL;

-- Access logs: remove personal details
UPDATE place_access_logs
SET firstname = NULL,
    lastname = NULL,
    id_number = NULL,
    email = NULL,
    comments = NULL;

-- Bookings + payments: remove free-text comments
UPDATE bookings
SET comments = NULL;

UPDATE payments
SET comments = NULL,
    metadata = NULL;

UPDATE flex_memberships
SET comments = NULL;

-- Chat + AI history
DELETE FROM message_attachments;
DELETE FROM message_gps_shares;
DELETE FROM message_receipts;
DELETE FROM ai_messages;
DELETE FROM ai_sessions;
DELETE FROM messages;
DELETE FROM conversation_settings;
DELETE FROM conversation_participants;
DELETE FROM conversations;
DELETE FROM chats;

-- Notifications + tokens
DELETE FROM notifications;
DELETE FROM expo_notifications;
DELETE FROM expo_tickets;
DELETE FROM expo_tokens;
DELETE FROM personal_access_tokens;
DELETE FROM password_resets;
DELETE FROM social_accounts;
DELETE FROM failed_jobs;
DELETE FROM activity_log;

-- QuickBooks: remove all integration data
DELETE FROM quickbooks_transactions;
DELETE FROM quickbooks_customers;
DELETE FROM quickbooks_products;
DELETE FROM quickbooks_sync_mappings;
DELETE FROM quickbooks_accounts;
DELETE FROM quickbooks_account_types;
DELETE FROM quickbooks_tax_codes;
DELETE FROM quickbooks_connections;

SQL

if ! command -v zip >/dev/null 2>&1; then
  echo "zip is required to bundle the dump + assets." >&2
  exit 1
fi

BUNDLE_DIR="$(mktemp -d)"
cleanup_bundle() {
  rm -rf "${BUNDLE_DIR}"
}
trap cleanup_bundle EXIT

SQL_BASENAME="$(basename "${OUT_FILE}")"
mkdir -p "${BUNDLE_DIR}/storage/app/public"
cp "${OUT_FILE}" "${BUNDLE_DIR}/storage/${SQL_BASENAME}"
cp "${ROOT_DIR}/importSandboxSeed.sh" "${BUNDLE_DIR}/importSandboxSeed.sh"

USE_S3=0
if [[ "${MEDIA_DISK:-}" == "s3" || "${FILESYSTEM_DISK:-}" == "s3" || "${FILAMENT_FILESYSTEM_DRIVER:-}" == "s3" ]]; then
  USE_S3=1
fi

if [[ "${USE_S3}" -eq 1 ]]; then
  if ! command -v aws >/dev/null 2>&1; then
    echo "aws CLI is required to export S3 assets." >&2
    exit 1
  fi
  if [[ -z "${AWS_BUCKET:-}" ]]; then
    echo "AWS_BUCKET is required to export S3 assets." >&2
    exit 1
  fi
  S3_PREFIX="${S3_ASSETS_PREFIX:-${MEDIA_PREFIX:-}}"
  S3_PREFIX="${S3_PREFIX#/}"
  S3_SOURCE="s3://${AWS_BUCKET}"
  if [[ -n "${S3_PREFIX}" ]]; then
    S3_SOURCE="${S3_SOURCE}/${S3_PREFIX}"
  fi
  AWS_ARGS=()
  if [[ -n "${AWS_ENDPOINT:-}" ]]; then
    AWS_ARGS+=(--endpoint-url "${AWS_ENDPOINT}")
  fi
  if [[ "${AWS_USE_PATH_STYLE_ENDPOINT:-false}" == "true" ]]; then
    export AWS_S3_FORCE_PATH_STYLE=1
  fi
  aws s3 sync "${AWS_ARGS[@]}" "${S3_SOURCE}" "${BUNDLE_DIR}/storage/app/public" >/dev/null
else
  if [[ -d "${ROOT_DIR}/storage/app/public" ]]; then
    if command -v rsync >/dev/null 2>&1; then
      rsync -a "${ROOT_DIR}/storage/app/public/" "${BUNDLE_DIR}/storage/app/public/"
    else
      cp -a "${ROOT_DIR}/storage/app/public/." "${BUNDLE_DIR}/storage/app/public/"
    fi
  fi
fi

ZIP_FILE="${2:-"${ROOT_DIR}/storage/sandbox-seed-${timestamp}.zip"}"
if [[ "${ZIP_FILE}" != /* ]]; then
  ZIP_FILE="${ROOT_DIR}/${ZIP_FILE}"
fi
(
  cd "${BUNDLE_DIR}"
  zip -r "${ZIP_FILE}" "storage/${SQL_BASENAME}" "storage/app/public" "importSandboxSeed.sh" >/dev/null
)

echo "Sanitized dump written to: ${OUT_FILE}"
echo "Bundle written to: ${ZIP_FILE}"
echo "On sandbox, run:"
echo "  ./importSandboxSeed.sh ${ZIP_FILE}"
