Transferring Data from Supabase to PlanetScale

There seems to be a lack of online guides for completing this, so I am posting some code snippets to help out future googlers. This is done with nextjs pages structure to easily set up some API routes needed.

Index.js - Simple buttons to initiate transfer, and then the verify

import { useState } from "react"

export default function Home() {
  const [transferStatus, setTransferStatus] = useState("")
  const [verifyStatus, setVerifyStatus] = useState(null)

  const handleTransfer = async () => {
    setTransferStatus("Transferring...")
    const response = await fetch("/api/transferData")
    const result = await response.json()
    setTransferStatus(JSON.stringify(result, null, 2))
  }

  const handleVerify = async () => {
    setVerifyStatus("Verifying...")
    const response = await fetch("/api/verifySync")
    const result = await response.json()
    setVerifyStatus(JSON.stringify(result, null, 2))
  }

  return (
    <div>
      <button onClick={handleTransfer}>Start Data Transfer</button>
      <pre>{transferStatus}</pre>
      <button onClick={handleVerify}>Verify Data Synchronization</button>
      <pre>{verifyStatus}</pre>
    </div>
  )
}

API - transferData.js - For my use case, I am transferring three tables, plans, deductibles, and quotes. Start with the table that doesn't have any foreign keys and then work backwards from there. Quote has deductibles has plans in my case. I had to change a few column headers as well due to some random changes from our CTO.

Note for some reason that I did not set up, our main supabase client was getting rejected, so I just used a user that had access and accessed Supabase via the PG library similar to how we are accessing PlanetScale. You can very easily replace the data lookup with the supabase client.

import { Pool } from "pg"
import mysql from "mysql2/promise"

async function getPostgresConnection() {
  const pool = new Pool({
    connectionString: process.env.DIRECT_DATABASE_URL,
    ssl: {
      rejectUnauthorized: false,
    },
  })
  return pool
}

async function getPlanetscaleConnection() {
  const DATABASE_URL = process.env.DATABASE_URL

  const matches = DATABASE_URL.match(
    /mysql:\/\/([^:]+):([^@]+)@([^/]+)\/([^?]+)\?ssl=(.+)/
  )

  if (!matches) {
    throw new Error("Invalid DATABASE_URL format.")
  }

  const [, user, password, host, database, sslOptions] = matches

  const connection = await mysql.createConnection({
    host,
    user,
    database,
    password,
    ssl: JSON.parse(decodeURIComponent(sslOptions)),
  })

  return connection
}

export default async function handler(req, res) {
  console.log(
    "API route triggered. Starting connection tests and data transfer..."
  )

  let postgresResult = "Failed"
  let planetscaleResult = "Failed"
  let transferResult = "Failed"

  // Test PostgreSQL connection
  let postgresPool
  console.log("Testing PostgreSQL connection...")
  try {
    postgresPool = await getPostgresConnection()
    const client = await postgresPool.connect()
    await client.query("SELECT 1")
    client.release()
    postgresResult = "Successful"
    console.log("PostgreSQL connection successful!")
  } catch (error) {
    console.error("Error connecting to PostgreSQL:", error.message)
    return res
      .status(500)
      .json({ error: "Error connecting to PostgreSQL: " + error.message })
  }

  // Test Planetscale connection
  let planetscaleConnection
  console.log("Testing Planetscale connection...")
  try {
    planetscaleConnection = await getPlanetscaleConnection()
    await planetscaleConnection.query("SELECT 1")
    planetscaleResult = "Successful"
    console.log("Planetscale connection successful!")
  } catch (error) {
    console.error("Error connecting to Planetscale:", error.message)
    return res
      .status(500)
      .json({ error: "Error connecting to Planetscale: " + error.message })
  }

  // Data transfer logic
  const client = await postgresPool.connect()

  // For Plan
  console.log("Starting data transfer for 'Plan' table...")
  const { rows: planData } = await client.query("SELECT * FROM plan")
  for (let item of planData) {
    const { id, name, displayName, order, created_at, updated_at } = item
    console.log(item)
    await planetscaleConnection.execute(
      "INSERT INTO plan (id, name, display_name, `order`, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)",
      [id, name, displayName, order, created_at, updated_at]
    )
  }
  console.log("Data transfer for 'Plan' table successful!")

  // For Deductible
  console.log("Starting data transfer for 'Deductible' table...")
  const { rows: deductibleData } = await client.query(
    "SELECT * FROM deductible"
  )
  for (let item of deductibleData) {
    const {
      id,
      in: inValue,
      out: outValue,
      created_at,
      plan_id,
      updated_at,
    } = item

    console.log(item)
    await planetscaleConnection.execute(
      "INSERT INTO deductible (id, `in`, out, plan_id, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)",
      [id, inValue, outValue, plan_id, created_at, updated_at]
    )
  }
  console.log("Data transfer for 'Deductible' table successful!")

  // For Quote
  console.log("Starting data transfer for 'Quote' table...")
  const { rows: quoteData } = await client.query("SELECT * FROM quote")
  for (let item of quoteData) {
    console.log(item)
    const { id, age, price, deductible_id, created_at, updated_at } = item
    await planetscaleConnection.execute(
      "INSERT INTO quote (id, age, price, deductible_id, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)",
      [id, age, price, deductible_id, created_at, updated_at]
    )
  }
  console.log("Data transfer for 'Quote' table successful!")

  client.release()

  // Close the connections
  postgresPool.end()
  await planetscaleConnection.end()

  // Send the final response
  console.log("Sending final response...")
  res.status(200).json({
    postgresConnection: postgresResult,
    planetscaleConnection: planetscaleResult,
    dataTransfer: transferResult,
  })
}

Lastly, a verify function that shows any examples out of sync

Api - verifySync

import { Pool } from "pg"
import mysql from "mysql2/promise"
import crypto from "crypto"

async function getPostgresConnection() {
  const pool = new Pool({
    connectionString: process.env.DIRECT_DATABASE_URL,
    ssl: {
      rejectUnauthorized: false,
    },
  })
  return pool
}

async function getPlanetscaleConnection() {
  const DATABASE_URL = process.env.DATABASE_URL
  const matches = DATABASE_URL.match(
    /mysql:\/\/([^:]+):([^@]+)@([^/]+)\/([^?]+)\?ssl=(.+)/
  )

  if (!matches) {
    throw new Error("Invalid DATABASE_URL format.")
  }

  const [, user, password, host, database, sslOptions] = matches
  const connection = await mysql.createConnection({
    host,
    user,
    database,
    password,
    ssl: JSON.parse(decodeURIComponent(sslOptions)),
  })

  return connection
}

async function getChecksum(data) {
  const hasher = crypto.createHash("md5")
  hasher.update(JSON.stringify(data))
  return hasher.digest("hex")
}

const columnMapping = {
  plan: {
    displayName: "display_name",
  },
  deductible: {}, // No changes needed for the deductible table
  quote: {}, // No changes needed for the quote table
}

function isEqual(data1, data2, table) {
  for (let key in data1) {
    const columnKey = columnMapping[table][key] || key
    if (key.includes("at")) {
      // If the key is a timestamp column
      const date1 = new Date(data1[key])
      const date2 = new Date(data2[columnKey])
      if (date1.getTime() !== date2.getTime()) {
        return false
      }
    } else if (data1[key] !== data2[columnKey]) {
      return false
    }
  }
  return true
}

export default async function verifySync(req, res) {
  const postgresPool = await getPostgresConnection()
  const planetscaleConnection = await getPlanetscaleConnection()

  const tables = ["plan", "deductible", "quote"]
  const results = {}

  for (let table of tables) {
    const postgresQuery = `SELECT * FROM ${table} ORDER BY id`
    const planetscaleQuery = `SELECT * FROM ${table} ORDER BY id`

    const { rows: postgresData } = await postgresPool.query(postgresQuery)
    const [planetscaleData] = await planetscaleConnection.query(
      planetscaleQuery
    )

    const postgresChecksum = await getChecksum(postgresData)
    const planetscaleChecksum = await getChecksum(planetscaleData)

    if (postgresChecksum !== planetscaleChecksum) {
      for (let i = 0; i < postgresData.length; i++) {
        if (!isEqual(postgresData[i], planetscaleData[i], table)) {
          results[table] = {
            recordCountMatch: postgresData.length === planetscaleData.length,
            checksumMatch: false,
            exampleMismatch: {
              postgres: postgresData[i],
              planetscale: planetscaleData[i],
            },
          }
          break
        }
      }
    } else {
      results[table] = {
        recordCountMatch: postgresData.length === planetscaleData.length,
        checksumMatch: true,
      }
    }
  }

  postgresPool.end()
  await planetscaleConnection.end()

  res.json(results)
}

Sample output:

It appears that that timestamps are missing some millisecond data in planetscale, but other than that, the data looks good enough for our use case!

{
  "plan": {
    "recordCountMatch": true,
    "checksumMatch": false,
    "exampleMismatch": {
      "postgres": {
        "id": "05006ac5-417d-49c8-81eb-6f72e03b3aa0",
        "name": "Serie 3000",
        "created_at": "2023-09-14T11:19:52.210Z",
        "updated_at": "2023-09-14T11:19:52.210Z",
        "displayName": "Elite",
        "order": 2
      },
      "planetscale": {
        "id": "05006ac5-417d-49c8-81eb-6f72e03b3aa0",
        "name": "Serie 3000",
        "display_name": "Elite",
        "order": 2,
        "created_at": "2023-09-14T11:19:52.000Z",
        "updated_at": "2023-09-14T11:19:52.000Z"
      }
    }
  },
  "deductible": {
    "recordCountMatch": true,
    "checksumMatch": false,
    "exampleMismatch": {
      "postgres": {
        "id": "00b2684f-aa23-480c-b7cf-7b1dda3adeb3",
        "in": 2500,
        "out": null,
        "created_at": "2023-09-14T11:07:13.292Z",
        "plan_id": "32d13400-9aeb-449d-a3d7-69f4bdf588d1",
        "updated_at": "2023-09-14T11:07:13.292Z"
      },
      "planetscale": {
        "id": "00b2684f-aa23-480c-b7cf-7b1dda3adeb3",
        "in": 2500,
        "out": null,
        "plan_id": "32d13400-9aeb-449d-a3d7-69f4bdf588d1",
        "created_at": "2023-09-14T11:07:13.000Z",
        "updated_at": "2023-09-14T11:07:13.000Z"
      }
    }
  },
  "quote": {
    "recordCountMatch": true,
    "checksumMatch": false,
    "exampleMismatch": {
      "postgres": {
        "id": "000fc400-222b-4a4c-85c6-27449929954d",
        "age": 60,
        "price": 16876,
        "created_at": "2023-09-14T12:15:53.172Z",
        "deductible_id": "4dcc3f4c-3061-4729-86df-3d2f39599193",
        "updated_at": "2023-09-14T12:15:53.172Z"
      },
      "planetscale": {
        "id": "000fc400-222b-4a4c-85c6-27449929954d",
        "age": 60,
        "price": 16876,
        "deductible_id": "4dcc3f4c-3061-4729-86df-3d2f39599193",
        "created_at": "2023-09-14T12:15:53.000Z",
        "updated_at": "2023-09-14T12:15:53.000Z"
      }
    }
  }
}

Subscribe to Justin's Blog

Don’t miss out on the latest issues. Sign up now to get access to the library of members-only issues.
jamie@example.com
Subscribe