import { test, equal, ok, notOk } from "zora"

import chalk from "chalk"

import { connect } from "../src/room.js"
import WasmCRDT from "../src/wasm-crdt.js"
import MockConnection, {
  userID,
  sendListener,
  broadcastListener,
  getEventListener,
} from "../src/connection/MockConnection.js"

import {
  createMessageReceivedEvent as _createMessageReceivedEvent,
  handshake,
  dotDraw,
  dotErase,
  pathDraw,
  pathErase,
} from "./data.js"

const remoteUserID = "392bf960-1d18-4482-bbbf-1c85e0132c9a"

const createMessageReceivedEvent = (message, channel = "crdt") =>
  _createMessageReceivedEvent(message, channel, remoteUserID)

const syncStep1 = {
  uuid: "6e20b20d-e1d8-405d-8a61-d56cb1c47a24",
  message: Uint8Array.of(
    130,
    164,
    116,
    121,
    112,
    101,
    171,
    115,
    121,
    110,
    99,
    32,
    115,
    116,
    101,
    112,
    32,
    49,
    167,
    109,
    101,
    115,
    115,
    97,
    103,
    101,
    196,
    3,
    0,
    0,
    0,
  ),
  slice: 0,
  length: 1,
  compressed: false,
}
const syncDone = {
  message: Uint8Array.of(
    129,
    164,
    116,
    121,
    112,
    101,
    169,
    115,
    121,
    110,
    99,
    32,
    100,
    111,
    110,
    101,
  ),
  slice: 0,
  length: 1,
  compressed: false,
}

// Start: Adapted from https://github.com/jprichardson/buffer-json (MIT license)
function stringify(value, space) {
  return JSON.stringify(value, replacer, space)
}

function parse(text) {
  return JSON.parse(text, reviver)
}

function replacer(key, value) {
  if (value instanceof Uint8Array) {
    return "base64:" + Buffer.from(value).toString("base64")
  }
  return value
}

function reviver(key, value) {
  if (typeof value == "string" && value.startsWith("base64:")) {
    return Uint8Array.from(Buffer.from(value.slice("base64:".length), "base64"))
  }
  return value
}
// End: Adapted from https://github.com/jprichardson/buffer-json (MIT license)

let fs

async function initFileSystem() {
  fs = await new Promise((resolve, reject) =>
    window.webkitRequestFileSystem(
      window.TEMPORARY,
      1024 * 1024 * 100,
      resolve,
      reject,
    ),
  )
}

async function dumpBSON(filename, data) {
  await new Promise((resolve) => {
    fs.root.getFile(
      filename,
      { create: false },
      (fileEntry) => {
        fileEntry.remove(resolve, console.error)
      },
      resolve,
    )
  })

  await new Promise((resolve) => {
    fs.root.getFile(
      filename,
      { create: true, exclusive: true },
      (fileEntry) => {
        fileEntry.createWriter((fileWriter) => {
          fileWriter.onwriteend = resolve
          fileWriter.onerror = console.error

          const blob = new Blob([stringify(data)], { type: "text/plain" })

          fileWriter.write(blob)
        }, console.error)
      },
      console.error,
    )
  })
}

async function loadBSON(filename) {
  return await new Promise((resolve) => {
    fs.root.getFile(
      filename,
      {},
      (fileEntry) => {
        fileEntry.file((file) => {
          const reader = new FileReader()

          reader.onloadend = () => resolve(parse(reader.result))

          reader.readAsText(file)
        }, console.error)
      },
      console.error,
    )
  })
}

function printBenchmark(title, iterations, results) {
  console.debug(`\n  ${title} (${iterations} iterations):\n`)

  for (const title in results) {
    const {
      timeLoc,
      encodeRAM,
      packets,
      size,
      timeRem,
      decodeRAM,
      events,
    } = results[title]
    const synchronisation = title == "synchronisation"

    console.debug(
      chalk`    {yellow ⧗} {dim ${title}:} {yellow.inverse ${(
        timeLoc /
        (1e3 * (synchronisation ? 1 : iterations))
      ).toFixed(3)}ms ${synchronisation ? "total" : "/ it"}} + {red.inverse ${(
        encodeRAM /
        (1024 * 1024)
      ).toFixed(
        3,
      )}MB} => {dim ${packets} packet(s)} => {magenta.inverse ${size}B} => {yellow.inverse ${(
        timeRem /
        (1e3 * (synchronisation ? 1 : iterations))
      ).toFixed(3)}ms ${synchronisation ? "total" : "/ it"}} + {red.inverse ${(
        decodeRAM /
        (1024 * 1024)
      ).toFixed(3)}MB} => {dim ${events} event(s)}\n`,
    )
  }

  console.debug(`\n`)
}

function writeBenchmarkHeader(filename, title, results) {
  console.info(JSON.stringify({ filename, title, results }))
}

function appendBenchmark(filename, iterations, results) {
  console.info(JSON.stringify({ filename, iterations, results }))
}

function captureHeapUsage() {
  for (let i = 0; i < 10; i++) {
    window.gc()
  }

  return performance.memory.usedJSHeapSize
}

function runBidirectionalBenchmark(
  BENCHMARK,
  FILENAME,
  ITERATIONSLIST,
  BLOCKSIZE,

  addData,
  eraseData,

  addOnInitFrontend,
  addBroadcastGroupTimeout,
  addOnBroadcastGroup,
  addPacketsFilename,

  eraseOnInitFrontend,
  eraseBroadcastGroupTimeout,
  eraseOnBroadcastGroup,
  erasePacketsFilename,

  syncSendGroupTimeout,
  syncOnSendGroup,
  syncPacketsFilename,

  addOnInitBackend,
  addEventGroupTimeout,
  addOnEventGroup,
  addEventsCache,
  addEventsFilename,

  eraseOnInitBackend,
  eraseEventGroupTimeout,
  eraseOnEventGroup,
  eraseEventsCache,
  eraseEventsFilename,

  syncEventGroupTimeout,
  syncOnEventGroup,
  syncEventsCache,
  syncEventsFilename,
) {
  if (BENCHMARK && FILENAME) {
    writeBenchmarkHeader(FILENAME, BENCHMARK, [
      "addPath",
      "extendErasureIntervals",
      "synchronisation",
    ])
  }

  return ITERATIONSLIST.reduce(
    (promise, ITERATIONS) =>
      promise.then(() => {
        const pathIDs = []
        let prevTime
        let currTime

        let connectRAM // eslint-disable-line no-unused-vars
        let addLocTime = 0
        let addPackets = []
        let addSize = 0
        let addRAM
        let eraseLocTime = 0
        let erasePackets = []
        let eraseSize = 0
        let eraseRAM
        let syncLocTime
        let syncPackets = []
        let syncSize = 0
        let syncRAM
        let disconnectRAM // eslint-disable-line no-unused-vars

        let connectUpdRAM // eslint-disable-line no-unused-vars
        let addRemTime = 0
        let addEvents = []
        let addRemRAM
        let eraseRemTime = 0
        let eraseEvents = []
        let eraseRemRAM
        let disconnectUpdRAM // eslint-disable-line no-unused-vars

        let connectSyncRAM // eslint-disable-line no-unused-vars
        let syncRemTime = 0
        let syncEvents = []
        let syncRemRAM
        let disconnectSyncRAM // eslint-disable-line no-unused-vars

        let timeout

        let addEventListener = null
        let eraseEventListener = null

        let room = null
        let updateRoom = null
        let syncRoom = null

        return (
          // eslint-disable-next-line no-async-promise-executor
          new Promise(async (resolve) => {
            userID.uuid = "61a540d6-4522-48c7-a660-1ed501503cb7"

            room = await connect("room", WasmCRDT, MockConnection)
            getEventListener(
              "room",
              "messageReceived",
            )(createMessageReceivedEvent(handshake, "tw-ml"))

            connectRAM = captureHeapUsage()

            return resolve()
          })
            .then(
              () =>
                new Promise((resolve) => {
                  let broadcasts = 0

                  broadcastListener.callback = (channel, message) => {
                    currTime = window.performance.now()

                    equal(channel, "crdt")
                    ok(message.message instanceof Uint8Array)

                    addPackets[addPackets.length - 1].push(message)
                    addSize += message.message.length

                    clearTimeout(timeout)
                    timeout = setTimeout(() => {
                      broadcasts += 1

                      addLocTime += (currTime - prevTime) * 1e3

                      prevTime = window.performance.now()

                      addOnBroadcastGroup(
                        room,
                        addPackets,
                        pathIDs,
                        addData,
                        ITERATIONS,
                        broadcasts,
                        resolve,
                      )
                    }, addBroadcastGroupTimeout)
                  }

                  prevTime = window.performance.now()

                  addOnInitFrontend(
                    room,
                    addPackets,
                    pathIDs,
                    addData,
                    ITERATIONS,
                    BLOCKSIZE,
                  )
                }),
            )
            .then(async () => {
              broadcastListener.callback = null

              await dumpBSON(addPacketsFilename, addPackets)
              addPackets = null

              addRAM = captureHeapUsage()
            })
            .then(
              () =>
                new Promise((resolve) => {
                  let broadcasts = 0

                  broadcastListener.callback = (channel, message) => {
                    currTime = window.performance.now()

                    equal(channel, "crdt")
                    ok(message.message instanceof Uint8Array)

                    erasePackets[erasePackets.length - 1].push(message)
                    eraseSize += message.message.length

                    clearTimeout(timeout)
                    timeout = setTimeout(() => {
                      broadcasts += 1

                      eraseLocTime += (currTime - prevTime) * 1e3

                      prevTime = window.performance.now()

                      eraseOnBroadcastGroup(
                        room,
                        erasePackets,
                        pathIDs,
                        eraseData,
                        ITERATIONS,
                        broadcasts,
                        resolve,
                      )
                    }, eraseBroadcastGroupTimeout)
                  }

                  prevTime = window.performance.now()

                  eraseOnInitFrontend(
                    room,
                    erasePackets,
                    pathIDs,
                    eraseData,
                    ITERATIONS,
                    BLOCKSIZE,
                  )
                }),
            )
            .then(async () => {
              broadcastListener.callback = null

              await dumpBSON(erasePacketsFilename, erasePackets)
              erasePackets = null

              eraseRAM = captureHeapUsage()
            })
            .then(
              () =>
                new Promise((resolve) => {
                  sendListener.callback = (uid, channel, message) => {
                    const currTime = window.performance.now()

                    equal(uid, remoteUserID)
                    equal(channel, "crdt")
                    ok(message.message instanceof Uint8Array)

                    syncLocTime = (currTime - prevTime) * 1e3

                    syncPackets.push(message)
                    syncSize += message.message.length

                    clearTimeout(timeout)
                    timeout = setTimeout(
                      () => syncOnSendGroup(syncPackets, resolve),
                      syncSendGroupTimeout,
                    )
                  }

                  prevTime = window.performance.now()

                  getEventListener(
                    "room",
                    "messageReceived",
                  )(createMessageReceivedEvent(syncStep1))
                }),
            )
            .then(async () => {
              sendListener.callback = null

              await dumpBSON(syncPacketsFilename, syncPackets)
              syncPackets = null

              syncRAM = captureHeapUsage()

              room.disconnect()
              room = null

              disconnectRAM = captureHeapUsage()
            })
            .then(
              () =>
                // eslint-disable-next-line no-async-promise-executor
                new Promise(async (resolve) => {
                  userID.uuid = "5c9e550b-3de8-4a32-80e1-80c08c19891a"

                  updateRoom = await connect("update", WasmCRDT, MockConnection)
                  getEventListener(
                    "update",
                    "messageReceived",
                  )(createMessageReceivedEvent(handshake, "tw-ml"))

                  connectUpdRAM = captureHeapUsage()

                  addPackets = await loadBSON(addPacketsFilename)

                  return resolve()
                }),
            )
            .then(
              () =>
                new Promise((resolve) => {
                  let broadcasts = 0

                  let currTime

                  const timeoutCallback = () => {
                    broadcasts += 1

                    addRemTime += (currTime - prevTime) * 1e3

                    prevTime = window.performance.now()

                    addOnEventGroup(
                      addPackets,
                      ITERATIONS,
                      broadcasts,
                      resolve,
                      addEvents,
                      pathIDs,
                      addData,
                    )
                  }

                  addEventListener = (event) => {
                    currTime = window.performance.now()

                    addEvents.push(addEventsCache ? { add: event } : null)

                    clearTimeout(timeout)
                    timeout = setTimeout(timeoutCallback, addEventGroupTimeout)
                  }
                  eraseEventListener = (event) => {
                    currTime = window.performance.now()

                    addEvents.push(addEventsCache ? { erase: event } : null)

                    clearTimeout(timeout)
                    timeout = setTimeout(timeoutCallback, addEventGroupTimeout)
                  }

                  updateRoom.addEventListener(
                    "addOrUpdatePath",
                    addEventListener,
                  )
                  updateRoom.addEventListener(
                    "removedIntervalsChange",
                    eraseEventListener,
                  )

                  prevTime = window.performance.now()

                  addOnInitBackend(addPackets, BLOCKSIZE)
                }),
            )
            .then(async () => {
              updateRoom.removeEventListener(
                "addOrUpdatePath",
                addEventListener,
              )
              addEventListener = null

              updateRoom.removeEventListener(
                "removedIntervalsChange",
                eraseEventListener,
              )
              eraseEventListener = null

              addPackets = null

              await dumpBSON(addEventsFilename, addEvents)
              addEvents = null

              addRemRAM = captureHeapUsage()

              erasePackets = await loadBSON(erasePacketsFilename)
            })
            .then(
              () =>
                new Promise((resolve) => {
                  let broadcasts = 0

                  let currTime

                  const timeoutCallback = () => {
                    broadcasts += 1

                    eraseRemTime += (currTime - prevTime) * 1e3

                    prevTime = window.performance.now()

                    eraseOnEventGroup(
                      erasePackets,
                      ITERATIONS,
                      broadcasts,
                      resolve,
                      eraseEvents,
                      pathIDs,
                      addData,
                      eraseData,
                    )
                  }

                  eraseEventListener = (event) => {
                    currTime = window.performance.now()

                    eraseEvents.push(eraseEventsCache ? { erase: event } : null)

                    clearTimeout(timeout)
                    timeout = setTimeout(
                      timeoutCallback,
                      eraseEventGroupTimeout,
                    )
                  }

                  updateRoom.addEventListener(
                    "removedIntervalsChange",
                    eraseEventListener,
                  )

                  prevTime = window.performance.now()

                  eraseOnInitBackend(erasePackets, BLOCKSIZE)
                }),
            )
            .then(async () => {
              updateRoom.removeEventListener(
                "removedIntervalsChange",
                eraseEventListener,
              )
              eraseEventListener = null

              erasePackets = null

              await dumpBSON(eraseEventsFilename, eraseEvents)
              eraseEvents = null

              eraseRemRAM = captureHeapUsage()

              updateRoom.disconnect()
              updateRoom = null

              disconnectUpdRAM = captureHeapUsage()
            })
            .then(
              () =>
                // eslint-disable-next-line no-async-promise-executor
                new Promise(async (resolve) => {
                  userID.uuid = "a2108f84-3785-4696-8dd5-fb89b38d4f7f"

                  syncRoom = await connect("sync", WasmCRDT, MockConnection)
                  getEventListener(
                    "sync",
                    "messageReceived",
                  )(createMessageReceivedEvent(handshake, "tw-ml"))

                  connectSyncRAM = captureHeapUsage()

                  syncPackets = await loadBSON(syncPacketsFilename)

                  return resolve()
                }),
            )
            .then(
              () =>
                new Promise((resolve) => {
                  addEventListener = (event) => {
                    const currTime = window.performance.now()

                    syncRemTime = (currTime - prevTime) * 1e3

                    syncEvents.push(syncEventsCache ? { add: event } : null)

                    clearTimeout(timeout)
                    timeout = setTimeout(
                      () =>
                        syncOnEventGroup(
                          resolve,
                          syncEvents,
                          pathIDs,
                          addData,
                          eraseData,
                        ),
                      syncEventGroupTimeout,
                    )
                  }
                  eraseEventListener = (event) => {
                    const currTime = window.performance.now()

                    syncRemTime = (currTime - prevTime) * 1e3

                    syncEvents.push(syncEventsCache ? { erase: event } : null)

                    clearTimeout(timeout)
                    timeout = setTimeout(
                      () =>
                        syncOnEventGroup(
                          resolve,
                          syncEvents,
                          pathIDs,
                          addData,
                          eraseData,
                        ),
                      syncEventGroupTimeout,
                    )
                  }

                  syncRoom.addEventListener("addOrUpdatePath", addEventListener)
                  syncRoom.addEventListener(
                    "removedIntervalsChange",
                    eraseEventListener,
                  )

                  prevTime = window.performance.now()

                  for (const syncPacket of syncPackets) {
                    getEventListener(
                      "sync",
                      "messageReceived",
                    )(createMessageReceivedEvent(syncPacket))
                  }
                }),
            )
            .then(async () => {
              syncRoom.removeEventListener("addOrUpdatePath", addEventListener)
              addEventListener = null

              syncRoom.removeEventListener(
                "removedIntervalsChange",
                eraseEventListener,
              )
              eraseEventListener = null

              syncPackets = null

              await dumpBSON(syncEventsFilename, syncEvents)
              syncEvents = null

              syncRemRAM = captureHeapUsage()

              syncRoom.disconnect()
              syncRoom = null

              disconnectSyncRAM = captureHeapUsage()
            })
            .then(async () => {
              if (!BENCHMARK) {
                return
              }

              addPackets = (await loadBSON(addPacketsFilename)).reduce(
                (sum, packets) => sum + packets.length,
                0,
              )
              erasePackets = (await loadBSON(erasePacketsFilename)).reduce(
                (sum, packets) => sum + packets.length,
                0,
              )
              syncPackets = (await loadBSON(syncPacketsFilename)).length

              addEvents = (await loadBSON(addEventsFilename)).length
              eraseEvents = (await loadBSON(eraseEventsFilename)).length
              syncEvents = (await loadBSON(syncEventsFilename)).length

              const results = {
                addPath: {
                  timeLoc: addLocTime,
                  encodeRAM: addRAM,
                  packets: addPackets,
                  size: addSize,
                  timeRem: addRemTime,
                  decodeRAM: addRemRAM,
                  events: addEvents,
                },
                extendErasureIntervals: {
                  timeLoc: eraseLocTime,
                  encodeRAM: eraseRAM,
                  packets: erasePackets,
                  size: eraseSize,
                  timeRem: eraseRemTime,
                  decodeRAM: eraseRemRAM,
                  events: eraseEvents,
                },
                synchronisation: {
                  timeLoc: syncLocTime,
                  encodeRAM: syncRAM,
                  packets: syncPackets,
                  size: syncSize,
                  timeRem: syncRemTime,
                  decodeRAM: syncRemRAM,
                  events: syncEvents,
                },
              }

              printBenchmark(BENCHMARK, ITERATIONS, results)
              appendBenchmark(FILENAME, ITERATIONS, results)
            })
        )
      }),
    Promise.resolve(),
  )
}

function addOnInitFrontendSequential(
  room,
  addPackets,
  pathIDs,
  addData,
  ITERATIONS, // eslint-disable-line no-unused-vars
  BLOCKSIZE, // eslint-disable-line no-unused-vars
) {
  addPackets.push([])

  const drawPathID = room.addPath(addData[0])
  pathIDs.push(drawPathID)

  for (let i = 1; i < addData.length; i++) {
    room.extendPath(drawPathID, addData[i])
  }

  room.endPath(drawPathID)
}

function addOnBroadcastGroupSequential(
  room,
  addPackets,
  pathIDs,
  addData,
  ITERATIONS,
  broadcasts,
  resolve,
) {
  if (broadcasts < ITERATIONS) {
    addPackets.push([])

    const drawPathID = room.addPath(addData[0])
    pathIDs.push(drawPathID)

    for (let i = 1; i < addData.length; i++) {
      room.extendPath(drawPathID, addData[i])
    }

    room.endPath(drawPathID)
  } else {
    resolve()
  }
}

function eraseOnInitFrontendSequential(
  room,
  erasePackets,
  pathIDs,
  eraseData,
  ITERATIONS, // eslint-disable-line no-unused-vars
  BLOCKSIZE, // eslint-disable-line no-unused-vars
) {
  erasePackets.push([])

  const erasePathID = pathIDs[0]

  for (let i = 0; i < eraseData.length; i++) {
    room.extendErasureIntervals(erasePathID, eraseData[i][0], eraseData[i][1])
  }
}

function eraseOnBroadcastGroupSequential(
  room,
  erasePackets,
  pathIDs,
  eraseData,
  ITERATIONS,
  broadcasts,
  resolve,
) {
  if (broadcasts < ITERATIONS) {
    erasePackets.push([])

    const erasePathID = pathIDs[broadcasts]

    for (let i = 0; i < eraseData.length; i++) {
      room.extendErasureIntervals(erasePathID, eraseData[i][0], eraseData[i][1])
    }
  } else {
    resolve()
  }
}

function syncOnSendGroup(syncPackets, resolve) {
  resolve()
}

function addOnInitBackendSequential(
  addPackets,
  BLOCKSIZE, // eslint-disable-line no-unused-vars
) {
  for (const packet of addPackets[0]) {
    getEventListener(
      "update",
      "messageReceived",
    )(createMessageReceivedEvent(packet))
  }
}

function addOnEventGroupSequential(
  addPackets,
  ITERATIONS,
  broadcasts,
  resolve,
  addEvents, // eslint-disable-line no-unused-vars
  pathIDs, // eslint-disable-line no-unused-vars
  addData, // eslint-disable-line no-unused-vars
) {
  if (broadcasts >= ITERATIONS) {
    return resolve()
  }

  for (const packet of addPackets[broadcasts]) {
    getEventListener(
      "update",
      "messageReceived",
    )(createMessageReceivedEvent(packet))
  }
}

function eraseOnInitBackendSequential(
  erasePackets,
  BLOCKSIZE, // eslint-disable-line no-unused-vars
) {
  for (const packet of erasePackets[0]) {
    getEventListener(
      "update",
      "messageReceived",
    )(createMessageReceivedEvent(packet))
  }
}

function eraseOnEventGroupSequential(
  erasePackets,
  ITERATIONS,
  broadcasts,
  resolve,
  eraseEvents, // eslint-disable-line no-unused-vars
  pathIDs, // eslint-disable-line no-unused-vars
  addData, // eslint-disable-line no-unused-vars
  eraseData, // eslint-disable-line no-unused-vars
) {
  if (broadcasts >= ITERATIONS) {
    return resolve()
  }

  for (const packet of erasePackets[broadcasts]) {
    getEventListener(
      "update",
      "messageReceived",
    )(createMessageReceivedEvent(packet))
  }
}

function syncOnEventGroup(
  resolve,
  syncEvents, // eslint-disable-line no-unused-vars
  pathIDs, // eslint-disable-line no-unused-vars
  addData, // eslint-disable-line no-unused-vars
  eraseData, // eslint-disable-line no-unused-vars
) {
  resolve()
}

function addOnInitFrontendParallel(
  room,
  addPackets,
  pathIDs,
  addData,
  ITERATIONS,
  BLOCKSIZE,
) {
  addPackets.push([])

  // Necessary to allow yjs to execute transactions (majority of processing time)
  function addPath(sj) {
    if (sj >= ITERATIONS) return

    for (let j = sj; j < Math.min(sj + BLOCKSIZE, ITERATIONS); j++) {
      const drawPathID = room.addPath(addData[0])
      pathIDs.push(drawPathID)

      for (let i = 1; i < addData.length; i++) {
        room.extendPath(drawPathID, addData[i])
      }

      room.endPath(drawPathID)
    }

    setTimeout(addPath, 0, sj + BLOCKSIZE)
  }

  addPath(0)
}

function addOnBroadcastGroupParallel(
  room,
  addPackets,
  pathIDs,
  addData,
  ITERATIONS,
  broadcasts,
  resolve,
) {
  resolve()
}

function eraseOnInitFrontendParallel(
  room,
  erasePackets,
  pathIDs,
  eraseData,
  ITERATIONS,
  BLOCKSIZE,
) {
  erasePackets.push([])

  // Necessary to allow yjs to execute transactions (majority of processing time)
  function erasePath(sj) {
    if (sj >= ITERATIONS) return

    for (let j = sj; j < Math.min(sj + BLOCKSIZE, ITERATIONS); j++) {
      const erasePathID = pathIDs[j]

      for (let i = 0; i < eraseData.length; i++) {
        room.extendErasureIntervals(
          erasePathID,
          eraseData[i][0],
          eraseData[i][1],
        )
      }
    }

    setTimeout(erasePath, 0, sj + BLOCKSIZE)
  }

  erasePath(0)
}

function eraseOnBroadcastGroupParallel(
  room,
  erasePackets,
  pathIDs,
  eraseData,
  ITERATIONS,
  broadcasts,
  resolve,
) {
  resolve()
}

function addOnInitBackendParallel(addPackets, BLOCKSIZE) {
  const packets = addPackets[0]

  function addPath(sj) {
    if (sj >= packets.length) return

    for (let j = sj; j < Math.min(sj + BLOCKSIZE, packets.length); j++) {
      getEventListener(
        "update",
        "messageReceived",
      )(createMessageReceivedEvent(packets[j]))
    }

    setTimeout(addPath, 0, sj + BLOCKSIZE)
  }

  addPath(0)
}

function addOnEventGroupParallel(
  addPackets,
  ITERATIONS,
  broadcasts,
  resolve,
  addEvents, // eslint-disable-line no-unused-vars
  pathIDs, // eslint-disable-line no-unused-vars
  addData, // eslint-disable-line no-unused-vars
) {
  resolve()
}

function eraseOnInitBackendParallel(erasePackets, BLOCKSIZE) {
  const packets = erasePackets[0]

  function erasePath(sj) {
    if (sj >= packets.length) return

    for (let j = sj; j < Math.min(sj + BLOCKSIZE, packets.length); j++) {
      getEventListener(
        "update",
        "messageReceived",
      )(createMessageReceivedEvent(packets[j]))
    }

    setTimeout(erasePath, 0, sj + BLOCKSIZE)
  }

  erasePath(0)
}

function eraseOnEventGroupParallel(
  erasePackets,
  ITERATIONS,
  broadcasts,
  resolve,
  eraseEvents, // eslint-disable-line no-unused-vars
  pathIDs, // eslint-disable-line no-unused-vars
  addData, // eslint-disable-line no-unused-vars
  eraseData, // eslint-disable-line no-unused-vars
) {
  resolve()
}

function syncOnSendGroupVerify(syncPackets, resolve) {
  let syncDonePacket = -1

  syncPackets.forEach((packet, i) => {
    if (
      packet.message.length == syncDone.message.length &&
      JSON.stringify(Object.assign({}, packet, { uuid: undefined })) ==
        JSON.stringify(syncDone)
    ) {
      equal(syncDonePacket, -1)

      syncDonePacket = i
    }
  })

  equal(syncDonePacket, syncPackets.length - 1)

  resolve()
}

function addOnEventGroupVerify(
  addPackets,
  ITERATIONS,
  broadcasts,
  resolve,
  addEvents,
  pathIDs,
  addData,
) {
  const updatePaths = {}
  const updateIntervals = {}

  for (const event of addEvents) {
    equal(!event.add + !event.erase, 1)

    if (event.add) {
      const {
        detail: { id, points },
      } = event.add

      updatePaths[id] = points
    } else if (event.erase) {
      const {
        detail: { id, intervals },
      } = event.erase

      updateIntervals[id] = intervals
    }
  }

  equal(updatePaths, { [pathIDs[0]]: addData })
  equal(updateIntervals, { [pathIDs[0]]: {} })

  resolve()
}

function eraseOnEventGroupVerify(
  erasePackets,
  ITERATIONS,
  broadcasts,
  resolve,
  eraseEvents,
  pathIDs,
  addData,
  eraseData, // eslint-disable-line no-unused-vars
) {
  const updateIntervals = {}

  for (const event of eraseEvents) {
    ok(!event.add)
    notOk(!event.erase)

    const {
      detail: { id, intervals },
    } = event.erase

    updateIntervals[id] = intervals
  }

  equal(updateIntervals, {
    [pathIDs[0]]: Object.assign(
      {},
      new Array(addData.length).fill([[0, 0 + (addData.length > 1)]]),
    ),
  })

  resolve()
}

function syncOnEventGroupVerify(
  resolve,
  syncEvents,
  pathIDs,
  addData,
  eraseData, // eslint-disable-line no-unused-vars
) {
  const syncPaths = {}
  const syncIntervals = {}

  for (const event of syncEvents) {
    equal(!event.add + !event.erase, 1)

    if (event.add) {
      const {
        detail: { id, points },
      } = event.add

      syncPaths[id] = points
    } else if (event.erase) {
      const {
        detail: { id, intervals },
      } = event.erase

      syncIntervals[id] = intervals
    }
  }

  equal(syncPaths, { [pathIDs[0]]: addData })
  equal(syncIntervals, {
    [pathIDs[0]]: Object.assign(
      {},
      new Array(addData.length).fill([[0, 0 + (addData.length > 1)]]),
    ),
  })

  resolve()
}

test("benchmark", async (t) => {
  await initFileSystem()

  const ITERATIONSLIST = [10, 25, 50, 75, 100, 250, 500]
  const BLOCKSIZE = 1000 //10

  await t.test(
    "benchmarks a dot draw and erase update sequentially",
    async (/*t*/) => {
      return runBidirectionalBenchmark(
        "dot draw and erase [sequential]" /* BENCHMARK */,
        "plots/dot-seq-benchmark.tsv" /* FILENAME */,
        ITERATIONSLIST /* ITERATIONSLIST */,
        BLOCKSIZE /* BLOCKSIZE */,
        dotDraw /* addData */,
        dotErase /* eraseData */,
        addOnInitFrontendSequential /* addOnInitFrontend */,
        100 /* addBroadcastGroupTimeout */,
        addOnBroadcastGroupSequential /* addOnBroadcastGroup */,
        ".dot-seq-add-packets.json" /* addPacketsFilename */,
        eraseOnInitFrontendSequential /* eraseOnInitFrontend */,
        100 /* eraseBroadcastGroupTimeout */,
        eraseOnBroadcastGroupSequential /* eraseOnBroadcastGroup */,
        ".dot-seq-erase-packets.json" /* erasePacketsFilename */,
        1000 /* syncSendGroupTimeout */,
        syncOnSendGroup /* syncOnSendGroup */,
        ".dot-seq-sync-packets.json" /* syncPacketsFilename */,
        addOnInitBackendSequential /* addOnInitBackend */,
        100 /* addEventGroupTimeout */,
        addOnEventGroupSequential /* addOnEventGroup */,
        false /* addEventsCache */,
        ".dot-seq-add-events.json" /* addEventsFilename */,
        eraseOnInitBackendSequential /* eraseOnInitBackend */,
        100 /* eraseEventGroupTimeout */,
        eraseOnEventGroupSequential /* eraseOnEventGroupTimeout */,
        false /* eraseEventsCache */,
        ".dot-seq-erase-events.json" /* eraseEventsFilename */,
        1000 /* syncEventGroupTimeout */,
        syncOnEventGroup /* syncOnEventGroup */,
        false /* syncEventsCache */,
        ".dot-seq-sync-events.json" /* syncEventsFilename */,
      )
    },
  )

  await t.test(
    "benchmarks a dot draw and erase update in parallel",
    async (/*t*/) => {
      return runBidirectionalBenchmark(
        "dot draw and erase [parallel]" /* BENCHMARK */,
        "plots/dot-par-benchmark.tsv" /* FILENAME */,
        ITERATIONSLIST /* ITERATIONSLIST */,
        BLOCKSIZE /* BLOCKSIZE */,
        dotDraw /* addData */,
        dotErase /* eraseData */,
        addOnInitFrontendParallel /* addOnInitFrontend */,
        1000 /* addBroadcastGroupTimeout */,
        addOnBroadcastGroupParallel /* addOnBroadcastGroup */,
        ".dot-par-add-packets.json" /* addPacketsFilename */,
        eraseOnInitFrontendParallel /* eraseOnInitFrontend */,
        1000 /* eraseBroadcastGroupTimeout */,
        eraseOnBroadcastGroupParallel /* eraseOnBroadcastGroup */,
        ".dot-par-erase-packets.json" /* erasePacketsFilename */,
        1000 /* syncSendGroupTimeout */,
        syncOnSendGroup /* syncOnSendGroup */,
        ".dot-par-sync-packets.json" /* syncPacketsFilename */,
        addOnInitBackendParallel /* addOnInitBackend */,
        1000 /* addEventGroupTimeout */,
        addOnEventGroupParallel /* addOnEventGroup */,
        false /* addEventsCache */,
        ".dot-par-add-events.json" /* addEventsFilename */,
        eraseOnInitBackendParallel /* eraseOnInitBackend */,
        1000 /* eraseEventGroupTimeout */,
        eraseOnEventGroupParallel /* eraseOnEventGroupTimeout */,
        false /* eraseEventsCache */,
        ".dot-par-erase-events.json" /* eraseEventsFilename */,
        1000 /* syncEventGroupTimeout */,
        syncOnEventGroup /* syncOnEventGroup */,
        false /* syncEventsCache */,
        ".dot-par-sync-events.json" /* syncEventsFilename */,
      )
    },
  )

  await t.test(
    "benchmarks a path draw and erase update sequentially",
    async (/*t*/) => {
      return runBidirectionalBenchmark(
        "path draw and erase [sequential]" /* BENCHMARK */,
        "plots/path-seq-benchmark.tsv" /* FILENAME */,
        ITERATIONSLIST /* ITERATIONSLIST */,
        BLOCKSIZE /* BLOCKSIZE */,
        pathDraw /* addData */,
        pathErase /* eraseData */,
        addOnInitFrontendSequential /* addOnInitFrontend */,
        100 /* addBroadcastGroupTimeout */,
        addOnBroadcastGroupSequential /* addOnBroadcastGroup */,
        ".path-seq-add-packets.json" /* addPacketsFilename */,
        eraseOnInitFrontendSequential /* eraseOnInitFrontend */,
        100 /* eraseBroadcastGroupTimeout */,
        eraseOnBroadcastGroupSequential /* eraseOnBroadcastGroup */,
        ".path-seq-erase-packets.json" /* erasePacketsFilename */,
        1000 /* syncSendGroupTimeout */,
        syncOnSendGroup /* syncOnSendGroup */,
        ".path-seq-sync-packets.json" /* syncPacketsFilename */,
        addOnInitBackendSequential /* addOnInitBackend */,
        100 /* addEventGroupTimeout */,
        addOnEventGroupSequential /* addOnEventGroup */,
        false /* addEventsCache */,
        ".path-seq-add-events.json" /* addEventsFilename */,
        eraseOnInitBackendSequential /* eraseOnInitBackend */,
        100 /* eraseEventGroupTimeout */,
        eraseOnEventGroupSequential /* eraseOnEventGroupTimeout */,
        false /* eraseEventsCache */,
        ".path-seq-erase-events.json" /* eraseEventsFilename */,
        1000 /* syncEventGroupTimeout */,
        syncOnEventGroup /* syncOnEventGroup */,
        false /* syncEventsCache */,
        ".path-seq-sync-events.json" /* syncEventsFilename */,
      )
    },
  )

  await t.test(
    "benchmarks a path draw and erase update in parallel",
    async (/*t*/) => {
      return runBidirectionalBenchmark(
        "path draw and erase [parallel]" /* BENCHMARK */,
        "plots/path-par-benchmark.tsv" /* FILENAME */,
        ITERATIONSLIST /* ITERATIONSLIST */,
        BLOCKSIZE /* BLOCKSIZE */,
        pathDraw /* addData */,
        pathErase /* eraseData */,
        addOnInitFrontendParallel /* addOnInitFrontend */,
        5000 /* addBroadcastGroupTimeout */,
        addOnBroadcastGroupParallel /* addOnBroadcastGroup */,
        ".path-par-add-packets.json" /* addPacketsFilename */,
        eraseOnInitFrontendParallel /* eraseOnInitFrontend */,
        5000 /* eraseBroadcastGroupTimeout */,
        eraseOnBroadcastGroupParallel /* eraseOnBroadcastGroup */,
        ".path-par-erase-packets.json" /* erasePacketsFilename */,
        5000 /* syncSendGroupTimeout */,
        syncOnSendGroup /* syncOnSendGroup */,
        ".path-par-sync-packets.json" /* syncPacketsFilename */,
        addOnInitBackendParallel /* addOnInitBackend */,
        5000 /* addEventGroupTimeout */,
        addOnEventGroupParallel /* addOnEventGroup */,
        false /* addEventsCache */,
        ".path-par-add-events.json" /* addEventsFilename */,
        eraseOnInitBackendParallel /* eraseOnInitBackend */,
        5000 /* eraseEventGroupTimeout */,
        eraseOnEventGroupParallel /* eraseOnEventGroupTimeout */,
        false /* eraseEventsCache */,
        ".path-par-erase-events.json" /* eraseEventsFilename */,
        5000 /* syncEventGroupTimeout */,
        syncOnEventGroup /* syncOnEventGroup */,
        false /* syncEventsCache */,
        ".path-par-sync-events.json" /* syncEventsFilename */,
      )
    },
  )

  await t.test("communicates a single draw and erase update", async (/*t*/) => {
    return runBidirectionalBenchmark(
      null /* BENCHMARK */,
      null /* FILENAME */,
      [1] /* ITERATIONSLIST */,
      BLOCKSIZE /* BLOCKSIZE */,
      dotDraw /* addData */,
      dotErase /* eraseData */,
      addOnInitFrontendSequential /* addOnInitFrontend */,
      1000 /* addBroadcastGroupTimeout */,
      addOnBroadcastGroupParallel /* addOnBroadcastGroup */,
      ".dot-ver-add-packets.json" /* addPacketsFilename */,
      eraseOnInitFrontendSequential /* eraseOnInitFrontend */,
      1000 /* eraseBroadcastGroupTimeout */,
      eraseOnBroadcastGroupParallel /* eraseOnBroadcastGroup */,
      ".dot-ver-erase-packets.json" /* erasePacketsFilename */,
      1000 /* syncSendGroupTimeout */,
      syncOnSendGroupVerify /* syncOnSendGroup */,
      ".dot-ver-sync-packets.json" /* syncPacketsFilename */,
      addOnInitBackendSequential /* addOnInitBackend */,
      1000 /* addEventGroupTimeout */,
      addOnEventGroupVerify /* addOnEventGroup */,
      true /* addEventsCache */,
      ".dot-ver-add-events.json" /* addEventsFilename */,
      eraseOnInitBackendSequential /* eraseOnInitBackend */,
      1000 /* eraseEventGroupTimeout */,
      eraseOnEventGroupVerify /* eraseOnEventGroupTimeout */,
      true /* eraseEventsCache */,
      ".dot-ver-erase-events.json" /* eraseEventsFilename */,
      1000 /* syncEventGroupTimeout */,
      syncOnEventGroupVerify /* syncOnEventGroup */,
      true /* syncEventsCache */,
      ".dot-ver-sync-events.json" /* syncEventsFilename */,
    )
  })

  await t.test("communicates a path draw and erase update", async (/*t*/) => {
    return runBidirectionalBenchmark(
      null /* BENCHMARK */,
      null /* FILENAME */,
      [1] /* ITERATIONSLIST */,
      BLOCKSIZE /* BLOCKSIZE */,
      pathDraw /* addData */,
      pathErase /* eraseData */,
      addOnInitFrontendSequential /* addOnInitFrontend */,
      1000 /* addBroadcastGroupTimeout */,
      addOnBroadcastGroupParallel /* addOnBroadcastGroup */,
      ".path-ver-add-packets.json" /* addPacketsFilename */,
      eraseOnInitFrontendSequential /* eraseOnInitFrontend */,
      1000 /* eraseBroadcastGroupTimeout */,
      eraseOnBroadcastGroupParallel /* eraseOnBroadcastGroup */,
      ".path-ver-erase-packets.json" /* erasePacketsFilename */,
      1000 /* syncSendGroupTimeout */,
      syncOnSendGroupVerify /* syncOnSendGroup */,
      ".path-ver-sync-packets.json" /* syncPacketsFilename */,
      addOnInitBackendSequential /* addOnInitBackend */,
      1000 /* addEventGroupTimeout */,
      addOnEventGroupVerify /* addOnEventGroup */,
      true /* addEventsCache */,
      ".path-ver-add-events.json" /* addEventsFilename */,
      eraseOnInitBackendSequential /* eraseOnInitBackend */,
      1000 /* eraseEventGroupTimeout */,
      eraseOnEventGroupVerify /* eraseOnEventGroupTimeout */,
      true /* eraseEventsCache */,
      ".path-ver-erase-events.json" /* eraseEventsFilename */,
      1000 /* syncEventGroupTimeout */,
      syncOnEventGroupVerify /* syncOnEventGroup */,
      true /* syncEventsCache */,
      ".path-ver-sync-events.json" /* syncEventsFilename */,
    )
  })
})