pglite icon indicating copy to clipboard operation
pglite copied to clipboard

On around 11k rows PGlite hangs

Open Vladlan opened this issue 7 months ago • 1 comments

Try to add data to PGlite with the following example (add in pglite/packages/pglite/examples/dump-restore-example.html)

<!doctype html>
<html>
  <head>
    <title>PGlite Live Changes Example</title>
    <link rel="stylesheet" href="./styles.css" />
    <script src="./utils.js"></script>
    <script type="importmap">
      {
        "imports": {
          "@electric-sql/pglite": "../dist/index.js",
          "@electric-sql/pglite/live": "../dist/live/index.js"
        }
      }
    </script>
    <style>
      #output {
        font-family: monospace;
        overflow: auto;
        max-height: 250px;
        border: 1px solid #eee;
        border-radius: 0.5rem;
        padding: 10px;
        margin: 1rem 0;
        font-size: 9px;
        line-height: 11px;
      }
      button {
        margin: 0 0.5rem 0 0;
      }
    </style>
  </head>
  <body>
    <h1>PGlite Live Changes Example</h1>
    <button id="start">start</button>
    <button id="add" disabled>Add</button>
    <button id="add1k" disabled>Add 1k</button>
    <button id="remove">Remove Random Row</button>
    <button id="dump">Create Dump</button>
    <button id="restore">Restore</button>
    <button id="check">Check</button>
    <a id="downloadLink">Download SQL Dump</a>
    <input type="file" id="fileInput" style="display: none" accept=".tar.gz" />

    <pre id="output"></pre>
    <div class="script-plus-log">
      <script type="module">
        import { PGlite } from '../dist/index.js'
        import { live } from '../dist/live/index.js'
        import { pgDump } from './pglite-tools/pg_dump.js'

        const output = document.getElementById('output')
        const startBtn = document.getElementById('start')
        const addBtn = document.getElementById('add')
        const add1kBtn = document.getElementById('add1k')
        const removeBtn = document.getElementById('remove')
        const dumpBtn = document.getElementById('dump')
        const restoreBtn = document.getElementById('restore')
        const checkBtn = document.getElementById('check')
        const downloadLink = document.getElementById('downloadLink')
        downloadLink.style.display = 'block'
        downloadLink.style.marginTop = '20px'
        let counter = 1_000
        const MAX_ELEMS_INSERT = 1_000
        let lastClicked = 0
        const nameLength = 1_000
        const nameSuffix = '-'.repeat(nameLength)

        let pg = new PGlite({
          extensions: {
            live,
          },
        })
        window.pg = pg

        startBtn.addEventListener('click', async () => {
          lastClicked = performance.now()
          await pg.exec(`
            CREATE TABLE IF NOT EXISTS test (
              id SERIAL PRIMARY KEY,
              rand float,
              name TEXT
            );
            INSERT INTO test (name, rand)
            SELECT 'test' || i || '${nameSuffix}', random()
            FROM generate_series(1, ${counter}) AS i;
          `)

          startBtn.disabled = true
          addBtn.disabled = false
          add1kBtn.disabled = false
          pg.live.changes(
            'SELECT * FROM test ORDER BY rand;',
            null,
            'id',
            (changes) => {
              const time = performance.now() - lastClicked
              console.log(`Update took ${time}ms`)
              // output.textContent = JSON.stringify(changes, null, 2)
            },
          )
        })

        addBtn.addEventListener('click', async () => {
          lastClicked = performance.now()
          await pg.query(
            'INSERT INTO test (name, rand) VALUES ($1, random());',
            [`test${++counter}${nameSuffix}`],
          )
        })

        add1kBtn.addEventListener('click', async () => {
          lastClicked = performance.now()
          const [dbData] = await pg.exec('SELECT * FROM test;')
          const rowsAmount = dbData.rows.length;
          await pg.exec(`
            INSERT INTO test (name, rand)
            SELECT 'test' || i || '${nameSuffix}', random()
            FROM generate_series(${rowsAmount + 1}, ${rowsAmount + MAX_ELEMS_INSERT}) AS i;
          `)
        })

        removeBtn.addEventListener('click', async () => {
          lastClicked = performance.now()
          try {
            await pg.exec(`
              DELETE FROM test 
              WHERE id = (
                SELECT id 
                FROM test 
                ORDER BY random() 
                LIMIT 1
              );
            `)
          } catch (e) {
            console.error('Error removing random row:', e)
          }
        })

        checkBtn.addEventListener('click', async () => {
          lastClicked = performance.now()
          try {
            const [dbData] = await pg.exec('SELECT * FROM test;')
            console.log('dbData: ', dbData);
            console.log(`Rows total: ${dbData.rows.length}`)
          } catch (e) {
            console.error(e)
          }
          const time = performance.now() - lastClicked
          console.log(`Query took ${time}ms`)
        })

        dumpBtn.addEventListener('click', async () => {
          const file = await pg.dumpDataDir()
          const url = URL.createObjectURL(file)
          downloadLink.href = url
          downloadLink.download = file.name
        })

        const fileInput = document.getElementById('fileInput')

        restoreBtn.addEventListener('click', async () => {
          fileInput.click()
        })

        fileInput.addEventListener('change', async (event) => {
          try {
            const file = event.target.files[0]
            if (!file) return
            console.log('file: ', file)

            pg = new PGlite({
              loadDataDir: file,
              extensions: {
                live,
              },
            })

            // Reset the file input
            fileInput.value = ''

            // Refresh the table data display
            const [dbData] = await pg.exec('SELECT * FROM test;')
            console.log('dbData: ', dbData)
            console.log(`Rows after restore: ${dbData.rows.length}`)
            console.log('Database restored successfully')

            // Enable/disable buttons appropriately
            startBtn.disabled = true
            addBtn.disabled = false
          } catch (error) {
            console.error('Error restoring database:', error)
          }
        })
      </script>
      <div id="log"></div>
    </div>
  </body>
</html>

On around 11k rows database hangs (tested on Macbook Air M4 (24gb RAM)).

Image

Vladlan avatar May 05 '25 07:05 Vladlan

for now the results is arbitray limited to 12MiB in current pglite builds up to 0.3.0 , not to the available free memory. Soon it should handle queries and results as big as half js free memory, upper limit being wasm free memory size ( depending on query/results types ).

pmp-p avatar May 07 '25 07:05 pmp-p

0.3.5 should support way more ( a priori ~ 300 MiB query and up to 1GiB result )

pmp-p avatar May 21 '25 23:05 pmp-p

Hey @Vladlan as @pmp-p said, this should be solved in versions > 0.3.5

Will close this now, but please let us know if you still encounter any issues.

tdrz avatar May 27 '25 12:05 tdrz