EventStore-Client-NodeJS
EventStore-Client-NodeJS copied to clipboard
Memory Leak on readStream
Lib version 3.3.1
Here is example
const {
EventStoreDBClient,
jsonEvent,
START,
FORWARDS
} = require("@eventstore/db-client")
var heapdump = require('heapdump')
const client = new EventStoreDBClient(
{
endpoint: "localhost:2113",
},
{
insecure: true,
}
);
(async () => {
const streamName = "es_supported_clients"
const event = jsonEvent({
type: "grpc-client",
data: {
languages: ["typescript", "javascript"],
runtime: "NodeJS",
},
})
await client.appendToStream(streamName, [event])
for (let index = 0; index < 10000; index++) {
const events = client.readStream(streamName, {
fromRevision: START,
direction: FORWARDS,
maxCount: 10,
})
// if remove the loop. memory leak even bigger
for await (const event of events) {
}
global.gc()
console.log(
// convert to mb
Math.round((process.memoryUsage().heapUsed / 1024 / 1024) * 100) / 100
)
}
heapdump.writeSnapshot(function (err, filename) {
console.log('dump written to', filename)
})
})()
Any idea where this find its source from?
Hi @matchish,
I can't seem to reproduce this, beyond ~10_000 reads, we level off at (around) 13mb.
nodejs
: v16.13.0
@eventstore/db-client
: v3.3.1
mem.js
const {
EventStoreDBClient,
jsonEvent,
START,
FORWARDS,
} = require("@eventstore/db-client");
const client = new EventStoreDBClient(
{
endpoint: "localhost:2113",
},
{
insecure: true,
}
);
const streamName = "es_supported_clients";
(async () => {
const event = jsonEvent({
type: "grpc-client",
data: {
languages: ["typescript", "javascript"],
runtime: "NodeJS",
},
});
await client.appendToStream(streamName, [event]);
for (let index = 0; index < 100_000_000; index++) {
const events = client.readStream(streamName, {
fromRevision: START,
direction: FORWARDS,
maxCount: 10,
});
for await (const event of events) {
}
if (index % 100 === 0) {
global.gc();
console.log(
// convert to mb
Math.round((process.memoryUsage().heapUsed / 1024 / 1024) * 100) / 100
);
}
}
process.exit();
})();
// if remove the loop. memory leak even bigger
This due to how nodejs streams work. It fills an internal buffer, then applies backpressure to the server until something has been pulled from the buffer. So If you don't consume it via the async iterator (or an event listener etc), you are filling that buffer and never disposing of the read events in it.
I'll check again
You are right it was not a memory leak, sorry
@George-Payne If I modify your example to read not exists stream we have another picture
red line read not exists stream
const {
EventStoreDBClient,
jsonEvent,
START,
FORWARDS
} = require("@eventstore/db-client")
const client = new EventStoreDBClient(
{
endpoint: "localhost:2113",
},
{
insecure: true,
}
);
const streamName = 'mem-debug-not-exists-' + Math.random()
(async () => {
for (let index = 0; index < 100_000_000; index++) {
const events = client.readStream(streamName, {
fromRevision: START,
direction: FORWARDS,
maxCount: 10,
});
try {
for await (const event of events) {
}
} catch (error) {
events.cancel()
events.destroy()
}
if (index % 100 === 0) {
global.gc();
console.log(
// convert to mb
Math.round((process.memoryUsage().heapUsed / 1024 / 1024) * 100) / 100
);
}
}
process.exit();
})();
How do I close stream properly to free up memory?
Thanks @matchish, good find.
The fix / bit of explanation is here: #310
@George-Payne Do you have ETA for a release with the fix?
@matchish #312 is the last PR before the v4.0.0
release, so should be early next week.
@matchish v4.0.0
has been released, including this fix.
https://www.npmjs.com/package/@eventstore/db-client/v/4.0.0 https://github.com/EventStore/EventStore-Client-NodeJS/releases/tag/v4.0.0