What about the speed of Bun.js?
Let’s take a look at the performance of Bun and Node.js through a few examples.
Writing and reading files
As an example, we’ll use a simple code for writing and reading files:
const fs = require('fs').promises;
const FILE_SIZES = [1024, 10 * 1024, 100 * 1024]; // 1KB, 10KB, 100KB
const FILE_NAME_TEMPLATE = 'testfile_{size}.txt';
async function writeFile(size) {
const data = Buffer.alloc(size, 'a'); // creates a buffer filled with letter 'a'
const fileName = FILE_NAME_TEMPLATE.replace('{size}', size);
console.time(`Writing ${size} bytes`);
try {
await fs.writeFile(fileName, data);
console.timeEnd(`Writing ${size} bytes`);
console.log(`Written ${size} bytes to ${fileName}`);
} catch (err) {
throw new Error(`Error writing file: ${err.message}`);
}
}
async function readFile(size) {
const fileName = FILE_NAME_TEMPLATE.replace('{size}', size);
console.time(`Reading ${size} bytes`);
try {
const data = await fs.readFile(fileName);
console.timeEnd(`Reading ${size} bytes`);
console.log(`Read ${data.length} bytes from ${fileName}`);
} catch (err) {
throw new Error(`Error reading file: ${err.message}`);
}
}
async function performIOOperations() {
for (const size of FILE_SIZES) {
await writeFile(size);
await readFile(size);
}
}
performIOOperations()
.then(() => console.log('I/O operations completed'))
.catch((error) => console.error(`An error occurred: ${error.message}`));
When running this code in Node.js, we get the following results:
The same code, when executed with Bun, will have these results:
Another thing that Bun runtime offers us out-of-the-box is the output formatting to the terminal.
The results show that Bun doesn’t always have the best performance compared to Node.js. In this particular example, we made no changes to the written code. However, Bun offers its API for working with files, including Bun.file(), Bun.write(), etc. You can read more about the API here. When using Bun’s native API, we got the following code:
//const fs = require('fs').promises;
const FILE_SIZES = [1024, 10 * 1024, 100 * 1024]; // 1KB, 10KB, 100KB
const FILE_NAME_TEMPLATE = 'testfile_{size}.txt';
async function writeFile(size) {
const data = Buffer.alloc(size, 'a'); // creates a buffer filled with letter 'a'
const fileName = FILE_NAME_TEMPLATE.replace('{size}', size);
console.time(`Writing ${size} bytes`);
try {
await Bun.write(fileName, data);
console.timeEnd(`Writing ${size} bytes`);
console.log(`Written ${size} bytes to ${fileName}`);
} catch (err) {
throw new Error(`Error writing file: ${err.message}`);
}
}
async function readFile(size) {
const fileName = FILE_NAME_TEMPLATE.replace('{size}', size);
console.time(`Reading ${size} bytes`);
try {
const data = await Bun.file(fileName);
console.timeEnd(`Reading ${size} bytes`);
console.log(`Read ${data.length} bytes from ${fileName}`);
} catch (err) {
throw new Error(`Error reading file: ${err.message}`);
}
}
async function performIOOperations() {
for (const size of FILE_SIZES) {
await writeFile(size);
await readFile(size);
}
}
performIOOperations()
.then(() => console.log('I/O operations completed'))
.catch((error) => console.error(`An error occurred: ${error.message}`));
And the following results:
Utilizing Bun’s native API significantly improved performance.
The server
Let’s assess how a simple test server with mocking data will work using Node.js and Bun.js. As an illustration, let’s take the following code:
const express = require('express');
const app = express();
const PORT = 3000;
// Sample in-memory data store
let posts = [
{ id: 1, title: 'First Post', content: 'This is the content of the first post.' },
{ id: 2, title: 'Second Post', content: 'This is the content of the second post.' },
// Add more sample posts as needed...
];
// Middleware to parse JSON requests
app.use(express.json());
// Fetch all posts
app.get('/posts', (req, res) => {
res.json(posts);
});
// Fetch a single post by ID
app.get('/posts/:id', (req, res) => {
const post = posts.find(p => p.id === parseInt(req.params.id));
if (!post) return res.status(404).send('Post not found.');
res.json(post);
});
// Create a new post
app.post('/posts', (req, res) => {
const post = {
id: posts.length + 1,
title: req.body.title,
content: req.body.content,
};
posts.push(post);
res.status(201).json(post);
});
// Update a post by ID
app.put('/posts/:id', (req, res) => {
const post = posts.find(p => p.id === parseInt(req.params.id));
if (!post) return res.status(404).send('Post not found.');
post.title = req.body.title || post.title;
post.content = req.body.content || post.content;
res.json(post);
});
// Delete a post by ID
app.delete('/posts/:id', (req, res) => {
posts = posts.filter(p => p.id !== parseInt(req.params.id));
res.status(204).send();
});
app.listen(PORT, () => {
console.log(`Server running at <http://localhost>:${PORT}/`);
});
Here’s the result for Node.js:
And for Bun:
Even though this server lacks complex logic and operates locally, we can still observe better performance compared to Node.js.
Working with data
Let’s take an example where we need to process a large object. For this purpose, I created a 45 MB JSON file containing an array of objects like this:
{
"userId": "b1ba31ac-25ce-4432-8e9f-b4cd89da167a",
"session": "a0dd197b-22bb-4934-8c60-2408912a2a16",
"timestamp": "2023-09-08T07:43:00.421Z",
"activity": "LOGOUT",
"ip": "214.76.47.2",
"userAgent": "Mozilla/5.0 (X11; Linux i686; rv:7.9) Gecko/20100101 Firefox/7.9.5",
"location": "Mohammadchester, Virginia, Andorra"
},
Here is the code for Node.js:
import fs from 'fs/promises';
const logs = JSON.parse(await fs.readFile('../userLogs.json', 'utf-8'));
function detectWindowsUsers(logs) {
return logs.filter(log => log.userAgent.includes('Windows'));
}
console.time('detectWindowsUsers');
const windowsUsers = detectWindowsUsers(logs);
console.timeEnd('detectWindowsUsers');
Running it, we got:
For Bun, we used the following code:
const logs = JSON.parse(await Bun.file('../userLogs.json', 'utf-8').text());
function detectWindowsUsers(logs) {
return logs.filter(log => log.userAgent.includes('Windows'));
}
console.time('detectWindowsUsers');
const windowsUsers = detectWindowsUsers(logs);
console.timeEnd('detectWindowsUsers');
And we got the following result:
In this test, the results were nearly identical.
Regular expressions
On the same file, we ran this code:
import fs from 'fs/promises';
const logs = JSON.parse(await fs.readFile('../userLogs.json', 'utf-8'));
function detectIPv4(logs) {
const ipv4Pattern = /\\b\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\b/g;
const allIPs = logs.map(log => log.ip.match(ipv4Pattern)).flat();
return [...new Set(allIPs)]; // Unique IPs
}
function detectChromeUsers(logs) {
return logs.filter(log => /Chrome/.test(log.userAgent));
}
// Benchmark IPv4 Detection
console.time('IPv4 Detection');
const ipv4Addresses = detectIPv4(logs);
console.timeEnd('IPv4 Detection');
console.log(`Detected ${ipv4Addresses.length} unique IPv4 addresses.`);
// Benchmark Chrome User Detection
console.time('Chrome User Detection');
const chromeUsers = detectChromeUsers(logs);
console.timeEnd('Chrome User Detection');
console.log(`Detected ${chromeUsers.length} Chrome users.`);
Here’s the result for Node.js:
And for Bun (with corresponding changes in the file reading):
In this case, Bun showed a much better performance.
So, what’s the verdict?
Bun is indeed a young and highly ambitious technology. To capture the audience’s attention, its creators have chosen catchy slogans. The promise is a swift Node.js replacement with Bun, backward compatibility with most of the Node.js ecosystem, and a significant performance boost — isn’t that impressive?
While it might seem impressive in promotional videos, real-world project implementation reveals a different reality. In practice, we are witnessing the following: replacing Node.js with Bun isn’t always straightforward and requires a lot of effort; performance surpasses Node.js in many aspects, but not always; and some npm packages work, while others — don’t. At the time of writing this article, the Bun repository had more than 1800 open tickets.
These are common challenges for a young technology and the developers behind Bun.js have put remarkable effort into it. But, there’s still a lot more to accomplish. Besides simply fixing bugs, it is necessary to build a strong community that will provide ongoing support and further develop Bun. Despite its popularity, the list of significant contributors is relatively short, with the primary contributor being Bun’s creator:Figure 2. The dynamics of Bun’s contributors community development
It’s also important that any large companies willing to take a chance on this technology actively contribute to developing pertinent marketing cases to promote its popularization further.
So for now, Bun is merely a modest local uprising with the potential to transform into a revolution. While you can experiment with Bun for specific utilities or small sub-projects today, it’s not quite time to completely migrate and use it for large codebases.
Fancy an in-depth discussion of why Node.js isn’t facing any hurdles in the future and might be one of the most stable tech options for your project? Schedule a one-on-one session with Oleksandr by filling out the contact form.