Node.js Complete Guide 2025
Advertisement
Node.js has evolved dramatically. In 2025, it's not just a platform for web servers — it's a complete ecosystem for building everything from CLI tools to microservices to IoT applications.
Node.js Core Concepts
Node.js is single-threaded event-driven I/O. Understanding this model is crucial:
// Event loop demonstration
console.log("1. Start");
setTimeout(() => console.log("2. Timeout"), 0);
Promise.resolve().then(() => console.log("3. Promise"));
console.log("4. Sync code");
// Output:
// 1. Start
// 4. Sync code
// 3. Promise (microtask queue)
// 2. Timeout (macrotask queue)
- Node.js Core Concepts
- Async/Await and Promises
- File System Operations
- Working with Paths
- HTTP Server
- Environment Variables
- Streams
- Child Processes
- Error Handling
- Testing Node.js Applications
- Performance Best Practices
- Production Deployment
- FAQ
Async/Await and Promises
// Promise basics
const fetchUser = (id) =>
new Promise((resolve, reject) => {
if (id > 0) {
resolve({ id, name: "Alice" });
} else {
reject(new Error("Invalid ID"));
}
});
// Async/await (modern approach)
async function getUserInfo(id) {
try {
const user = await fetchUser(id);
console.log(`User: ${user.name}`);
return user;
} catch (error) {
console.error("Failed to fetch user:", error.message);
}
}
// Handle multiple promises
async function getMultipleUsers(ids) {
try {
// Sequential
const users = [];
for (const id of ids) {
const user = await fetchUser(id);
users.push(user);
}
// Parallel (better performance)
const users2 = await Promise.all(ids.map((id) => fetchUser(id)));
// Race
const fastest = await Promise.race([
fetchUser(1),
fetchUser(2),
fetchUser(3),
]);
return users2;
} catch (error) {
console.error(error);
}
}
File System Operations
import { promises as fs } from "fs";
// Read file
async function readConfig() {
try {
const content = await fs.readFile("config.json", "utf-8");
return JSON.parse(content);
} catch (error) {
console.error("Failed to read config:", error);
}
}
// Write file
async function saveData(filename, data) {
await fs.writeFile(filename, JSON.stringify(data, null, 2));
}
// Directory operations
async function listFiles(dir) {
const files = await fs.readdir(dir);
return files;
}
// Stats
async function getFileSize(file) {
const stat = await fs.stat(file);
return stat.size;
}
Working with Paths
import path from "path";
const filePath = path.join("/home", "user", "documents", "file.txt");
const dirName = path.dirname(filePath);
const baseName = path.basename(filePath);
const extension = path.extname(filePath);
const absolute = path.resolve("./relative/path");
// Parse path
const parsed = path.parse("/home/user/documents/file.txt");
// {
// root: '/',
// dir: '/home/user/documents',
// base: 'file.txt',
// ext: '.txt',
// name: 'file'
// }
HTTP Server
import http from "http";
const server = http.createServer((req, res) => {
if (req.url === "/" && req.method === "GET") {
res.writeHead(200, { "Content-Type": "application/json" });
res.end(JSON.stringify({ message: "Hello" }));
} else if (req.url === "/api/users" && req.method === "GET") {
res.writeHead(200, { "Content-Type": "application/json" });
res.end(JSON.stringify([{ id: 1, name: "Alice" }]));
} else {
res.writeHead(404, { "Content-Type": "application/json" });
res.end(JSON.stringify({ error: "Not found" }));
}
});
server.listen(3000, () => {
console.log("Server running on http://localhost:3000");
});
Environment Variables
// .env file
// PORT=3000
// DATABASE_URL=postgresql://localhost/mydb
// NODE_ENV=development
// Load with dotenv
import dotenv from "dotenv";
dotenv.config();
const config = {
port: parseInt(process.env.PORT || "3000", 10),
databaseUrl: process.env.DATABASE_URL,
nodeEnv: process.env.NODE_ENV || "development",
isDevelopment: process.env.NODE_ENV === "development",
};
console.log(config);
Streams
import fs from "fs";
// Read stream
const readStream = fs.createReadStream("large-file.txt", {
encoding: "utf-8",
highWaterMark: 16 * 1024, // 16KB chunks
});
readStream.on("data", (chunk) => {
console.log(`Received ${chunk.length} bytes`);
});
readStream.on("error", (error) => {
console.error("Read error:", error);
});
// Write stream
const writeStream = fs.createWriteStream("output.txt");
readStream.pipe(writeStream);
writeStream.on("finish", () => {
console.log("Write complete");
});
Child Processes
import { exec, spawn } from "child_process";
import { promisify } from "util";
const execPromise = promisify(exec);
// Execute shell command
async function runCommand() {
try {
const { stdout, stderr } = await execPromise("ls -la");
console.log(stdout);
} catch (error) {
console.error(error);
}
}
// Spawn process
function spawnProcess() {
const child = spawn("node", ["script.js"]);
child.stdout.on("data", (data) => {
console.log(`stdout: ${data}`);
});
child.stderr.on("data", (data) => {
console.error(`stderr: ${data}`);
});
child.on("close", (code) => {
console.log(`Process exited with code ${code}`);
});
}
Error Handling
// Global error handler
process.on("uncaughtException", (error) => {
console.error("Uncaught Exception:", error);
process.exit(1);
});
process.on("unhandledRejection", (reason, promise) => {
console.error("Unhandled Rejection at:", promise, "reason:", reason);
process.exit(1);
});
// Application errors
class AppError extends Error {
constructor(message, statusCode) {
super(message);
this.statusCode = statusCode;
}
}
throw new AppError("User not found", 404);
Testing Node.js Applications
// test.js with Node's built-in test runner
import test from "node:test";
import assert from "node:assert";
test("addition", () => {
assert.strictEqual(1 + 1, 2);
});
test("async operation", async () => {
const result = await someAsyncFunction();
assert.strictEqual(result, "expected");
});
Performance Best Practices
- Use async/await - cleaner than callbacks
- Monitor memory - Node.js can handle large data but carefully
- Use clustering - leverage multiple cores
- Enable compression - gzip responses
- Cache effectively - Redis for frequently accessed data
Production Deployment
// PM2 configuration
// ecosystem.config.js
module.exports = {
apps: [
{
name: "api",
script: "./dist/index.js",
instances: "max",
exec_mode: "cluster",
env: {
NODE_ENV: "production",
PORT: 3000,
},
},
],
};
FAQ
Q: What Node.js version should I use? A: Use the latest LTS version. In 2025, that's Node 22 LTS. It receives 3 years of support and is production-ready.
Q: How do I handle database connections in Node.js? A: Use connection pools (standard practice). Create a pool at startup and reuse connections. Never create new connections per request.
Q: What's the difference between npm and yarn? A: Both work fine. npm is built-in. yarn is faster and more reliable for monorepos. pnpm is newer and most efficient. Pick one and stick with it.
Node.js in 2025 is mature, fast, and production-ready. Master async patterns, understand the event loop, and build with streams and clustering for scalable backend systems.
Advertisement