Published on

Supply Chain Security for Node.js — Protecting Against Compromised Dependencies

Authors

Introduction

A single compromised dependency can expose your entire application. In 2023-2025 alone, malicious packages, subtle typosquatting attacks, and build-time code injection have compromised millions of installations. Node.js projects with hundreds of transitive dependencies are particularly vulnerable—you control maybe 30 packages directly but depend on 300+ indirectly.

This post covers npm audit configuration, automated dependency updates, lockfile verification, detection of typosquatting attacks, SBOM generation, private registries, and reproducible builds.

npm Audit in CI with Audit Levels

Enforce security scanning in every pull request:

# .github/workflows/security.yml
name: Security Audit

on: [push, pull_request]

jobs:
  audit:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      - uses: actions/setup-node@v4
        with:
          node-version: '20'
      - run: npm ci
      - run: npm audit --audit-level=moderate
        # Levels: low, moderate, high, critical
        # Fail CI if any moderate-level vulnerability found

  audit-production:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      - uses: actions/setup-node@v4
        with:
          node-version: '20'
      - run: npm ci --omit=dev
      - run: npm audit --production --audit-level=high
        # Production dependencies only, higher threshold

Advanced npm audit configuration:

// audit-config.ts - Custom audit resolver
import { execSync } from 'child_process';
import * as fs from 'fs';

interface AuditReport {
  vulnerabilities: Record<string, { severity: string; fixAvailable: boolean }>;
  metadata: { vulnerabilities: number };
}

class AuditManager {
  private allowedVulnerabilities: string[] = [];

  runAudit(production: boolean = false): void {
    const command = `npm audit --json ${production ? '--production' : ''}`;

    try {
      const output = execSync(command, { encoding: 'utf-8' });
      const report: AuditReport = JSON.parse(output);

      const vulnerabilities = Object.entries(report.vulnerabilities)
        .filter(([packageName]) => !this.allowedVulnerabilities.includes(packageName))
        .filter(([, data]) => data.severity !== 'low');

      if (vulnerabilities.length > 0) {
        console.error('Critical vulnerabilities found:');
        vulnerabilities.forEach(([pkg, data]) => {
          console.error(`  ${pkg}: ${data.severity}`);
        });
        process.exit(1);
      }

      console.log('Audit passed');
    } catch (error) {
      console.error('Audit failed:', error);
      process.exit(1);
    }
  }

  loadAllowlist(path: string): void {
    const content = fs.readFileSync(path, 'utf-8');
    this.allowedVulnerabilities = JSON.parse(content);
  }
}

const manager = new AuditManager();
manager.loadAllowlist('.auditignore.json');
manager.runAudit();

Dependabot Configuration for Automated Updates

Automate dependency updates with Dependabot:

// .github/dependabot.yml
version: 2
updates:
  # Production dependencies
  - package-ecosystem: "npm"
    directory: "/"
    schedule:
      interval: "weekly"
      day: "monday"
      time: "03:00"
    pull-request-branch-name:
      separator: "/"
    reviewers:
      - "security-team"
    labels:
      - "dependencies"
      - "npm"
    allow:
      - dependency-type: "production"
    versioning-strategy: "auto"

  # Development dependencies (less strict)
  - package-ecosystem: "npm"
    directory: "/"
    schedule:
      interval: "weekly"
    pull-request-branch-name:
      separator: "/"
    only-allow-version-updates:
      - "dependency-type": "development"
    labels:
      - "dev-dependencies"

  # Major version updates separately
  - package-ecosystem: "npm"
    directory: "/"
    schedule:
      interval: "monthly"
    pull-request-branch-name:
      separator: "/"
    versioning-strategy: "increase-if-necessary"
    open-pull-requests-limit: 5
    reviewers:
      - "architecture-team"

Renovate Alternative Configuration

Renovate offers more granular control:

// renovate.json
{
  "extends": ["config:base", "schedule:weekly"],
  "automerge": false,
  "major": {
    "enabled": true,
    "schedule": ["after 10pm on weekends"]
  },
  "minor": {
    "automerge": true,
    "automergeType": "pr",
    "automergeStrategy": "squash",
    "schedule": ["before 3am on weekdays"]
  },
  "patch": {
    "automerge": true,
    "automergeType": "pr",
    "automergeStrategy": "squash",
    "schedule": ["before 3am every weekday"]
  },
  "vulnerabilityAlerts": {
    "enabled": true,
    "labels": ["security"],
    "assignees": ["@security-team"],
    "reviewers": ["@security-team"]
  },
  "vulnerabilityAlerts:automerge": {
    "enabled": true,
    "automerge": true,
    "automergeType": "pr",
    "automergeStrategy": "squash"
  },
  "packageRules": [
    {
      "packagePatterns": ["^@babel"],
      "groupName": "Babel packages"
    },
    {
      "packagePatterns": ["^@types/"],
      "automerge": true
    }
  ]
}

Lockfile Integrity and npm ci

Enforce reproducible installs:

# package-lock.json usage
# Use npm ci (clean install) in production, never npm install

# CI environment
npm ci --omit=dev

# Local development (can use npm install)
npm install

# Verify lockfile matches package.json
npm ls --depth=0

# Update lockfile when modifying package.json
npm install --package-lock-only
// Lockfile integrity checker
import * as fs from 'fs';
import * as crypto from 'crypto';

interface LockfileEntry {
  version: string;
  integrity: string;
  resolved: string;
}

class LockfileValidator {
  validateIntegrity(packageLockPath: string): boolean {
    const lockfile = JSON.parse(
      fs.readFileSync(packageLockPath, 'utf-8')
    );

    for (const [pkg, data] of Object.entries(lockfile.packages || {})) {
      const entry = data as LockfileEntry;

      if (!entry.integrity) {
        console.warn(`Missing integrity for ${pkg}`);
        return false;
      }

      if (!this.isValidSRI(entry.integrity)) {
        console.error(`Invalid SRI for ${pkg}: ${entry.integrity}`);
        return false;
      }
    }

    return true;
  }

  private isValidSRI(sri: string): boolean {
    // SRI format: algorithm-base64hash
    const match = sri.match(/^(sha256|sha384|sha512)-(.+)$/);
    return !!match;
  }

  compareWithPackageJson(packagePath: string, lockPath: string): boolean {
    const packageJson = JSON.parse(fs.readFileSync(packagePath, 'utf-8'));
    const lockfile = JSON.parse(fs.readFileSync(lockPath, 'utf-8'));

    for (const dep of Object.keys(packageJson.dependencies || {})) {
      const lockEntries = Object.keys(lockfile.packages || {}).filter(
        (pkg) => pkg.includes(dep)
      );

      if (lockEntries.length === 0) {
        console.error(`Dependency ${dep} not in lockfile`);
        return false;
      }
    }

    return true;
  }
}

const validator = new LockfileValidator();

if (!validator.validateIntegrity('./package-lock.json')) {
  process.exit(1);
}

Typosquatting Detection

Detect packages with names similar to legitimate ones:

import Levenshtein from 'levenshtein';

interface DependencyMap {
  [pkg: string]: string; // name -> version
}

class TyposquattingDetector {
  private commonPackages = [
    'express',
    'lodash',
    'react',
    'axios',
    'tslib',
    'uuid',
    'debug',
    'colors',
    'moment',
    'async',
  ];

  detectSuspiciousPackages(dependencies: DependencyMap): string[] {
    const suspicious: string[] = [];

    for (const pkg of Object.keys(dependencies)) {
      for (const common of this.commonPackages) {
        const distance = new Levenshtein(pkg, common).distance;
        const similarity = 1 - distance / Math.max(pkg.length, common.length);

        // Flag if too similar (but not identical)
        if (similarity > 0.8 && pkg !== common) {
          suspicious.push(pkg);
          console.warn(
            `Typosquatting suspicion: "${pkg}" is similar to "${common}"`
          );
        }
      }
    }

    return suspicious;
  }

  checkPackageReputation(pkg: string): Promise<boolean> {
    // Query npm registry API
    return fetch(`https://registry.npmjs.org/${pkg}`)
      .then((res) => res.json())
      .then((data: any) => {
        // Check publish date (typosquats often brand new)
        const publishDate = new Date(data.time.created);
        const age = Date.now() - publishDate.getTime();
        const sixMonths = 180 * 24 * 60 * 60 * 1000;

        if (age < sixMonths) {
          // New package, higher suspicion
          const downloads =
            data['npm-stat']?.month || 0;
          if (downloads < 100) {
            console.warn(`Low reputation: ${pkg} (${downloads} downloads/month)`);
            return false;
          }
        }

        return true;
      });
  }
}

const detector = new TyposquattingDetector();
const packageJson = require('./package.json');

async function auditDependencies() {
  const suspicious = detector.detectSuspiciousPackages(
    packageJson.dependencies
  );

  for (const pkg of suspicious) {
    const isLegit = await detector.checkPackageReputation(pkg);
    if (!isLegit) {
      console.error(`BLOCK: Suspected typosquatting package: ${pkg}`);
      process.exit(1);
    }
  }
}

auditDependencies();

SBOM Generation with CycloneDX

Create Software Bill of Materials for compliance:

import * as fs from 'fs';
import * as crypto from 'crypto';

interface SBOMComponent {
  type: string;
  name: string;
  version: string;
  purl: string;
  hashes?: Array<{ alg: string; content: string }>;
}

interface SBOM {
  bomFormat: string;
  specVersion: string;
  version: number;
  components: SBOMComponent[];
  metadata: {
    timestamp: string;
    tools: Array<{ name: string; version: string }>;
  };
}

class SBOMGenerator {
  async generateFromLockfile(
    lockfilePath: string
  ): Promise<SBOM> {
    const lockfile = JSON.parse(
      fs.readFileSync(lockfilePath, 'utf-8')
    );

    const components: SBOMComponent[] = [];

    for (const [pkgPath, pkgData] of Object.entries(
      lockfile.packages || {}
    )) {
      const pkg = pkgData as any;

      if (!pkgPath.includes('/node_modules/')) {
        continue; // Skip root package
      }

      const pkgName = pkgPath.split('/node_modules/').pop() || '';
      const nameVersion = pkgName.split('@');
      const name = nameVersion.pop() || '';
      const version = pkg.version || '';

      components.push({
        type: 'library',
        name,
        version,
        purl: `pkg:npm/${name}@${version}`,
        hashes: this.parseHashes(pkg.integrity),
      });
    }

    return {
      bomFormat: 'CycloneDX',
      specVersion: '1.4',
      version: 1,
      components,
      metadata: {
        timestamp: new Date().toISOString(),
        tools: [
          {
            name: 'sbom-generator',
            version: '1.0.0',
          },
        ],
      },
    };
  }

  private parseHashes(
    integrity: string
  ): Array<{ alg: string; content: string }> {
    if (!integrity) return [];

    const [alg, hash] = integrity.split('-');
    return [
      {
        alg: alg === 'sha256' ? 'SHA-256' : alg.toUpperCase(),
        content: hash,
      },
    ];
  }

  async writeSBOM(sbom: SBOM, outputPath: string): Promise<void> {
    fs.writeFileSync(outputPath, JSON.stringify(sbom, null, 2));
    console.log(`SBOM written to ${outputPath}`);
  }
}

const generator = new SBOMGenerator();
const sbom = await generator.generateFromLockfile('./package-lock.json');
await generator.writeSBOM(sbom, './sbom.json');

Socket.dev Pre-Install Analysis

Scan packages before installation:

# Install socket.dev CLI
npm install -g @socketsecurity/cli

# Scan package before install
socket scan lodash@4.17.21

# Scan entire lockfile
socket scan package-lock.json

# Configure socket in CI
# .github/workflows/socket-scan.yml
name: Socket.dev Scan

on: [pull_request]

jobs:
  socket:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v4
      - uses: socketsecurity/socket-security-action@v1
        with:
          apiToken: ${{ secrets.SOCKET_API_TOKEN }}
          files: package-lock.json
          failure-level: 'high'

Private npm Registry with Verdaccio

Host internal packages securely:

# verdaccio/config.yaml
storage: ./storage
auth:
  htpasswd:
    file: ./htpasswd

uplinks:
  npmjs:
    url: https://registry.npmjs.org/

packages:
  '@mycompany/*':
    access: $authenticated
    publish: $authenticated
    proxy: npmjs
  '**':
    access: $all
    publish: $authenticated
    proxy: npmjs

server:
  keepAliveTimeout: 60

security:
  api:
    jwt:
      sign:
        expiresIn: 7d
  web:
    sign:
      expiresIn: 7d

middlewares:
  audit:
    enabled: true

logs:
  - { type: stdout, format: pretty, level: http }
// .npmrc for CI
registry=https://registry.company.com
@mycompany:registry=https://registry.company.com
//registry.company.com/:_authToken=${NPM_TOKEN}

Reproducible Builds

Ensure builds are deterministic:

// build-verification.ts
import { execSync } from 'child_process';
import * as crypto from 'crypto';
import * as fs from 'fs';

class ReproducibleBuildValidator {
  private hashBuild(directory: string): string {
    const files = this.getFilesRecursive(directory);
    const hashes = files
      .sort()
      .map((file) => {
        const content = fs.readFileSync(file, 'utf-8');
        return crypto
          .createHash('sha256')
          .update(content)
          .digest('hex');
      });

    return crypto
      .createHash('sha256')
      .update(hashes.join(''))
      .digest('hex');
  }

  async verifyReproducibility(buildCommand: string): Promise<boolean> {
    // Clean and build twice
    execSync('rm -rf dist');
    execSync(buildCommand);
    const hash1 = this.hashBuild('./dist');

    execSync('rm -rf dist');
    execSync(buildCommand);
    const hash2 = this.hashBuild('./dist');

    const reproducible = hash1 === hash2;
    console.log(
      `Build reproducibility: ${reproducible ? 'PASS' : 'FAIL'}`
    );

    if (!reproducible) {
      console.error(`Hash 1: ${hash1}`);
      console.error(`Hash 2: ${hash2}`);
    }

    return reproducible;
  }

  private getFilesRecursive(directory: string): string[] {
    const files: string[] = [];

    const traverse = (dir: string) => {
      fs.readdirSync(dir).forEach((file) => {
        const path = `${dir}/${file}`;
        if (fs.statSync(path).isDirectory()) {
          traverse(path);
        } else {
          files.push(path);
        }
      });
    };

    traverse(directory);
    return files;
  }
}

const validator = new ReproducibleBuildValidator();
const isReproducible = await validator.verifyReproducibility('npm run build');
process.exit(isReproducible ? 0 : 1);

Checklist

  • Run npm audit in CI with --audit-level=moderate
  • Automate updates with Dependabot or Renovate
  • Use npm ci in production (never npm install)
  • Validate lockfile integrity and SRI hashes
  • Check for typosquatting with Levenshtein distance
  • Generate SBOM with CycloneDX for compliance
  • Scan packages with Socket.dev before installation
  • Use private registry for internal packages
  • Verify reproducible builds (same input = same output)
  • Audit vendor licenses (GPL, MIT, etc.)

Conclusion

Supply chain security for Node.js requires defense in depth: automated scanning with npm audit, dependency updates with Dependabot, lockfile verification with npm ci, detection of typosquatting, SBOM generation for compliance, and private registries for internal code. These controls eliminate the vast majority of supply chain attacks before they reach production.