Building Scalable Node.js Microservices: Architecture Patterns and Best Practices
Microservices architecture has become the de facto standard for building scalable applications. After architecting several Node.js microservices systems, I want to share the patterns, practices, and lessons learned that lead to successful implementations.
Core Architecture Principles
Successful microservices follow these fundamental principles:
Single Responsibility
Each service should own a specific business capability:
// User Service - handles user management
class UserService {
async createUser(userData) {
// Validate user data
// Store in database
// Publish user.created event
}
async getUserById(id) {
// Retrieve user from database
}
}
// Order Service - handles order processing
class OrderService {
async createOrder(orderData) {
// Validate order
// Calculate pricing
// Publish order.created event
}
}
Database Per Service
Each microservice should have its own database:
// User Service Database Configuration
const userDbConfig = {
host: process.env.USER_DB_HOST,
database: 'users',
// User-specific schema
}
// Order Service Database Configuration
const orderDbConfig = {
host: process.env.ORDER_DB_HOST,
database: 'orders',
// Order-specific schema
}
Service Communication Patterns
Synchronous Communication with HTTP
// API Gateway pattern
const express = require('express')
const httpProxy = require('http-proxy-middleware')
const app = express()
// Route to User Service
app.use('/api/users', httpProxy({
target: 'http://user-service:3001',
changeOrigin: true,
pathRewrite: { '^/api/users': '' }
}))
// Route to Order Service
app.use('/api/orders', httpProxy({
target: 'http://order-service:3002',
changeOrigin: true,
pathRewrite: { '^/api/orders': '' }
}))
// Circuit breaker pattern
const CircuitBreaker = require('opossum')
const options = {
timeout: 3000,
errorThresholdPercentage: 50,
resetTimeout: 30000
}
const breaker = new CircuitBreaker(callExternalService, options)
breaker.fallback(() => ({ error: 'Service temporarily unavailable' }))
async function callExternalService(serviceUrl, data) {
const response = await fetch(serviceUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data)
})
return response.json()
}
Asynchronous Communication with Message Queues
// Event-driven architecture with RabbitMQ
const amqp = require('amqplib')
class EventBus {
constructor() {
this.connection = null
this.channel = null
}
async connect() {
this.connection = await amqp.connect(process.env.RABBITMQ_URL)
this.channel = await this.connection.createChannel()
}
async publish(exchange, routingKey, message) {
await this.channel.assertExchange(exchange, 'topic', { durable: true })
const messageBuffer = Buffer.from(JSON.stringify(message))
return this.channel.publish(exchange, routingKey, messageBuffer, {
persistent: true,
timestamp: Date.now(),
messageId: require('uuid').v4()
})
}
async subscribe(exchange, queue, routingKey, handler) {
await this.channel.assertExchange(exchange, 'topic', { durable: true })
await this.channel.assertQueue(queue, { durable: true })
await this.channel.bindQueue(queue, exchange, routingKey)
return this.channel.consume(queue, async (message) => {
try {
const content = JSON.parse(message.content.toString())
await handler(content)
this.channel.ack(message)
} catch (error) {
console.error('Message processing failed:', error)
this.channel.nack(message, false, false) // Dead letter queue
}
})
}
}
// Usage in User Service
const eventBus = new EventBus()
class UserService {
async createUser(userData) {
const user = await this.userRepository.create(userData)
// Publish event for other services
await eventBus.publish('user.events', 'user.created', {
userId: user.id,
email: user.email,
createdAt: user.createdAt
})
return user
}
}
// Usage in Email Service
class EmailService {
async init() {
await eventBus.subscribe(
'user.events',
'email.user.created',
'user.created',
this.handleUserCreated.bind(this)
)
}
async handleUserCreated(event) {
await this.sendWelcomeEmail(event.email)
}
}
Data Management Patterns
Saga Pattern for Distributed Transactions
// Order Saga - coordinates order processing across services
class OrderSaga {
constructor(eventBus, services) {
this.eventBus = eventBus
this.services = services
this.setupEventHandlers()
}
setupEventHandlers() {
this.eventBus.subscribe('order.events', 'saga.order', 'order.created',
this.handleOrderCreated.bind(this))
this.eventBus.subscribe('payment.events', 'saga.payment', 'payment.processed',
this.handlePaymentProcessed.bind(this))
this.eventBus.subscribe('inventory.events', 'saga.inventory', 'inventory.reserved',
this.handleInventoryReserved.bind(this))
}
async handleOrderCreated(event) {
try {
// Step 1: Reserve inventory
await this.services.inventory.reserve({
orderId: event.orderId,
items: event.items
})
} catch (error) {
await this.compensateOrder(event.orderId)
}
}
async handleInventoryReserved(event) {
try {
// Step 2: Process payment
await this.services.payment.process({
orderId: event.orderId,
amount: event.amount
})
} catch (error) {
await this.compensateInventory(event.orderId)
await this.compensateOrder(event.orderId)
}
}
async handlePaymentProcessed(event) {
// Step 3: Confirm order
await this.services.order.confirm(event.orderId)
await this.services.shipping.schedule(event.orderId)
}
async compensateOrder(orderId) {
await this.services.order.cancel(orderId)
}
async compensateInventory(orderId) {
await this.services.inventory.release(orderId)
}
}
CQRS (Command Query Responsibility Segregation)
// Command side - handles writes
class UserCommandHandler {
constructor(eventStore, eventBus) {
this.eventStore = eventStore
this.eventBus = eventBus
}
async createUser(command) {
const events = [
{
type: 'UserCreated',
aggregateId: command.userId,
data: {
name: command.name,
email: command.email
},
timestamp: new Date()
}
]
await this.eventStore.saveEvents(command.userId, events)
for (const event of events) {
await this.eventBus.publish('user.events', event.type, event)
}
}
}
// Query side - handles reads
class UserQueryHandler {
constructor(readDatabase) {
this.db = readDatabase
}
async getUserById(userId) {
return this.db.users.findById(userId)
}
async getUsersByEmail(email) {
return this.db.users.findByEmail(email)
}
}
// Event projector - updates read models
class UserProjector {
constructor(readDatabase, eventBus) {
this.db = readDatabase
this.setupEventHandlers(eventBus)
}
setupEventHandlers(eventBus) {
eventBus.subscribe('user.events', 'projector.user', 'UserCreated',
this.handleUserCreated.bind(this))
}
async handleUserCreated(event) {
await this.db.users.create({
id: event.aggregateId,
name: event.data.name,
email: event.data.email,
createdAt: event.timestamp
})
}
}
Service Discovery and Load Balancing
Consul-based Service Discovery
const consul = require('consul')()
class ServiceRegistry {
constructor() {
this.services = new Map()
}
async registerService(name, port, health) {
const serviceId = `${name}-${process.env.HOSTNAME || 'local'}-${port}`
await consul.agent.service.register({
id: serviceId,
name: name,
port: port,
check: {
http: `http://localhost:${port}${health}`,
interval: '10s'
}
})
console.log(`Service ${name} registered with ID: ${serviceId}`)
}
async discoverService(serviceName) {
const services = await consul.health.service({
service: serviceName,
passing: true
})
return services.map(service => ({
host: service.Service.Address,
port: service.Service.Port
}))
}
async getServiceUrl(serviceName) {
const instances = await this.discoverService(serviceName)
if (instances.length === 0) {
throw new Error(`No healthy instances found for service: ${serviceName}`)
}
// Simple round-robin load balancing
const instance = instances[Math.floor(Math.random() * instances.length)]
return `http://${instance.host}:${instance.port}`
}
}
// Usage in service
const registry = new ServiceRegistry()
async function startService() {
const port = process.env.PORT || 3000
app.listen(port, async () => {
await registry.registerService('user-service', port, '/health')
console.log(`User service running on port ${port}`)
})
}
Monitoring and Observability
Distributed Tracing with OpenTelemetry
const { NodeSDK } = require('@opentelemetry/sdk-node')
const { getNodeAutoInstrumentations } = require('@opentelemetry/auto-instrumentations-node')
const { JaegerExporter } = require('@opentelemetry/exporter-jaeger')
// Initialize tracing
const sdk = new NodeSDK({
traceExporter: new JaegerExporter({
endpoint: process.env.JAEGER_ENDPOINT
}),
instrumentations: [getNodeAutoInstrumentations()]
})
sdk.start()
// Custom tracing in business logic
const { trace } = require('@opentelemetry/api')
class UserService {
async createUser(userData) {
const span = trace.getActiveSpan()
span?.setAttributes({
'user.email': userData.email,
'operation': 'createUser'
})
try {
const user = await this.userRepository.create(userData)
span?.setStatus({ code: 1 }) // SUCCESS
return user
} catch (error) {
span?.setStatus({ code: 2, message: error.message }) // ERROR
throw error
}
}
}
Structured Logging
const winston = require('winston')
const { v4: uuidv4 } = require('uuid')
const logger = winston.createLogger({
format: winston.format.combine(
winston.format.timestamp(),
winston.format.json()
),
transports: [
new winston.transports.Console(),
new winston.transports.File({ filename: 'app.log' })
]
})
// Request correlation middleware
function correlationMiddleware(req, res, next) {
req.correlationId = req.headers['x-correlation-id'] || uuidv4()
res.setHeader('x-correlation-id', req.correlationId)
req.logger = logger.child({
correlationId: req.correlationId,
service: 'user-service'
})
next()
}
// Usage in routes
app.use(correlationMiddleware)
app.post('/users', async (req, res) => {
req.logger.info('Creating user', { email: req.body.email })
try {
const user = await userService.createUser(req.body)
req.logger.info('User created successfully', { userId: user.id })
res.json(user)
} catch (error) {
req.logger.error('Failed to create user', { error: error.message })
res.status(500).json({ error: 'Internal server error' })
}
})
Deployment and DevOps
Docker Configuration
# Multi-stage build for Node.js microservice
FROM node:18-alpine AS builder
WORKDIR /app
COPY package*.json ./
RUN npm ci --only=production
FROM node:18-alpine AS runtime
RUN addgroup -g 1001 -S nodejs
RUN adduser -S nodejs -u 1001
WORKDIR /app
COPY /app/node_modules ./node_modules
COPY . .
USER nodejs
EXPOSE 3000
HEALTHCHECK \
CMD curl -f http://localhost:3000/health || exit 1
CMD ["node", "server.js"]
Kubernetes Deployment
apiVersion: apps/v1
kind: Deployment
metadata:
name: user-service
spec:
replicas: 3
selector:
matchLabels:
app: user-service
template:
metadata:
labels:
app: user-service
spec:
containers:
- name: user-service
image: user-service:latest
ports:
- containerPort: 3000
env:
- name: DATABASE_URL
valueFrom:
secretKeyRef:
name: user-service-secrets
key: database-url
livenessProbe:
httpGet:
path: /health
port: 3000
initialDelaySeconds: 30
periodSeconds: 10
readinessProbe:
httpGet:
path: /ready
port: 3000
initialDelaySeconds: 5
periodSeconds: 5
resources:
requests:
memory: "128Mi"
cpu: "100m"
limits:
memory: "256Mi"
cpu: "200m"
---
apiVersion: v1
kind: Service
metadata:
name: user-service
spec:
selector:
app: user-service
ports:
- port: 80
targetPort: 3000
type: ClusterIP
Testing Strategies
Contract Testing with Pact
// Consumer test (API Gateway)
const { Pact } = require('@pact-foundation/pact')
const { like, eachLike } = require('@pact-foundation/pact').Matchers
const provider = new Pact({
consumer: 'api-gateway',
provider: 'user-service',
port: 1234
})
describe('User Service Contract', () => {
beforeAll(() => provider.setup())
afterAll(() => provider.finalize())
it('should get user by ID', async () => {
await provider.addInteraction({
state: 'user exists',
uponReceiving: 'a request for user',
withRequest: {
method: 'GET',
path: '/users/123'
},
willRespondWith: {
status: 200,
headers: { 'Content-Type': 'application/json' },
body: like({
id: '123',
name: 'John Doe',
email: 'john@example.com'
})
}
})
const response = await fetch('http://localhost:1234/users/123')
const user = await response.json()
expect(user.id).toBe('123')
})
})
Conclusion
Building scalable Node.js microservices requires careful consideration of architecture patterns, communication strategies, and operational concerns. The key is to start simple and evolve your architecture as your system grows.
Focus on:
- Clear service boundaries
- Robust communication patterns
- Comprehensive monitoring
- Automated testing and deployment
- Gradual migration strategies
Remember that microservices introduce complexity, so ensure the benefits outweigh the costs for your specific use case. Start with a modular monolith and extract services when you have clear business and technical justification.
