Building Robust Backend Systems with Node.js and MySQL
Design and implement scalable, secure backend APIs with Node.js, Express, and MySQL

Node.js and MySQL remain one of the most reliable and battle-tested combinations for building backend systems. Node.js provides an event-driven, non-blocking I/O model that handles concurrent requests efficiently, while MySQL delivers the relational data integrity that business applications demand. Together, they form a foundation that powers everything from startup MVPs to enterprise platforms handling millions of requests per day.
This guide walks through building a production-grade backend API, covering project structure, database design, authentication, error handling, and deployment.
Express.js Project Structure
A well-organized project structure is the foundation of a maintainable backend. Separate concerns clearly and establish conventions early.
project-root/
src/
config/
database.js
environment.js
logger.js
middleware/
auth.js
errorHandler.js
rateLimiter.js
validator.js
models/
User.js
Product.js
Order.js
index.js
routes/
auth.routes.js
users.routes.js
products.routes.js
orders.routes.js
index.js
services/
auth.service.js
user.service.js
product.service.js
email.service.js
utils/
ApiError.js
asyncHandler.js
pagination.js
app.js
server.js
migrations/
seeders/
tests/
.env
.env.example
package.jsonThe entry point sets up Express with essential middleware:
// src/app.js
const express = require('express');
const cors = require('cors');
const helmet = require('helmet');
const morgan = require('morgan');
const { errorHandler } = require('./middleware/errorHandler');
const routes = require('./routes');
const app = express();
// Security middleware
app.use(helmet());
app.use(cors({
origin: process.env.ALLOWED_ORIGINS?.split(',') || 'http://localhost:3000',
credentials: true,
}));
// Request parsing
app.use(express.json({ limit: '10mb' }));
app.use(express.urlencoded({ extended: true }));
// Logging
app.use(morgan(process.env.NODE_ENV === 'production' ? 'combined' : 'dev'));
// Health check
app.get('/health', (req, res) => {
res.json({ status: 'ok', timestamp: new Date().toISOString() });
});
// API routes
app.use('/api/v1', routes);
// Error handling (must be last)
app.use(errorHandler);
module.exports = app;RESTful API Design
Design your API endpoints following REST conventions. Use nouns for resources, HTTP methods for actions, and consistent response formats.
// src/routes/products.routes.js
const router = require('express').Router();
const { authenticate, authorize } = require('../middleware/auth');
const { validate } = require('../middleware/validator');
const { createProductSchema, updateProductSchema } = require('../validators/product');
const productController = require('../controllers/product.controller');
router.get('/', productController.getAll);
router.get('/:id', productController.getById);
router.post('/',
authenticate,
authorize('admin'),
validate(createProductSchema),
productController.create
);
router.put('/:id',
authenticate,
authorize('admin'),
validate(updateProductSchema),
productController.update
);
router.delete('/:id',
authenticate,
authorize('admin'),
productController.delete
);
module.exports = router;Controllers should be thin, delegating business logic to service classes:
// src/controllers/product.controller.js
const productService = require('../services/product.service');
const { asyncHandler } = require('../utils/asyncHandler');
exports.getAll = asyncHandler(async (req, res) => {
const { page = 1, limit = 20, sort = 'created_at', order = 'DESC', search } = req.query;
const result = await productService.findAll({
page: parseInt(page),
limit: Math.min(parseInt(limit), 100),
sort,
order,
search,
});
res.json({
success: true,
data: result.products,
pagination: {
page: result.page,
limit: result.limit,
total: result.total,
totalPages: result.totalPages,
},
});
});
exports.create = asyncHandler(async (req, res) => {
const product = await productService.create(req.body);
res.status(201).json({
success: true,
data: product,
});
});MySQL Connection Pooling with mysql2
Connection pooling is critical for performance. The mysql2 package provides a Promise-based API with prepared statements and connection pooling out of the box.
// src/config/database.js
const mysql = require('mysql2/promise');
const logger = require('./logger');
const pool = mysql.createPool({
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT) || 3306,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
database: process.env.DB_NAME,
waitForConnections: true,
connectionLimit: parseInt(process.env.DB_POOL_SIZE) || 10,
queueLimit: 0,
enableKeepAlive: true,
keepAliveInitialDelay: 30000,
timezone: '+00:00',
typeCast: function (field, next) {
if (field.type === 'TINY' && field.length === 1) {
return field.string() === '1';
}
return next();
},
});
// Test connection on startup
pool.getConnection()
.then(conn => {
logger.info('MySQL connected successfully');
conn.release();
})
.catch(err => {
logger.error('MySQL connection failed:', err.message);
process.exit(1);
});
module.exports = pool;Always use parameterized queries to prevent SQL injection:
// NEVER do this
const query = `SELECT * FROM users WHERE email = '${email}'`;
// ALWAYS use parameterized queries
const [rows] = await pool.execute(
'SELECT id, email, first_name, last_name FROM users WHERE email = ?',
[email]
);Sequelize ORM
For applications with complex data relationships, Sequelize provides a full-featured ORM with model definitions, associations, migrations, and query building.
// src/models/Product.js
const { DataTypes } = require('sequelize');
const sequelize = require('../config/sequelize');
const Product = sequelize.define('Product', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true,
},
name: {
type: DataTypes.STRING(255),
allowNull: false,
validate: {
notEmpty: true,
len: [2, 255],
},
},
description: {
type: DataTypes.TEXT,
allowNull: true,
},
price: {
type: DataTypes.DECIMAL(10, 2),
allowNull: false,
validate: {
min: 0,
},
},
sku: {
type: DataTypes.STRING(100),
unique: true,
allowNull: false,
},
stock_quantity: {
type: DataTypes.INTEGER,
defaultValue: 0,
validate: {
min: 0,
},
},
is_active: {
type: DataTypes.BOOLEAN,
defaultValue: true,
},
}, {
tableName: 'products',
timestamps: true,
underscored: true,
paranoid: true, // Soft deletes
indexes: [
{ fields: ['sku'], unique: true },
{ fields: ['is_active'] },
{ fields: ['price'] },
{ fields: ['created_at'] },
],
});
// Associations
Product.associate = (models) => {
Product.belongsTo(models.Category, { foreignKey: 'category_id' });
Product.hasMany(models.OrderItem, { foreignKey: 'product_id' });
Product.belongsToMany(models.Tag, { through: 'product_tags' });
};
module.exports = Product;Authentication with JWT
Implement stateless authentication using JSON Web Tokens. Use access tokens for API requests and refresh tokens for session management.
// src/services/auth.service.js
const bcrypt = require('bcrypt');
const jwt = require('jsonwebtoken');
const { User } = require('../models');
const ApiError = require('../utils/ApiError');
const SALT_ROUNDS = 12;
const ACCESS_TOKEN_EXPIRY = '15m';
const REFRESH_TOKEN_EXPIRY = '7d';
exports.register = async ({ email, password, firstName, lastName }) => {
const existingUser = await User.findOne({ where: { email } });
if (existingUser) {
throw new ApiError(409, 'Email already registered');
}
const hashedPassword = await bcrypt.hash(password, SALT_ROUNDS);
const user = await User.create({
email,
password: hashedPassword,
first_name: firstName,
last_name: lastName,
});
const tokens = generateTokens(user);
return { user: sanitizeUser(user), ...tokens };
};
exports.login = async ({ email, password }) => {
const user = await User.findOne({ where: { email } });
if (!user || !(await bcrypt.compare(password, user.password))) {
throw new ApiError(401, 'Invalid email or password');
}
const tokens = generateTokens(user);
return { user: sanitizeUser(user), ...tokens };
};
function generateTokens(user) {
const accessToken = jwt.sign(
{ userId: user.id, email: user.email, role: user.role },
process.env.JWT_SECRET,
{ expiresIn: ACCESS_TOKEN_EXPIRY }
);
const refreshToken = jwt.sign(
{ userId: user.id, tokenType: 'refresh' },
process.env.JWT_REFRESH_SECRET,
{ expiresIn: REFRESH_TOKEN_EXPIRY }
);
return { accessToken, refreshToken };
}
function sanitizeUser(user) {
const { password, ...userData } = user.toJSON();
return userData;
}The authentication middleware verifies tokens on protected routes:
// src/middleware/auth.js
const jwt = require('jsonwebtoken');
const ApiError = require('../utils/ApiError');
exports.authenticate = (req, res, next) => {
const authHeader = req.headers.authorization;
if (!authHeader?.startsWith('Bearer ')) {
throw new ApiError(401, 'Access token required');
}
const token = authHeader.split(' ')[1];
try {
const decoded = jwt.verify(token, process.env.JWT_SECRET);
req.user = decoded;
next();
} catch (error) {
if (error.name === 'TokenExpiredError') {
throw new ApiError(401, 'Access token expired');
}
throw new ApiError(401, 'Invalid access token');
}
};
exports.authorize = (...roles) => {
return (req, res, next) => {
if (!roles.includes(req.user.role)) {
throw new ApiError(403, 'Insufficient permissions');
}
next();
};
};Input Validation with Joi
Validate all incoming data before it reaches your business logic. Joi provides a powerful schema-based validation library.
// src/validators/product.js
const Joi = require('joi');
exports.createProductSchema = Joi.object({
name: Joi.string().min(2).max(255).required(),
description: Joi.string().max(5000).optional(),
price: Joi.number().positive().precision(2).required(),
sku: Joi.string().alphanum().max(100).required(),
stock_quantity: Joi.number().integer().min(0).default(0),
category_id: Joi.string().uuid().required(),
tags: Joi.array().items(Joi.string().uuid()).optional(),
is_active: Joi.boolean().default(true),
});
exports.updateProductSchema = Joi.object({
name: Joi.string().min(2).max(255),
description: Joi.string().max(5000).allow(null),
price: Joi.number().positive().precision(2),
stock_quantity: Joi.number().integer().min(0),
category_id: Joi.string().uuid(),
is_active: Joi.boolean(),
}).min(1);
// src/middleware/validator.js
exports.validate = (schema) => {
return (req, res, next) => {
const { error, value } = schema.validate(req.body, {
abortEarly: false,
stripUnknown: true,
});
if (error) {
const errors = error.details.map(detail => ({
field: detail.path.join('.'),
message: detail.message,
}));
return res.status(400).json({
success: false,
message: 'Validation failed',
errors,
});
}
req.body = value;
next();
};
};Error Handling Middleware
Centralized error handling ensures consistent error responses and prevents sensitive information from leaking to clients.
// src/utils/ApiError.js
class ApiError extends Error {
constructor(statusCode, message, errors = []) {
super(message);
this.statusCode = statusCode;
this.errors = errors;
this.isOperational = true;
Error.captureStackTrace(this, this.constructor);
}
}
module.exports = ApiError;
// src/utils/asyncHandler.js
exports.asyncHandler = (fn) => (req, res, next) => {
Promise.resolve(fn(req, res, next)).catch(next);
};
// src/middleware/errorHandler.js
const logger = require('../config/logger');
exports.errorHandler = (err, req, res, next) => {
let statusCode = err.statusCode || 500;
let message = err.message || 'Internal Server Error';
// Sequelize validation errors
if (err.name === 'SequelizeValidationError') {
statusCode = 400;
message = 'Validation error';
}
// Sequelize unique constraint
if (err.name === 'SequelizeUniqueConstraintError') {
statusCode = 409;
message = 'Resource already exists';
}
// Log server errors
if (statusCode >= 500) {
logger.error({
message: err.message,
stack: err.stack,
url: req.originalUrl,
method: req.method,
ip: req.ip,
});
}
res.status(statusCode).json({
success: false,
message,
...(process.env.NODE_ENV === 'development' && { stack: err.stack }),
...(err.errors?.length && { errors: err.errors }),
});
};Query Optimization and Indexing
Efficient database queries are crucial for backend performance. Follow these strategies to keep your MySQL queries fast.
- Use indexes on frequently queried columns - Add indexes on columns used in WHERE clauses, JOIN conditions, and ORDER BY statements. Composite indexes should follow the leftmost prefix rule.
- Avoid SELECT * - Always specify the columns you need. This reduces data transfer and allows MySQL to use covering indexes.
- Use EXPLAIN to analyze queries - Run
EXPLAINbefore your queries to understand the execution plan. Look for full table scans, filesort operations, and temporary tables. - Optimize pagination - For large datasets, use cursor-based pagination (keyset pagination) instead of OFFSET, which becomes slow at high page numbers.
// Inefficient OFFSET pagination
const [rows] = await pool.execute(
'SELECT * FROM products ORDER BY created_at DESC LIMIT ? OFFSET ?',
[limit, (page - 1) * limit]
);
// Efficient cursor-based pagination
const [rows] = await pool.execute(
`SELECT id, name, price, created_at FROM products
WHERE created_at < ?
ORDER BY created_at DESC
LIMIT ?`,
[cursor, limit]
);Database Migrations
Never modify production databases manually. Use Sequelize migrations for version-controlled schema changes.
// migrations/20250101000000-create-products-table.js
module.exports = {
up: async (queryInterface, Sequelize) => {
await queryInterface.createTable('products', {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true,
},
name: {
type: Sequelize.STRING(255),
allowNull: false,
},
price: {
type: Sequelize.DECIMAL(10, 2),
allowNull: false,
},
sku: {
type: Sequelize.STRING(100),
unique: true,
allowNull: false,
},
category_id: {
type: Sequelize.UUID,
references: {
model: 'categories',
key: 'id',
},
onUpdate: 'CASCADE',
onDelete: 'SET NULL',
},
created_at: {
type: Sequelize.DATE,
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP'),
},
updated_at: {
type: Sequelize.DATE,
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP'),
},
});
await queryInterface.addIndex('products', ['sku']);
await queryInterface.addIndex('products', ['category_id']);
await queryInterface.addIndex('products', ['created_at']);
},
down: async (queryInterface) => {
await queryInterface.dropTable('products');
},
};Rate Limiting
Protect your API from abuse with rate limiting. Use express-rate-limit with a Redis store for distributed deployments.
// src/middleware/rateLimiter.js
const rateLimit = require('express-rate-limit');
const RedisStore = require('rate-limit-redis');
const redis = require('../config/redis');
exports.apiLimiter = rateLimit({
store: new RedisStore({ sendCommand: (...args) => redis.call(...args) }),
windowMs: 15 * 60 * 1000, // 15 minutes
max: 100,
message: {
success: false,
message: 'Too many requests, please try again later',
},
standardHeaders: true,
legacyHeaders: false,
});
exports.authLimiter = rateLimit({
store: new RedisStore({ sendCommand: (...args) => redis.call(...args) }),
windowMs: 15 * 60 * 1000,
max: 5,
message: {
success: false,
message: 'Too many login attempts, please try again later',
},
skipSuccessfulRequests: true,
});Logging with Winston
Production applications need structured logging with multiple transports and log levels.
// src/config/logger.js
const winston = require('winston');
const logger = winston.createLogger({
level: process.env.LOG_LEVEL || 'info',
format: winston.format.combine(
winston.format.timestamp(),
winston.format.errors({ stack: true }),
winston.format.json()
),
defaultMeta: { service: 'api-server' },
transports: [
new winston.transports.File({
filename: 'logs/error.log',
level: 'error',
maxsize: 5242880, // 5MB
maxFiles: 5,
}),
new winston.transports.File({
filename: 'logs/combined.log',
maxsize: 5242880,
maxFiles: 10,
}),
],
});
if (process.env.NODE_ENV !== 'production') {
logger.add(new winston.transports.Console({
format: winston.format.combine(
winston.format.colorize(),
winston.format.simple()
),
}));
}
module.exports = logger;Docker Deployment
Containerize your application for consistent deployments across environments.
# Dockerfile
FROM node:20-alpine AS builder
WORKDIR /app
COPY package*.json ./
RUN npm ci --only=production
FROM node:20-alpine
WORKDIR /app
RUN addgroup -g 1001 -S appgroup && \
adduser -S appuser -u 1001 -G appgroup
COPY --from=builder /app/node_modules ./node_modules
COPY src/ ./src/
COPY migrations/ ./migrations/
COPY package.json ./
USER appuser
EXPOSE 3000
HEALTHCHECK --interval=30s --timeout=3s \
CMD wget --no-verbose --tries=1 --spider http://localhost:3000/health || exit 1
CMD ["node", "src/server.js"]# docker-compose.yml
version: '3.8'
services:
api:
build: .
ports:
- "3000:3000"
environment:
- NODE_ENV=production
- DB_HOST=mysql
- DB_USER=app_user
- DB_PASSWORD_FILE=/run/secrets/db_password
- DB_NAME=myapp
depends_on:
mysql:
condition: service_healthy
restart: unless-stopped
mysql:
image: mysql:8.0
environment:
MYSQL_ROOT_PASSWORD_FILE: /run/secrets/db_root_password
MYSQL_DATABASE: myapp
MYSQL_USER: app_user
MYSQL_PASSWORD_FILE: /run/secrets/db_password
volumes:
- mysql_data:/var/lib/mysql
healthcheck:
test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
interval: 10s
timeout: 5s
retries: 5
volumes:
mysql_data:Conclusion
Building a robust Node.js and MySQL backend requires attention to architecture, security, performance, and operational concerns. By establishing a clean project structure, implementing proper authentication and validation, optimizing database queries, and containerizing your deployment, you create a backend that is secure, performant, and maintainable. Start with the fundamentals outlined in this guide, measure your application's performance under realistic load, and iterate on the bottlenecks you discover. The patterns presented here have been proven across thousands of production applications and will serve as a solid foundation for your backend systems.