Database Testing Patterns
A practical guide to testing database interactions in Node.js covering repository pattern testing, migration testing, transaction testing, and test database management.
Database Testing Patterns
Database tests answer the question: does your application correctly read from and write to the database? This seems simple, but database interactions are where most production bugs live — wrong queries, missing indexes, broken migrations, transaction isolation issues, and data type mismatches.
The challenge is testing against a real database without tests being slow, flaky, or dependent on shared state. This guide covers patterns that make database testing reliable, fast, and maintainable.
Prerequisites
- Node.js installed (v16+)
- PostgreSQL or MongoDB installed locally (or via Docker)
- A Node.js project with database interactions
Test Database Setup
Dedicated Test Database
Never run tests against your development or production database. Create a separate database for testing.
# PostgreSQL
createdb myapp_test
# Or via psql
psql -c "CREATE DATABASE myapp_test"
// config/database.js
var configs = {
development: {
connectionString: process.env.DATABASE_URL || "postgresql://localhost:5432/myapp_dev"
},
test: {
connectionString: process.env.TEST_DATABASE_URL || "postgresql://localhost:5432/myapp_test"
},
production: {
connectionString: process.env.DATABASE_URL
}
};
var env = process.env.NODE_ENV || "development";
module.exports = configs[env];
Docker for Consistent Test Databases
# docker-compose.test.yml
version: "3.8"
services:
test-db:
image: postgres:15
environment:
POSTGRES_USER: test
POSTGRES_PASSWORD: test
POSTGRES_DB: myapp_test
ports:
- "5433:5432"
tmpfs:
- /var/lib/postgresql/data # In-memory for speed
docker compose -f docker-compose.test.yml up -d
TEST_DATABASE_URL=postgresql://test:test@localhost:5433/myapp_test npm test
Using tmpfs keeps the database in memory, making tests significantly faster since no disk I/O is needed.
Pattern 1: Repository Pattern Testing
The repository pattern wraps database operations in a clean interface. Test the repository against a real database.
// userRepository.js
var db = require("./db");
function UserRepository() {}
UserRepository.prototype.create = function(userData) {
return db.query(
"INSERT INTO users (name, email, role, created_at) VALUES ($1, $2, $3, NOW()) RETURNING *",
[userData.name, userData.email, userData.role || "user"]
).then(function(result) {
return result.rows[0];
});
};
UserRepository.prototype.findById = function(id) {
return db.query("SELECT * FROM users WHERE id = $1", [id])
.then(function(result) {
return result.rows[0] || null;
});
};
UserRepository.prototype.findByEmail = function(email) {
return db.query("SELECT * FROM users WHERE email = $1", [email])
.then(function(result) {
return result.rows[0] || null;
});
};
UserRepository.prototype.update = function(id, updates) {
var fields = [];
var values = [];
var paramCount = 1;
var allowed = ["name", "email", "role"];
var keys = Object.keys(updates);
for (var i = 0; i < keys.length; i++) {
if (allowed.indexOf(keys[i]) !== -1) {
fields.push(keys[i] + " = $" + paramCount);
values.push(updates[keys[i]]);
paramCount++;
}
}
if (fields.length === 0) return this.findById(id);
fields.push("updated_at = NOW()");
values.push(id);
var sql = "UPDATE users SET " + fields.join(", ") + " WHERE id = $" + paramCount + " RETURNING *";
return db.query(sql, values).then(function(result) {
return result.rows[0] || null;
});
};
UserRepository.prototype.delete = function(id) {
return db.query("DELETE FROM users WHERE id = $1 RETURNING id", [id])
.then(function(result) {
return result.rowCount > 0;
});
};
UserRepository.prototype.findAll = function(options) {
var page = (options && options.page) || 1;
var limit = (options && options.limit) || 20;
var offset = (page - 1) * limit;
return Promise.all([
db.query("SELECT * FROM users ORDER BY created_at DESC LIMIT $1 OFFSET $2", [limit, offset]),
db.query("SELECT COUNT(*) FROM users")
]).then(function(results) {
return {
data: results[0].rows,
total: parseInt(results[1].rows[0].count, 10),
page: page,
totalPages: Math.ceil(parseInt(results[1].rows[0].count, 10) / limit)
};
});
};
module.exports = UserRepository;
Testing the Repository
// userRepository.test.js
var db = require("../db");
var UserRepository = require("../userRepository");
var repo = new UserRepository();
beforeAll(function() {
return db.connect(process.env.TEST_DATABASE_URL);
});
afterAll(function() {
return db.end();
});
beforeEach(function() {
return db.query("TRUNCATE users RESTART IDENTITY CASCADE");
});
describe("UserRepository", function() {
describe("create", function() {
test("inserts a user and returns it with an id", function() {
return repo.create({ name: "Alice", email: "[email protected]" })
.then(function(user) {
expect(user.id).toBeDefined();
expect(user.name).toBe("Alice");
expect(user.email).toBe("[email protected]");
expect(user.role).toBe("user");
expect(user.created_at).toBeDefined();
});
});
test("enforces unique email constraint", function() {
return repo.create({ name: "Alice", email: "[email protected]" })
.then(function() {
return repo.create({ name: "Bob", email: "[email protected]" });
})
.then(function() {
throw new Error("Should have thrown");
})
.catch(function(err) {
expect(err.code).toBe("23505"); // PostgreSQL unique violation
});
});
test("rejects null name", function() {
return repo.create({ name: null, email: "[email protected]" })
.then(function() {
throw new Error("Should have thrown");
})
.catch(function(err) {
expect(err.code).toBe("23502"); // PostgreSQL not null violation
});
});
});
describe("findById", function() {
test("returns the user when found", function() {
return repo.create({ name: "Alice", email: "[email protected]" })
.then(function(created) {
return repo.findById(created.id);
})
.then(function(found) {
expect(found.name).toBe("Alice");
});
});
test("returns null when not found", function() {
return repo.findById(99999).then(function(result) {
expect(result).toBeNull();
});
});
});
describe("update", function() {
test("updates specified fields only", function() {
return repo.create({ name: "Alice", email: "[email protected]", role: "user" })
.then(function(user) {
return repo.update(user.id, { name: "Alice Updated" });
})
.then(function(updated) {
expect(updated.name).toBe("Alice Updated");
expect(updated.email).toBe("[email protected]"); // Unchanged
expect(updated.role).toBe("user"); // Unchanged
expect(updated.updated_at).toBeDefined();
});
});
test("ignores unknown fields", function() {
return repo.create({ name: "Alice", email: "[email protected]" })
.then(function(user) {
return repo.update(user.id, { name: "Updated", hackField: "malicious" });
})
.then(function(updated) {
expect(updated.name).toBe("Updated");
});
});
});
describe("delete", function() {
test("removes the user", function() {
return repo.create({ name: "Alice", email: "[email protected]" })
.then(function(user) {
return repo.delete(user.id);
})
.then(function(deleted) {
expect(deleted).toBe(true);
});
});
test("returns false for non-existent user", function() {
return repo.delete(99999).then(function(deleted) {
expect(deleted).toBe(false);
});
});
});
describe("findAll", function() {
beforeEach(function() {
var users = [];
for (var i = 1; i <= 25; i++) {
users.push(repo.create({
name: "User " + i,
email: "user" + i + "@example.com"
}));
}
return Promise.all(users);
});
test("paginates results", function() {
return repo.findAll({ page: 1, limit: 10 }).then(function(result) {
expect(result.data).toHaveLength(10);
expect(result.total).toBe(25);
expect(result.page).toBe(1);
expect(result.totalPages).toBe(3);
});
});
test("returns last page with fewer results", function() {
return repo.findAll({ page: 3, limit: 10 }).then(function(result) {
expect(result.data).toHaveLength(5);
expect(result.page).toBe(3);
});
});
test("defaults to page 1 with 20 results", function() {
return repo.findAll({}).then(function(result) {
expect(result.data).toHaveLength(20);
expect(result.page).toBe(1);
});
});
});
});
Pattern 2: Transaction Rollback Isolation
Wrap each test in a transaction and roll it back afterward. This is the fastest cleanup method.
// test/helpers/transaction.js
var db = require("../../src/db");
var activeClient = null;
function beginTransaction() {
return db.pool.connect().then(function(client) {
activeClient = client;
return client.query("BEGIN");
});
}
function rollbackTransaction() {
if (!activeClient) return Promise.resolve();
return activeClient.query("ROLLBACK").then(function() {
activeClient.release();
activeClient = null;
});
}
function getClient() {
return activeClient;
}
module.exports = {
beginTransaction: beginTransaction,
rollbackTransaction: rollbackTransaction,
getClient: getClient
};
var tx = require("./helpers/transaction");
describe("UserRepository with transaction isolation", function() {
beforeEach(function() {
return tx.beginTransaction();
});
afterEach(function() {
return tx.rollbackTransaction();
});
test("creates a user within a transaction", function() {
var client = tx.getClient();
return client.query(
"INSERT INTO users (name, email) VALUES ($1, $2) RETURNING *",
["Alice", "[email protected]"]
).then(function(result) {
expect(result.rows[0].name).toBe("Alice");
});
// Transaction is rolled back — Alice is NOT in the database after this test
});
});
Pattern 3: Migration Testing
Verify that database migrations run correctly and are reversible.
// migrations.test.js
var db = require("../db");
var migrate = require("../migrate");
describe("Database Migrations", function() {
beforeAll(function() {
return db.connect(process.env.TEST_DATABASE_URL);
});
afterAll(function() {
return db.end();
});
test("all migrations run without error", function() {
return migrate.up().then(function(result) {
expect(result.applied).toBeGreaterThan(0);
});
});
test("migrations are reversible", function() {
return migrate.up()
.then(function() { return migrate.down(); })
.then(function() { return migrate.up(); })
.then(function(result) {
expect(result.applied).toBeGreaterThan(0);
});
});
test("users table has expected columns", function() {
return migrate.up().then(function() {
return db.query(
"SELECT column_name, data_type, is_nullable " +
"FROM information_schema.columns " +
"WHERE table_name = 'users' " +
"ORDER BY ordinal_position"
);
}).then(function(result) {
var columns = {};
result.rows.forEach(function(row) {
columns[row.column_name] = row;
});
expect(columns.id).toBeDefined();
expect(columns.id.data_type).toBe("integer");
expect(columns.name).toBeDefined();
expect(columns.name.is_nullable).toBe("NO");
expect(columns.email).toBeDefined();
expect(columns.email.is_nullable).toBe("NO");
expect(columns.created_at).toBeDefined();
});
});
test("unique index exists on email", function() {
return migrate.up().then(function() {
return db.query(
"SELECT indexname FROM pg_indexes " +
"WHERE tablename = 'users' AND indexdef LIKE '%UNIQUE%email%'"
);
}).then(function(result) {
expect(result.rows.length).toBeGreaterThan(0);
});
});
});
Pattern 4: Query Testing
Test complex queries in isolation to verify they return correct results.
// queries.test.js
var db = require("../db");
describe("Complex Queries", function() {
beforeAll(function() {
return db.connect(process.env.TEST_DATABASE_URL);
});
beforeEach(function() {
return db.query("TRUNCATE users, orders, order_items RESTART IDENTITY CASCADE")
.then(function() {
return Promise.all([
db.query("INSERT INTO users (name, email) VALUES ('Alice', '[email protected]'), ('Bob', '[email protected]'), ('Carol', '[email protected]')"),
db.query("INSERT INTO orders (user_id, total, status, created_at) VALUES (1, 100, 'completed', '2026-01-01'), (1, 200, 'completed', '2026-01-15'), (2, 50, 'pending', '2026-01-20'), (2, 150, 'completed', '2026-01-25')")
]);
});
});
test("aggregates order totals by user", function() {
return db.query(
"SELECT u.name, COUNT(o.id) as order_count, SUM(o.total) as total_spent " +
"FROM users u " +
"LEFT JOIN orders o ON u.id = o.user_id AND o.status = 'completed' " +
"GROUP BY u.id, u.name " +
"ORDER BY total_spent DESC NULLS LAST"
).then(function(result) {
expect(result.rows).toHaveLength(3);
expect(result.rows[0].name).toBe("Alice");
expect(parseFloat(result.rows[0].total_spent)).toBe(300);
expect(parseInt(result.rows[0].order_count)).toBe(2);
expect(result.rows[1].name).toBe("Bob");
expect(parseFloat(result.rows[1].total_spent)).toBe(150);
expect(result.rows[2].name).toBe("Carol");
expect(result.rows[2].total_spent).toBeNull(); // No orders
});
});
test("date range filtering works correctly", function() {
return db.query(
"SELECT * FROM orders WHERE created_at BETWEEN $1 AND $2 ORDER BY created_at",
["2026-01-10", "2026-01-20"]
).then(function(result) {
expect(result.rows).toHaveLength(2);
});
});
test("full-text search on user names", function() {
return db.query(
"SELECT * FROM users WHERE name ILIKE $1",
["%ali%"]
).then(function(result) {
expect(result.rows).toHaveLength(1);
expect(result.rows[0].name).toBe("Alice");
});
});
});
Pattern 5: Constraint Testing
Verify that database constraints protect data integrity.
describe("Database Constraints", function() {
beforeEach(function() {
return db.query("TRUNCATE users RESTART IDENTITY CASCADE");
});
test("email must be unique", function() {
return db.query("INSERT INTO users (name, email) VALUES ('A', '[email protected]')")
.then(function() {
return db.query("INSERT INTO users (name, email) VALUES ('B', '[email protected]')");
})
.then(function() { throw new Error("Should have thrown"); })
.catch(function(err) {
expect(err.code).toBe("23505");
expect(err.constraint).toContain("email");
});
});
test("name cannot be null", function() {
return db.query("INSERT INTO users (name, email) VALUES (NULL, '[email protected]')")
.then(function() { throw new Error("Should have thrown"); })
.catch(function(err) {
expect(err.code).toBe("23502");
});
});
test("foreign key prevents orphaned orders", function() {
return db.query("INSERT INTO orders (user_id, total, status) VALUES (99999, 100, 'pending')")
.then(function() { throw new Error("Should have thrown"); })
.catch(function(err) {
expect(err.code).toBe("23503"); // Foreign key violation
});
});
test("cascade delete removes related records", function() {
return db.query("INSERT INTO users (name, email) VALUES ('Alice', '[email protected]') RETURNING id")
.then(function(result) {
var userId = result.rows[0].id;
return db.query("INSERT INTO orders (user_id, total, status) VALUES ($1, 100, 'pending')", [userId])
.then(function() {
return db.query("DELETE FROM users WHERE id = $1", [userId]);
});
})
.then(function() {
return db.query("SELECT COUNT(*) FROM orders");
})
.then(function(result) {
expect(parseInt(result.rows[0].count)).toBe(0);
});
});
test("check constraint validates status values", function() {
return db.query(
"INSERT INTO orders (user_id, total, status) VALUES (1, 100, 'invalid_status')"
)
.then(function() { throw new Error("Should have thrown"); })
.catch(function(err) {
expect(err.code).toBe("23514"); // Check constraint violation
});
});
});
Pattern 6: MongoDB Testing
// mongoRepository.test.js
var MongoClient = require("mongodb").MongoClient;
var client, db, collection;
beforeAll(function() {
return MongoClient.connect(process.env.TEST_MONGO_URL || "mongodb://localhost:27017")
.then(function(c) {
client = c;
db = client.db("myapp_test");
collection = db.collection("users");
});
});
afterAll(function() {
return client.close();
});
beforeEach(function() {
return collection.deleteMany({});
});
describe("MongoDB User Repository", function() {
test("inserts a document", function() {
return collection.insertOne({
name: "Alice",
email: "[email protected]",
tags: ["developer", "nodejs"]
}).then(function(result) {
expect(result.insertedId).toBeDefined();
});
});
test("finds by query", function() {
return collection.insertMany([
{ name: "Alice", role: "admin" },
{ name: "Bob", role: "user" },
{ name: "Carol", role: "user" }
]).then(function() {
return collection.find({ role: "user" }).toArray();
}).then(function(users) {
expect(users).toHaveLength(2);
});
});
test("updates with $set", function() {
return collection.insertOne({ name: "Alice", role: "user" })
.then(function(result) {
return collection.updateOne(
{ _id: result.insertedId },
{ $set: { role: "admin" } }
);
})
.then(function(result) {
expect(result.modifiedCount).toBe(1);
});
});
test("aggregation pipeline works", function() {
return collection.insertMany([
{ name: "Alice", department: "engineering", salary: 100000 },
{ name: "Bob", department: "engineering", salary: 95000 },
{ name: "Carol", department: "marketing", salary: 85000 }
]).then(function() {
return collection.aggregate([
{ $group: { _id: "$department", avgSalary: { $avg: "$salary" }, count: { $sum: 1 } } },
{ $sort: { avgSalary: -1 } }
]).toArray();
}).then(function(result) {
expect(result).toHaveLength(2);
expect(result[0]._id).toBe("engineering");
expect(result[0].avgSalary).toBe(97500);
expect(result[0].count).toBe(2);
});
});
test("unique index prevents duplicates", function() {
return collection.createIndex({ email: 1 }, { unique: true })
.then(function() {
return collection.insertOne({ name: "Alice", email: "[email protected]" });
})
.then(function() {
return collection.insertOne({ name: "Bob", email: "[email protected]" });
})
.then(function() { throw new Error("Should have thrown"); })
.catch(function(err) {
expect(err.code).toBe(11000); // MongoDB duplicate key
});
});
});
Performance Testing Queries
describe("Query Performance", function() {
beforeAll(function() {
// Seed with realistic data volume
var inserts = [];
for (var i = 0; i < 10000; i++) {
inserts.push(
db.query("INSERT INTO users (name, email, role) VALUES ($1, $2, $3)", [
"User " + i,
"user" + i + "@example.com",
i % 10 === 0 ? "admin" : "user"
])
);
}
return Promise.all(inserts);
});
test("indexed query completes within budget", function() {
var start = Date.now();
return db.query("SELECT * FROM users WHERE email = $1", ["[email protected]"])
.then(function(result) {
var elapsed = Date.now() - start;
expect(result.rows).toHaveLength(1);
expect(elapsed).toBeLessThan(50); // Should be fast with an index
});
});
test("pagination query scales with large datasets", function() {
var start = Date.now();
return db.query(
"SELECT * FROM users ORDER BY created_at DESC LIMIT 20 OFFSET 5000"
).then(function(result) {
var elapsed = Date.now() - start;
expect(result.rows).toHaveLength(20);
expect(elapsed).toBeLessThan(100);
});
});
test("EXPLAIN shows index usage", function() {
return db.query("EXPLAIN SELECT * FROM users WHERE email = $1", ["[email protected]"])
.then(function(result) {
var plan = result.rows.map(function(r) { return r["QUERY PLAN"]; }).join(" ");
expect(plan).toContain("Index");
expect(plan).not.toContain("Seq Scan");
});
});
});
Common Issues and Troubleshooting
Tests are slow due to database setup
Truncating and reseeding tables takes time:
Fix: Use transaction rollback instead of truncation for per-test isolation. Use beforeAll for shared seed data that tests only read. Run PostgreSQL with tmpfs for in-memory storage.
Tests fail with "relation does not exist"
Migrations have not run on the test database:
Fix: Add migration step to your test setup. Run beforeAll that calls your migration script. In CI, run migrations before the test step.
Tests interfere with each other
One test modifies data that another test depends on:
Fix: Use beforeEach to reset state, not beforeAll. Give each test its own unique data. Use transaction rollback for complete isolation.
Database connection pool exhaustion
Too many tests open connections without releasing them:
Fix: Configure a small pool for testing (2-5 connections). Ensure connections are released in afterEach or afterAll. Use a single shared pool across all test files.
Best Practices
- Test against a real database, not mocks. Database mocks cannot verify SQL syntax, constraint violations, data types, or query performance. Use a real database instance for database tests.
- Use transaction rollback for isolation. It is faster than truncation and guarantees clean state. Each test runs in its own transaction that is rolled back.
- Test constraints and indexes. Verify that unique constraints reject duplicates, foreign keys prevent orphaned records, and indexes exist where expected. These are the database's safety net.
- Test migrations in CI. Run migrations up, then down, then up again. This verifies migrations are reversible and idempotent.
- Use Docker for consistent test databases. Docker guarantees the same database version and configuration across all environments. Use
tmpfsfor speed. - Test with realistic data volumes. A query that works with 10 rows may fail with 10,000. Seed test databases with realistic data for performance tests.
- Verify query plans for critical queries. Use
EXPLAINto confirm indexes are used. A missing index causes no functional error but degrades performance. - Keep test databases disposable. Test databases should be created and destroyed automatically. Never store important data in a test database.