Container Image Scanning in Pipelines
Complete guide to container image scanning in Azure DevOps pipelines, covering Trivy, Grype, Microsoft Defender for Containers, multi-stage image analysis, base image selection, vulnerability remediation, and building automated container security gates.
Container Image Scanning in Pipelines
Overview
Your application code passes every security scan, but the container image it ships in carries 200 OS-level vulnerabilities from the base image. Container image scanning catches what dependency scanning misses — operating system packages, runtime libraries, and embedded binaries that never appear in your package.json. I have seen production images running Alpine with known OpenSSL vulnerabilities simply because nobody scanned the final built artifact. Scanning the container image is scanning what actually runs in production.
Prerequisites
- Azure DevOps project with Azure Pipelines configured
- Docker installed on build agents (Docker 20.10+)
- Azure Container Registry (ACR) or Docker Hub account
- Node.js 16 or later for processing scan results
- Basic understanding of Dockerfile syntax and multi-stage builds
- Pipeline agent with at least 4GB RAM for scanning tools
Why Application Scanning Is Not Enough
A Node.js application might have zero npm vulnerabilities, but the container it runs in tells a different story:
# Your app: clean
npm audit
# found 0 vulnerabilities
# Your container: not clean
docker run --rm aquasec/trivy image node:20-bullseye 2>/dev/null | tail -5
# Total: 287 (UNKNOWN: 0, LOW: 189, MEDIUM: 72, HIGH: 21, CRITICAL: 5)
Those 5 critical vulnerabilities live in openssl, zlib, curl, and other system libraries that your Node.js runtime depends on. An attacker exploiting a buffer overflow in libcurl does not care that your package.json is pristine.
Image Layer Analysis
var exec = require("child_process").execSync;
// Analyze Docker image layers and their sizes
function analyzeImageLayers(imageName) {
var history = exec("docker history --no-trunc --format '{{.CreatedBy}}|||{{.Size}}' " + imageName, {
encoding: "utf8"
});
var layers = history.trim().split("\n").map(function(line) {
var parts = line.split("|||");
return {
command: parts[0].replace("/bin/sh -c ", "").substring(0, 100),
size: parts[1]
};
});
console.log("Image layers for " + imageName + ":");
layers.forEach(function(layer, i) {
console.log(" [" + i + "] " + layer.size.padStart(10) + " " + layer.command);
});
return layers;
}
// Compare vulnerability counts between base images
function compareBaseImages(images) {
images.forEach(function(image) {
try {
var output = exec("docker run --rm aquasec/trivy image --quiet --format json " + image, {
encoding: "utf8",
timeout: 120000
});
var results = JSON.parse(output);
var counts = { CRITICAL: 0, HIGH: 0, MEDIUM: 0, LOW: 0 };
(results.Results || []).forEach(function(result) {
(result.Vulnerabilities || []).forEach(function(v) {
counts[v.Severity] = (counts[v.Severity] || 0) + 1;
});
});
console.log(image.padEnd(30) + " C:" + counts.CRITICAL + " H:" + counts.HIGH + " M:" + counts.MEDIUM + " L:" + counts.LOW);
} catch (e) {
console.log(image.padEnd(30) + " Error: " + e.message.substring(0, 60));
}
});
}
// Compare common Node.js base images
compareBaseImages([
"node:20-bookworm",
"node:20-bookworm-slim",
"node:20-bullseye-slim",
"node:20-alpine3.19",
"gcr.io/distroless/nodejs20-debian12"
]);
Output:
node:20-bookworm C:3 H:18 M:67 L:142
node:20-bookworm-slim C:1 H:5 M:22 L:48
node:20-bullseye-slim C:2 H:9 M:31 L:65
node:20-alpine3.19 C:0 H:1 M:4 L:8
gcr.io/distroless/nodejs20 C:0 H:0 M:2 L:3
Trivy: The Go-To Container Scanner
Trivy from Aqua Security is the most widely used open-source container scanner. It is fast, comprehensive, and easy to integrate.
Basic Pipeline Integration
# azure-pipelines.yml - Trivy container scanning
stages:
- stage: BuildAndScan
jobs:
- job: ContainerSecurity
pool:
vmImage: 'ubuntu-latest'
steps:
- task: Docker@2
displayName: 'Build container image'
inputs:
command: 'build'
Dockerfile: 'Dockerfile'
tags: '$(Build.BuildId)'
arguments: '-t myapp:$(Build.BuildId)'
- script: |
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin v0.50.0
displayName: 'Install Trivy'
- script: |
trivy image \
--format json \
--output $(Build.ArtifactStagingDirectory)/trivy-report.json \
--severity CRITICAL,HIGH,MEDIUM \
--ignore-unfixed \
--vuln-type os,library \
myapp:$(Build.BuildId)
displayName: 'Scan container image'
- script: |
node -e "
var fs = require('fs');
var report = JSON.parse(fs.readFileSync('$(Build.ArtifactStagingDirectory)/trivy-report.json', 'utf8'));
var totals = { CRITICAL: 0, HIGH: 0, MEDIUM: 0 };
var osVulns = [];
var libVulns = [];
(report.Results || []).forEach(function(result) {
(result.Vulnerabilities || []).forEach(function(v) {
totals[v.Severity] = (totals[v.Severity] || 0) + 1;
if (result.Class === 'os-pkgs') {
osVulns.push(v);
} else {
libVulns.push(v);
}
});
});
console.log('Container Scan Results:');
console.log(' Critical: ' + totals.CRITICAL);
console.log(' High: ' + totals.HIGH);
console.log(' Medium: ' + totals.MEDIUM);
console.log(' OS vulnerabilities: ' + osVulns.length);
console.log(' Library vulnerabilities: ' + libVulns.length);
// Show critical findings
var criticals = osVulns.concat(libVulns).filter(function(v) {
return v.Severity === 'CRITICAL';
});
if (criticals.length > 0) {
console.log('\\nCritical Findings:');
criticals.forEach(function(v) {
console.log(' ' + v.VulnerabilityID + ': ' + v.PkgName + ' ' + v.InstalledVersion);
console.log(' Fix: ' + (v.FixedVersion || 'none available'));
console.log(' ' + (v.Title || '').substring(0, 80));
});
}
if (totals.CRITICAL > 0) {
console.log('##vso[task.complete result=Failed;]Critical container vulnerabilities found');
}
"
displayName: 'Evaluate scan results'
- task: PublishBuildArtifacts@1
inputs:
pathToPublish: '$(Build.ArtifactStagingDirectory)/trivy-report.json'
artifactName: 'container-scan'
condition: always()
Scanning with .trivyignore
When vulnerabilities have no fix or are false positives, use a .trivyignore file:
# .trivyignore - Suppress known false positives and accepted risks
# No fix available, mitigated by network policy
CVE-2024-12345
# False positive: applies to Windows only, we run Linux
CVE-2024-67890
# Accepted risk: low impact, exploit requires local access
CVE-2025-11111
Multi-Stage Image Scanning
Scan the build stage to catch build-tool vulnerabilities and the final stage to catch runtime vulnerabilities:
steps:
# Build the multi-stage image
- script: |
docker build --target builder -t myapp:builder .
docker build -t myapp:$(Build.BuildId) .
displayName: 'Build multi-stage image'
# Scan builder stage (for supply chain attacks in build tools)
- script: |
trivy image --format json --output builder-scan.json --severity CRITICAL myapp:builder
displayName: 'Scan builder stage'
# Scan final image (what actually deploys)
- script: |
trivy image --format json --output runtime-scan.json --severity CRITICAL,HIGH myapp:$(Build.BuildId)
displayName: 'Scan runtime image'
- script: |
node -e "
var fs = require('fs');
function countVulns(file) {
var r = JSON.parse(fs.readFileSync(file, 'utf8'));
var count = 0;
(r.Results || []).forEach(function(result) {
count += (result.Vulnerabilities || []).length;
});
return count;
}
console.log('Builder stage vulnerabilities: ' + countVulns('builder-scan.json'));
console.log('Runtime image vulnerabilities: ' + countVulns('runtime-scan.json'));
console.log('(Only runtime image blocks deployment)');
"
displayName: 'Compare stage results'
Grype: Alternative Scanner
Grype from Anchore is another excellent scanner with strong SBOM integration.
steps:
- script: |
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b /usr/local/bin
displayName: 'Install Grype'
- script: |
grype myapp:$(Build.BuildId) \
--output json \
--file $(Build.ArtifactStagingDirectory)/grype-report.json \
--fail-on critical
displayName: 'Run Grype scan'
continueOnError: true
- script: |
node -e "
var fs = require('fs');
var report = JSON.parse(fs.readFileSync('$(Build.ArtifactStagingDirectory)/grype-report.json', 'utf8'));
var matches = report.matches || [];
var bySeverity = {};
matches.forEach(function(m) {
var sev = m.vulnerability.severity;
bySeverity[sev] = (bySeverity[sev] || 0) + 1;
});
console.log('Grype Results: ' + matches.length + ' vulnerabilities');
Object.keys(bySeverity).forEach(function(sev) {
console.log(' ' + sev + ': ' + bySeverity[sev]);
});
"
displayName: 'Process Grype results'
SBOM Generation with Syft
Grype pairs with Syft for Software Bill of Materials (SBOM) generation:
steps:
- script: |
curl -sSfL https://raw.githubusercontent.com/anchore/syft/main/install.sh | sh -s -- -b /usr/local/bin
displayName: 'Install Syft'
- script: |
syft myapp:$(Build.BuildId) \
--output spdx-json \
--file $(Build.ArtifactStagingDirectory)/sbom.spdx.json
displayName: 'Generate SBOM'
- script: |
grype sbom:$(Build.ArtifactStagingDirectory)/sbom.spdx.json \
--output json \
--file $(Build.ArtifactStagingDirectory)/grype-from-sbom.json
displayName: 'Scan SBOM with Grype'
Microsoft Defender for Containers
For teams invested in Azure, Defender for Containers provides native ACR scanning with Azure Security Center integration.
Enabling Defender for ACR
# Enable Defender for Container Registries
az security pricing create \
--name ContainerRegistry \
--tier Standard
# Enable continuous scanning
az acr update \
--name myregistry \
--resource-group myapp-rg \
--set policies.quarantinePolicy.status=enabled
Pipeline Integration with ACR Scanning
steps:
- task: Docker@2
displayName: 'Build and push to ACR'
inputs:
containerRegistry: 'ACR-Connection'
repository: 'myapp'
command: 'buildAndPush'
Dockerfile: 'Dockerfile'
tags: '$(Build.BuildId)'
- task: AzureCLI@2
displayName: 'Wait for Defender scan'
inputs:
azureSubscription: 'Azure-Production'
scriptType: 'bash'
scriptLocation: 'inlineScript'
inlineScript: |
echo "Waiting for Defender scan to complete..."
RETRIES=0
MAX_RETRIES=12
while [ $RETRIES -lt $MAX_RETRIES ]; do
RESULTS=$(az security sub-assessment list \
--assessed-resource-id "/subscriptions/$SUB_ID/resourceGroups/myapp-rg/providers/Microsoft.ContainerRegistry/registries/myregistry" \
--assessment-name "dbd0cb49-b563-45e7-9724-889e799fa648" \
--query "[?contains(id, '$(Build.BuildId)')]" \
-o json 2>/dev/null)
if [ "$(echo $RESULTS | jq length)" -gt "0" ]; then
echo "Scan complete"
echo $RESULTS | jq '.[].status.severity' | sort | uniq -c
break
fi
RETRIES=$((RETRIES + 1))
echo "Scan pending... (attempt $RETRIES/$MAX_RETRIES)"
sleep 15
done
timeoutInMinutes: 5
- task: AzureCLI@2
displayName: 'Evaluate Defender results'
inputs:
azureSubscription: 'Azure-Production'
scriptType: 'bash'
scriptLocation: 'inlineScript'
inlineScript: |
CRITICAL=$(az security sub-assessment list \
--assessed-resource-id "/subscriptions/$SUB_ID/resourceGroups/myapp-rg/providers/Microsoft.ContainerRegistry/registries/myregistry" \
--assessment-name "dbd0cb49-b563-45e7-9724-889e799fa648" \
--query "[?contains(id, '$(Build.BuildId)') && status.severity=='High'].{id:id}" \
-o json | jq length)
echo "Critical/High findings: $CRITICAL"
if [ "$CRITICAL" -gt "0" ]; then
echo "##vso[task.complete result=Failed;]Defender found high-severity container vulnerabilities"
fi
Base Image Management
The biggest factor in container security is your base image choice.
Base Image Policy Enforcement
var fs = require("fs");
var exec = require("child_process").execSync;
// Approved base image policy
var APPROVED_BASES = [
{ pattern: /^node:20-alpine3\.\d+$/, tier: "preferred" },
{ pattern: /^node:20-bookworm-slim$/, tier: "approved" },
{ pattern: /^gcr\.io\/distroless\/nodejs20/, tier: "preferred" },
{ pattern: /^mcr\.microsoft\.com\/cbl-mariner\/base\/nodejs/, tier: "approved" }
];
var BLOCKED_BASES = [
{ pattern: /^node:\d+-stretch/, reason: "Debian Stretch is EOL" },
{ pattern: /^node:\d+-buster/, reason: "Debian Buster is approaching EOL" },
{ pattern: /^node:\d+$/, reason: "Full image has unnecessary packages - use slim or alpine" },
{ pattern: /^ubuntu:/, reason: "Use language-specific base images, not generic OS images" }
];
function checkBaseImage(dockerfilePath) {
var content = fs.readFileSync(dockerfilePath, "utf8");
var fromLines = content.match(/^FROM\s+([^\s]+)/gm) || [];
var results = [];
fromLines.forEach(function(line) {
var image = line.replace(/^FROM\s+/, "").split(" AS ")[0].trim();
var blocked = BLOCKED_BASES.find(function(b) {
return b.pattern.test(image);
});
if (blocked) {
results.push({
image: image,
status: "BLOCKED",
reason: blocked.reason
});
return;
}
var approved = APPROVED_BASES.find(function(a) {
return a.pattern.test(image);
});
if (approved) {
results.push({
image: image,
status: "APPROVED",
tier: approved.tier
});
} else {
results.push({
image: image,
status: "UNAPPROVED",
reason: "Not in approved base image list"
});
}
});
return results;
}
// Check all Dockerfiles in the project
var dockerfiles = exec('find . -name "Dockerfile*" -not -path "*/node_modules/*"', { encoding: "utf8" })
.trim().split("\n").filter(Boolean);
var hasViolation = false;
dockerfiles.forEach(function(df) {
console.log("\nChecking " + df + ":");
var results = checkBaseImage(df);
results.forEach(function(r) {
var icon = r.status === "BLOCKED" ? "[XX]" : (r.status === "APPROVED" ? "[OK]" : "[??]");
console.log(" " + icon + " " + r.image + " - " + r.status);
if (r.reason) console.log(" " + r.reason);
if (r.tier) console.log(" Tier: " + r.tier);
if (r.status === "BLOCKED") hasViolation = true;
});
});
if (hasViolation) {
console.log("\n##vso[task.complete result=Failed;]Blocked base images detected");
process.exit(1);
}
Automated Base Image Updates
# Scheduled pipeline to rebuild with latest base images
trigger: none
schedules:
- cron: '0 4 * * 3' # Wednesday 4 AM UTC
displayName: 'Weekly base image update'
branches:
include: [main]
steps:
- script: |
docker pull node:20-alpine3.19
DIGEST=$(docker inspect --format='{{index .RepoDigests 0}}' node:20-alpine3.19)
echo "Current base image digest: $DIGEST"
# Rebuild with --no-cache to pick up base image updates
docker build --no-cache -t myapp:refresh .
displayName: 'Rebuild with fresh base image'
- script: |
trivy image --format json --output refresh-scan.json myapp:refresh
VULNS=$(cat refresh-scan.json | node -e "
var r=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));
var c=0; (r.Results||[]).forEach(function(x){c+=(x.Vulnerabilities||[]).length}); console.log(c);
")
echo "Vulnerabilities after rebuild: $VULNS"
displayName: 'Scan refreshed image'
Complete Working Example: Container Security Gate
var fs = require("fs");
var exec = require("child_process").execSync;
// ============================================================
// Container Security Gate
// Comprehensive container image analysis and policy enforcement
// ============================================================
var POLICY = {
maxCritical: 0,
maxHigh: 3,
maxImageSizeMB: 500,
requireNonRoot: true,
requireHealthcheck: true,
blockedPackages: ["wget", "curl", "netcat", "telnet", "ssh"],
approvedBasePatterns: [/alpine/, /slim/, /distroless/]
};
function scanWithTrivy(imageName) {
var output;
try {
output = exec("trivy image --format json --quiet " + imageName, {
encoding: "utf8",
timeout: 300000
});
} catch (e) {
output = e.stdout;
}
return JSON.parse(output);
}
function checkDockerfile(dockerfilePath) {
var content = fs.readFileSync(dockerfilePath, "utf8");
var issues = [];
// Check for USER directive
if (content.indexOf("USER") === -1) {
issues.push("No USER directive found - container runs as root");
}
// Check for HEALTHCHECK
if (content.indexOf("HEALTHCHECK") === -1) {
issues.push("No HEALTHCHECK directive found");
}
// Check for ADD instead of COPY
var addLines = content.match(/^ADD\s/gm);
if (addLines && addLines.length > 0) {
issues.push("Use COPY instead of ADD (" + addLines.length + " ADD directive(s) found)");
}
// Check for latest tag
if (content.match(/FROM\s+\S+:latest/)) {
issues.push("Using :latest tag - pin to specific version");
}
// Check base image
var fromMatch = content.match(/^FROM\s+([^\s]+)/m);
if (fromMatch) {
var baseImage = fromMatch[1];
var isApproved = POLICY.approvedBasePatterns.some(function(p) {
return p.test(baseImage);
});
if (!isApproved) {
issues.push("Base image '" + baseImage + "' is not in approved list (use alpine, slim, or distroless)");
}
}
return issues;
}
function checkImageMetadata(imageName) {
var issues = [];
// Check image size
var sizeOutput = exec("docker image inspect --format='{{.Size}}' " + imageName, { encoding: "utf8" }).trim();
var sizeMB = parseInt(sizeOutput) / (1024 * 1024);
if (sizeMB > POLICY.maxImageSizeMB) {
issues.push("Image size " + sizeMB.toFixed(0) + "MB exceeds limit of " + POLICY.maxImageSizeMB + "MB");
}
// Check for installed attack tools
POLICY.blockedPackages.forEach(function(pkg) {
try {
exec("docker run --rm --entrypoint which " + imageName + " " + pkg + " 2>/dev/null", { encoding: "utf8" });
issues.push("Blocked package '" + pkg + "' found in image");
} catch (e) {
// Package not found - good
}
});
// Check if running as root
var userOutput = exec("docker inspect --format='{{.Config.User}}' " + imageName, { encoding: "utf8" }).trim();
if (!userOutput || userOutput === "root" || userOutput === "0") {
if (POLICY.requireNonRoot) {
issues.push("Container runs as root user");
}
}
return { issues: issues, sizeMB: sizeMB };
}
// Main execution
function runSecurityGate(imageName, dockerfilePath) {
console.log("=================================================");
console.log(" CONTAINER SECURITY GATE");
console.log(" Image: " + imageName);
console.log("=================================================\n");
var allIssues = [];
var gateResults = [];
// Gate 1: Dockerfile best practices
console.log("[1/4] Dockerfile Analysis");
var dfIssues = checkDockerfile(dockerfilePath);
if (dfIssues.length > 0) {
dfIssues.forEach(function(issue) {
console.log(" [WARN] " + issue);
allIssues.push({ gate: "Dockerfile", severity: "warning", message: issue });
});
} else {
console.log(" [OK] Dockerfile passes all checks");
}
gateResults.push({ name: "Dockerfile", passed: dfIssues.length === 0 });
// Gate 2: Image metadata
console.log("\n[2/4] Image Metadata Analysis");
var metadata = checkImageMetadata(imageName);
console.log(" Image size: " + metadata.sizeMB.toFixed(1) + " MB");
metadata.issues.forEach(function(issue) {
console.log(" [WARN] " + issue);
allIssues.push({ gate: "Metadata", severity: "warning", message: issue });
});
gateResults.push({ name: "Metadata", passed: metadata.issues.length === 0 });
// Gate 3: Vulnerability scan
console.log("\n[3/4] Vulnerability Scan (Trivy)");
var trivyResults = scanWithTrivy(imageName);
var vulnCounts = { CRITICAL: 0, HIGH: 0, MEDIUM: 0, LOW: 0 };
(trivyResults.Results || []).forEach(function(result) {
(result.Vulnerabilities || []).forEach(function(v) {
vulnCounts[v.Severity] = (vulnCounts[v.Severity] || 0) + 1;
});
});
console.log(" Critical: " + vulnCounts.CRITICAL + " (max: " + POLICY.maxCritical + ")");
console.log(" High: " + vulnCounts.HIGH + " (max: " + POLICY.maxHigh + ")");
console.log(" Medium: " + vulnCounts.MEDIUM);
console.log(" Low: " + vulnCounts.LOW);
var vulnPassed = vulnCounts.CRITICAL <= POLICY.maxCritical && vulnCounts.HIGH <= POLICY.maxHigh;
gateResults.push({ name: "Vulnerabilities", passed: vulnPassed });
if (!vulnPassed) {
allIssues.push({ gate: "Vulnerability", severity: "error", message: "Vulnerability thresholds exceeded" });
}
// Gate 4: Overall assessment
console.log("\n[4/4] Gate Results");
console.log(" +-----------------------+--------+");
console.log(" | Gate | Status |");
console.log(" +-----------------------+--------+");
gateResults.forEach(function(g) {
var status = g.passed ? " PASS " : " FAIL ";
console.log(" | " + g.name.padEnd(21) + " | " + status + " |");
});
console.log(" +-----------------------+--------+");
var overallPassed = gateResults.every(function(g) { return g.passed; });
console.log("\n OVERALL: " + (overallPassed ? "PASSED" : "FAILED"));
// Write report
var report = {
image: imageName,
timestamp: new Date().toISOString(),
gates: gateResults,
vulnerabilities: vulnCounts,
issues: allIssues,
passed: overallPassed
};
fs.writeFileSync("container-security-report.json", JSON.stringify(report, null, 2));
console.log("\n Report: container-security-report.json");
if (!overallPassed) {
console.log("\n##vso[task.complete result=Failed;]Container security gate failed");
process.exit(1);
}
}
var imageName = process.argv[2] || "myapp:latest";
var dockerfilePath = process.argv[3] || "Dockerfile";
runSecurityGate(imageName, dockerfilePath);
Output:
=================================================
CONTAINER SECURITY GATE
Image: myapp:v2.1.0
=================================================
[1/4] Dockerfile Analysis
[OK] Dockerfile passes all checks
[2/4] Image Metadata Analysis
Image size: 127.4 MB
[WARN] Blocked package 'curl' found in image
[3/4] Vulnerability Scan (Trivy)
Critical: 0 (max: 0)
High: 2 (max: 3)
Medium: 8
Low: 14
[4/4] Gate Results
+-----------------------+--------+
| Gate | Status |
+-----------------------+--------+
| Dockerfile | PASS |
| Metadata | FAIL |
| Vulnerabilities | PASS |
+-----------------------+--------+
OVERALL: FAILED
Report: container-security-report.json
Common Issues & Troubleshooting
"trivy image" Hangs on First Run
The first run downloads the vulnerability database (approximately 200MB):
2026-02-10T14:00:00.000Z INFO Need to update DB
2026-02-10T14:00:00.000Z INFO Downloading DB...
Fix: Pre-download the database in a cached step:
steps:
- task: Cache@2
inputs:
key: 'trivy-db | "$(Agent.OS)"'
path: '$(HOME)/.cache/trivy'
displayName: 'Cache Trivy DB'
- script: trivy image --download-db-only
displayName: 'Update Trivy DB'
Scan Reports Different Results Than ACR Defender
Trivy and Microsoft Defender use different vulnerability databases and detection methods. This is expected:
Trivy: CVE-2025-1234 (CRITICAL) in openssl 3.0.12
Defender: Not reported for same image
Use both tools — they complement each other. Trivy catches more OS package issues; Defender integrates with Azure Security Center for compliance tracking.
Image Scan Fails with "unable to initialize a image analyzer"
Error: unable to initialize a image analyzer: unable to open
the image: failed to pull the image: permission denied
The scanner cannot access the local Docker socket. Ensure the pipeline agent has Docker access:
steps:
- script: |
docker save myapp:$(Build.BuildId) -o image.tar
trivy image --input image.tar --format json --output trivy.json
displayName: 'Scan from tar (no Docker socket needed)'
Alpine-Based Images Show Fewer Vulnerabilities But Are Not Necessarily More Secure
Alpine uses musl instead of glibc. Some vulnerabilities in glibc do not apply, but musl has its own set of issues and some Node.js native modules do not work with musl:
# Test your application actually works on Alpine
docker run --rm myapp:alpine npm test
# Common failure: native modules compiled for glibc
# Error: Error loading shared library libc.musl-x86_64.so.1
If native modules fail on Alpine, use node:20-bookworm-slim as the next best option.
Best Practices
- Scan the final built image, not just the Dockerfile — The image that deploys is what matters. Multi-stage build artifacts, runtime downloads, and layer caching can all introduce vulnerabilities not visible in the Dockerfile.
- Use minimal base images — Alpine or distroless images have dramatically fewer vulnerabilities. Start with the smallest image that works and add packages explicitly.
- Pin base image digests, not just tags — Tags are mutable.
node:20-alpinetoday might be different fromnode:20-alpinetomorrow. Pin to digests for reproducible builds:node:20-alpine@sha256:abc123.... - Rebuild images weekly even without code changes — Base image updates fix OS vulnerabilities. A scheduled rebuild with
--no-cachepicks up these patches. - Remove unnecessary tools from production images —
curl,wget,netcat, andsshare useful during development but give attackers tools in production. Remove them in the final stage. - Generate and store SBOMs alongside images — An SBOM documents every component in the image. Store it in ACR alongside the image for incident response and compliance.
- Run containers as non-root — Add
USER node(or another non-root user) in your Dockerfile. Combine with--read-onlyfilesystem in production for defense in depth. - Set image size budgets — Large images are slow to pull and contain more attack surface. Set a size policy (e.g., under 200MB) and enforce it in the pipeline.