Confluence Integration for Documentation
A comprehensive guide to integrating Atlassian Confluence with Azure DevOps for documentation workflows, covering automated page publishing from pipelines, release notes generation, API documentation sync, bidirectional linking between work items and wiki pages, and documentation-as-code patterns.
Confluence Integration for Documentation
Overview
Documentation rots when it lives in a separate workflow from code. Confluence is where most enterprise teams keep their project documentation, runbooks, and architecture decisions. Azure DevOps is where the code and pipelines live. Connecting them means documentation stays current because it is generated and published as part of the build process — release notes appear automatically, API docs update when the code changes, and architecture decision records link directly to the work items that drove them. I have set up this integration for teams that went from "our docs are always outdated" to "our docs update themselves."
Prerequisites
- Atlassian Confluence instance (Cloud or Data Center)
- Azure DevOps organization with Pipelines and Repos
- Confluence API token (Cloud) or personal access token (Data Center)
- Node.js 16 or later for automation scripts
- Azure DevOps Personal Access Token for work item integration
- Basic familiarity with the Confluence REST API and storage format (XHTML)
Confluence REST API Basics
Confluence Cloud uses the REST API v2 at https://your-domain.atlassian.net/wiki/api/v2/. Data Center uses v1 at https://confluence.yourcompany.com/rest/api/.
Authentication
// confluence/client.js
var https = require("https");
function createClient(baseUrl, email, apiToken) {
var parsed = new URL(baseUrl);
var auth = Buffer.from(email + ":" + apiToken).toString("base64");
return {
request: function (method, path, body, callback) {
var options = {
hostname: parsed.hostname,
path: parsed.pathname + path,
method: method,
headers: {
"Authorization": "Basic " + auth,
"Content-Type": "application/json",
"Accept": "application/json"
}
};
var req = https.request(options, function (res) {
var data = "";
res.on("data", function (chunk) { data += chunk; });
res.on("end", function () {
var parsed;
try { parsed = JSON.parse(data); } catch (e) { parsed = data; }
if (res.statusCode >= 400) {
var error = new Error("Confluence API " + res.statusCode + ": " + (parsed.message || JSON.stringify(parsed)));
error.statusCode = res.statusCode;
return callback(error);
}
callback(null, parsed);
});
});
req.on("error", callback);
if (body) { req.write(JSON.stringify(body)); }
req.end();
},
getPage: function (pageId, callback) {
this.request("GET", "/content/" + pageId + "?expand=body.storage,version", null, callback);
},
createPage: function (spaceKey, title, content, parentId, callback) {
var body = {
type: "page",
title: title,
space: { key: spaceKey },
body: {
storage: {
value: content,
representation: "storage"
}
}
};
if (parentId) {
body.ancestors = [{ id: parentId }];
}
this.request("POST", "/content", body, callback);
},
updatePage: function (pageId, title, content, version, callback) {
var body = {
type: "page",
title: title,
version: { number: version + 1 },
body: {
storage: {
value: content,
representation: "storage"
}
}
};
this.request("PUT", "/content/" + pageId, body, callback);
},
findPage: function (spaceKey, title, callback) {
var cql = encodeURIComponent("space = \"" + spaceKey + "\" AND title = \"" + title + "\"");
this.request("GET", "/content/search?cql=" + cql, null, callback);
}
};
}
module.exports = { createClient: createClient };
Automated Release Notes
Generate release notes from Azure DevOps work items and publish them to Confluence after each release:
// confluence/release-notes.js
var confluenceClient = require("./client");
var https = require("https");
var CONFLUENCE_URL = process.env.CONFLUENCE_URL;
var CONFLUENCE_EMAIL = process.env.CONFLUENCE_EMAIL;
var CONFLUENCE_TOKEN = process.env.CONFLUENCE_API_TOKEN;
var CONFLUENCE_SPACE = process.env.CONFLUENCE_SPACE_KEY || "REL";
var CONFLUENCE_PARENT_ID = process.env.CONFLUENCE_RELEASE_NOTES_PARENT;
var AZURE_ORG = process.env.AZURE_ORG;
var AZURE_PROJECT = process.env.AZURE_PROJECT;
var AZURE_PAT = process.env.AZURE_PAT;
var BUILD_NUMBER = process.env.BUILD_BUILDNUMBER || "local";
var BUILD_URL = (process.env.SYSTEM_TEAMFOUNDATIONCOLLECTIONURI || "") +
(process.env.SYSTEM_TEAMPROJECT || "") +
"/_build/results?buildId=" + (process.env.BUILD_BUILDID || "0");
var confluence = confluenceClient.createClient(CONFLUENCE_URL, CONFLUENCE_EMAIL, CONFLUENCE_TOKEN);
function azureRequest(path, callback) {
var auth = Buffer.from(":" + AZURE_PAT).toString("base64");
var options = {
hostname: "dev.azure.com",
path: "/" + AZURE_ORG + "/" + AZURE_PROJECT + "/_apis" + path,
method: "GET",
headers: { "Authorization": "Basic " + auth, "Accept": "application/json" }
};
var req = https.request(options, function (res) {
var data = "";
res.on("data", function (chunk) { data += chunk; });
res.on("end", function () {
try { callback(null, JSON.parse(data)); } catch (e) { callback(e); }
});
});
req.on("error", callback);
req.end();
}
function getWorkItemsForBuild(buildId, callback) {
azureRequest("/build/builds/" + buildId + "/workitems?api-version=7.1", function (err, data) {
if (err) { return callback(err); }
var ids = (data.value || []).map(function (wi) { return wi.id; });
if (ids.length === 0) { return callback(null, []); }
azureRequest("/wit/workitems?ids=" + ids.join(",") + "&api-version=7.1", function (err2, items) {
if (err2) { return callback(err2); }
callback(null, items.value || []);
});
});
}
function generateReleaseNotesHtml(workItems) {
var stories = [];
var bugs = [];
var tasks = [];
workItems.forEach(function (wi) {
var type = wi.fields["System.WorkItemType"];
var item = {
id: wi.id,
title: wi.fields["System.Title"],
state: wi.fields["System.State"],
assignedTo: wi.fields["System.AssignedTo"] ? wi.fields["System.AssignedTo"].displayName : "Unassigned"
};
if (type === "User Story" || type === "Product Backlog Item") { stories.push(item); }
else if (type === "Bug") { bugs.push(item); }
else { tasks.push(item); }
});
var date = new Date().toISOString().split("T")[0];
var html = '<ac:structured-macro ac:name="info"><ac:rich-text-body>' +
'<p>Release <strong>' + BUILD_NUMBER + '</strong> — ' + date + '</p>' +
'<p><a href="' + BUILD_URL + '">View Build in Azure DevOps</a></p>' +
'</ac:rich-text-body></ac:structured-macro>';
if (stories.length > 0) {
html += "<h2>New Features & Enhancements</h2><table><thead><tr>" +
"<th>ID</th><th>Title</th><th>Assigned To</th></tr></thead><tbody>";
stories.forEach(function (s) {
html += "<tr><td>" + s.id + "</td><td>" + escapeHtml(s.title) + "</td><td>" + escapeHtml(s.assignedTo) + "</td></tr>";
});
html += "</tbody></table>";
}
if (bugs.length > 0) {
html += "<h2>Bug Fixes</h2><table><thead><tr>" +
"<th>ID</th><th>Title</th><th>Assigned To</th></tr></thead><tbody>";
bugs.forEach(function (b) {
html += "<tr><td>" + b.id + "</td><td>" + escapeHtml(b.title) + "</td><td>" + escapeHtml(b.assignedTo) + "</td></tr>";
});
html += "</tbody></table>";
}
if (tasks.length > 0) {
html += "<h2>Other Changes</h2><ul>";
tasks.forEach(function (t) {
html += "<li>" + escapeHtml(t.title) + " (#" + t.id + ")</li>";
});
html += "</ul>";
}
if (workItems.length === 0) {
html += "<p><em>No linked work items found for this build.</em></p>";
}
return html;
}
function escapeHtml(str) {
return (str || "").replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
}
// Main execution
var buildId = process.env.BUILD_BUILDID || "0";
var pageTitle = "Release " + BUILD_NUMBER + " — " + new Date().toISOString().split("T")[0];
console.log("Generating release notes for build #" + BUILD_NUMBER + "...");
getWorkItemsForBuild(buildId, function (err, workItems) {
if (err) {
console.error("Failed to get work items:", err.message);
process.exit(0); // Don't fail the pipeline
}
console.log("Found " + workItems.length + " linked work items");
var html = generateReleaseNotesHtml(workItems);
// Check if page already exists
confluence.findPage(CONFLUENCE_SPACE, pageTitle, function (err2, searchResult) {
if (err2) {
console.error("Confluence search failed:", err2.message);
process.exit(0);
}
var existingPages = searchResult.results || [];
if (existingPages.length > 0) {
var existingPage = existingPages[0];
confluence.getPage(existingPage.id, function (err3, page) {
if (err3) { return console.error("Failed to get page:", err3.message); }
confluence.updatePage(page.id, pageTitle, html, page.version.number, function (err4) {
if (err4) { console.error("Update failed:", err4.message); }
else { console.log("Updated Confluence page: " + pageTitle); }
});
});
} else {
confluence.createPage(CONFLUENCE_SPACE, pageTitle, html, CONFLUENCE_PARENT_ID, function (err3, page) {
if (err3) { console.error("Create failed:", err3.message); }
else { console.log("Created Confluence page: " + pageTitle + " (ID: " + page.id + ")"); }
});
}
});
});
Pipeline usage:
- script: node confluence/release-notes.js
displayName: "Publish release notes to Confluence"
condition: succeeded()
env:
CONFLUENCE_URL: $(ConfluenceUrl)
CONFLUENCE_EMAIL: $(ConfluenceEmail)
CONFLUENCE_API_TOKEN: $(ConfluenceToken)
CONFLUENCE_SPACE_KEY: "REL"
CONFLUENCE_RELEASE_NOTES_PARENT: $(ConfluenceParentPageId)
AZURE_ORG: $(System.CollectionUri)
AZURE_PROJECT: $(System.TeamProject)
AZURE_PAT: $(System.AccessToken)
API Documentation Sync
Publish API documentation generated from OpenAPI specs or JSDoc to Confluence automatically:
// confluence/sync-api-docs.js
var fs = require("fs");
var path = require("path");
var confluenceClient = require("./client");
var confluence = confluenceClient.createClient(
process.env.CONFLUENCE_URL,
process.env.CONFLUENCE_EMAIL,
process.env.CONFLUENCE_API_TOKEN
);
var SPACE_KEY = process.env.CONFLUENCE_SPACE_KEY || "API";
var PARENT_PAGE_ID = process.env.CONFLUENCE_API_DOCS_PARENT;
var DOCS_DIR = process.argv[2] || "docs";
function readMarkdownFile(filePath) {
var content = fs.readFileSync(filePath, "utf8");
// Convert basic markdown to Confluence storage format
// This is a simplified converter — use a proper markdown-to-confluence library in production
var html = content
.replace(/^### (.+)$/gm, "<h3>$1</h3>")
.replace(/^## (.+)$/gm, "<h2>$1</h2>")
.replace(/^# (.+)$/gm, "<h1>$1</h1>")
.replace(/```(\w+)\n([\s\S]*?)```/g, function (match, lang, code) {
return '<ac:structured-macro ac:name="code"><ac:parameter ac:name="language">' +
lang + '</ac:parameter><ac:plain-text-body><![CDATA[' +
code.trim() + ']]></ac:plain-text-body></ac:structured-macro>';
})
.replace(/`([^`]+)`/g, "<code>$1</code>")
.replace(/\*\*([^*]+)\*\*/g, "<strong>$1</strong>")
.replace(/\*([^*]+)\*/g, "<em>$1</em>")
.replace(/^- (.+)$/gm, "<li>$1</li>")
.replace(/(<li>.*<\/li>\n?)+/g, "<ul>$&</ul>")
.replace(/\n\n/g, "</p><p>")
.replace(/^(?!<)(.+)$/gm, "<p>$1</p>");
return html;
}
function syncDocFile(filePath) {
var fileName = path.basename(filePath, path.extname(filePath));
var pageTitle = "API: " + fileName.replace(/-/g, " ").replace(/\b\w/g, function (l) { return l.toUpperCase(); });
var html = readMarkdownFile(filePath);
confluence.findPage(SPACE_KEY, pageTitle, function (err, result) {
if (err) {
console.error("Search failed for " + pageTitle + ":", err.message);
return;
}
var existing = (result.results || [])[0];
if (existing) {
confluence.getPage(existing.id, function (err2, page) {
if (err2) { return console.error("Get failed:", err2.message); }
confluence.updatePage(page.id, pageTitle, html, page.version.number, function (err3) {
if (err3) { console.error("Update failed " + pageTitle + ":", err3.message); }
else { console.log("Updated: " + pageTitle); }
});
});
} else {
confluence.createPage(SPACE_KEY, pageTitle, html, PARENT_PAGE_ID, function (err2) {
if (err2) { console.error("Create failed " + pageTitle + ":", err2.message); }
else { console.log("Created: " + pageTitle); }
});
}
});
}
// Sync all markdown files
if (!fs.existsSync(DOCS_DIR)) {
console.error("Docs directory not found: " + DOCS_DIR);
process.exit(1);
}
var files = fs.readdirSync(DOCS_DIR).filter(function (f) { return f.endsWith(".md"); });
console.log("Syncing " + files.length + " doc files from " + DOCS_DIR);
files.forEach(function (file) {
syncDocFile(path.join(DOCS_DIR, file));
});
Complete Working Example: Documentation Pipeline
A dedicated pipeline that builds and publishes all documentation:
# azure-pipelines-docs.yml
trigger:
branches:
include:
- main
paths:
include:
- docs/**
- src/**/*.ts # Trigger on source changes for API doc generation
pool:
vmImage: "ubuntu-latest"
steps:
- task: NodeTool@0
inputs:
versionSpec: "20.x"
- script: npm ci
displayName: "Install dependencies"
# Generate API docs from source
- script: npx typedoc --out docs/api src/index.ts
displayName: "Generate API documentation"
# Generate OpenAPI spec
- script: npm run generate-openapi
displayName: "Generate OpenAPI spec"
# Publish docs to Confluence
- script: node confluence/sync-api-docs.js docs/api
displayName: "Sync API docs to Confluence"
env:
CONFLUENCE_URL: $(ConfluenceUrl)
CONFLUENCE_EMAIL: $(ConfluenceEmail)
CONFLUENCE_API_TOKEN: $(ConfluenceToken)
CONFLUENCE_SPACE_KEY: "API"
CONFLUENCE_API_DOCS_PARENT: $(ConfluenceApiDocsParent)
# Publish architecture docs
- script: node confluence/sync-api-docs.js docs/architecture
displayName: "Sync architecture docs to Confluence"
env:
CONFLUENCE_URL: $(ConfluenceUrl)
CONFLUENCE_EMAIL: $(ConfluenceEmail)
CONFLUENCE_API_TOKEN: $(ConfluenceToken)
CONFLUENCE_SPACE_KEY: "ARCH"
CONFLUENCE_API_DOCS_PARENT: $(ConfluenceArchDocsParent)
# Publish release notes if this is a release build
- script: node confluence/release-notes.js
displayName: "Publish release notes"
condition: and(succeeded(), eq(variables['Build.SourceBranch'], 'refs/heads/main'))
env:
CONFLUENCE_URL: $(ConfluenceUrl)
CONFLUENCE_EMAIL: $(ConfluenceEmail)
CONFLUENCE_API_TOKEN: $(ConfluenceToken)
CONFLUENCE_SPACE_KEY: "REL"
CONFLUENCE_RELEASE_NOTES_PARENT: $(ConfluenceReleaseParent)
AZURE_PAT: $(System.AccessToken)
Bidirectional Linking Between Work Items and Confluence Pages
One of the most valuable patterns is linking Azure DevOps work items to the Confluence pages that document them. When a feature is designed in Confluence, the corresponding work items should link back to the design doc. When release notes are generated, the work items should reference the published page.
// confluence/link-work-items.js
var https = require("https");
function addConfluenceLinkToWorkItem(orgUrl, pat, workItemId, confluencePageUrl, pageTitle, callback) {
var orgName = orgUrl.split("/").pop();
var patchBody = JSON.stringify([
{
op: "add",
path: "/relations/-",
value: {
rel: "Hyperlink",
url: confluencePageUrl,
attributes: {
comment: "Confluence: " + pageTitle
}
}
}
]);
var options = {
hostname: "dev.azure.com",
path: "/" + orgName + "/_apis/wit/workitems/" + workItemId + "?api-version=7.1",
method: "PATCH",
headers: {
"Content-Type": "application/json-patch+json",
"Content-Length": Buffer.byteLength(patchBody),
"Authorization": "Basic " + Buffer.from(":" + pat).toString("base64")
}
};
var req = https.request(options, function(res) {
var data = "";
res.on("data", function(chunk) { data += chunk; });
res.on("end", function() {
if (res.statusCode === 200) {
console.log("Linked work item #%d to %s", workItemId, pageTitle);
callback(null, JSON.parse(data));
} else {
callback(new Error("Link failed: " + res.statusCode + " - " + data));
}
});
});
req.on("error", callback);
req.write(patchBody);
req.end();
}
module.exports = { addConfluenceLinkToWorkItem: addConfluenceLinkToWorkItem };
You can call this after publishing release notes to link every work item mentioned in the release back to the Confluence page. This creates a navigable trail from code change to documentation without any manual effort.
Common Issues and Troubleshooting
Confluence API returns "401 Unauthorized" with correct credentials
Confluence API 401: Basic authentication with passwords is not allowed on this instance
Confluence Cloud requires API tokens, not passwords. Generate an API token at https://id.atlassian.com/manage-profile/security/api-tokens. Use the token in place of the password with your email as the username. Confluence Data Center still accepts PATs or passwords depending on the configuration.
Page creation fails with "A page with this title already exists"
Confluence API 400: A page with this title already exists in this space
Confluence enforces unique page titles within a space. Your script should search for existing pages before creating new ones and update them instead. The findPage function in the client above handles this pattern. Also be aware that titles are case-insensitive — "Release Notes" and "release notes" are considered the same title.
Storage format rejected with "Could not parse XHTML content"
Confluence API 400: Could not parse storage format content body
Confluence storage format is strict XHTML. Self-closing tags, unescaped ampersands, or unclosed elements cause parsing failures. Always escape HTML entities in dynamic content. Use & for &, < for <, and ensure all tags are properly closed. Run your generated HTML through an XML validator before sending to the API.
Rate limiting on Confluence Cloud
HTTP 429: Rate limit exceeded
Confluence Cloud limits API requests. When syncing many pages, add delays between requests. A 500ms delay between page creates/updates is usually sufficient. For large documentation sets, batch the sync across multiple pipeline runs rather than publishing everything at once.
Confluence macros not rendering in API-created pages
Confluence macros use the ac:structured-macro XML namespace. If your content includes macros (code blocks, info panels, expand sections), ensure the XML is well-formed and uses the correct macro names. The most common issue is missing the ac: namespace prefix or using incorrect parameter names.
Best Practices
Treat documentation as code. Keep documentation in Markdown files alongside the source code in Azure Repos. Generate and publish to Confluence as part of the CI/CD pipeline. This ensures docs are reviewed in PRs and versioned with the code they describe.
Use Confluence parent pages to organize by project. Create a hierarchy: Project > Release Notes, Project > API Documentation, Project > Architecture. This keeps auto-published content organized and easy to find.
Generate release notes from work item links. Do not write release notes manually. Link work items to commits and PRs, then let the pipeline query linked work items and generate formatted release notes automatically.
Version your Confluence page updates. Each Confluence API update increments the page version. Include the build number or git commit SHA in the page content so readers know which code version the documentation reflects.
Do not fail the pipeline if documentation publishing fails. Documentation sync is important but not critical. Use
condition: always()orcontinueOnError: trueso a Confluence API error does not block a production deployment.Set up a service account for Confluence API access. Use a dedicated bot account with appropriate space permissions rather than a personal account. This prevents access issues when team members change roles.
Add Confluence links back to Azure DevOps. Include links to the source repo, build pipeline, and related work items in the Confluence page footer. When someone reads a design doc in Confluence, they should be one click away from the implementation code.
Implement dry-run mode for documentation sync. Before publishing to production Confluence spaces, log what would be created or updated without making changes. This prevents accidental overwrites when testing pipeline changes.