|
|
import fs from 'fs/promises' |
|
|
import path from 'path' |
|
|
import unified from 'unified' |
|
|
import markdown from 'remark-parse' |
|
|
import remarkToRehype from 'remark-rehype' |
|
|
import raw from 'rehype-raw' |
|
|
import visit from 'unist-util-visit' |
|
|
import GithubSlugger from 'github-slugger' |
|
|
import matter from 'gray-matter' |
|
|
import * as github from '@actions/github' |
|
|
import { setFailed } from '@actions/core' |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
interface Document { |
|
|
body: string |
|
|
path: string |
|
|
headings: string[] |
|
|
source?: string |
|
|
related?: { |
|
|
links: string[] |
|
|
} |
|
|
} |
|
|
|
|
|
interface Errors { |
|
|
doc: Document |
|
|
link: string[] |
|
|
hash: string[] |
|
|
source: string[] |
|
|
related: string[] |
|
|
} |
|
|
|
|
|
type ErrorType = Exclude<keyof Errors, 'doc'> |
|
|
|
|
|
interface Comment { |
|
|
id: number |
|
|
} |
|
|
|
|
|
const DOCS_PATH = '/docs/' |
|
|
const ERRORS_PATH = '/errors/' |
|
|
const EXCLUDED_HASHES = ['top'] |
|
|
const COMMENT_TAG = '<!-- LINK_CHECKER_COMMENT -->' |
|
|
|
|
|
const { context, getOctokit } = github |
|
|
const octokit = getOctokit(process.env.GITHUB_TOKEN!) |
|
|
const { owner, repo } = context.repo |
|
|
const pullRequest = context.payload.pull_request |
|
|
if (!pullRequest) { |
|
|
console.log('Skipping since this is not a pull request') |
|
|
process.exit(0) |
|
|
} |
|
|
const sha = pullRequest.head.sha |
|
|
const isFork = pullRequest.head.repo.fork |
|
|
const prNumber = pullRequest.number |
|
|
|
|
|
const slugger = new GithubSlugger() |
|
|
|
|
|
|
|
|
async function getAllMdxFilePaths( |
|
|
directoriesToScan: string[], |
|
|
fileList: string[] = [] |
|
|
): Promise<string[]> { |
|
|
for (const dir of directoriesToScan) { |
|
|
const dirPath = path.join('.', dir) |
|
|
const files = await fs.readdir(dirPath) |
|
|
for (const file of files) { |
|
|
const filePath = path.join(dirPath, file) |
|
|
const stats = await fs.stat(filePath) |
|
|
if (stats.isDirectory()) { |
|
|
fileList = await getAllMdxFilePaths([filePath], fileList) |
|
|
} else if (path.extname(file) === '.mdx') { |
|
|
fileList.push(filePath) |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
return fileList |
|
|
} |
|
|
|
|
|
|
|
|
function getHeadingsFromMarkdownTree( |
|
|
tree: ReturnType<typeof markdownProcessor.parse> |
|
|
): string[] { |
|
|
const headings: string[] = [] |
|
|
slugger.reset() |
|
|
|
|
|
visit(tree, 'heading', (node) => { |
|
|
let headingText = '' |
|
|
|
|
|
|
|
|
visit(node, (node: any) => { |
|
|
if (node.value) { |
|
|
headingText += node.value |
|
|
} |
|
|
}) |
|
|
headings.push(slugger.slug(headingText)) |
|
|
}) |
|
|
|
|
|
return headings |
|
|
} |
|
|
|
|
|
|
|
|
const markdownProcessor = unified() |
|
|
.use(markdown) |
|
|
.use(remarkToRehype, { allowDangerousHTML: true }) |
|
|
.use(raw) |
|
|
.use(function compiler() { |
|
|
|
|
|
|
|
|
|
|
|
this.Compiler = function treeCompiler(tree) { |
|
|
return tree |
|
|
} |
|
|
}) |
|
|
|
|
|
|
|
|
function normalizePath(filePath: string): string { |
|
|
if (filePath.startsWith(ERRORS_PATH.substring(1))) { |
|
|
return ( |
|
|
filePath |
|
|
|
|
|
|
|
|
.replace(ERRORS_PATH.substring(1), DOCS_PATH.substring(1) + 'messages/') |
|
|
.replace('.mdx', '') |
|
|
) |
|
|
} |
|
|
|
|
|
return ( |
|
|
|
|
|
|
|
|
filePath |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
.replace(DOCS_PATH.substring(1), '') |
|
|
|
|
|
.replace(/(\d\d-)/g, '') |
|
|
.replace('.mdx', '') |
|
|
.replace('/index', '') |
|
|
) |
|
|
} |
|
|
|
|
|
|
|
|
let documentMap: Map<string, Document> |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async function prepareDocumentMapEntry( |
|
|
filePath: string |
|
|
): Promise<[string, Document]> { |
|
|
try { |
|
|
const mdxContent = await fs.readFile(filePath, 'utf8') |
|
|
const { content, data } = matter(mdxContent) |
|
|
const tree = markdownProcessor.parse(content) |
|
|
const headings = getHeadingsFromMarkdownTree(tree) |
|
|
const normalizedUrlPath = normalizePath(filePath) |
|
|
|
|
|
return [ |
|
|
normalizedUrlPath, |
|
|
{ body: content, path: filePath, headings, ...data }, |
|
|
] |
|
|
} catch (error) { |
|
|
setFailed(`Error preparing document map for file ${filePath}: ${error}`) |
|
|
return ['', {} as Document] |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
function validateInternalLink(errors: Errors, href: string): void { |
|
|
|
|
|
const [link, hash] = href.replace(DOCS_PATH, '').split('#', 2) |
|
|
|
|
|
let foundPage |
|
|
|
|
|
if (link.startsWith('messages/')) { |
|
|
|
|
|
|
|
|
foundPage = documentMap.get(DOCS_PATH.substring(1) + link) |
|
|
} else { |
|
|
|
|
|
|
|
|
foundPage = documentMap.get(link) |
|
|
} |
|
|
|
|
|
if (!foundPage) { |
|
|
errors.link.push(href) |
|
|
} else if (hash && !EXCLUDED_HASHES.includes(hash)) { |
|
|
|
|
|
const foundPageSource = foundPage.source |
|
|
? documentMap.get(foundPage.source) |
|
|
: undefined |
|
|
|
|
|
|
|
|
const hashFound = (foundPageSource || foundPage).headings.includes(hash) |
|
|
|
|
|
if (!hashFound) { |
|
|
errors.hash.push(href) |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
function validateHashLink(errors: Errors, href: string, doc: Document): void { |
|
|
const hashLink = href.replace('#', '') |
|
|
|
|
|
if (!EXCLUDED_HASHES.includes(hashLink) && !doc.headings.includes(hashLink)) { |
|
|
errors.hash.push(href) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
function validateSourceLinks(doc: Document, errors: Errors): void { |
|
|
if (doc.source && !documentMap.get(doc.source)) { |
|
|
errors.source.push(doc.source) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
function validateRelatedLinks(doc: Document, errors: Errors): void { |
|
|
if (doc.related && doc.related.links) { |
|
|
doc.related.links.forEach((link) => { |
|
|
if (!documentMap.get(link)) { |
|
|
errors.related.push(link) |
|
|
} |
|
|
}) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
function traverseTreeAndValidateLinks(tree: any, doc: Document): Errors { |
|
|
const errors: Errors = { |
|
|
doc, |
|
|
link: [], |
|
|
hash: [], |
|
|
source: [], |
|
|
related: [], |
|
|
} |
|
|
|
|
|
try { |
|
|
visit(tree, (node: any) => { |
|
|
if (node.type === 'element' && node.tagName === 'a') { |
|
|
const href = node.properties.href |
|
|
|
|
|
if (!href) return |
|
|
|
|
|
if (href.startsWith(DOCS_PATH)) { |
|
|
validateInternalLink(errors, href) |
|
|
} else if (href.startsWith('#')) { |
|
|
validateHashLink(errors, href, doc) |
|
|
} |
|
|
} |
|
|
}) |
|
|
|
|
|
validateSourceLinks(doc, errors) |
|
|
validateRelatedLinks(doc, errors) |
|
|
} catch (error) { |
|
|
setFailed('Error traversing tree: ' + error) |
|
|
} |
|
|
|
|
|
return errors |
|
|
} |
|
|
|
|
|
async function findBotComment(): Promise<Comment | undefined> { |
|
|
try { |
|
|
const { data: comments } = await octokit.rest.issues.listComments({ |
|
|
owner, |
|
|
repo, |
|
|
issue_number: prNumber, |
|
|
}) |
|
|
|
|
|
return comments.find((c) => c.body?.includes(COMMENT_TAG)) |
|
|
} catch (error) { |
|
|
setFailed('Error finding bot comment: ' + error) |
|
|
return undefined |
|
|
} |
|
|
} |
|
|
|
|
|
async function updateComment( |
|
|
comment: string, |
|
|
botComment: Comment |
|
|
): Promise<string> { |
|
|
try { |
|
|
const { data } = await octokit.rest.issues.updateComment({ |
|
|
owner, |
|
|
repo, |
|
|
comment_id: botComment.id, |
|
|
body: comment, |
|
|
}) |
|
|
|
|
|
return data.html_url |
|
|
} catch (error) { |
|
|
setFailed('Error updating comment: ' + error) |
|
|
return '' |
|
|
} |
|
|
} |
|
|
|
|
|
async function createComment(comment: string): Promise<string> { |
|
|
if (isFork) { |
|
|
setFailed( |
|
|
'The action could not create a Github comment because it is initiated from a forked repo. View the action logs for a list of broken links.' |
|
|
) |
|
|
|
|
|
return '' |
|
|
} else { |
|
|
try { |
|
|
const { data } = await octokit.rest.issues.createComment({ |
|
|
owner, |
|
|
repo, |
|
|
issue_number: prNumber, |
|
|
body: comment, |
|
|
}) |
|
|
|
|
|
return data.html_url |
|
|
} catch (error) { |
|
|
setFailed('Error creating comment: ' + error) |
|
|
return '' |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
const formatTableRow = ( |
|
|
link: string, |
|
|
errorType: ErrorType, |
|
|
docPath: string |
|
|
) => { |
|
|
return `| ${link} | ${errorType} | [/${docPath}](https://github.com/vercel/next.js/blob/${sha}/${docPath}) | \n` |
|
|
} |
|
|
|
|
|
async function updateCheckStatus( |
|
|
errorsExist: boolean, |
|
|
commentUrl?: string |
|
|
): Promise<void> { |
|
|
const checkName = 'Docs Link Validation' |
|
|
|
|
|
let summary, text |
|
|
|
|
|
if (errorsExist) { |
|
|
summary = |
|
|
'This PR introduces broken links to the docs. Click details for a list.' |
|
|
text = `[See the comment for details](${commentUrl})` |
|
|
} else { |
|
|
summary = 'No broken links found' |
|
|
} |
|
|
|
|
|
const checkParams = { |
|
|
owner, |
|
|
repo, |
|
|
name: checkName, |
|
|
head_sha: sha, |
|
|
status: 'completed', |
|
|
conclusion: errorsExist ? 'failure' : 'success', |
|
|
output: { |
|
|
title: checkName, |
|
|
summary: summary, |
|
|
text: text, |
|
|
}, |
|
|
} |
|
|
|
|
|
if (isFork) { |
|
|
if (errorsExist) { |
|
|
setFailed( |
|
|
'This PR introduces broken links to the docs. The action could not create a Github check because it is initiated from a forked repo.' |
|
|
) |
|
|
} else { |
|
|
console.log('Link validation was successful.') |
|
|
} |
|
|
} else { |
|
|
try { |
|
|
await octokit.rest.checks.create(checkParams) |
|
|
} catch (error) { |
|
|
setFailed('Failed to create check: ' + error) |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
async function validateAllInternalLinks(): Promise<void> { |
|
|
try { |
|
|
const allMdxFilePaths = await getAllMdxFilePaths([DOCS_PATH, ERRORS_PATH]) |
|
|
|
|
|
documentMap = new Map( |
|
|
await Promise.all(allMdxFilePaths.map(prepareDocumentMapEntry)) |
|
|
) |
|
|
|
|
|
const docProcessingPromises = allMdxFilePaths.map(async (filePath) => { |
|
|
const doc = documentMap.get(normalizePath(filePath)) |
|
|
if (doc) { |
|
|
const tree = (await markdownProcessor.process(doc.body)).contents |
|
|
return traverseTreeAndValidateLinks(tree, doc) |
|
|
} else { |
|
|
return { |
|
|
doc: {} as Document, |
|
|
link: [], |
|
|
hash: [], |
|
|
source: [], |
|
|
related: [], |
|
|
} as Errors |
|
|
} |
|
|
}) |
|
|
|
|
|
const allErrors = await Promise.all(docProcessingPromises) |
|
|
|
|
|
let errorsExist = false |
|
|
|
|
|
let errorRows: string[] = [] |
|
|
|
|
|
const errorTypes: ErrorType[] = ['link', 'hash', 'source', 'related'] |
|
|
allErrors.forEach((errors) => { |
|
|
const { |
|
|
doc: { path: docPath }, |
|
|
} = errors |
|
|
|
|
|
errorTypes.forEach((errorType) => { |
|
|
if (errors[errorType].length > 0) { |
|
|
errorsExist = true |
|
|
errors[errorType].forEach((link) => { |
|
|
errorRows.push(formatTableRow(link, errorType, docPath)) |
|
|
}) |
|
|
} |
|
|
}) |
|
|
}) |
|
|
|
|
|
const errorComment = [ |
|
|
'Hi there :wave:\n\nIt looks like this PR introduces broken links to the docs, please take a moment to fix them before merging:\n\n| Broken link | Type | File | \n| ----------- | ----------- | ----------- | \n', |
|
|
...errorRows, |
|
|
'\nThank you :pray:', |
|
|
].join('') |
|
|
|
|
|
const botComment = await findBotComment() |
|
|
|
|
|
let commentUrl |
|
|
|
|
|
if (errorsExist) { |
|
|
const comment = `${COMMENT_TAG}\n${errorComment}` |
|
|
if (botComment) { |
|
|
commentUrl = await updateComment(comment, botComment) |
|
|
} else { |
|
|
commentUrl = await createComment(comment) |
|
|
} |
|
|
|
|
|
const errorTableData = allErrors.flatMap((errors) => { |
|
|
const { doc } = errors |
|
|
|
|
|
return errorTypes.flatMap((errorType) => |
|
|
errors[errorType].map((link) => ({ |
|
|
docPath: doc.path, |
|
|
errorType, |
|
|
link, |
|
|
})) |
|
|
) |
|
|
}) |
|
|
|
|
|
console.log('This PR introduces broken links to the docs:') |
|
|
console.table(errorTableData, ['link', 'type', 'docPath']) |
|
|
} else if (botComment) { |
|
|
const comment = `${COMMENT_TAG}\nAll broken links are now fixed, thank you!` |
|
|
commentUrl = await updateComment(comment, botComment) |
|
|
} |
|
|
|
|
|
try { |
|
|
await updateCheckStatus(errorsExist, commentUrl) |
|
|
} catch (error) { |
|
|
setFailed('Failed to create Github check: ' + error) |
|
|
} |
|
|
} catch (error) { |
|
|
setFailed('Error validating internal links: ' + error) |
|
|
} |
|
|
} |
|
|
|
|
|
validateAllInternalLinks() |
|
|
|