site/node_modules/micromark-core-commonmark/lib/heading-atx.js
2024-10-14 08:09:33 +02:00

210 lines
4.5 KiB
JavaScript

/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').Resolver} Resolver
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').Token} Token
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
import {factorySpace} from 'micromark-factory-space'
import {
markdownLineEnding,
markdownLineEndingOrSpace,
markdownSpace
} from 'micromark-util-character'
import {splice} from 'micromark-util-chunked'
/** @type {Construct} */
export const headingAtx = {
name: 'headingAtx',
tokenize: tokenizeHeadingAtx,
resolve: resolveHeadingAtx
}
/** @type {Resolver} */
function resolveHeadingAtx(events, context) {
let contentEnd = events.length - 2
let contentStart = 3
/** @type {Token} */
let content
/** @type {Token} */
let text
// Prefix whitespace, part of the opening.
if (events[contentStart][1].type === 'whitespace') {
contentStart += 2
}
// Suffix whitespace, part of the closing.
if (
contentEnd - 2 > contentStart &&
events[contentEnd][1].type === 'whitespace'
) {
contentEnd -= 2
}
if (
events[contentEnd][1].type === 'atxHeadingSequence' &&
(contentStart === contentEnd - 1 ||
(contentEnd - 4 > contentStart &&
events[contentEnd - 2][1].type === 'whitespace'))
) {
contentEnd -= contentStart + 1 === contentEnd ? 2 : 4
}
if (contentEnd > contentStart) {
content = {
type: 'atxHeadingText',
start: events[contentStart][1].start,
end: events[contentEnd][1].end
}
text = {
type: 'chunkText',
start: events[contentStart][1].start,
end: events[contentEnd][1].end,
contentType: 'text'
}
splice(events, contentStart, contentEnd - contentStart + 1, [
['enter', content, context],
['enter', text, context],
['exit', text, context],
['exit', content, context]
])
}
return events
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeHeadingAtx(effects, ok, nok) {
let size = 0
return start
/**
* Start of a heading (atx).
*
* ```markdown
* > | ## aa
* ^
* ```
*
* @type {State}
*/
function start(code) {
// To do: parse indent like `markdown-rs`.
effects.enter('atxHeading')
return before(code)
}
/**
* After optional whitespace, at `#`.
*
* ```markdown
* > | ## aa
* ^
* ```
*
* @type {State}
*/
function before(code) {
effects.enter('atxHeadingSequence')
return sequenceOpen(code)
}
/**
* In opening sequence.
*
* ```markdown
* > | ## aa
* ^
* ```
*
* @type {State}
*/
function sequenceOpen(code) {
if (code === 35 && size++ < 6) {
effects.consume(code)
return sequenceOpen
}
// Always at least one `#`.
if (code === null || markdownLineEndingOrSpace(code)) {
effects.exit('atxHeadingSequence')
return atBreak(code)
}
return nok(code)
}
/**
* After something, before something else.
*
* ```markdown
* > | ## aa
* ^
* ```
*
* @type {State}
*/
function atBreak(code) {
if (code === 35) {
effects.enter('atxHeadingSequence')
return sequenceFurther(code)
}
if (code === null || markdownLineEnding(code)) {
effects.exit('atxHeading')
// To do: interrupt like `markdown-rs`.
// // Feel free to interrupt.
// tokenizer.interrupt = false
return ok(code)
}
if (markdownSpace(code)) {
return factorySpace(effects, atBreak, 'whitespace')(code)
}
// To do: generate `data` tokens, add the `text` token later.
// Needs edit map, see: `markdown.rs`.
effects.enter('atxHeadingText')
return data(code)
}
/**
* In further sequence (after whitespace).
*
* Could be normal “visible” hashes in the heading or a final sequence.
*
* ```markdown
* > | ## aa ##
* ^
* ```
*
* @type {State}
*/
function sequenceFurther(code) {
if (code === 35) {
effects.consume(code)
return sequenceFurther
}
effects.exit('atxHeadingSequence')
return atBreak(code)
}
/**
* In text.
*
* ```markdown
* > | ## aa
* ^
* ```
*
* @type {State}
*/
function data(code) {
if (code === null || code === 35 || markdownLineEndingOrSpace(code)) {
effects.exit('atxHeadingText')
return atBreak(code)
}
effects.consume(code)
return data
}
}