site/node_modules/micromark-core-commonmark/lib/content.js

169 lines
3.4 KiB
JavaScript
Raw Permalink Normal View History

2024-10-14 06:09:33 +00:00
/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').Resolver} Resolver
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').Token} Token
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
import {factorySpace} from 'micromark-factory-space'
import {markdownLineEnding} from 'micromark-util-character'
import {subtokenize} from 'micromark-util-subtokenize'
/**
* No name because it must not be turned off.
* @type {Construct}
*/
export const content = {
tokenize: tokenizeContent,
resolve: resolveContent
}
/** @type {Construct} */
const continuationConstruct = {
tokenize: tokenizeContinuation,
partial: true
}
/**
* Content is transparent: its parsed right now. That way, definitions are also
* parsed right now: before text in paragraphs (specifically, media) are parsed.
*
* @type {Resolver}
*/
function resolveContent(events) {
subtokenize(events)
return events
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeContent(effects, ok) {
/** @type {Token | undefined} */
let previous
return chunkStart
/**
* Before a content chunk.
*
* ```markdown
* > | abc
* ^
* ```
*
* @type {State}
*/
function chunkStart(code) {
effects.enter('content')
previous = effects.enter('chunkContent', {
contentType: 'content'
})
return chunkInside(code)
}
/**
* In a content chunk.
*
* ```markdown
* > | abc
* ^^^
* ```
*
* @type {State}
*/
function chunkInside(code) {
if (code === null) {
return contentEnd(code)
}
// To do: in `markdown-rs`, each line is parsed on its own, and everything
// is stitched together resolving.
if (markdownLineEnding(code)) {
return effects.check(
continuationConstruct,
contentContinue,
contentEnd
)(code)
}
// Data.
effects.consume(code)
return chunkInside
}
/**
*
*
* @type {State}
*/
function contentEnd(code) {
effects.exit('chunkContent')
effects.exit('content')
return ok(code)
}
/**
*
*
* @type {State}
*/
function contentContinue(code) {
effects.consume(code)
effects.exit('chunkContent')
previous.next = effects.enter('chunkContent', {
contentType: 'content',
previous
})
previous = previous.next
return chunkInside
}
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeContinuation(effects, ok, nok) {
const self = this
return startLookahead
/**
*
*
* @type {State}
*/
function startLookahead(code) {
effects.exit('chunkContent')
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return factorySpace(effects, prefixed, 'linePrefix')
}
/**
*
*
* @type {State}
*/
function prefixed(code) {
if (code === null || markdownLineEnding(code)) {
return nok(code)
}
// Always populated by defaults.
const tail = self.events[self.events.length - 1]
if (
!self.parser.constructs.disable.null.includes('codeIndented') &&
tail &&
tail[1].type === 'linePrefix' &&
tail[2].sliceSerialize(tail[1], true).length >= 4
) {
return ok(code)
}
return effects.interrupt(self.parser.constructs.flow, nok, ok)(code)
}
}