site/node_modules/micromark-core-commonmark/dev/lib/label-end.js
2024-10-14 08:09:33 +02:00

644 lines
14 KiB
JavaScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').Event} Event
* @typedef {import('micromark-util-types').Resolver} Resolver
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').Token} Token
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*/
import {factoryDestination} from 'micromark-factory-destination'
import {factoryLabel} from 'micromark-factory-label'
import {factoryTitle} from 'micromark-factory-title'
import {factoryWhitespace} from 'micromark-factory-whitespace'
import {markdownLineEndingOrSpace} from 'micromark-util-character'
import {push, splice} from 'micromark-util-chunked'
import {normalizeIdentifier} from 'micromark-util-normalize-identifier'
import {resolveAll} from 'micromark-util-resolve-all'
import {codes, constants, types} from 'micromark-util-symbol'
import {ok as assert} from 'devlop'
/** @type {Construct} */
export const labelEnd = {
name: 'labelEnd',
tokenize: tokenizeLabelEnd,
resolveTo: resolveToLabelEnd,
resolveAll: resolveAllLabelEnd
}
/** @type {Construct} */
const resourceConstruct = {tokenize: tokenizeResource}
/** @type {Construct} */
const referenceFullConstruct = {tokenize: tokenizeReferenceFull}
/** @type {Construct} */
const referenceCollapsedConstruct = {tokenize: tokenizeReferenceCollapsed}
/** @type {Resolver} */
function resolveAllLabelEnd(events) {
let index = -1
while (++index < events.length) {
const token = events[index][1]
if (
token.type === types.labelImage ||
token.type === types.labelLink ||
token.type === types.labelEnd
) {
// Remove the marker.
events.splice(index + 1, token.type === types.labelImage ? 4 : 2)
token.type = types.data
index++
}
}
return events
}
/** @type {Resolver} */
function resolveToLabelEnd(events, context) {
let index = events.length
let offset = 0
/** @type {Token} */
let token
/** @type {number | undefined} */
let open
/** @type {number | undefined} */
let close
/** @type {Array<Event>} */
let media
// Find an opening.
while (index--) {
token = events[index][1]
if (open) {
// If we see another link, or inactive link label, weve been here before.
if (
token.type === types.link ||
(token.type === types.labelLink && token._inactive)
) {
break
}
// Mark other link openings as inactive, as we cant have links in
// links.
if (events[index][0] === 'enter' && token.type === types.labelLink) {
token._inactive = true
}
} else if (close) {
if (
events[index][0] === 'enter' &&
(token.type === types.labelImage || token.type === types.labelLink) &&
!token._balanced
) {
open = index
if (token.type !== types.labelLink) {
offset = 2
break
}
}
} else if (token.type === types.labelEnd) {
close = index
}
}
assert(open !== undefined, '`open` is supposed to be found')
assert(close !== undefined, '`close` is supposed to be found')
const group = {
type: events[open][1].type === types.labelLink ? types.link : types.image,
start: Object.assign({}, events[open][1].start),
end: Object.assign({}, events[events.length - 1][1].end)
}
const label = {
type: types.label,
start: Object.assign({}, events[open][1].start),
end: Object.assign({}, events[close][1].end)
}
const text = {
type: types.labelText,
start: Object.assign({}, events[open + offset + 2][1].end),
end: Object.assign({}, events[close - 2][1].start)
}
media = [
['enter', group, context],
['enter', label, context]
]
// Opening marker.
media = push(media, events.slice(open + 1, open + offset + 3))
// Text open.
media = push(media, [['enter', text, context]])
// Always populated by defaults.
assert(
context.parser.constructs.insideSpan.null,
'expected `insideSpan.null` to be populated'
)
// Between.
media = push(
media,
resolveAll(
context.parser.constructs.insideSpan.null,
events.slice(open + offset + 4, close - 3),
context
)
)
// Text close, marker close, label close.
media = push(media, [
['exit', text, context],
events[close - 2],
events[close - 1],
['exit', label, context]
])
// Reference, resource, or so.
media = push(media, events.slice(close + 1))
// Media close.
media = push(media, [['exit', group, context]])
splice(events, open, events.length, media)
return events
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeLabelEnd(effects, ok, nok) {
const self = this
let index = self.events.length
/** @type {Token} */
let labelStart
/** @type {boolean} */
let defined
// Find an opening.
while (index--) {
if (
(self.events[index][1].type === types.labelImage ||
self.events[index][1].type === types.labelLink) &&
!self.events[index][1]._balanced
) {
labelStart = self.events[index][1]
break
}
}
return start
/**
* Start of label end.
*
* ```markdown
* > | [a](b) c
* ^
* > | [a][b] c
* ^
* > | [a][] b
* ^
* > | [a] b
* ```
*
* @type {State}
*/
function start(code) {
assert(code === codes.rightSquareBracket, 'expected `]`')
// If there is not an okay opening.
if (!labelStart) {
return nok(code)
}
// If the corresponding label (link) start is marked as inactive,
// it means wed be wrapping a link, like this:
//
// ```markdown
// > | a [b [c](d) e](f) g.
// ^
// ```
//
// We cant have that, so its just balanced brackets.
if (labelStart._inactive) {
return labelEndNok(code)
}
defined = self.parser.defined.includes(
normalizeIdentifier(
self.sliceSerialize({start: labelStart.end, end: self.now()})
)
)
effects.enter(types.labelEnd)
effects.enter(types.labelMarker)
effects.consume(code)
effects.exit(types.labelMarker)
effects.exit(types.labelEnd)
return after
}
/**
* After `]`.
*
* ```markdown
* > | [a](b) c
* ^
* > | [a][b] c
* ^
* > | [a][] b
* ^
* > | [a] b
* ^
* ```
*
* @type {State}
*/
function after(code) {
// Note: `markdown-rs` also parses GFM footnotes here, which for us is in
// an extension.
// Resource (`[asd](fgh)`)?
if (code === codes.leftParenthesis) {
return effects.attempt(
resourceConstruct,
labelEndOk,
defined ? labelEndOk : labelEndNok
)(code)
}
// Full (`[asd][fgh]`) or collapsed (`[asd][]`) reference?
if (code === codes.leftSquareBracket) {
return effects.attempt(
referenceFullConstruct,
labelEndOk,
defined ? referenceNotFull : labelEndNok
)(code)
}
// Shortcut (`[asd]`) reference?
return defined ? labelEndOk(code) : labelEndNok(code)
}
/**
* After `]`, at `[`, but not at a full reference.
*
* > 👉 **Note**: we only get here if the label is defined.
*
* ```markdown
* > | [a][] b
* ^
* > | [a] b
* ^
* ```
*
* @type {State}
*/
function referenceNotFull(code) {
return effects.attempt(
referenceCollapsedConstruct,
labelEndOk,
labelEndNok
)(code)
}
/**
* Done, we found something.
*
* ```markdown
* > | [a](b) c
* ^
* > | [a][b] c
* ^
* > | [a][] b
* ^
* > | [a] b
* ^
* ```
*
* @type {State}
*/
function labelEndOk(code) {
// Note: `markdown-rs` does a bunch of stuff here.
return ok(code)
}
/**
* Done, its nothing.
*
* There was an okay opening, but we didnt match anything.
*
* ```markdown
* > | [a](b c
* ^
* > | [a][b c
* ^
* > | [a] b
* ^
* ```
*
* @type {State}
*/
function labelEndNok(code) {
labelStart._balanced = true
return nok(code)
}
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeResource(effects, ok, nok) {
return resourceStart
/**
* At a resource.
*
* ```markdown
* > | [a](b) c
* ^
* ```
*
* @type {State}
*/
function resourceStart(code) {
assert(code === codes.leftParenthesis, 'expected left paren')
effects.enter(types.resource)
effects.enter(types.resourceMarker)
effects.consume(code)
effects.exit(types.resourceMarker)
return resourceBefore
}
/**
* In resource, after `(`, at optional whitespace.
*
* ```markdown
* > | [a](b) c
* ^
* ```
*
* @type {State}
*/
function resourceBefore(code) {
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, resourceOpen)(code)
: resourceOpen(code)
}
/**
* In resource, after optional whitespace, at `)` or a destination.
*
* ```markdown
* > | [a](b) c
* ^
* ```
*
* @type {State}
*/
function resourceOpen(code) {
if (code === codes.rightParenthesis) {
return resourceEnd(code)
}
return factoryDestination(
effects,
resourceDestinationAfter,
resourceDestinationMissing,
types.resourceDestination,
types.resourceDestinationLiteral,
types.resourceDestinationLiteralMarker,
types.resourceDestinationRaw,
types.resourceDestinationString,
constants.linkResourceDestinationBalanceMax
)(code)
}
/**
* In resource, after destination, at optional whitespace.
*
* ```markdown
* > | [a](b) c
* ^
* ```
*
* @type {State}
*/
function resourceDestinationAfter(code) {
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, resourceBetween)(code)
: resourceEnd(code)
}
/**
* At invalid destination.
*
* ```markdown
* > | [a](<<) b
* ^
* ```
*
* @type {State}
*/
function resourceDestinationMissing(code) {
return nok(code)
}
/**
* In resource, after destination and whitespace, at `(` or title.
*
* ```markdown
* > | [a](b ) c
* ^
* ```
*
* @type {State}
*/
function resourceBetween(code) {
if (
code === codes.quotationMark ||
code === codes.apostrophe ||
code === codes.leftParenthesis
) {
return factoryTitle(
effects,
resourceTitleAfter,
nok,
types.resourceTitle,
types.resourceTitleMarker,
types.resourceTitleString
)(code)
}
return resourceEnd(code)
}
/**
* In resource, after title, at optional whitespace.
*
* ```markdown
* > | [a](b "c") d
* ^
* ```
*
* @type {State}
*/
function resourceTitleAfter(code) {
return markdownLineEndingOrSpace(code)
? factoryWhitespace(effects, resourceEnd)(code)
: resourceEnd(code)
}
/**
* In resource, at `)`.
*
* ```markdown
* > | [a](b) d
* ^
* ```
*
* @type {State}
*/
function resourceEnd(code) {
if (code === codes.rightParenthesis) {
effects.enter(types.resourceMarker)
effects.consume(code)
effects.exit(types.resourceMarker)
effects.exit(types.resource)
return ok
}
return nok(code)
}
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeReferenceFull(effects, ok, nok) {
const self = this
return referenceFull
/**
* In a reference (full), at the `[`.
*
* ```markdown
* > | [a][b] d
* ^
* ```
*
* @type {State}
*/
function referenceFull(code) {
assert(code === codes.leftSquareBracket, 'expected left bracket')
return factoryLabel.call(
self,
effects,
referenceFullAfter,
referenceFullMissing,
types.reference,
types.referenceMarker,
types.referenceString
)(code)
}
/**
* In a reference (full), after `]`.
*
* ```markdown
* > | [a][b] d
* ^
* ```
*
* @type {State}
*/
function referenceFullAfter(code) {
return self.parser.defined.includes(
normalizeIdentifier(
self.sliceSerialize(self.events[self.events.length - 1][1]).slice(1, -1)
)
)
? ok(code)
: nok(code)
}
/**
* In reference (full) that was missing.
*
* ```markdown
* > | [a][b d
* ^
* ```
*
* @type {State}
*/
function referenceFullMissing(code) {
return nok(code)
}
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeReferenceCollapsed(effects, ok, nok) {
return referenceCollapsedStart
/**
* In reference (collapsed), at `[`.
*
* > 👉 **Note**: we only get here if the label is defined.
*
* ```markdown
* > | [a][] d
* ^
* ```
*
* @type {State}
*/
function referenceCollapsedStart(code) {
// We only attempt a collapsed label if theres a `[`.
assert(code === codes.leftSquareBracket, 'expected left bracket')
effects.enter(types.reference)
effects.enter(types.referenceMarker)
effects.consume(code)
effects.exit(types.referenceMarker)
return referenceCollapsedOpen
}
/**
* In reference (collapsed), at `]`.
*
* > 👉 **Note**: we only get here if the label is defined.
*
* ```markdown
* > | [a][] d
* ^
* ```
*
* @type {State}
*/
function referenceCollapsedOpen(code) {
if (code === codes.rightSquareBracket) {
effects.enter(types.referenceMarker)
effects.consume(code)
effects.exit(types.referenceMarker)
effects.exit(types.reference)
return ok
}
return nok(code)
}
}