|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import {factorySpace} from 'micromark-factory-space' |
|
import {markdownLineEnding, markdownSpace} from 'micromark-util-character' |
|
import {codes} from 'micromark-util-symbol/codes.js' |
|
import {constants} from 'micromark-util-symbol/constants.js' |
|
import {types} from 'micromark-util-symbol/types.js' |
|
import {ok as assert} from 'uvu/assert' |
|
|
|
|
|
const nonLazyContinuation = { |
|
tokenize: tokenizeNonLazyContinuation, |
|
partial: true |
|
} |
|
|
|
|
|
export const codeFenced = { |
|
name: 'codeFenced', |
|
tokenize: tokenizeCodeFenced, |
|
concrete: true |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function tokenizeCodeFenced(effects, ok, nok) { |
|
const self = this |
|
|
|
const closeStart = {tokenize: tokenizeCloseStart, partial: true} |
|
let initialPrefix = 0 |
|
let sizeOpen = 0 |
|
|
|
let marker |
|
|
|
return start |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function start(code) { |
|
|
|
return beforeSequenceOpen(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function beforeSequenceOpen(code) { |
|
assert( |
|
code === codes.graveAccent || code === codes.tilde, |
|
'expected `` ` `` or `~`' |
|
) |
|
|
|
const tail = self.events[self.events.length - 1] |
|
initialPrefix = |
|
tail && tail[1].type === types.linePrefix |
|
? tail[2].sliceSerialize(tail[1], true).length |
|
: 0 |
|
|
|
marker = code |
|
effects.enter(types.codeFenced) |
|
effects.enter(types.codeFencedFence) |
|
effects.enter(types.codeFencedFenceSequence) |
|
return sequenceOpen(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function sequenceOpen(code) { |
|
if (code === marker) { |
|
sizeOpen++ |
|
effects.consume(code) |
|
return sequenceOpen |
|
} |
|
|
|
if (sizeOpen < constants.codeFencedSequenceSizeMin) { |
|
return nok(code) |
|
} |
|
|
|
effects.exit(types.codeFencedFenceSequence) |
|
return markdownSpace(code) |
|
? factorySpace(effects, infoBefore, types.whitespace)(code) |
|
: infoBefore(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function infoBefore(code) { |
|
if (code === codes.eof || markdownLineEnding(code)) { |
|
effects.exit(types.codeFencedFence) |
|
return self.interrupt |
|
? ok(code) |
|
: effects.check(nonLazyContinuation, atNonLazyBreak, after)(code) |
|
} |
|
|
|
effects.enter(types.codeFencedFenceInfo) |
|
effects.enter(types.chunkString, {contentType: constants.contentTypeString}) |
|
return info(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function info(code) { |
|
if (code === codes.eof || markdownLineEnding(code)) { |
|
effects.exit(types.chunkString) |
|
effects.exit(types.codeFencedFenceInfo) |
|
return infoBefore(code) |
|
} |
|
|
|
if (markdownSpace(code)) { |
|
effects.exit(types.chunkString) |
|
effects.exit(types.codeFencedFenceInfo) |
|
return factorySpace(effects, metaBefore, types.whitespace)(code) |
|
} |
|
|
|
if (code === codes.graveAccent && code === marker) { |
|
return nok(code) |
|
} |
|
|
|
effects.consume(code) |
|
return info |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function metaBefore(code) { |
|
if (code === codes.eof || markdownLineEnding(code)) { |
|
return infoBefore(code) |
|
} |
|
|
|
effects.enter(types.codeFencedFenceMeta) |
|
effects.enter(types.chunkString, {contentType: constants.contentTypeString}) |
|
return meta(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function meta(code) { |
|
if (code === codes.eof || markdownLineEnding(code)) { |
|
effects.exit(types.chunkString) |
|
effects.exit(types.codeFencedFenceMeta) |
|
return infoBefore(code) |
|
} |
|
|
|
if (code === codes.graveAccent && code === marker) { |
|
return nok(code) |
|
} |
|
|
|
effects.consume(code) |
|
return meta |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function atNonLazyBreak(code) { |
|
assert(markdownLineEnding(code), 'expected eol') |
|
return effects.attempt(closeStart, after, contentBefore)(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function contentBefore(code) { |
|
assert(markdownLineEnding(code), 'expected eol') |
|
effects.enter(types.lineEnding) |
|
effects.consume(code) |
|
effects.exit(types.lineEnding) |
|
return contentStart |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function contentStart(code) { |
|
return initialPrefix > 0 && markdownSpace(code) |
|
? factorySpace( |
|
effects, |
|
beforeContentChunk, |
|
types.linePrefix, |
|
initialPrefix + 1 |
|
)(code) |
|
: beforeContentChunk(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function beforeContentChunk(code) { |
|
if (code === codes.eof || markdownLineEnding(code)) { |
|
return effects.check(nonLazyContinuation, atNonLazyBreak, after)(code) |
|
} |
|
|
|
effects.enter(types.codeFlowValue) |
|
return contentChunk(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function contentChunk(code) { |
|
if (code === codes.eof || markdownLineEnding(code)) { |
|
effects.exit(types.codeFlowValue) |
|
return beforeContentChunk(code) |
|
} |
|
|
|
effects.consume(code) |
|
return contentChunk |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function after(code) { |
|
effects.exit(types.codeFenced) |
|
return ok(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function tokenizeCloseStart(effects, ok, nok) { |
|
let size = 0 |
|
|
|
return startBefore |
|
|
|
|
|
|
|
|
|
|
|
|
|
function startBefore(code) { |
|
assert(markdownLineEnding(code), 'expected eol') |
|
effects.enter(types.lineEnding) |
|
effects.consume(code) |
|
effects.exit(types.lineEnding) |
|
return start |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function start(code) { |
|
|
|
assert( |
|
self.parser.constructs.disable.null, |
|
'expected `disable.null` to be populated' |
|
) |
|
|
|
|
|
effects.enter(types.codeFencedFence) |
|
return markdownSpace(code) |
|
? factorySpace( |
|
effects, |
|
beforeSequenceClose, |
|
types.linePrefix, |
|
self.parser.constructs.disable.null.includes('codeIndented') |
|
? undefined |
|
: constants.tabSize |
|
)(code) |
|
: beforeSequenceClose(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function beforeSequenceClose(code) { |
|
if (code === marker) { |
|
effects.enter(types.codeFencedFenceSequence) |
|
return sequenceClose(code) |
|
} |
|
|
|
return nok(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function sequenceClose(code) { |
|
if (code === marker) { |
|
size++ |
|
effects.consume(code) |
|
return sequenceClose |
|
} |
|
|
|
if (size >= sizeOpen) { |
|
effects.exit(types.codeFencedFenceSequence) |
|
return markdownSpace(code) |
|
? factorySpace(effects, sequenceCloseAfter, types.whitespace)(code) |
|
: sequenceCloseAfter(code) |
|
} |
|
|
|
return nok(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function sequenceCloseAfter(code) { |
|
if (code === codes.eof || markdownLineEnding(code)) { |
|
effects.exit(types.codeFencedFence) |
|
return ok(code) |
|
} |
|
|
|
return nok(code) |
|
} |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function tokenizeNonLazyContinuation(effects, ok, nok) { |
|
const self = this |
|
|
|
return start |
|
|
|
|
|
|
|
|
|
|
|
|
|
function start(code) { |
|
if (code === codes.eof) { |
|
return nok(code) |
|
} |
|
|
|
assert(markdownLineEnding(code), 'expected eol') |
|
effects.enter(types.lineEnding) |
|
effects.consume(code) |
|
effects.exit(types.lineEnding) |
|
return lineStart |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
function lineStart(code) { |
|
return self.parser.lazy[self.now().line] ? nok(code) : ok(code) |
|
} |
|
} |
|
|