|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import {factorySpace} from 'micromark-factory-space' |
|
import {markdownLineEnding, markdownSpace} from 'micromark-util-character' |
|
|
|
const nonLazyContinuation = { |
|
tokenize: tokenizeNonLazyContinuation, |
|
partial: true |
|
} |
|
|
|
|
|
export const codeFenced = { |
|
name: 'codeFenced', |
|
tokenize: tokenizeCodeFenced, |
|
concrete: true |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function tokenizeCodeFenced(effects, ok, nok) { |
|
const self = this |
|
|
|
const closeStart = { |
|
tokenize: tokenizeCloseStart, |
|
partial: true |
|
} |
|
let initialPrefix = 0 |
|
let sizeOpen = 0 |
|
|
|
let marker |
|
return start |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function start(code) { |
|
|
|
return beforeSequenceOpen(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function beforeSequenceOpen(code) { |
|
const tail = self.events[self.events.length - 1] |
|
initialPrefix = |
|
tail && tail[1].type === 'linePrefix' |
|
? tail[2].sliceSerialize(tail[1], true).length |
|
: 0 |
|
marker = code |
|
effects.enter('codeFenced') |
|
effects.enter('codeFencedFence') |
|
effects.enter('codeFencedFenceSequence') |
|
return sequenceOpen(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function sequenceOpen(code) { |
|
if (code === marker) { |
|
sizeOpen++ |
|
effects.consume(code) |
|
return sequenceOpen |
|
} |
|
if (sizeOpen < 3) { |
|
return nok(code) |
|
} |
|
effects.exit('codeFencedFenceSequence') |
|
return markdownSpace(code) |
|
? factorySpace(effects, infoBefore, 'whitespace')(code) |
|
: infoBefore(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function infoBefore(code) { |
|
if (code === null || markdownLineEnding(code)) { |
|
effects.exit('codeFencedFence') |
|
return self.interrupt |
|
? ok(code) |
|
: effects.check(nonLazyContinuation, atNonLazyBreak, after)(code) |
|
} |
|
effects.enter('codeFencedFenceInfo') |
|
effects.enter('chunkString', { |
|
contentType: 'string' |
|
}) |
|
return info(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function info(code) { |
|
if (code === null || markdownLineEnding(code)) { |
|
effects.exit('chunkString') |
|
effects.exit('codeFencedFenceInfo') |
|
return infoBefore(code) |
|
} |
|
if (markdownSpace(code)) { |
|
effects.exit('chunkString') |
|
effects.exit('codeFencedFenceInfo') |
|
return factorySpace(effects, metaBefore, 'whitespace')(code) |
|
} |
|
if (code === 96 && code === marker) { |
|
return nok(code) |
|
} |
|
effects.consume(code) |
|
return info |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function metaBefore(code) { |
|
if (code === null || markdownLineEnding(code)) { |
|
return infoBefore(code) |
|
} |
|
effects.enter('codeFencedFenceMeta') |
|
effects.enter('chunkString', { |
|
contentType: 'string' |
|
}) |
|
return meta(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function meta(code) { |
|
if (code === null || markdownLineEnding(code)) { |
|
effects.exit('chunkString') |
|
effects.exit('codeFencedFenceMeta') |
|
return infoBefore(code) |
|
} |
|
if (code === 96 && code === marker) { |
|
return nok(code) |
|
} |
|
effects.consume(code) |
|
return meta |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function atNonLazyBreak(code) { |
|
return effects.attempt(closeStart, after, contentBefore)(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function contentBefore(code) { |
|
effects.enter('lineEnding') |
|
effects.consume(code) |
|
effects.exit('lineEnding') |
|
return contentStart |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function contentStart(code) { |
|
return initialPrefix > 0 && markdownSpace(code) |
|
? factorySpace( |
|
effects, |
|
beforeContentChunk, |
|
'linePrefix', |
|
initialPrefix + 1 |
|
)(code) |
|
: beforeContentChunk(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function beforeContentChunk(code) { |
|
if (code === null || markdownLineEnding(code)) { |
|
return effects.check(nonLazyContinuation, atNonLazyBreak, after)(code) |
|
} |
|
effects.enter('codeFlowValue') |
|
return contentChunk(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function contentChunk(code) { |
|
if (code === null || markdownLineEnding(code)) { |
|
effects.exit('codeFlowValue') |
|
return beforeContentChunk(code) |
|
} |
|
effects.consume(code) |
|
return contentChunk |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function after(code) { |
|
effects.exit('codeFenced') |
|
return ok(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function tokenizeCloseStart(effects, ok, nok) { |
|
let size = 0 |
|
return startBefore |
|
|
|
|
|
|
|
|
|
|
|
|
|
function startBefore(code) { |
|
effects.enter('lineEnding') |
|
effects.consume(code) |
|
effects.exit('lineEnding') |
|
return start |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function start(code) { |
|
|
|
|
|
|
|
effects.enter('codeFencedFence') |
|
return markdownSpace(code) |
|
? factorySpace( |
|
effects, |
|
beforeSequenceClose, |
|
'linePrefix', |
|
self.parser.constructs.disable.null.includes('codeIndented') |
|
? undefined |
|
: 4 |
|
)(code) |
|
: beforeSequenceClose(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function beforeSequenceClose(code) { |
|
if (code === marker) { |
|
effects.enter('codeFencedFenceSequence') |
|
return sequenceClose(code) |
|
} |
|
return nok(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function sequenceClose(code) { |
|
if (code === marker) { |
|
size++ |
|
effects.consume(code) |
|
return sequenceClose |
|
} |
|
if (size >= sizeOpen) { |
|
effects.exit('codeFencedFenceSequence') |
|
return markdownSpace(code) |
|
? factorySpace(effects, sequenceCloseAfter, 'whitespace')(code) |
|
: sequenceCloseAfter(code) |
|
} |
|
return nok(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function sequenceCloseAfter(code) { |
|
if (code === null || markdownLineEnding(code)) { |
|
effects.exit('codeFencedFence') |
|
return ok(code) |
|
} |
|
return nok(code) |
|
} |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function tokenizeNonLazyContinuation(effects, ok, nok) { |
|
const self = this |
|
return start |
|
|
|
|
|
|
|
|
|
|
|
|
|
function start(code) { |
|
if (code === null) { |
|
return nok(code) |
|
} |
|
effects.enter('lineEnding') |
|
effects.consume(code) |
|
effects.exit('lineEnding') |
|
return lineStart |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
function lineStart(code) { |
|
return self.parser.lazy[self.now().line] ? nok(code) : ok(code) |
|
} |
|
} |
|
|