|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import {factorySpace} from 'micromark-factory-space' |
|
import {markdownLineEnding} from 'micromark-util-character' |
|
import {subtokenize} from 'micromark-util-subtokenize' |
|
import {codes} from 'micromark-util-symbol/codes.js' |
|
import {constants} from 'micromark-util-symbol/constants.js' |
|
import {types} from 'micromark-util-symbol/types.js' |
|
import {ok as assert} from 'uvu/assert' |
|
|
|
|
|
|
|
|
|
|
|
export const content = {tokenize: tokenizeContent, resolve: resolveContent} |
|
|
|
|
|
const continuationConstruct = {tokenize: tokenizeContinuation, partial: true} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function resolveContent(events) { |
|
subtokenize(events) |
|
return events |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function tokenizeContent(effects, ok) { |
|
|
|
let previous |
|
|
|
return chunkStart |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function chunkStart(code) { |
|
assert( |
|
code !== codes.eof && !markdownLineEnding(code), |
|
'expected no eof or eol' |
|
) |
|
|
|
effects.enter(types.content) |
|
previous = effects.enter(types.chunkContent, { |
|
contentType: constants.contentTypeContent |
|
}) |
|
return chunkInside(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function chunkInside(code) { |
|
if (code === codes.eof) { |
|
return contentEnd(code) |
|
} |
|
|
|
|
|
|
|
if (markdownLineEnding(code)) { |
|
return effects.check( |
|
continuationConstruct, |
|
contentContinue, |
|
contentEnd |
|
)(code) |
|
} |
|
|
|
|
|
effects.consume(code) |
|
return chunkInside |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
function contentEnd(code) { |
|
effects.exit(types.chunkContent) |
|
effects.exit(types.content) |
|
return ok(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
function contentContinue(code) { |
|
assert(markdownLineEnding(code), 'expected eol') |
|
effects.consume(code) |
|
effects.exit(types.chunkContent) |
|
assert(previous, 'expected previous token') |
|
previous.next = effects.enter(types.chunkContent, { |
|
contentType: constants.contentTypeContent, |
|
previous |
|
}) |
|
previous = previous.next |
|
return chunkInside |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function tokenizeContinuation(effects, ok, nok) { |
|
const self = this |
|
|
|
return startLookahead |
|
|
|
|
|
|
|
|
|
|
|
|
|
function startLookahead(code) { |
|
assert(markdownLineEnding(code), 'expected a line ending') |
|
effects.exit(types.chunkContent) |
|
effects.enter(types.lineEnding) |
|
effects.consume(code) |
|
effects.exit(types.lineEnding) |
|
return factorySpace(effects, prefixed, types.linePrefix) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
function prefixed(code) { |
|
if (code === codes.eof || markdownLineEnding(code)) { |
|
return nok(code) |
|
} |
|
|
|
|
|
assert( |
|
self.parser.constructs.disable.null, |
|
'expected `disable.null` to be populated' |
|
) |
|
|
|
const tail = self.events[self.events.length - 1] |
|
|
|
if ( |
|
!self.parser.constructs.disable.null.includes('codeIndented') && |
|
tail && |
|
tail[1].type === types.linePrefix && |
|
tail[2].sliceSerialize(tail[1], true).length >= constants.tabSize |
|
) { |
|
return ok(code) |
|
} |
|
|
|
return effects.interrupt(self.parser.constructs.flow, nok, ok)(code) |
|
} |
|
} |
|
|