|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import {factorySpace} from 'micromark-factory-space' |
|
import {markdownLineEnding} from 'micromark-util-character' |
|
import {splice} from 'micromark-util-chunked' |
|
import {codes} from 'micromark-util-symbol/codes.js' |
|
import {constants} from 'micromark-util-symbol/constants.js' |
|
import {types} from 'micromark-util-symbol/types.js' |
|
import {ok as assert} from 'uvu/assert' |
|
|
|
|
|
export const document = {tokenize: initializeDocument} |
|
|
|
|
|
const containerConstruct = {tokenize: tokenizeContainer} |
|
|
|
|
|
|
|
|
|
|
|
function initializeDocument(effects) { |
|
const self = this |
|
|
|
const stack = [] |
|
let continued = 0 |
|
|
|
let childFlow |
|
|
|
let childToken |
|
|
|
let lineStartOffset |
|
|
|
return start |
|
|
|
|
|
function start(code) { |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if (continued < stack.length) { |
|
const item = stack[continued] |
|
self.containerState = item[1] |
|
assert( |
|
item[0].continuation, |
|
'expected `continuation` to be defined on container construct' |
|
) |
|
return effects.attempt( |
|
item[0].continuation, |
|
documentContinue, |
|
checkNewContainers |
|
)(code) |
|
} |
|
|
|
|
|
return checkNewContainers(code) |
|
} |
|
|
|
|
|
function documentContinue(code) { |
|
assert( |
|
self.containerState, |
|
'expected `containerState` to be defined after continuation' |
|
) |
|
|
|
continued++ |
|
|
|
|
|
|
|
|
|
if (self.containerState._closeFlow) { |
|
self.containerState._closeFlow = undefined |
|
|
|
if (childFlow) { |
|
closeFlow() |
|
} |
|
|
|
|
|
|
|
const indexBeforeExits = self.events.length |
|
let indexBeforeFlow = indexBeforeExits |
|
|
|
let point |
|
|
|
|
|
while (indexBeforeFlow--) { |
|
if ( |
|
self.events[indexBeforeFlow][0] === 'exit' && |
|
self.events[indexBeforeFlow][1].type === types.chunkFlow |
|
) { |
|
point = self.events[indexBeforeFlow][1].end |
|
break |
|
} |
|
} |
|
|
|
assert(point, 'could not find previous flow chunk') |
|
|
|
exitContainers(continued) |
|
|
|
|
|
let index = indexBeforeExits |
|
|
|
while (index < self.events.length) { |
|
self.events[index][1].end = Object.assign({}, point) |
|
index++ |
|
} |
|
|
|
|
|
splice( |
|
self.events, |
|
indexBeforeFlow + 1, |
|
0, |
|
self.events.slice(indexBeforeExits) |
|
) |
|
|
|
|
|
self.events.length = index |
|
|
|
return checkNewContainers(code) |
|
} |
|
|
|
return start(code) |
|
} |
|
|
|
|
|
function checkNewContainers(code) { |
|
|
|
|
|
|
|
|
|
|
|
if (continued === stack.length) { |
|
|
|
|
|
|
|
if (!childFlow) { |
|
return documentContinued(code) |
|
} |
|
|
|
|
|
|
|
|
|
if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) { |
|
return flowStart(code) |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
self.interrupt = Boolean( |
|
childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack |
|
) |
|
} |
|
|
|
|
|
self.containerState = {} |
|
return effects.check( |
|
containerConstruct, |
|
thereIsANewContainer, |
|
thereIsNoNewContainer |
|
)(code) |
|
} |
|
|
|
|
|
function thereIsANewContainer(code) { |
|
if (childFlow) closeFlow() |
|
exitContainers(continued) |
|
return documentContinued(code) |
|
} |
|
|
|
|
|
function thereIsNoNewContainer(code) { |
|
self.parser.lazy[self.now().line] = continued !== stack.length |
|
lineStartOffset = self.now().offset |
|
return flowStart(code) |
|
} |
|
|
|
|
|
function documentContinued(code) { |
|
|
|
self.containerState = {} |
|
return effects.attempt( |
|
containerConstruct, |
|
containerContinue, |
|
flowStart |
|
)(code) |
|
} |
|
|
|
|
|
function containerContinue(code) { |
|
assert( |
|
self.currentConstruct, |
|
'expected `currentConstruct` to be defined on tokenizer' |
|
) |
|
assert( |
|
self.containerState, |
|
'expected `containerState` to be defined on tokenizer' |
|
) |
|
continued++ |
|
stack.push([self.currentConstruct, self.containerState]) |
|
|
|
return documentContinued(code) |
|
} |
|
|
|
|
|
function flowStart(code) { |
|
if (code === codes.eof) { |
|
if (childFlow) closeFlow() |
|
exitContainers(0) |
|
effects.consume(code) |
|
return |
|
} |
|
|
|
childFlow = childFlow || self.parser.flow(self.now()) |
|
effects.enter(types.chunkFlow, { |
|
contentType: constants.contentTypeFlow, |
|
previous: childToken, |
|
_tokenizer: childFlow |
|
}) |
|
|
|
return flowContinue(code) |
|
} |
|
|
|
|
|
function flowContinue(code) { |
|
if (code === codes.eof) { |
|
writeToChild(effects.exit(types.chunkFlow), true) |
|
exitContainers(0) |
|
effects.consume(code) |
|
return |
|
} |
|
|
|
if (markdownLineEnding(code)) { |
|
effects.consume(code) |
|
writeToChild(effects.exit(types.chunkFlow)) |
|
|
|
continued = 0 |
|
self.interrupt = undefined |
|
return start |
|
} |
|
|
|
effects.consume(code) |
|
return flowContinue |
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
function writeToChild(token, eof) { |
|
assert(childFlow, 'expected `childFlow` to be defined when continuing') |
|
const stream = self.sliceStream(token) |
|
if (eof) stream.push(null) |
|
token.previous = childToken |
|
if (childToken) childToken.next = token |
|
childToken = token |
|
childFlow.defineSkip(token.start) |
|
childFlow.write(stream) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if (self.parser.lazy[token.start.line]) { |
|
let index = childFlow.events.length |
|
|
|
while (index--) { |
|
if ( |
|
|
|
childFlow.events[index][1].start.offset < lineStartOffset && |
|
|
|
(!childFlow.events[index][1].end || |
|
|
|
childFlow.events[index][1].end.offset > lineStartOffset) |
|
) { |
|
|
|
|
|
return |
|
} |
|
} |
|
|
|
|
|
|
|
const indexBeforeExits = self.events.length |
|
let indexBeforeFlow = indexBeforeExits |
|
|
|
let seen |
|
|
|
let point |
|
|
|
|
|
while (indexBeforeFlow--) { |
|
if ( |
|
self.events[indexBeforeFlow][0] === 'exit' && |
|
self.events[indexBeforeFlow][1].type === types.chunkFlow |
|
) { |
|
if (seen) { |
|
point = self.events[indexBeforeFlow][1].end |
|
break |
|
} |
|
|
|
seen = true |
|
} |
|
} |
|
|
|
assert(point, 'could not find previous flow chunk') |
|
|
|
exitContainers(continued) |
|
|
|
|
|
index = indexBeforeExits |
|
|
|
while (index < self.events.length) { |
|
self.events[index][1].end = Object.assign({}, point) |
|
index++ |
|
} |
|
|
|
|
|
splice( |
|
self.events, |
|
indexBeforeFlow + 1, |
|
0, |
|
self.events.slice(indexBeforeExits) |
|
) |
|
|
|
|
|
self.events.length = index |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function exitContainers(size) { |
|
let index = stack.length |
|
|
|
|
|
while (index-- > size) { |
|
const entry = stack[index] |
|
self.containerState = entry[1] |
|
assert( |
|
entry[0].exit, |
|
'expected `exit` to be defined on container construct' |
|
) |
|
entry[0].exit.call(self, effects) |
|
} |
|
|
|
stack.length = size |
|
} |
|
|
|
function closeFlow() { |
|
assert( |
|
self.containerState, |
|
'expected `containerState` to be defined when closing flow' |
|
) |
|
assert(childFlow, 'expected `childFlow` to be defined when closing it') |
|
childFlow.write([codes.eof]) |
|
childToken = undefined |
|
childFlow = undefined |
|
self.containerState._closeFlow = undefined |
|
} |
|
} |
|
|
|
|
|
|
|
|
|
|
|
function tokenizeContainer(effects, ok, nok) { |
|
|
|
assert( |
|
this.parser.constructs.disable.null, |
|
'expected `disable.null` to be populated' |
|
) |
|
return factorySpace( |
|
effects, |
|
effects.attempt(this.parser.constructs.document, ok, nok), |
|
types.linePrefix, |
|
this.parser.constructs.disable.null.includes('codeIndented') |
|
? undefined |
|
: constants.tabSize |
|
) |
|
} |
|
|