Refactor routing in App component to enhance navigation and improve error handling by integrating dynamic routes and updating the NotFound route.
This commit is contained in:
4
node_modules/micromark/lib/initialize/content.d.ts
generated
vendored
Normal file
4
node_modules/micromark/lib/initialize/content.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
/** @type {InitialConstruct} */
|
||||
export const content: InitialConstruct;
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
//# sourceMappingURL=content.d.ts.map
|
1
node_modules/micromark/lib/initialize/content.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/initialize/content.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"content.d.ts","sourceRoot":"","sources":["content.js"],"names":[],"mappings":"AAeA,+BAA+B;AAC/B,sBADW,gBAAgB,CACyB;sCAT1C,sBAAsB"}
|
79
node_modules/micromark/lib/initialize/content.js
generated
vendored
Normal file
79
node_modules/micromark/lib/initialize/content.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
/**
|
||||
* @import {
|
||||
* InitialConstruct,
|
||||
* Initializer,
|
||||
* State,
|
||||
* TokenizeContext,
|
||||
* Token
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import { factorySpace } from 'micromark-factory-space';
|
||||
import { markdownLineEnding } from 'micromark-util-character';
|
||||
/** @type {InitialConstruct} */
|
||||
export const content = {
|
||||
tokenize: initializeContent
|
||||
};
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Context.
|
||||
* @type {Initializer}
|
||||
* Content.
|
||||
*/
|
||||
function initializeContent(effects) {
|
||||
const contentStart = effects.attempt(this.parser.constructs.contentInitial, afterContentStartConstruct, paragraphInitial);
|
||||
/** @type {Token} */
|
||||
let previous;
|
||||
return contentStart;
|
||||
|
||||
/** @type {State} */
|
||||
function afterContentStartConstruct(code) {
|
||||
if (code === null) {
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
effects.enter("lineEnding");
|
||||
effects.consume(code);
|
||||
effects.exit("lineEnding");
|
||||
return factorySpace(effects, contentStart, "linePrefix");
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function paragraphInitial(code) {
|
||||
effects.enter("paragraph");
|
||||
return lineStart(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function lineStart(code) {
|
||||
const token = effects.enter("chunkText", {
|
||||
contentType: "text",
|
||||
previous
|
||||
});
|
||||
if (previous) {
|
||||
previous.next = token;
|
||||
}
|
||||
previous = token;
|
||||
return data(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function data(code) {
|
||||
if (code === null) {
|
||||
effects.exit("chunkText");
|
||||
effects.exit("paragraph");
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.consume(code);
|
||||
effects.exit("chunkText");
|
||||
return lineStart;
|
||||
}
|
||||
|
||||
// Data.
|
||||
effects.consume(code);
|
||||
return data;
|
||||
}
|
||||
}
|
10
node_modules/micromark/lib/initialize/document.d.ts
generated
vendored
Normal file
10
node_modules/micromark/lib/initialize/document.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
/** @type {InitialConstruct} */
|
||||
export const document: InitialConstruct;
|
||||
/**
|
||||
* Construct and its state.
|
||||
*/
|
||||
export type StackItem = [Construct, ContainerState];
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
import type { Construct } from 'micromark-util-types';
|
||||
import type { ContainerState } from 'micromark-util-types';
|
||||
//# sourceMappingURL=document.d.ts.map
|
1
node_modules/micromark/lib/initialize/document.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/initialize/document.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"document.d.ts","sourceRoot":"","sources":["document.js"],"names":[],"mappings":"AAyBA,+BAA+B;AAC/B,uBADW,gBAAgB,CAC2B;;;;wBAXzC,CAAC,SAAS,EAAE,cAAc,CAAC;sCAJ9B,sBAAsB;+BAAtB,sBAAsB;oCAAtB,sBAAsB"}
|
362
node_modules/micromark/lib/initialize/document.js
generated
vendored
Normal file
362
node_modules/micromark/lib/initialize/document.js
generated
vendored
Normal file
@@ -0,0 +1,362 @@
|
||||
/**
|
||||
* @import {
|
||||
* Construct,
|
||||
* ContainerState,
|
||||
* InitialConstruct,
|
||||
* Initializer,
|
||||
* Point,
|
||||
* State,
|
||||
* TokenizeContext,
|
||||
* Tokenizer,
|
||||
* Token
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {[Construct, ContainerState]} StackItem
|
||||
* Construct and its state.
|
||||
*/
|
||||
|
||||
import { factorySpace } from 'micromark-factory-space';
|
||||
import { markdownLineEnding } from 'micromark-util-character';
|
||||
import { splice } from 'micromark-util-chunked';
|
||||
/** @type {InitialConstruct} */
|
||||
export const document = {
|
||||
tokenize: initializeDocument
|
||||
};
|
||||
|
||||
/** @type {Construct} */
|
||||
const containerConstruct = {
|
||||
tokenize: tokenizeContainer
|
||||
};
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Self.
|
||||
* @type {Initializer}
|
||||
* Initializer.
|
||||
*/
|
||||
function initializeDocument(effects) {
|
||||
const self = this;
|
||||
/** @type {Array<StackItem>} */
|
||||
const stack = [];
|
||||
let continued = 0;
|
||||
/** @type {TokenizeContext | undefined} */
|
||||
let childFlow;
|
||||
/** @type {Token | undefined} */
|
||||
let childToken;
|
||||
/** @type {number} */
|
||||
let lineStartOffset;
|
||||
return start;
|
||||
|
||||
/** @type {State} */
|
||||
function start(code) {
|
||||
// First we iterate through the open blocks, starting with the root
|
||||
// document, and descending through last children down to the last open
|
||||
// block.
|
||||
// Each block imposes a condition that the line must satisfy if the block is
|
||||
// to remain open.
|
||||
// For example, a block quote requires a `>` character.
|
||||
// A paragraph requires a non-blank line.
|
||||
// In this phase we may match all or just some of the open blocks.
|
||||
// But we cannot close unmatched blocks yet, because we may have a lazy
|
||||
// continuation line.
|
||||
if (continued < stack.length) {
|
||||
const item = stack[continued];
|
||||
self.containerState = item[1];
|
||||
return effects.attempt(item[0].continuation, documentContinue, checkNewContainers)(code);
|
||||
}
|
||||
|
||||
// Done.
|
||||
return checkNewContainers(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function documentContinue(code) {
|
||||
continued++;
|
||||
|
||||
// Note: this field is called `_closeFlow` but it also closes containers.
|
||||
// Perhaps a good idea to rename it but it’s already used in the wild by
|
||||
// extensions.
|
||||
if (self.containerState._closeFlow) {
|
||||
self.containerState._closeFlow = undefined;
|
||||
if (childFlow) {
|
||||
closeFlow();
|
||||
}
|
||||
|
||||
// Note: this algorithm for moving events around is similar to the
|
||||
// algorithm when dealing with lazy lines in `writeToChild`.
|
||||
const indexBeforeExits = self.events.length;
|
||||
let indexBeforeFlow = indexBeforeExits;
|
||||
/** @type {Point | undefined} */
|
||||
let point;
|
||||
|
||||
// Find the flow chunk.
|
||||
while (indexBeforeFlow--) {
|
||||
if (self.events[indexBeforeFlow][0] === 'exit' && self.events[indexBeforeFlow][1].type === "chunkFlow") {
|
||||
point = self.events[indexBeforeFlow][1].end;
|
||||
break;
|
||||
}
|
||||
}
|
||||
exitContainers(continued);
|
||||
|
||||
// Fix positions.
|
||||
let index = indexBeforeExits;
|
||||
while (index < self.events.length) {
|
||||
self.events[index][1].end = {
|
||||
...point
|
||||
};
|
||||
index++;
|
||||
}
|
||||
|
||||
// Inject the exits earlier (they’re still also at the end).
|
||||
splice(self.events, indexBeforeFlow + 1, 0, self.events.slice(indexBeforeExits));
|
||||
|
||||
// Discard the duplicate exits.
|
||||
self.events.length = index;
|
||||
return checkNewContainers(code);
|
||||
}
|
||||
return start(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function checkNewContainers(code) {
|
||||
// Next, after consuming the continuation markers for existing blocks, we
|
||||
// look for new block starts (e.g. `>` for a block quote).
|
||||
// If we encounter a new block start, we close any blocks unmatched in
|
||||
// step 1 before creating the new block as a child of the last matched
|
||||
// block.
|
||||
if (continued === stack.length) {
|
||||
// No need to `check` whether there’s a container, of `exitContainers`
|
||||
// would be moot.
|
||||
// We can instead immediately `attempt` to parse one.
|
||||
if (!childFlow) {
|
||||
return documentContinued(code);
|
||||
}
|
||||
|
||||
// If we have concrete content, such as block HTML or fenced code,
|
||||
// we can’t have containers “pierce” into them, so we can immediately
|
||||
// start.
|
||||
if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) {
|
||||
return flowStart(code);
|
||||
}
|
||||
|
||||
// If we do have flow, it could still be a blank line,
|
||||
// but we’d be interrupting it w/ a new container if there’s a current
|
||||
// construct.
|
||||
// To do: next major: remove `_gfmTableDynamicInterruptHack` (no longer
|
||||
// needed in micromark-extension-gfm-table@1.0.6).
|
||||
self.interrupt = Boolean(childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack);
|
||||
}
|
||||
|
||||
// Check if there is a new container.
|
||||
self.containerState = {};
|
||||
return effects.check(containerConstruct, thereIsANewContainer, thereIsNoNewContainer)(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function thereIsANewContainer(code) {
|
||||
if (childFlow) closeFlow();
|
||||
exitContainers(continued);
|
||||
return documentContinued(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function thereIsNoNewContainer(code) {
|
||||
self.parser.lazy[self.now().line] = continued !== stack.length;
|
||||
lineStartOffset = self.now().offset;
|
||||
return flowStart(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function documentContinued(code) {
|
||||
// Try new containers.
|
||||
self.containerState = {};
|
||||
return effects.attempt(containerConstruct, containerContinue, flowStart)(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function containerContinue(code) {
|
||||
continued++;
|
||||
stack.push([self.currentConstruct, self.containerState]);
|
||||
// Try another.
|
||||
return documentContinued(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function flowStart(code) {
|
||||
if (code === null) {
|
||||
if (childFlow) closeFlow();
|
||||
exitContainers(0);
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
childFlow = childFlow || self.parser.flow(self.now());
|
||||
effects.enter("chunkFlow", {
|
||||
_tokenizer: childFlow,
|
||||
contentType: "flow",
|
||||
previous: childToken
|
||||
});
|
||||
return flowContinue(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function flowContinue(code) {
|
||||
if (code === null) {
|
||||
writeToChild(effects.exit("chunkFlow"), true);
|
||||
exitContainers(0);
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.consume(code);
|
||||
writeToChild(effects.exit("chunkFlow"));
|
||||
// Get ready for the next line.
|
||||
continued = 0;
|
||||
self.interrupt = undefined;
|
||||
return start;
|
||||
}
|
||||
effects.consume(code);
|
||||
return flowContinue;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Token} token
|
||||
* Token.
|
||||
* @param {boolean | undefined} [endOfFile]
|
||||
* Whether the token is at the end of the file (default: `false`).
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function writeToChild(token, endOfFile) {
|
||||
const stream = self.sliceStream(token);
|
||||
if (endOfFile) stream.push(null);
|
||||
token.previous = childToken;
|
||||
if (childToken) childToken.next = token;
|
||||
childToken = token;
|
||||
childFlow.defineSkip(token.start);
|
||||
childFlow.write(stream);
|
||||
|
||||
// Alright, so we just added a lazy line:
|
||||
//
|
||||
// ```markdown
|
||||
// > a
|
||||
// b.
|
||||
//
|
||||
// Or:
|
||||
//
|
||||
// > ~~~c
|
||||
// d
|
||||
//
|
||||
// Or:
|
||||
//
|
||||
// > | e |
|
||||
// f
|
||||
// ```
|
||||
//
|
||||
// The construct in the second example (fenced code) does not accept lazy
|
||||
// lines, so it marked itself as done at the end of its first line, and
|
||||
// then the content construct parses `d`.
|
||||
// Most constructs in markdown match on the first line: if the first line
|
||||
// forms a construct, a non-lazy line can’t “unmake” it.
|
||||
//
|
||||
// The construct in the third example is potentially a GFM table, and
|
||||
// those are *weird*.
|
||||
// It *could* be a table, from the first line, if the following line
|
||||
// matches a condition.
|
||||
// In this case, that second line is lazy, which “unmakes” the first line
|
||||
// and turns the whole into one content block.
|
||||
//
|
||||
// We’ve now parsed the non-lazy and the lazy line, and can figure out
|
||||
// whether the lazy line started a new flow block.
|
||||
// If it did, we exit the current containers between the two flow blocks.
|
||||
if (self.parser.lazy[token.start.line]) {
|
||||
let index = childFlow.events.length;
|
||||
while (index--) {
|
||||
if (
|
||||
// The token starts before the line ending…
|
||||
childFlow.events[index][1].start.offset < lineStartOffset && (
|
||||
// …and either is not ended yet…
|
||||
!childFlow.events[index][1].end ||
|
||||
// …or ends after it.
|
||||
childFlow.events[index][1].end.offset > lineStartOffset)) {
|
||||
// Exit: there’s still something open, which means it’s a lazy line
|
||||
// part of something.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Note: this algorithm for moving events around is similar to the
|
||||
// algorithm when closing flow in `documentContinue`.
|
||||
const indexBeforeExits = self.events.length;
|
||||
let indexBeforeFlow = indexBeforeExits;
|
||||
/** @type {boolean | undefined} */
|
||||
let seen;
|
||||
/** @type {Point | undefined} */
|
||||
let point;
|
||||
|
||||
// Find the previous chunk (the one before the lazy line).
|
||||
while (indexBeforeFlow--) {
|
||||
if (self.events[indexBeforeFlow][0] === 'exit' && self.events[indexBeforeFlow][1].type === "chunkFlow") {
|
||||
if (seen) {
|
||||
point = self.events[indexBeforeFlow][1].end;
|
||||
break;
|
||||
}
|
||||
seen = true;
|
||||
}
|
||||
}
|
||||
exitContainers(continued);
|
||||
|
||||
// Fix positions.
|
||||
index = indexBeforeExits;
|
||||
while (index < self.events.length) {
|
||||
self.events[index][1].end = {
|
||||
...point
|
||||
};
|
||||
index++;
|
||||
}
|
||||
|
||||
// Inject the exits earlier (they’re still also at the end).
|
||||
splice(self.events, indexBeforeFlow + 1, 0, self.events.slice(indexBeforeExits));
|
||||
|
||||
// Discard the duplicate exits.
|
||||
self.events.length = index;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} size
|
||||
* Size.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function exitContainers(size) {
|
||||
let index = stack.length;
|
||||
|
||||
// Exit open containers.
|
||||
while (index-- > size) {
|
||||
const entry = stack[index];
|
||||
self.containerState = entry[1];
|
||||
entry[0].exit.call(self, effects);
|
||||
}
|
||||
stack.length = size;
|
||||
}
|
||||
function closeFlow() {
|
||||
childFlow.write([null]);
|
||||
childToken = undefined;
|
||||
childFlow = undefined;
|
||||
self.containerState._closeFlow = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Context.
|
||||
* @type {Tokenizer}
|
||||
* Tokenizer.
|
||||
*/
|
||||
function tokenizeContainer(effects, ok, nok) {
|
||||
// Always populated by defaults.
|
||||
|
||||
return factorySpace(effects, effects.attempt(this.parser.constructs.document, ok, nok), "linePrefix", this.parser.constructs.disable.null.includes('codeIndented') ? undefined : 4);
|
||||
}
|
4
node_modules/micromark/lib/initialize/flow.d.ts
generated
vendored
Normal file
4
node_modules/micromark/lib/initialize/flow.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
/** @type {InitialConstruct} */
|
||||
export const flow: InitialConstruct;
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
//# sourceMappingURL=flow.d.ts.map
|
1
node_modules/micromark/lib/initialize/flow.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/initialize/flow.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"flow.d.ts","sourceRoot":"","sources":["flow.js"],"names":[],"mappings":"AAeA,+BAA+B;AAC/B,mBADW,gBAAgB,CACmB;sCAVpC,sBAAsB"}
|
58
node_modules/micromark/lib/initialize/flow.js
generated
vendored
Normal file
58
node_modules/micromark/lib/initialize/flow.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
/**
|
||||
* @import {
|
||||
* InitialConstruct,
|
||||
* Initializer,
|
||||
* State,
|
||||
* TokenizeContext
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import { blankLine, content } from 'micromark-core-commonmark';
|
||||
import { factorySpace } from 'micromark-factory-space';
|
||||
import { markdownLineEnding } from 'micromark-util-character';
|
||||
/** @type {InitialConstruct} */
|
||||
export const flow = {
|
||||
tokenize: initializeFlow
|
||||
};
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Self.
|
||||
* @type {Initializer}
|
||||
* Initializer.
|
||||
*/
|
||||
function initializeFlow(effects) {
|
||||
const self = this;
|
||||
const initial = effects.attempt(
|
||||
// Try to parse a blank line.
|
||||
blankLine, atBlankEnding,
|
||||
// Try to parse initial flow (essentially, only code).
|
||||
effects.attempt(this.parser.constructs.flowInitial, afterConstruct, factorySpace(effects, effects.attempt(this.parser.constructs.flow, afterConstruct, effects.attempt(content, afterConstruct)), "linePrefix")));
|
||||
return initial;
|
||||
|
||||
/** @type {State} */
|
||||
function atBlankEnding(code) {
|
||||
if (code === null) {
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
effects.enter("lineEndingBlank");
|
||||
effects.consume(code);
|
||||
effects.exit("lineEndingBlank");
|
||||
self.currentConstruct = undefined;
|
||||
return initial;
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function afterConstruct(code) {
|
||||
if (code === null) {
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
effects.enter("lineEnding");
|
||||
effects.consume(code);
|
||||
effects.exit("lineEnding");
|
||||
self.currentConstruct = undefined;
|
||||
return initial;
|
||||
}
|
||||
}
|
8
node_modules/micromark/lib/initialize/text.d.ts
generated
vendored
Normal file
8
node_modules/micromark/lib/initialize/text.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export namespace resolver {
|
||||
let resolveAll: Resolver;
|
||||
}
|
||||
export const string: InitialConstruct;
|
||||
export const text: InitialConstruct;
|
||||
import type { Resolver } from 'micromark-util-types';
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
//# sourceMappingURL=text.d.ts.map
|
1
node_modules/micromark/lib/initialize/text.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/initialize/text.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"text.d.ts","sourceRoot":"","sources":["text.js"],"names":[],"mappings":";;;AAeA,sCAAiD;AACjD,oCAA6C;8BARnC,sBAAsB;sCAAtB,sBAAsB"}
|
212
node_modules/micromark/lib/initialize/text.js
generated
vendored
Normal file
212
node_modules/micromark/lib/initialize/text.js
generated
vendored
Normal file
@@ -0,0 +1,212 @@
|
||||
/**
|
||||
* @import {
|
||||
* Code,
|
||||
* InitialConstruct,
|
||||
* Initializer,
|
||||
* Resolver,
|
||||
* State,
|
||||
* TokenizeContext
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
export const resolver = {
|
||||
resolveAll: createResolver()
|
||||
};
|
||||
export const string = initializeFactory('string');
|
||||
export const text = initializeFactory('text');
|
||||
|
||||
/**
|
||||
* @param {'string' | 'text'} field
|
||||
* Field.
|
||||
* @returns {InitialConstruct}
|
||||
* Construct.
|
||||
*/
|
||||
function initializeFactory(field) {
|
||||
return {
|
||||
resolveAll: createResolver(field === 'text' ? resolveAllLineSuffixes : undefined),
|
||||
tokenize: initializeText
|
||||
};
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Context.
|
||||
* @type {Initializer}
|
||||
*/
|
||||
function initializeText(effects) {
|
||||
const self = this;
|
||||
const constructs = this.parser.constructs[field];
|
||||
const text = effects.attempt(constructs, start, notText);
|
||||
return start;
|
||||
|
||||
/** @type {State} */
|
||||
function start(code) {
|
||||
return atBreak(code) ? text(code) : notText(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function notText(code) {
|
||||
if (code === null) {
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
effects.enter("data");
|
||||
effects.consume(code);
|
||||
return data;
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function data(code) {
|
||||
if (atBreak(code)) {
|
||||
effects.exit("data");
|
||||
return text(code);
|
||||
}
|
||||
|
||||
// Data.
|
||||
effects.consume(code);
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Code} code
|
||||
* Code.
|
||||
* @returns {boolean}
|
||||
* Whether the code is a break.
|
||||
*/
|
||||
function atBreak(code) {
|
||||
if (code === null) {
|
||||
return true;
|
||||
}
|
||||
const list = constructs[code];
|
||||
let index = -1;
|
||||
if (list) {
|
||||
// Always populated by defaults.
|
||||
|
||||
while (++index < list.length) {
|
||||
const item = list[index];
|
||||
if (!item.previous || item.previous.call(self, self.previous)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Resolver | undefined} [extraResolver]
|
||||
* Resolver.
|
||||
* @returns {Resolver}
|
||||
* Resolver.
|
||||
*/
|
||||
function createResolver(extraResolver) {
|
||||
return resolveAllText;
|
||||
|
||||
/** @type {Resolver} */
|
||||
function resolveAllText(events, context) {
|
||||
let index = -1;
|
||||
/** @type {number | undefined} */
|
||||
let enter;
|
||||
|
||||
// A rather boring computation (to merge adjacent `data` events) which
|
||||
// improves mm performance by 29%.
|
||||
while (++index <= events.length) {
|
||||
if (enter === undefined) {
|
||||
if (events[index] && events[index][1].type === "data") {
|
||||
enter = index;
|
||||
index++;
|
||||
}
|
||||
} else if (!events[index] || events[index][1].type !== "data") {
|
||||
// Don’t do anything if there is one data token.
|
||||
if (index !== enter + 2) {
|
||||
events[enter][1].end = events[index - 1][1].end;
|
||||
events.splice(enter + 2, index - enter - 2);
|
||||
index = enter + 2;
|
||||
}
|
||||
enter = undefined;
|
||||
}
|
||||
}
|
||||
return extraResolver ? extraResolver(events, context) : events;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A rather ugly set of instructions which again looks at chunks in the input
|
||||
* stream.
|
||||
* The reason to do this here is that it is *much* faster to parse in reverse.
|
||||
* And that we can’t hook into `null` to split the line suffix before an EOF.
|
||||
* To do: figure out if we can make this into a clean utility, or even in core.
|
||||
* As it will be useful for GFMs literal autolink extension (and maybe even
|
||||
* tables?)
|
||||
*
|
||||
* @type {Resolver}
|
||||
*/
|
||||
function resolveAllLineSuffixes(events, context) {
|
||||
let eventIndex = 0; // Skip first.
|
||||
|
||||
while (++eventIndex <= events.length) {
|
||||
if ((eventIndex === events.length || events[eventIndex][1].type === "lineEnding") && events[eventIndex - 1][1].type === "data") {
|
||||
const data = events[eventIndex - 1][1];
|
||||
const chunks = context.sliceStream(data);
|
||||
let index = chunks.length;
|
||||
let bufferIndex = -1;
|
||||
let size = 0;
|
||||
/** @type {boolean | undefined} */
|
||||
let tabs;
|
||||
while (index--) {
|
||||
const chunk = chunks[index];
|
||||
if (typeof chunk === 'string') {
|
||||
bufferIndex = chunk.length;
|
||||
while (chunk.charCodeAt(bufferIndex - 1) === 32) {
|
||||
size++;
|
||||
bufferIndex--;
|
||||
}
|
||||
if (bufferIndex) break;
|
||||
bufferIndex = -1;
|
||||
}
|
||||
// Number
|
||||
else if (chunk === -2) {
|
||||
tabs = true;
|
||||
size++;
|
||||
} else if (chunk === -1) {
|
||||
// Empty
|
||||
} else {
|
||||
// Replacement character, exit.
|
||||
index++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Allow final trailing whitespace.
|
||||
if (context._contentTypeTextTrailing && eventIndex === events.length) {
|
||||
size = 0;
|
||||
}
|
||||
if (size) {
|
||||
const token = {
|
||||
type: eventIndex === events.length || tabs || size < 2 ? "lineSuffix" : "hardBreakTrailing",
|
||||
start: {
|
||||
_bufferIndex: index ? bufferIndex : data.start._bufferIndex + bufferIndex,
|
||||
_index: data.start._index + index,
|
||||
line: data.end.line,
|
||||
column: data.end.column - size,
|
||||
offset: data.end.offset - size
|
||||
},
|
||||
end: {
|
||||
...data.end
|
||||
}
|
||||
};
|
||||
data.end = {
|
||||
...token.start
|
||||
};
|
||||
if (data.start.offset === data.end.offset) {
|
||||
Object.assign(data, token);
|
||||
} else {
|
||||
events.splice(eventIndex, 0, ['enter', token, context], ['exit', token, context]);
|
||||
eventIndex += 2;
|
||||
}
|
||||
}
|
||||
eventIndex++;
|
||||
}
|
||||
}
|
||||
return events;
|
||||
}
|
Reference in New Issue
Block a user