Refactor routing in App component to enhance navigation and improve error handling by integrating dynamic routes and updating the NotFound route.
This commit is contained in:
82
node_modules/micromark/dev/index.d.ts
generated
vendored
Normal file
82
node_modules/micromark/dev/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
/**
|
||||
* Compile markdown to HTML.
|
||||
*
|
||||
* > Note: which encodings are supported depends on the engine.
|
||||
* > For info on Node.js, see:
|
||||
* > <https://nodejs.org/api/util.html#whatwg-supported-encodings>.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | null | undefined} encoding
|
||||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | Options | null | undefined} [encoding]
|
||||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*/
|
||||
export function micromark(value: Value, encoding: Encoding | null | undefined, options?: Options | null | undefined): string;
|
||||
/**
|
||||
* Compile markdown to HTML.
|
||||
*
|
||||
* > Note: which encodings are supported depends on the engine.
|
||||
* > For info on Node.js, see:
|
||||
* > <https://nodejs.org/api/util.html#whatwg-supported-encodings>.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | null | undefined} encoding
|
||||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | Options | null | undefined} [encoding]
|
||||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*/
|
||||
export function micromark(value: Value, options?: Options | null | undefined): string;
|
||||
export { compile } from "./lib/compile.js";
|
||||
export { parse } from "./lib/parse.js";
|
||||
export { postprocess } from "./lib/postprocess.js";
|
||||
export { preprocess } from "./lib/preprocess.js";
|
||||
export type Options = import("micromark-util-types").Options;
|
||||
import type { Value } from 'micromark-util-types';
|
||||
import type { Encoding } from 'micromark-util-types';
|
||||
//# sourceMappingURL=index.d.ts.map
|
1
node_modules/micromark/dev/index.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/dev/index.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["index.js"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyBG,iCACQ,KAAK,YAEL,QAAQ,GAAG,IAAI,GAAG,SAAS,YAG3B,OAAO,GAAG,IAAI,GAAG,SAAS,GAExB,MAAM,CAGhB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,iCACQ,KAAK,YAEL,OAAO,GAAG,IAAI,GAAG,SAAS,GAExB,MAAM,CAGhB;;;;;sBAvCU,OAAO,sBAAsB,EAAE,OAAO;2BAJjB,sBAAsB;8BAAtB,sBAAsB"}
|
68
node_modules/micromark/dev/index.js
generated
vendored
Normal file
68
node_modules/micromark/dev/index.js
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
/**
|
||||
* @import {Encoding, Value} from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {import('micromark-util-types').Options} Options
|
||||
*/
|
||||
|
||||
import {compile} from './lib/compile.js'
|
||||
import {parse} from './lib/parse.js'
|
||||
import {postprocess} from './lib/postprocess.js'
|
||||
import {preprocess} from './lib/preprocess.js'
|
||||
|
||||
export {compile} from './lib/compile.js'
|
||||
export {parse} from './lib/parse.js'
|
||||
export {postprocess} from './lib/postprocess.js'
|
||||
export {preprocess} from './lib/preprocess.js'
|
||||
|
||||
/**
|
||||
* Compile markdown to HTML.
|
||||
*
|
||||
* > Note: which encodings are supported depends on the engine.
|
||||
* > For info on Node.js, see:
|
||||
* > <https://nodejs.org/api/util.html#whatwg-supported-encodings>.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | null | undefined} encoding
|
||||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | Options | null | undefined} [encoding]
|
||||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*/
|
||||
export function micromark(value, encoding, options) {
|
||||
if (typeof encoding !== 'string') {
|
||||
options = encoding
|
||||
encoding = undefined
|
||||
}
|
||||
|
||||
return compile(options)(
|
||||
postprocess(
|
||||
parse(options)
|
||||
.document()
|
||||
.write(preprocess()(value, encoding, true))
|
||||
)
|
||||
)
|
||||
}
|
16
node_modules/micromark/dev/lib/compile.d.ts
generated
vendored
Normal file
16
node_modules/micromark/dev/lib/compile.d.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
/**
|
||||
* @param {CompileOptions | null | undefined} [options]
|
||||
* @returns {Compile}
|
||||
*/
|
||||
export function compile(options?: CompileOptions | null | undefined): Compile;
|
||||
export type Media = {
|
||||
image?: boolean | undefined;
|
||||
labelId?: string | undefined;
|
||||
label?: string | undefined;
|
||||
referenceId?: string | undefined;
|
||||
destination?: string | undefined;
|
||||
title?: string | undefined;
|
||||
};
|
||||
import type { CompileOptions } from 'micromark-util-types';
|
||||
import type { Compile } from 'micromark-util-types';
|
||||
//# sourceMappingURL=compile.d.ts.map
|
1
node_modules/micromark/dev/lib/compile.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/dev/lib/compile.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"compile.d.ts","sourceRoot":"","sources":["compile.js"],"names":[],"mappings":"AA6DA;;;GAGG;AACH,kCAHW,cAAc,GAAG,IAAI,GAAG,SAAS,GAC/B,OAAO,CAgkCnB;;YA/lCa,OAAO,GAAG,SAAS;cACnB,MAAM,GAAG,SAAS;YAClB,MAAM,GAAG,SAAS;kBAClB,MAAM,GAAG,SAAS;kBAClB,MAAM,GAAG,SAAS;YAClB,MAAM,GAAG,SAAS;;oCAVtB,sBAAsB;6BAAtB,sBAAsB"}
|
1152
node_modules/micromark/dev/lib/compile.js
generated
vendored
Normal file
1152
node_modules/micromark/dev/lib/compile.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
73
node_modules/micromark/dev/lib/constructs.d.ts
generated
vendored
Normal file
73
node_modules/micromark/dev/lib/constructs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
/** @satisfies {Extension['document']} */
|
||||
export const document: {
|
||||
42: import("micromark-util-types").Construct;
|
||||
43: import("micromark-util-types").Construct;
|
||||
45: import("micromark-util-types").Construct;
|
||||
48: import("micromark-util-types").Construct;
|
||||
49: import("micromark-util-types").Construct;
|
||||
50: import("micromark-util-types").Construct;
|
||||
51: import("micromark-util-types").Construct;
|
||||
52: import("micromark-util-types").Construct;
|
||||
53: import("micromark-util-types").Construct;
|
||||
54: import("micromark-util-types").Construct;
|
||||
55: import("micromark-util-types").Construct;
|
||||
56: import("micromark-util-types").Construct;
|
||||
57: import("micromark-util-types").Construct;
|
||||
62: import("micromark-util-types").Construct;
|
||||
};
|
||||
/** @satisfies {Extension['contentInitial']} */
|
||||
export const contentInitial: {
|
||||
91: import("micromark-util-types").Construct;
|
||||
};
|
||||
/** @satisfies {Extension['flowInitial']} */
|
||||
export const flowInitial: {
|
||||
[-2]: import("micromark-util-types").Construct;
|
||||
[-1]: import("micromark-util-types").Construct;
|
||||
32: import("micromark-util-types").Construct;
|
||||
};
|
||||
/** @satisfies {Extension['flow']} */
|
||||
export const flow: {
|
||||
35: import("micromark-util-types").Construct;
|
||||
42: import("micromark-util-types").Construct;
|
||||
45: import("micromark-util-types").Construct[];
|
||||
60: import("micromark-util-types").Construct;
|
||||
61: import("micromark-util-types").Construct;
|
||||
95: import("micromark-util-types").Construct;
|
||||
96: import("micromark-util-types").Construct;
|
||||
126: import("micromark-util-types").Construct;
|
||||
};
|
||||
/** @satisfies {Extension['string']} */
|
||||
export const string: {
|
||||
38: import("micromark-util-types").Construct;
|
||||
92: import("micromark-util-types").Construct;
|
||||
};
|
||||
/** @satisfies {Extension['text']} */
|
||||
export const text: {
|
||||
[-5]: import("micromark-util-types").Construct;
|
||||
[-4]: import("micromark-util-types").Construct;
|
||||
[-3]: import("micromark-util-types").Construct;
|
||||
33: import("micromark-util-types").Construct;
|
||||
38: import("micromark-util-types").Construct;
|
||||
42: import("micromark-util-types").Construct;
|
||||
60: import("micromark-util-types").Construct[];
|
||||
91: import("micromark-util-types").Construct;
|
||||
92: import("micromark-util-types").Construct[];
|
||||
93: import("micromark-util-types").Construct;
|
||||
95: import("micromark-util-types").Construct;
|
||||
96: import("micromark-util-types").Construct;
|
||||
};
|
||||
export namespace insideSpan {
|
||||
let _null: (import("micromark-util-types").Construct | {
|
||||
resolveAll: import("micromark-util-types").Resolver;
|
||||
})[];
|
||||
export { _null as null };
|
||||
}
|
||||
export namespace attentionMarkers {
|
||||
let _null_1: (42 | 95)[];
|
||||
export { _null_1 as null };
|
||||
}
|
||||
export namespace disable {
|
||||
let _null_2: never[];
|
||||
export { _null_2 as null };
|
||||
}
|
||||
//# sourceMappingURL=constructs.d.ts.map
|
1
node_modules/micromark/dev/lib/constructs.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/dev/lib/constructs.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"constructs.d.ts","sourceRoot":"","sources":["constructs.js"],"names":[],"mappings":"AA6BA,yCAAyC;AACzC;;;;;;;;;;;;;;;EAeC;AAED,+CAA+C;AAC/C;;EAEC;AAED,4CAA4C;AAC5C;;;;EAIC;AAED,qCAAqC;AACrC;;;;;;;;;EASC;AAED,uCAAuC;AACvC;;;EAGC;AAED,qCAAqC;AACrC;;;;;;;;;;;;;EAaC"}
|
101
node_modules/micromark/dev/lib/constructs.js
generated
vendored
Normal file
101
node_modules/micromark/dev/lib/constructs.js
generated
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
/**
|
||||
* @import {Extension} from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import {
|
||||
attention,
|
||||
autolink,
|
||||
blockQuote,
|
||||
characterEscape,
|
||||
characterReference,
|
||||
codeFenced,
|
||||
codeIndented,
|
||||
codeText,
|
||||
definition,
|
||||
hardBreakEscape,
|
||||
headingAtx,
|
||||
htmlFlow,
|
||||
htmlText,
|
||||
labelEnd,
|
||||
labelStartImage,
|
||||
labelStartLink,
|
||||
lineEnding,
|
||||
list,
|
||||
setextUnderline,
|
||||
thematicBreak
|
||||
} from 'micromark-core-commonmark'
|
||||
import {codes} from 'micromark-util-symbol'
|
||||
import {resolver as resolveText} from './initialize/text.js'
|
||||
|
||||
/** @satisfies {Extension['document']} */
|
||||
export const document = {
|
||||
[codes.asterisk]: list,
|
||||
[codes.plusSign]: list,
|
||||
[codes.dash]: list,
|
||||
[codes.digit0]: list,
|
||||
[codes.digit1]: list,
|
||||
[codes.digit2]: list,
|
||||
[codes.digit3]: list,
|
||||
[codes.digit4]: list,
|
||||
[codes.digit5]: list,
|
||||
[codes.digit6]: list,
|
||||
[codes.digit7]: list,
|
||||
[codes.digit8]: list,
|
||||
[codes.digit9]: list,
|
||||
[codes.greaterThan]: blockQuote
|
||||
}
|
||||
|
||||
/** @satisfies {Extension['contentInitial']} */
|
||||
export const contentInitial = {
|
||||
[codes.leftSquareBracket]: definition
|
||||
}
|
||||
|
||||
/** @satisfies {Extension['flowInitial']} */
|
||||
export const flowInitial = {
|
||||
[codes.horizontalTab]: codeIndented,
|
||||
[codes.virtualSpace]: codeIndented,
|
||||
[codes.space]: codeIndented
|
||||
}
|
||||
|
||||
/** @satisfies {Extension['flow']} */
|
||||
export const flow = {
|
||||
[codes.numberSign]: headingAtx,
|
||||
[codes.asterisk]: thematicBreak,
|
||||
[codes.dash]: [setextUnderline, thematicBreak],
|
||||
[codes.lessThan]: htmlFlow,
|
||||
[codes.equalsTo]: setextUnderline,
|
||||
[codes.underscore]: thematicBreak,
|
||||
[codes.graveAccent]: codeFenced,
|
||||
[codes.tilde]: codeFenced
|
||||
}
|
||||
|
||||
/** @satisfies {Extension['string']} */
|
||||
export const string = {
|
||||
[codes.ampersand]: characterReference,
|
||||
[codes.backslash]: characterEscape
|
||||
}
|
||||
|
||||
/** @satisfies {Extension['text']} */
|
||||
export const text = {
|
||||
[codes.carriageReturn]: lineEnding,
|
||||
[codes.lineFeed]: lineEnding,
|
||||
[codes.carriageReturnLineFeed]: lineEnding,
|
||||
[codes.exclamationMark]: labelStartImage,
|
||||
[codes.ampersand]: characterReference,
|
||||
[codes.asterisk]: attention,
|
||||
[codes.lessThan]: [autolink, htmlText],
|
||||
[codes.leftSquareBracket]: labelStartLink,
|
||||
[codes.backslash]: [hardBreakEscape, characterEscape],
|
||||
[codes.rightSquareBracket]: labelEnd,
|
||||
[codes.underscore]: attention,
|
||||
[codes.graveAccent]: codeText
|
||||
}
|
||||
|
||||
/** @satisfies {Extension['insideSpan']} */
|
||||
export const insideSpan = {null: [attention, resolveText]}
|
||||
|
||||
/** @satisfies {Extension['attentionMarkers']} */
|
||||
export const attentionMarkers = {null: [codes.asterisk, codes.underscore]}
|
||||
|
||||
/** @satisfies {Extension['disable']} */
|
||||
export const disable = {null: []}
|
46
node_modules/micromark/dev/lib/create-tokenizer.d.ts
generated
vendored
Normal file
46
node_modules/micromark/dev/lib/create-tokenizer.d.ts
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* Create a tokenizer.
|
||||
* Tokenizers deal with one type of data (e.g., containers, flow, text).
|
||||
* The parser is the object dealing with it all.
|
||||
* `initialize` works like other constructs, except that only its `tokenize`
|
||||
* function is used, in which case it doesn’t receive an `ok` or `nok`.
|
||||
* `from` can be given to set the point before the first character, although
|
||||
* when further lines are indented, they must be set with `defineSkip`.
|
||||
*
|
||||
* @param {ParseContext} parser
|
||||
* Parser.
|
||||
* @param {InitialConstruct} initialize
|
||||
* Construct.
|
||||
* @param {Omit<Point, '_bufferIndex' | '_index'> | undefined} [from]
|
||||
* Point (optional).
|
||||
* @returns {TokenizeContext}
|
||||
* Context.
|
||||
*/
|
||||
export function createTokenizer(parser: ParseContext, initialize: InitialConstruct, from?: Omit<Point, "_bufferIndex" | "_index"> | undefined): TokenizeContext;
|
||||
/**
|
||||
* Restore the state.
|
||||
*/
|
||||
export type Restore = () => undefined;
|
||||
/**
|
||||
* Info.
|
||||
*/
|
||||
export type Info = {
|
||||
/**
|
||||
* Restore.
|
||||
*/
|
||||
restore: Restore;
|
||||
/**
|
||||
* From.
|
||||
*/
|
||||
from: number;
|
||||
};
|
||||
/**
|
||||
* Handle a successful run.
|
||||
*/
|
||||
export type ReturnHandle = (construct: Construct, info: Info) => undefined;
|
||||
import type { ParseContext } from 'micromark-util-types';
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
import type { Point } from 'micromark-util-types';
|
||||
import type { TokenizeContext } from 'micromark-util-types';
|
||||
import type { Construct } from 'micromark-util-types';
|
||||
//# sourceMappingURL=create-tokenizer.d.ts.map
|
1
node_modules/micromark/dev/lib/create-tokenizer.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/dev/lib/create-tokenizer.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"create-tokenizer.d.ts","sourceRoot":"","sources":["create-tokenizer.js"],"names":[],"mappings":"AAgDA;;;;;;;;;;;;;;;;;GAiBG;AACH,wCATW,YAAY,cAEZ,gBAAgB,SAEhB,IAAI,CAAC,KAAK,EAAE,cAAc,GAAG,QAAQ,CAAC,GAAG,SAAS,GAEhD,eAAe,CAwhB3B;;;;4BApkBY,SAAS;;;;;;;;aAKR,OAAO;;;;UAEP,MAAM;;;;;uCAKT,SAAS,QAET,IAAI,KAEF,SAAS;kCAtBZ,sBAAsB;sCAAtB,sBAAsB;2BAAtB,sBAAsB;qCAAtB,sBAAsB;+BAAtB,sBAAsB"}
|
717
node_modules/micromark/dev/lib/create-tokenizer.js
generated
vendored
Normal file
717
node_modules/micromark/dev/lib/create-tokenizer.js
generated
vendored
Normal file
@@ -0,0 +1,717 @@
|
||||
/**
|
||||
* @import {
|
||||
* Chunk,
|
||||
* Code,
|
||||
* ConstructRecord,
|
||||
* Construct,
|
||||
* Effects,
|
||||
* InitialConstruct,
|
||||
* ParseContext,
|
||||
* Point,
|
||||
* State,
|
||||
* TokenizeContext,
|
||||
* Token
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @callback Restore
|
||||
* Restore the state.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*
|
||||
* @typedef Info
|
||||
* Info.
|
||||
* @property {Restore} restore
|
||||
* Restore.
|
||||
* @property {number} from
|
||||
* From.
|
||||
*
|
||||
* @callback ReturnHandle
|
||||
* Handle a successful run.
|
||||
* @param {Construct} construct
|
||||
* Construct.
|
||||
* @param {Info} info
|
||||
* Info.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
|
||||
import createDebug from 'debug'
|
||||
import {ok as assert} from 'devlop'
|
||||
import {markdownLineEnding} from 'micromark-util-character'
|
||||
import {push, splice} from 'micromark-util-chunked'
|
||||
import {resolveAll} from 'micromark-util-resolve-all'
|
||||
import {codes, values} from 'micromark-util-symbol'
|
||||
|
||||
const debug = createDebug('micromark')
|
||||
|
||||
/**
|
||||
* Create a tokenizer.
|
||||
* Tokenizers deal with one type of data (e.g., containers, flow, text).
|
||||
* The parser is the object dealing with it all.
|
||||
* `initialize` works like other constructs, except that only its `tokenize`
|
||||
* function is used, in which case it doesn’t receive an `ok` or `nok`.
|
||||
* `from` can be given to set the point before the first character, although
|
||||
* when further lines are indented, they must be set with `defineSkip`.
|
||||
*
|
||||
* @param {ParseContext} parser
|
||||
* Parser.
|
||||
* @param {InitialConstruct} initialize
|
||||
* Construct.
|
||||
* @param {Omit<Point, '_bufferIndex' | '_index'> | undefined} [from]
|
||||
* Point (optional).
|
||||
* @returns {TokenizeContext}
|
||||
* Context.
|
||||
*/
|
||||
export function createTokenizer(parser, initialize, from) {
|
||||
/** @type {Point} */
|
||||
let point = {
|
||||
_bufferIndex: -1,
|
||||
_index: 0,
|
||||
line: (from && from.line) || 1,
|
||||
column: (from && from.column) || 1,
|
||||
offset: (from && from.offset) || 0
|
||||
}
|
||||
/** @type {Record<string, number>} */
|
||||
const columnStart = {}
|
||||
/** @type {Array<Construct>} */
|
||||
const resolveAllConstructs = []
|
||||
/** @type {Array<Chunk>} */
|
||||
let chunks = []
|
||||
/** @type {Array<Token>} */
|
||||
let stack = []
|
||||
/** @type {boolean | undefined} */
|
||||
let consumed = true
|
||||
|
||||
/**
|
||||
* Tools used for tokenizing.
|
||||
*
|
||||
* @type {Effects}
|
||||
*/
|
||||
const effects = {
|
||||
attempt: constructFactory(onsuccessfulconstruct),
|
||||
check: constructFactory(onsuccessfulcheck),
|
||||
consume,
|
||||
enter,
|
||||
exit,
|
||||
interrupt: constructFactory(onsuccessfulcheck, {interrupt: true})
|
||||
}
|
||||
|
||||
/**
|
||||
* State and tools for resolving and serializing.
|
||||
*
|
||||
* @type {TokenizeContext}
|
||||
*/
|
||||
const context = {
|
||||
code: codes.eof,
|
||||
containerState: {},
|
||||
defineSkip,
|
||||
events: [],
|
||||
now,
|
||||
parser,
|
||||
previous: codes.eof,
|
||||
sliceSerialize,
|
||||
sliceStream,
|
||||
write
|
||||
}
|
||||
|
||||
/**
|
||||
* The state function.
|
||||
*
|
||||
* @type {State | undefined}
|
||||
*/
|
||||
let state = initialize.tokenize.call(context, effects)
|
||||
|
||||
/**
|
||||
* Track which character we expect to be consumed, to catch bugs.
|
||||
*
|
||||
* @type {Code}
|
||||
*/
|
||||
let expectedCode
|
||||
|
||||
if (initialize.resolveAll) {
|
||||
resolveAllConstructs.push(initialize)
|
||||
}
|
||||
|
||||
return context
|
||||
|
||||
/** @type {TokenizeContext['write']} */
|
||||
function write(slice) {
|
||||
chunks = push(chunks, slice)
|
||||
|
||||
main()
|
||||
|
||||
// Exit if we’re not done, resolve might change stuff.
|
||||
if (chunks[chunks.length - 1] !== codes.eof) {
|
||||
return []
|
||||
}
|
||||
|
||||
addResult(initialize, 0)
|
||||
|
||||
// Otherwise, resolve, and exit.
|
||||
context.events = resolveAll(resolveAllConstructs, context.events, context)
|
||||
|
||||
return context.events
|
||||
}
|
||||
|
||||
//
|
||||
// Tools.
|
||||
//
|
||||
|
||||
/** @type {TokenizeContext['sliceSerialize']} */
|
||||
function sliceSerialize(token, expandTabs) {
|
||||
return serializeChunks(sliceStream(token), expandTabs)
|
||||
}
|
||||
|
||||
/** @type {TokenizeContext['sliceStream']} */
|
||||
function sliceStream(token) {
|
||||
return sliceChunks(chunks, token)
|
||||
}
|
||||
|
||||
/** @type {TokenizeContext['now']} */
|
||||
function now() {
|
||||
// This is a hot path, so we clone manually instead of `Object.assign({}, point)`
|
||||
const {_bufferIndex, _index, line, column, offset} = point
|
||||
return {_bufferIndex, _index, line, column, offset}
|
||||
}
|
||||
|
||||
/** @type {TokenizeContext['defineSkip']} */
|
||||
function defineSkip(value) {
|
||||
columnStart[value.line] = value.column
|
||||
accountForPotentialSkip()
|
||||
debug('position: define skip: `%j`', point)
|
||||
}
|
||||
|
||||
//
|
||||
// State management.
|
||||
//
|
||||
|
||||
/**
|
||||
* Main loop (note that `_index` and `_bufferIndex` in `point` are modified by
|
||||
* `consume`).
|
||||
* Here is where we walk through the chunks, which either include strings of
|
||||
* several characters, or numerical character codes.
|
||||
* The reason to do this in a loop instead of a call is so the stack can
|
||||
* drain.
|
||||
*
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function main() {
|
||||
/** @type {number} */
|
||||
let chunkIndex
|
||||
|
||||
while (point._index < chunks.length) {
|
||||
const chunk = chunks[point._index]
|
||||
|
||||
// If we’re in a buffer chunk, loop through it.
|
||||
if (typeof chunk === 'string') {
|
||||
chunkIndex = point._index
|
||||
|
||||
if (point._bufferIndex < 0) {
|
||||
point._bufferIndex = 0
|
||||
}
|
||||
|
||||
while (
|
||||
point._index === chunkIndex &&
|
||||
point._bufferIndex < chunk.length
|
||||
) {
|
||||
go(chunk.charCodeAt(point._bufferIndex))
|
||||
}
|
||||
} else {
|
||||
go(chunk)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deal with one code.
|
||||
*
|
||||
* @param {Code} code
|
||||
* Code.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function go(code) {
|
||||
assert(consumed === true, 'expected character to be consumed')
|
||||
consumed = undefined
|
||||
debug('main: passing `%s` to %s', code, state && state.name)
|
||||
expectedCode = code
|
||||
assert(typeof state === 'function', 'expected state')
|
||||
state = state(code)
|
||||
}
|
||||
|
||||
/** @type {Effects['consume']} */
|
||||
function consume(code) {
|
||||
assert(code === expectedCode, 'expected given code to equal expected code')
|
||||
|
||||
debug('consume: `%s`', code)
|
||||
|
||||
assert(
|
||||
consumed === undefined,
|
||||
'expected code to not have been consumed: this might be because `return x(code)` instead of `return x` was used'
|
||||
)
|
||||
assert(
|
||||
code === null
|
||||
? context.events.length === 0 ||
|
||||
context.events[context.events.length - 1][0] === 'exit'
|
||||
: context.events[context.events.length - 1][0] === 'enter',
|
||||
'expected last token to be open'
|
||||
)
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
point.line++
|
||||
point.column = 1
|
||||
point.offset += code === codes.carriageReturnLineFeed ? 2 : 1
|
||||
accountForPotentialSkip()
|
||||
debug('position: after eol: `%j`', point)
|
||||
} else if (code !== codes.virtualSpace) {
|
||||
point.column++
|
||||
point.offset++
|
||||
}
|
||||
|
||||
// Not in a string chunk.
|
||||
if (point._bufferIndex < 0) {
|
||||
point._index++
|
||||
} else {
|
||||
point._bufferIndex++
|
||||
|
||||
// At end of string chunk.
|
||||
if (
|
||||
point._bufferIndex ===
|
||||
// Points w/ non-negative `_bufferIndex` reference
|
||||
// strings.
|
||||
/** @type {string} */ (chunks[point._index]).length
|
||||
) {
|
||||
point._bufferIndex = -1
|
||||
point._index++
|
||||
}
|
||||
}
|
||||
|
||||
// Expose the previous character.
|
||||
context.previous = code
|
||||
|
||||
// Mark as consumed.
|
||||
consumed = true
|
||||
}
|
||||
|
||||
/** @type {Effects['enter']} */
|
||||
function enter(type, fields) {
|
||||
/** @type {Token} */
|
||||
// @ts-expect-error Patch instead of assign required fields to help GC.
|
||||
const token = fields || {}
|
||||
token.type = type
|
||||
token.start = now()
|
||||
|
||||
assert(typeof type === 'string', 'expected string type')
|
||||
assert(type.length > 0, 'expected non-empty string')
|
||||
debug('enter: `%s`', type)
|
||||
|
||||
context.events.push(['enter', token, context])
|
||||
|
||||
stack.push(token)
|
||||
|
||||
return token
|
||||
}
|
||||
|
||||
/** @type {Effects['exit']} */
|
||||
function exit(type) {
|
||||
assert(typeof type === 'string', 'expected string type')
|
||||
assert(type.length > 0, 'expected non-empty string')
|
||||
|
||||
const token = stack.pop()
|
||||
assert(token, 'cannot close w/o open tokens')
|
||||
token.end = now()
|
||||
|
||||
assert(type === token.type, 'expected exit token to match current token')
|
||||
|
||||
assert(
|
||||
!(
|
||||
token.start._index === token.end._index &&
|
||||
token.start._bufferIndex === token.end._bufferIndex
|
||||
),
|
||||
'expected non-empty token (`' + type + '`)'
|
||||
)
|
||||
|
||||
debug('exit: `%s`', token.type)
|
||||
context.events.push(['exit', token, context])
|
||||
|
||||
return token
|
||||
}
|
||||
|
||||
/**
|
||||
* Use results.
|
||||
*
|
||||
* @type {ReturnHandle}
|
||||
*/
|
||||
function onsuccessfulconstruct(construct, info) {
|
||||
addResult(construct, info.from)
|
||||
}
|
||||
|
||||
/**
|
||||
* Discard results.
|
||||
*
|
||||
* @type {ReturnHandle}
|
||||
*/
|
||||
function onsuccessfulcheck(_, info) {
|
||||
info.restore()
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory to attempt/check/interrupt.
|
||||
*
|
||||
* @param {ReturnHandle} onreturn
|
||||
* Callback.
|
||||
* @param {{interrupt?: boolean | undefined} | undefined} [fields]
|
||||
* Fields.
|
||||
*/
|
||||
function constructFactory(onreturn, fields) {
|
||||
return hook
|
||||
|
||||
/**
|
||||
* Handle either an object mapping codes to constructs, a list of
|
||||
* constructs, or a single construct.
|
||||
*
|
||||
* @param {Array<Construct> | ConstructRecord | Construct} constructs
|
||||
* Constructs.
|
||||
* @param {State} returnState
|
||||
* State.
|
||||
* @param {State | undefined} [bogusState]
|
||||
* State.
|
||||
* @returns {State}
|
||||
* State.
|
||||
*/
|
||||
function hook(constructs, returnState, bogusState) {
|
||||
/** @type {ReadonlyArray<Construct>} */
|
||||
let listOfConstructs
|
||||
/** @type {number} */
|
||||
let constructIndex
|
||||
/** @type {Construct} */
|
||||
let currentConstruct
|
||||
/** @type {Info} */
|
||||
let info
|
||||
|
||||
return Array.isArray(constructs)
|
||||
? /* c8 ignore next 1 */
|
||||
handleListOfConstructs(constructs)
|
||||
: 'tokenize' in constructs
|
||||
? // Looks like a construct.
|
||||
handleListOfConstructs([/** @type {Construct} */ (constructs)])
|
||||
: handleMapOfConstructs(constructs)
|
||||
|
||||
/**
|
||||
* Handle a list of construct.
|
||||
*
|
||||
* @param {ConstructRecord} map
|
||||
* Constructs.
|
||||
* @returns {State}
|
||||
* State.
|
||||
*/
|
||||
function handleMapOfConstructs(map) {
|
||||
return start
|
||||
|
||||
/** @type {State} */
|
||||
function start(code) {
|
||||
const left = code !== null && map[code]
|
||||
const all = code !== null && map.null
|
||||
const list = [
|
||||
// To do: add more extension tests.
|
||||
/* c8 ignore next 2 */
|
||||
...(Array.isArray(left) ? left : left ? [left] : []),
|
||||
...(Array.isArray(all) ? all : all ? [all] : [])
|
||||
]
|
||||
|
||||
return handleListOfConstructs(list)(code)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a list of construct.
|
||||
*
|
||||
* @param {ReadonlyArray<Construct>} list
|
||||
* Constructs.
|
||||
* @returns {State}
|
||||
* State.
|
||||
*/
|
||||
function handleListOfConstructs(list) {
|
||||
listOfConstructs = list
|
||||
constructIndex = 0
|
||||
|
||||
if (list.length === 0) {
|
||||
assert(bogusState, 'expected `bogusState` to be given')
|
||||
return bogusState
|
||||
}
|
||||
|
||||
return handleConstruct(list[constructIndex])
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a single construct.
|
||||
*
|
||||
* @param {Construct} construct
|
||||
* Construct.
|
||||
* @returns {State}
|
||||
* State.
|
||||
*/
|
||||
function handleConstruct(construct) {
|
||||
return start
|
||||
|
||||
/** @type {State} */
|
||||
function start(code) {
|
||||
// To do: not needed to store if there is no bogus state, probably?
|
||||
// Currently doesn’t work because `inspect` in document does a check
|
||||
// w/o a bogus, which doesn’t make sense. But it does seem to help perf
|
||||
// by not storing.
|
||||
info = store()
|
||||
currentConstruct = construct
|
||||
|
||||
if (!construct.partial) {
|
||||
context.currentConstruct = construct
|
||||
}
|
||||
|
||||
// Always populated by defaults.
|
||||
assert(
|
||||
context.parser.constructs.disable.null,
|
||||
'expected `disable.null` to be populated'
|
||||
)
|
||||
|
||||
if (
|
||||
construct.name &&
|
||||
context.parser.constructs.disable.null.includes(construct.name)
|
||||
) {
|
||||
return nok(code)
|
||||
}
|
||||
|
||||
return construct.tokenize.call(
|
||||
// If we do have fields, create an object w/ `context` as its
|
||||
// prototype.
|
||||
// This allows a “live binding”, which is needed for `interrupt`.
|
||||
fields ? Object.assign(Object.create(context), fields) : context,
|
||||
effects,
|
||||
ok,
|
||||
nok
|
||||
)(code)
|
||||
}
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function ok(code) {
|
||||
assert(code === expectedCode, 'expected code')
|
||||
consumed = true
|
||||
onreturn(currentConstruct, info)
|
||||
return returnState
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function nok(code) {
|
||||
assert(code === expectedCode, 'expected code')
|
||||
consumed = true
|
||||
info.restore()
|
||||
|
||||
if (++constructIndex < listOfConstructs.length) {
|
||||
return handleConstruct(listOfConstructs[constructIndex])
|
||||
}
|
||||
|
||||
return bogusState
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Construct} construct
|
||||
* Construct.
|
||||
* @param {number} from
|
||||
* From.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function addResult(construct, from) {
|
||||
if (construct.resolveAll && !resolveAllConstructs.includes(construct)) {
|
||||
resolveAllConstructs.push(construct)
|
||||
}
|
||||
|
||||
if (construct.resolve) {
|
||||
splice(
|
||||
context.events,
|
||||
from,
|
||||
context.events.length - from,
|
||||
construct.resolve(context.events.slice(from), context)
|
||||
)
|
||||
}
|
||||
|
||||
if (construct.resolveTo) {
|
||||
context.events = construct.resolveTo(context.events, context)
|
||||
}
|
||||
|
||||
assert(
|
||||
construct.partial ||
|
||||
context.events.length === 0 ||
|
||||
context.events[context.events.length - 1][0] === 'exit',
|
||||
'expected last token to end'
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Store state.
|
||||
*
|
||||
* @returns {Info}
|
||||
* Info.
|
||||
*/
|
||||
function store() {
|
||||
const startPoint = now()
|
||||
const startPrevious = context.previous
|
||||
const startCurrentConstruct = context.currentConstruct
|
||||
const startEventsIndex = context.events.length
|
||||
const startStack = Array.from(stack)
|
||||
|
||||
return {from: startEventsIndex, restore}
|
||||
|
||||
/**
|
||||
* Restore state.
|
||||
*
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function restore() {
|
||||
point = startPoint
|
||||
context.previous = startPrevious
|
||||
context.currentConstruct = startCurrentConstruct
|
||||
context.events.length = startEventsIndex
|
||||
stack = startStack
|
||||
accountForPotentialSkip()
|
||||
debug('position: restore: `%j`', point)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Move the current point a bit forward in the line when it’s on a column
|
||||
* skip.
|
||||
*
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function accountForPotentialSkip() {
|
||||
if (point.line in columnStart && point.column < 2) {
|
||||
point.column = columnStart[point.line]
|
||||
point.offset += columnStart[point.line] - 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the chunks from a slice of chunks in the range of a token.
|
||||
*
|
||||
* @param {ReadonlyArray<Chunk>} chunks
|
||||
* Chunks.
|
||||
* @param {Pick<Token, 'end' | 'start'>} token
|
||||
* Token.
|
||||
* @returns {Array<Chunk>}
|
||||
* Chunks.
|
||||
*/
|
||||
function sliceChunks(chunks, token) {
|
||||
const startIndex = token.start._index
|
||||
const startBufferIndex = token.start._bufferIndex
|
||||
const endIndex = token.end._index
|
||||
const endBufferIndex = token.end._bufferIndex
|
||||
/** @type {Array<Chunk>} */
|
||||
let view
|
||||
|
||||
if (startIndex === endIndex) {
|
||||
assert(endBufferIndex > -1, 'expected non-negative end buffer index')
|
||||
assert(startBufferIndex > -1, 'expected non-negative start buffer index')
|
||||
// @ts-expect-error `_bufferIndex` is used on string chunks.
|
||||
view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)]
|
||||
} else {
|
||||
view = chunks.slice(startIndex, endIndex)
|
||||
|
||||
if (startBufferIndex > -1) {
|
||||
const head = view[0]
|
||||
if (typeof head === 'string') {
|
||||
view[0] = head.slice(startBufferIndex)
|
||||
/* c8 ignore next 4 -- used to be used, no longer */
|
||||
} else {
|
||||
assert(startBufferIndex === 0, 'expected `startBufferIndex` to be `0`')
|
||||
view.shift()
|
||||
}
|
||||
}
|
||||
|
||||
if (endBufferIndex > 0) {
|
||||
// @ts-expect-error `_bufferIndex` is used on string chunks.
|
||||
view.push(chunks[endIndex].slice(0, endBufferIndex))
|
||||
}
|
||||
}
|
||||
|
||||
return view
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the string value of a slice of chunks.
|
||||
*
|
||||
* @param {ReadonlyArray<Chunk>} chunks
|
||||
* Chunks.
|
||||
* @param {boolean | undefined} [expandTabs=false]
|
||||
* Whether to expand tabs (default: `false`).
|
||||
* @returns {string}
|
||||
* Result.
|
||||
*/
|
||||
function serializeChunks(chunks, expandTabs) {
|
||||
let index = -1
|
||||
/** @type {Array<string>} */
|
||||
const result = []
|
||||
/** @type {boolean | undefined} */
|
||||
let atTab
|
||||
|
||||
while (++index < chunks.length) {
|
||||
const chunk = chunks[index]
|
||||
/** @type {string} */
|
||||
let value
|
||||
|
||||
if (typeof chunk === 'string') {
|
||||
value = chunk
|
||||
} else
|
||||
switch (chunk) {
|
||||
case codes.carriageReturn: {
|
||||
value = values.cr
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case codes.lineFeed: {
|
||||
value = values.lf
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case codes.carriageReturnLineFeed: {
|
||||
value = values.cr + values.lf
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case codes.horizontalTab: {
|
||||
value = expandTabs ? values.space : values.ht
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case codes.virtualSpace: {
|
||||
if (!expandTabs && atTab) continue
|
||||
value = values.space
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
default: {
|
||||
assert(typeof chunk === 'number', 'expected number')
|
||||
// Currently only replacement character.
|
||||
value = String.fromCharCode(chunk)
|
||||
}
|
||||
}
|
||||
|
||||
atTab = chunk === codes.horizontalTab
|
||||
result.push(value)
|
||||
}
|
||||
|
||||
return result.join('')
|
||||
}
|
4
node_modules/micromark/dev/lib/initialize/content.d.ts
generated
vendored
Normal file
4
node_modules/micromark/dev/lib/initialize/content.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
/** @type {InitialConstruct} */
|
||||
export const content: InitialConstruct;
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
//# sourceMappingURL=content.d.ts.map
|
1
node_modules/micromark/dev/lib/initialize/content.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/dev/lib/initialize/content.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"content.d.ts","sourceRoot":"","sources":["content.js"],"names":[],"mappings":"AAeA,+BAA+B;AAC/B,sBADW,gBAAgB,CACyB;sCAT1C,sBAAsB"}
|
99
node_modules/micromark/dev/lib/initialize/content.js
generated
vendored
Normal file
99
node_modules/micromark/dev/lib/initialize/content.js
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
/**
|
||||
* @import {
|
||||
* InitialConstruct,
|
||||
* Initializer,
|
||||
* State,
|
||||
* TokenizeContext,
|
||||
* Token
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import {ok as assert} from 'devlop'
|
||||
import {factorySpace} from 'micromark-factory-space'
|
||||
import {markdownLineEnding} from 'micromark-util-character'
|
||||
import {codes, constants, types} from 'micromark-util-symbol'
|
||||
|
||||
/** @type {InitialConstruct} */
|
||||
export const content = {tokenize: initializeContent}
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Context.
|
||||
* @type {Initializer}
|
||||
* Content.
|
||||
*/
|
||||
function initializeContent(effects) {
|
||||
const contentStart = effects.attempt(
|
||||
this.parser.constructs.contentInitial,
|
||||
afterContentStartConstruct,
|
||||
paragraphInitial
|
||||
)
|
||||
/** @type {Token} */
|
||||
let previous
|
||||
|
||||
return contentStart
|
||||
|
||||
/** @type {State} */
|
||||
function afterContentStartConstruct(code) {
|
||||
assert(
|
||||
code === codes.eof || markdownLineEnding(code),
|
||||
'expected eol or eof'
|
||||
)
|
||||
|
||||
if (code === codes.eof) {
|
||||
effects.consume(code)
|
||||
return
|
||||
}
|
||||
|
||||
effects.enter(types.lineEnding)
|
||||
effects.consume(code)
|
||||
effects.exit(types.lineEnding)
|
||||
return factorySpace(effects, contentStart, types.linePrefix)
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function paragraphInitial(code) {
|
||||
assert(
|
||||
code !== codes.eof && !markdownLineEnding(code),
|
||||
'expected anything other than a line ending or EOF'
|
||||
)
|
||||
effects.enter(types.paragraph)
|
||||
return lineStart(code)
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function lineStart(code) {
|
||||
const token = effects.enter(types.chunkText, {
|
||||
contentType: constants.contentTypeText,
|
||||
previous
|
||||
})
|
||||
|
||||
if (previous) {
|
||||
previous.next = token
|
||||
}
|
||||
|
||||
previous = token
|
||||
|
||||
return data(code)
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function data(code) {
|
||||
if (code === codes.eof) {
|
||||
effects.exit(types.chunkText)
|
||||
effects.exit(types.paragraph)
|
||||
effects.consume(code)
|
||||
return
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.consume(code)
|
||||
effects.exit(types.chunkText)
|
||||
return lineStart
|
||||
}
|
||||
|
||||
// Data.
|
||||
effects.consume(code)
|
||||
return data
|
||||
}
|
||||
}
|
10
node_modules/micromark/dev/lib/initialize/document.d.ts
generated
vendored
Normal file
10
node_modules/micromark/dev/lib/initialize/document.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
/** @type {InitialConstruct} */
|
||||
export const document: InitialConstruct;
|
||||
/**
|
||||
* Construct and its state.
|
||||
*/
|
||||
export type StackItem = [Construct, ContainerState];
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
import type { Construct } from 'micromark-util-types';
|
||||
import type { ContainerState } from 'micromark-util-types';
|
||||
//# sourceMappingURL=document.d.ts.map
|
1
node_modules/micromark/dev/lib/initialize/document.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/dev/lib/initialize/document.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"document.d.ts","sourceRoot":"","sources":["document.js"],"names":[],"mappings":"AAyBA,+BAA+B;AAC/B,uBADW,gBAAgB,CAC2B;;;;wBAXzC,CAAC,SAAS,EAAE,cAAc,CAAC;sCAJ9B,sBAAsB;+BAAtB,sBAAsB;oCAAtB,sBAAsB"}
|
445
node_modules/micromark/dev/lib/initialize/document.js
generated
vendored
Normal file
445
node_modules/micromark/dev/lib/initialize/document.js
generated
vendored
Normal file
@@ -0,0 +1,445 @@
|
||||
/**
|
||||
* @import {
|
||||
* Construct,
|
||||
* ContainerState,
|
||||
* InitialConstruct,
|
||||
* Initializer,
|
||||
* Point,
|
||||
* State,
|
||||
* TokenizeContext,
|
||||
* Tokenizer,
|
||||
* Token
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {[Construct, ContainerState]} StackItem
|
||||
* Construct and its state.
|
||||
*/
|
||||
|
||||
import {ok as assert} from 'devlop'
|
||||
import {factorySpace} from 'micromark-factory-space'
|
||||
import {markdownLineEnding} from 'micromark-util-character'
|
||||
import {splice} from 'micromark-util-chunked'
|
||||
import {codes, constants, types} from 'micromark-util-symbol'
|
||||
|
||||
/** @type {InitialConstruct} */
|
||||
export const document = {tokenize: initializeDocument}
|
||||
|
||||
/** @type {Construct} */
|
||||
const containerConstruct = {tokenize: tokenizeContainer}
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Self.
|
||||
* @type {Initializer}
|
||||
* Initializer.
|
||||
*/
|
||||
function initializeDocument(effects) {
|
||||
const self = this
|
||||
/** @type {Array<StackItem>} */
|
||||
const stack = []
|
||||
let continued = 0
|
||||
/** @type {TokenizeContext | undefined} */
|
||||
let childFlow
|
||||
/** @type {Token | undefined} */
|
||||
let childToken
|
||||
/** @type {number} */
|
||||
let lineStartOffset
|
||||
|
||||
return start
|
||||
|
||||
/** @type {State} */
|
||||
function start(code) {
|
||||
// First we iterate through the open blocks, starting with the root
|
||||
// document, and descending through last children down to the last open
|
||||
// block.
|
||||
// Each block imposes a condition that the line must satisfy if the block is
|
||||
// to remain open.
|
||||
// For example, a block quote requires a `>` character.
|
||||
// A paragraph requires a non-blank line.
|
||||
// In this phase we may match all or just some of the open blocks.
|
||||
// But we cannot close unmatched blocks yet, because we may have a lazy
|
||||
// continuation line.
|
||||
if (continued < stack.length) {
|
||||
const item = stack[continued]
|
||||
self.containerState = item[1]
|
||||
assert(
|
||||
item[0].continuation,
|
||||
'expected `continuation` to be defined on container construct'
|
||||
)
|
||||
return effects.attempt(
|
||||
item[0].continuation,
|
||||
documentContinue,
|
||||
checkNewContainers
|
||||
)(code)
|
||||
}
|
||||
|
||||
// Done.
|
||||
return checkNewContainers(code)
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function documentContinue(code) {
|
||||
assert(
|
||||
self.containerState,
|
||||
'expected `containerState` to be defined after continuation'
|
||||
)
|
||||
|
||||
continued++
|
||||
|
||||
// Note: this field is called `_closeFlow` but it also closes containers.
|
||||
// Perhaps a good idea to rename it but it’s already used in the wild by
|
||||
// extensions.
|
||||
if (self.containerState._closeFlow) {
|
||||
self.containerState._closeFlow = undefined
|
||||
|
||||
if (childFlow) {
|
||||
closeFlow()
|
||||
}
|
||||
|
||||
// Note: this algorithm for moving events around is similar to the
|
||||
// algorithm when dealing with lazy lines in `writeToChild`.
|
||||
const indexBeforeExits = self.events.length
|
||||
let indexBeforeFlow = indexBeforeExits
|
||||
/** @type {Point | undefined} */
|
||||
let point
|
||||
|
||||
// Find the flow chunk.
|
||||
while (indexBeforeFlow--) {
|
||||
if (
|
||||
self.events[indexBeforeFlow][0] === 'exit' &&
|
||||
self.events[indexBeforeFlow][1].type === types.chunkFlow
|
||||
) {
|
||||
point = self.events[indexBeforeFlow][1].end
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
assert(point, 'could not find previous flow chunk')
|
||||
|
||||
exitContainers(continued)
|
||||
|
||||
// Fix positions.
|
||||
let index = indexBeforeExits
|
||||
|
||||
while (index < self.events.length) {
|
||||
self.events[index][1].end = {...point}
|
||||
index++
|
||||
}
|
||||
|
||||
// Inject the exits earlier (they’re still also at the end).
|
||||
splice(
|
||||
self.events,
|
||||
indexBeforeFlow + 1,
|
||||
0,
|
||||
self.events.slice(indexBeforeExits)
|
||||
)
|
||||
|
||||
// Discard the duplicate exits.
|
||||
self.events.length = index
|
||||
|
||||
return checkNewContainers(code)
|
||||
}
|
||||
|
||||
return start(code)
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function checkNewContainers(code) {
|
||||
// Next, after consuming the continuation markers for existing blocks, we
|
||||
// look for new block starts (e.g. `>` for a block quote).
|
||||
// If we encounter a new block start, we close any blocks unmatched in
|
||||
// step 1 before creating the new block as a child of the last matched
|
||||
// block.
|
||||
if (continued === stack.length) {
|
||||
// No need to `check` whether there’s a container, of `exitContainers`
|
||||
// would be moot.
|
||||
// We can instead immediately `attempt` to parse one.
|
||||
if (!childFlow) {
|
||||
return documentContinued(code)
|
||||
}
|
||||
|
||||
// If we have concrete content, such as block HTML or fenced code,
|
||||
// we can’t have containers “pierce” into them, so we can immediately
|
||||
// start.
|
||||
if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) {
|
||||
return flowStart(code)
|
||||
}
|
||||
|
||||
// If we do have flow, it could still be a blank line,
|
||||
// but we’d be interrupting it w/ a new container if there’s a current
|
||||
// construct.
|
||||
// To do: next major: remove `_gfmTableDynamicInterruptHack` (no longer
|
||||
// needed in micromark-extension-gfm-table@1.0.6).
|
||||
self.interrupt = Boolean(
|
||||
childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack
|
||||
)
|
||||
}
|
||||
|
||||
// Check if there is a new container.
|
||||
self.containerState = {}
|
||||
return effects.check(
|
||||
containerConstruct,
|
||||
thereIsANewContainer,
|
||||
thereIsNoNewContainer
|
||||
)(code)
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function thereIsANewContainer(code) {
|
||||
if (childFlow) closeFlow()
|
||||
exitContainers(continued)
|
||||
return documentContinued(code)
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function thereIsNoNewContainer(code) {
|
||||
self.parser.lazy[self.now().line] = continued !== stack.length
|
||||
lineStartOffset = self.now().offset
|
||||
return flowStart(code)
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function documentContinued(code) {
|
||||
// Try new containers.
|
||||
self.containerState = {}
|
||||
return effects.attempt(
|
||||
containerConstruct,
|
||||
containerContinue,
|
||||
flowStart
|
||||
)(code)
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function containerContinue(code) {
|
||||
assert(
|
||||
self.currentConstruct,
|
||||
'expected `currentConstruct` to be defined on tokenizer'
|
||||
)
|
||||
assert(
|
||||
self.containerState,
|
||||
'expected `containerState` to be defined on tokenizer'
|
||||
)
|
||||
continued++
|
||||
stack.push([self.currentConstruct, self.containerState])
|
||||
// Try another.
|
||||
return documentContinued(code)
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function flowStart(code) {
|
||||
if (code === codes.eof) {
|
||||
if (childFlow) closeFlow()
|
||||
exitContainers(0)
|
||||
effects.consume(code)
|
||||
return
|
||||
}
|
||||
|
||||
childFlow = childFlow || self.parser.flow(self.now())
|
||||
effects.enter(types.chunkFlow, {
|
||||
_tokenizer: childFlow,
|
||||
contentType: constants.contentTypeFlow,
|
||||
previous: childToken
|
||||
})
|
||||
|
||||
return flowContinue(code)
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function flowContinue(code) {
|
||||
if (code === codes.eof) {
|
||||
writeToChild(effects.exit(types.chunkFlow), true)
|
||||
exitContainers(0)
|
||||
effects.consume(code)
|
||||
return
|
||||
}
|
||||
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.consume(code)
|
||||
writeToChild(effects.exit(types.chunkFlow))
|
||||
// Get ready for the next line.
|
||||
continued = 0
|
||||
self.interrupt = undefined
|
||||
return start
|
||||
}
|
||||
|
||||
effects.consume(code)
|
||||
return flowContinue
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Token} token
|
||||
* Token.
|
||||
* @param {boolean | undefined} [endOfFile]
|
||||
* Whether the token is at the end of the file (default: `false`).
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function writeToChild(token, endOfFile) {
|
||||
assert(childFlow, 'expected `childFlow` to be defined when continuing')
|
||||
const stream = self.sliceStream(token)
|
||||
if (endOfFile) stream.push(null)
|
||||
token.previous = childToken
|
||||
if (childToken) childToken.next = token
|
||||
childToken = token
|
||||
childFlow.defineSkip(token.start)
|
||||
childFlow.write(stream)
|
||||
|
||||
// Alright, so we just added a lazy line:
|
||||
//
|
||||
// ```markdown
|
||||
// > a
|
||||
// b.
|
||||
//
|
||||
// Or:
|
||||
//
|
||||
// > ~~~c
|
||||
// d
|
||||
//
|
||||
// Or:
|
||||
//
|
||||
// > | e |
|
||||
// f
|
||||
// ```
|
||||
//
|
||||
// The construct in the second example (fenced code) does not accept lazy
|
||||
// lines, so it marked itself as done at the end of its first line, and
|
||||
// then the content construct parses `d`.
|
||||
// Most constructs in markdown match on the first line: if the first line
|
||||
// forms a construct, a non-lazy line can’t “unmake” it.
|
||||
//
|
||||
// The construct in the third example is potentially a GFM table, and
|
||||
// those are *weird*.
|
||||
// It *could* be a table, from the first line, if the following line
|
||||
// matches a condition.
|
||||
// In this case, that second line is lazy, which “unmakes” the first line
|
||||
// and turns the whole into one content block.
|
||||
//
|
||||
// We’ve now parsed the non-lazy and the lazy line, and can figure out
|
||||
// whether the lazy line started a new flow block.
|
||||
// If it did, we exit the current containers between the two flow blocks.
|
||||
if (self.parser.lazy[token.start.line]) {
|
||||
let index = childFlow.events.length
|
||||
|
||||
while (index--) {
|
||||
if (
|
||||
// The token starts before the line ending…
|
||||
childFlow.events[index][1].start.offset < lineStartOffset &&
|
||||
// …and either is not ended yet…
|
||||
(!childFlow.events[index][1].end ||
|
||||
// …or ends after it.
|
||||
childFlow.events[index][1].end.offset > lineStartOffset)
|
||||
) {
|
||||
// Exit: there’s still something open, which means it’s a lazy line
|
||||
// part of something.
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Note: this algorithm for moving events around is similar to the
|
||||
// algorithm when closing flow in `documentContinue`.
|
||||
const indexBeforeExits = self.events.length
|
||||
let indexBeforeFlow = indexBeforeExits
|
||||
/** @type {boolean | undefined} */
|
||||
let seen
|
||||
/** @type {Point | undefined} */
|
||||
let point
|
||||
|
||||
// Find the previous chunk (the one before the lazy line).
|
||||
while (indexBeforeFlow--) {
|
||||
if (
|
||||
self.events[indexBeforeFlow][0] === 'exit' &&
|
||||
self.events[indexBeforeFlow][1].type === types.chunkFlow
|
||||
) {
|
||||
if (seen) {
|
||||
point = self.events[indexBeforeFlow][1].end
|
||||
break
|
||||
}
|
||||
|
||||
seen = true
|
||||
}
|
||||
}
|
||||
|
||||
assert(point, 'could not find previous flow chunk')
|
||||
|
||||
exitContainers(continued)
|
||||
|
||||
// Fix positions.
|
||||
index = indexBeforeExits
|
||||
|
||||
while (index < self.events.length) {
|
||||
self.events[index][1].end = {...point}
|
||||
index++
|
||||
}
|
||||
|
||||
// Inject the exits earlier (they’re still also at the end).
|
||||
splice(
|
||||
self.events,
|
||||
indexBeforeFlow + 1,
|
||||
0,
|
||||
self.events.slice(indexBeforeExits)
|
||||
)
|
||||
|
||||
// Discard the duplicate exits.
|
||||
self.events.length = index
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} size
|
||||
* Size.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function exitContainers(size) {
|
||||
let index = stack.length
|
||||
|
||||
// Exit open containers.
|
||||
while (index-- > size) {
|
||||
const entry = stack[index]
|
||||
self.containerState = entry[1]
|
||||
assert(
|
||||
entry[0].exit,
|
||||
'expected `exit` to be defined on container construct'
|
||||
)
|
||||
entry[0].exit.call(self, effects)
|
||||
}
|
||||
|
||||
stack.length = size
|
||||
}
|
||||
|
||||
function closeFlow() {
|
||||
assert(
|
||||
self.containerState,
|
||||
'expected `containerState` to be defined when closing flow'
|
||||
)
|
||||
assert(childFlow, 'expected `childFlow` to be defined when closing it')
|
||||
childFlow.write([codes.eof])
|
||||
childToken = undefined
|
||||
childFlow = undefined
|
||||
self.containerState._closeFlow = undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Context.
|
||||
* @type {Tokenizer}
|
||||
* Tokenizer.
|
||||
*/
|
||||
function tokenizeContainer(effects, ok, nok) {
|
||||
// Always populated by defaults.
|
||||
assert(
|
||||
this.parser.constructs.disable.null,
|
||||
'expected `disable.null` to be populated'
|
||||
)
|
||||
return factorySpace(
|
||||
effects,
|
||||
effects.attempt(this.parser.constructs.document, ok, nok),
|
||||
types.linePrefix,
|
||||
this.parser.constructs.disable.null.includes('codeIndented')
|
||||
? undefined
|
||||
: constants.tabSize
|
||||
)
|
||||
}
|
4
node_modules/micromark/dev/lib/initialize/flow.d.ts
generated
vendored
Normal file
4
node_modules/micromark/dev/lib/initialize/flow.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
/** @type {InitialConstruct} */
|
||||
export const flow: InitialConstruct;
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
//# sourceMappingURL=flow.d.ts.map
|
1
node_modules/micromark/dev/lib/initialize/flow.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/dev/lib/initialize/flow.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"flow.d.ts","sourceRoot":"","sources":["flow.js"],"names":[],"mappings":"AAeA,+BAA+B;AAC/B,mBADW,gBAAgB,CACmB;sCAVpC,sBAAsB"}
|
86
node_modules/micromark/dev/lib/initialize/flow.js
generated
vendored
Normal file
86
node_modules/micromark/dev/lib/initialize/flow.js
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
/**
|
||||
* @import {
|
||||
* InitialConstruct,
|
||||
* Initializer,
|
||||
* State,
|
||||
* TokenizeContext
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import {ok as assert} from 'devlop'
|
||||
import {blankLine, content} from 'micromark-core-commonmark'
|
||||
import {factorySpace} from 'micromark-factory-space'
|
||||
import {markdownLineEnding} from 'micromark-util-character'
|
||||
import {codes, types} from 'micromark-util-symbol'
|
||||
|
||||
/** @type {InitialConstruct} */
|
||||
export const flow = {tokenize: initializeFlow}
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Self.
|
||||
* @type {Initializer}
|
||||
* Initializer.
|
||||
*/
|
||||
function initializeFlow(effects) {
|
||||
const self = this
|
||||
const initial = effects.attempt(
|
||||
// Try to parse a blank line.
|
||||
blankLine,
|
||||
atBlankEnding,
|
||||
// Try to parse initial flow (essentially, only code).
|
||||
effects.attempt(
|
||||
this.parser.constructs.flowInitial,
|
||||
afterConstruct,
|
||||
factorySpace(
|
||||
effects,
|
||||
effects.attempt(
|
||||
this.parser.constructs.flow,
|
||||
afterConstruct,
|
||||
effects.attempt(content, afterConstruct)
|
||||
),
|
||||
types.linePrefix
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return initial
|
||||
|
||||
/** @type {State} */
|
||||
function atBlankEnding(code) {
|
||||
assert(
|
||||
code === codes.eof || markdownLineEnding(code),
|
||||
'expected eol or eof'
|
||||
)
|
||||
|
||||
if (code === codes.eof) {
|
||||
effects.consume(code)
|
||||
return
|
||||
}
|
||||
|
||||
effects.enter(types.lineEndingBlank)
|
||||
effects.consume(code)
|
||||
effects.exit(types.lineEndingBlank)
|
||||
self.currentConstruct = undefined
|
||||
return initial
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function afterConstruct(code) {
|
||||
assert(
|
||||
code === codes.eof || markdownLineEnding(code),
|
||||
'expected eol or eof'
|
||||
)
|
||||
|
||||
if (code === codes.eof) {
|
||||
effects.consume(code)
|
||||
return
|
||||
}
|
||||
|
||||
effects.enter(types.lineEnding)
|
||||
effects.consume(code)
|
||||
effects.exit(types.lineEnding)
|
||||
self.currentConstruct = undefined
|
||||
return initial
|
||||
}
|
||||
}
|
8
node_modules/micromark/dev/lib/initialize/text.d.ts
generated
vendored
Normal file
8
node_modules/micromark/dev/lib/initialize/text.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export namespace resolver {
|
||||
let resolveAll: Resolver;
|
||||
}
|
||||
export const string: InitialConstruct;
|
||||
export const text: InitialConstruct;
|
||||
import type { Resolver } from 'micromark-util-types';
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
//# sourceMappingURL=text.d.ts.map
|
1
node_modules/micromark/dev/lib/initialize/text.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/dev/lib/initialize/text.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"text.d.ts","sourceRoot":"","sources":["text.js"],"names":[],"mappings":";;;AAeA,sCAAiD;AACjD,oCAA6C;8BARnC,sBAAsB;sCAAtB,sBAAsB"}
|
244
node_modules/micromark/dev/lib/initialize/text.js
generated
vendored
Normal file
244
node_modules/micromark/dev/lib/initialize/text.js
generated
vendored
Normal file
@@ -0,0 +1,244 @@
|
||||
/**
|
||||
* @import {
|
||||
* Code,
|
||||
* InitialConstruct,
|
||||
* Initializer,
|
||||
* Resolver,
|
||||
* State,
|
||||
* TokenizeContext
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import {ok as assert} from 'devlop'
|
||||
import {codes, constants, types} from 'micromark-util-symbol'
|
||||
|
||||
export const resolver = {resolveAll: createResolver()}
|
||||
export const string = initializeFactory('string')
|
||||
export const text = initializeFactory('text')
|
||||
|
||||
/**
|
||||
* @param {'string' | 'text'} field
|
||||
* Field.
|
||||
* @returns {InitialConstruct}
|
||||
* Construct.
|
||||
*/
|
||||
function initializeFactory(field) {
|
||||
return {
|
||||
resolveAll: createResolver(
|
||||
field === 'text' ? resolveAllLineSuffixes : undefined
|
||||
),
|
||||
tokenize: initializeText
|
||||
}
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Context.
|
||||
* @type {Initializer}
|
||||
*/
|
||||
function initializeText(effects) {
|
||||
const self = this
|
||||
const constructs = this.parser.constructs[field]
|
||||
const text = effects.attempt(constructs, start, notText)
|
||||
|
||||
return start
|
||||
|
||||
/** @type {State} */
|
||||
function start(code) {
|
||||
return atBreak(code) ? text(code) : notText(code)
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function notText(code) {
|
||||
if (code === codes.eof) {
|
||||
effects.consume(code)
|
||||
return
|
||||
}
|
||||
|
||||
effects.enter(types.data)
|
||||
effects.consume(code)
|
||||
return data
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function data(code) {
|
||||
if (atBreak(code)) {
|
||||
effects.exit(types.data)
|
||||
return text(code)
|
||||
}
|
||||
|
||||
// Data.
|
||||
effects.consume(code)
|
||||
return data
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Code} code
|
||||
* Code.
|
||||
* @returns {boolean}
|
||||
* Whether the code is a break.
|
||||
*/
|
||||
function atBreak(code) {
|
||||
if (code === codes.eof) {
|
||||
return true
|
||||
}
|
||||
|
||||
const list = constructs[code]
|
||||
let index = -1
|
||||
|
||||
if (list) {
|
||||
// Always populated by defaults.
|
||||
assert(Array.isArray(list), 'expected `disable.null` to be populated')
|
||||
|
||||
while (++index < list.length) {
|
||||
const item = list[index]
|
||||
if (!item.previous || item.previous.call(self, self.previous)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Resolver | undefined} [extraResolver]
|
||||
* Resolver.
|
||||
* @returns {Resolver}
|
||||
* Resolver.
|
||||
*/
|
||||
function createResolver(extraResolver) {
|
||||
return resolveAllText
|
||||
|
||||
/** @type {Resolver} */
|
||||
function resolveAllText(events, context) {
|
||||
let index = -1
|
||||
/** @type {number | undefined} */
|
||||
let enter
|
||||
|
||||
// A rather boring computation (to merge adjacent `data` events) which
|
||||
// improves mm performance by 29%.
|
||||
while (++index <= events.length) {
|
||||
if (enter === undefined) {
|
||||
if (events[index] && events[index][1].type === types.data) {
|
||||
enter = index
|
||||
index++
|
||||
}
|
||||
} else if (!events[index] || events[index][1].type !== types.data) {
|
||||
// Don’t do anything if there is one data token.
|
||||
if (index !== enter + 2) {
|
||||
events[enter][1].end = events[index - 1][1].end
|
||||
events.splice(enter + 2, index - enter - 2)
|
||||
index = enter + 2
|
||||
}
|
||||
|
||||
enter = undefined
|
||||
}
|
||||
}
|
||||
|
||||
return extraResolver ? extraResolver(events, context) : events
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A rather ugly set of instructions which again looks at chunks in the input
|
||||
* stream.
|
||||
* The reason to do this here is that it is *much* faster to parse in reverse.
|
||||
* And that we can’t hook into `null` to split the line suffix before an EOF.
|
||||
* To do: figure out if we can make this into a clean utility, or even in core.
|
||||
* As it will be useful for GFMs literal autolink extension (and maybe even
|
||||
* tables?)
|
||||
*
|
||||
* @type {Resolver}
|
||||
*/
|
||||
function resolveAllLineSuffixes(events, context) {
|
||||
let eventIndex = 0 // Skip first.
|
||||
|
||||
while (++eventIndex <= events.length) {
|
||||
if (
|
||||
(eventIndex === events.length ||
|
||||
events[eventIndex][1].type === types.lineEnding) &&
|
||||
events[eventIndex - 1][1].type === types.data
|
||||
) {
|
||||
const data = events[eventIndex - 1][1]
|
||||
const chunks = context.sliceStream(data)
|
||||
let index = chunks.length
|
||||
let bufferIndex = -1
|
||||
let size = 0
|
||||
/** @type {boolean | undefined} */
|
||||
let tabs
|
||||
|
||||
while (index--) {
|
||||
const chunk = chunks[index]
|
||||
|
||||
if (typeof chunk === 'string') {
|
||||
bufferIndex = chunk.length
|
||||
|
||||
while (chunk.charCodeAt(bufferIndex - 1) === codes.space) {
|
||||
size++
|
||||
bufferIndex--
|
||||
}
|
||||
|
||||
if (bufferIndex) break
|
||||
bufferIndex = -1
|
||||
}
|
||||
// Number
|
||||
else if (chunk === codes.horizontalTab) {
|
||||
tabs = true
|
||||
size++
|
||||
} else if (chunk === codes.virtualSpace) {
|
||||
// Empty
|
||||
} else {
|
||||
// Replacement character, exit.
|
||||
index++
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// Allow final trailing whitespace.
|
||||
if (context._contentTypeTextTrailing && eventIndex === events.length) {
|
||||
size = 0
|
||||
}
|
||||
|
||||
if (size) {
|
||||
const token = {
|
||||
type:
|
||||
eventIndex === events.length ||
|
||||
tabs ||
|
||||
size < constants.hardBreakPrefixSizeMin
|
||||
? types.lineSuffix
|
||||
: types.hardBreakTrailing,
|
||||
start: {
|
||||
_bufferIndex: index
|
||||
? bufferIndex
|
||||
: data.start._bufferIndex + bufferIndex,
|
||||
_index: data.start._index + index,
|
||||
line: data.end.line,
|
||||
column: data.end.column - size,
|
||||
offset: data.end.offset - size
|
||||
},
|
||||
end: {...data.end}
|
||||
}
|
||||
|
||||
data.end = {...token.start}
|
||||
|
||||
if (data.start.offset === data.end.offset) {
|
||||
Object.assign(data, token)
|
||||
} else {
|
||||
events.splice(
|
||||
eventIndex,
|
||||
0,
|
||||
['enter', token, context],
|
||||
['exit', token, context]
|
||||
)
|
||||
eventIndex += 2
|
||||
}
|
||||
}
|
||||
|
||||
eventIndex++
|
||||
}
|
||||
}
|
||||
|
||||
return events
|
||||
}
|
10
node_modules/micromark/dev/lib/parse.d.ts
generated
vendored
Normal file
10
node_modules/micromark/dev/lib/parse.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* @param {ParseOptions | null | undefined} [options]
|
||||
* Configuration (optional).
|
||||
* @returns {ParseContext}
|
||||
* Parser.
|
||||
*/
|
||||
export function parse(options?: ParseOptions | null | undefined): ParseContext;
|
||||
import type { ParseOptions } from 'micromark-util-types';
|
||||
import type { ParseContext } from 'micromark-util-types';
|
||||
//# sourceMappingURL=parse.d.ts.map
|
1
node_modules/micromark/dev/lib/parse.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/dev/lib/parse.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"parse.d.ts","sourceRoot":"","sources":["parse.js"],"names":[],"mappings":"AAkBA;;;;;GAKG;AACH,gCALW,YAAY,GAAG,IAAI,GAAG,SAAS,GAE7B,YAAY,CAoCxB;kCAlDS,sBAAsB;kCAAtB,sBAAsB"}
|
58
node_modules/micromark/dev/lib/parse.js
generated
vendored
Normal file
58
node_modules/micromark/dev/lib/parse.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
/**
|
||||
* @import {
|
||||
* Create,
|
||||
* FullNormalizedExtension,
|
||||
* InitialConstruct,
|
||||
* ParseContext,
|
||||
* ParseOptions
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import {combineExtensions} from 'micromark-util-combine-extensions'
|
||||
import {content} from './initialize/content.js'
|
||||
import {document} from './initialize/document.js'
|
||||
import {flow} from './initialize/flow.js'
|
||||
import {string, text} from './initialize/text.js'
|
||||
import * as defaultConstructs from './constructs.js'
|
||||
import {createTokenizer} from './create-tokenizer.js'
|
||||
|
||||
/**
|
||||
* @param {ParseOptions | null | undefined} [options]
|
||||
* Configuration (optional).
|
||||
* @returns {ParseContext}
|
||||
* Parser.
|
||||
*/
|
||||
export function parse(options) {
|
||||
const settings = options || {}
|
||||
const constructs = /** @type {FullNormalizedExtension} */ (
|
||||
combineExtensions([defaultConstructs, ...(settings.extensions || [])])
|
||||
)
|
||||
|
||||
/** @type {ParseContext} */
|
||||
const parser = {
|
||||
constructs,
|
||||
content: create(content),
|
||||
defined: [],
|
||||
document: create(document),
|
||||
flow: create(flow),
|
||||
lazy: {},
|
||||
string: create(string),
|
||||
text: create(text)
|
||||
}
|
||||
|
||||
return parser
|
||||
|
||||
/**
|
||||
* @param {InitialConstruct} initial
|
||||
* Construct to start with.
|
||||
* @returns {Create}
|
||||
* Create a tokenizer.
|
||||
*/
|
||||
function create(initial) {
|
||||
return creator
|
||||
/** @type {Create} */
|
||||
function creator(from) {
|
||||
return createTokenizer(parser, initial, from)
|
||||
}
|
||||
}
|
||||
}
|
9
node_modules/micromark/dev/lib/postprocess.d.ts
generated
vendored
Normal file
9
node_modules/micromark/dev/lib/postprocess.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* @param {Array<Event>} events
|
||||
* Events.
|
||||
* @returns {Array<Event>}
|
||||
* Events.
|
||||
*/
|
||||
export function postprocess(events: Array<Event>): Array<Event>;
|
||||
import type { Event } from 'micromark-util-types';
|
||||
//# sourceMappingURL=postprocess.d.ts.map
|
1
node_modules/micromark/dev/lib/postprocess.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/dev/lib/postprocess.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"postprocess.d.ts","sourceRoot":"","sources":["postprocess.js"],"names":[],"mappings":"AAMA;;;;;GAKG;AACH,oCALW,KAAK,CAAC,KAAK,CAAC,GAEV,KAAK,CAAC,KAAK,CAAC,CASxB;2BAjBuB,sBAAsB"}
|
19
node_modules/micromark/dev/lib/postprocess.js
generated
vendored
Normal file
19
node_modules/micromark/dev/lib/postprocess.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
/**
|
||||
* @import {Event} from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import {subtokenize} from 'micromark-util-subtokenize'
|
||||
|
||||
/**
|
||||
* @param {Array<Event>} events
|
||||
* Events.
|
||||
* @returns {Array<Event>}
|
||||
* Events.
|
||||
*/
|
||||
export function postprocess(events) {
|
||||
while (!subtokenize(events)) {
|
||||
// Empty
|
||||
}
|
||||
|
||||
return events
|
||||
}
|
13
node_modules/micromark/dev/lib/preprocess.d.ts
generated
vendored
Normal file
13
node_modules/micromark/dev/lib/preprocess.d.ts
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
/**
|
||||
* @returns {Preprocessor}
|
||||
* Preprocess a value.
|
||||
*/
|
||||
export function preprocess(): Preprocessor;
|
||||
/**
|
||||
* Preprocess a value.
|
||||
*/
|
||||
export type Preprocessor = (value: Value, encoding?: Encoding | null | undefined, end?: boolean | null | undefined) => Array<Chunk>;
|
||||
import type { Value } from 'micromark-util-types';
|
||||
import type { Encoding } from 'micromark-util-types';
|
||||
import type { Chunk } from 'micromark-util-types';
|
||||
//# sourceMappingURL=preprocess.d.ts.map
|
1
node_modules/micromark/dev/lib/preprocess.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/dev/lib/preprocess.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"preprocess.d.ts","sourceRoot":"","sources":["preprocess.js"],"names":[],"mappings":"AAqBA;;;GAGG;AACH,8BAHa,YAAY,CAsHxB;;;;mCArIU,KAAK,aAEL,QAAQ,GAAG,IAAI,GAAG,SAAS,QAE3B,OAAO,GAAG,IAAI,GAAG,SAAS,KAExB,KAAK,CAAC,KAAK,CAAC;2BAZsB,sBAAsB;8BAAtB,sBAAsB;2BAAtB,sBAAsB"}
|
141
node_modules/micromark/dev/lib/preprocess.js
generated
vendored
Normal file
141
node_modules/micromark/dev/lib/preprocess.js
generated
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
/**
|
||||
* @import {Chunk, Code, Encoding, Value} from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @callback Preprocessor
|
||||
* Preprocess a value.
|
||||
* @param {Value} value
|
||||
* Value.
|
||||
* @param {Encoding | null | undefined} [encoding]
|
||||
* Encoding when `value` is a typed array (optional).
|
||||
* @param {boolean | null | undefined} [end=false]
|
||||
* Whether this is the last chunk (default: `false`).
|
||||
* @returns {Array<Chunk>}
|
||||
* Chunks.
|
||||
*/
|
||||
|
||||
import {codes, constants} from 'micromark-util-symbol'
|
||||
|
||||
const search = /[\0\t\n\r]/g
|
||||
|
||||
/**
|
||||
* @returns {Preprocessor}
|
||||
* Preprocess a value.
|
||||
*/
|
||||
export function preprocess() {
|
||||
let column = 1
|
||||
let buffer = ''
|
||||
/** @type {boolean | undefined} */
|
||||
let start = true
|
||||
/** @type {boolean | undefined} */
|
||||
let atCarriageReturn
|
||||
|
||||
return preprocessor
|
||||
|
||||
/** @type {Preprocessor} */
|
||||
// eslint-disable-next-line complexity
|
||||
function preprocessor(value, encoding, end) {
|
||||
/** @type {Array<Chunk>} */
|
||||
const chunks = []
|
||||
/** @type {RegExpMatchArray | null} */
|
||||
let match
|
||||
/** @type {number} */
|
||||
let next
|
||||
/** @type {number} */
|
||||
let startPosition
|
||||
/** @type {number} */
|
||||
let endPosition
|
||||
/** @type {Code} */
|
||||
let code
|
||||
|
||||
value =
|
||||
buffer +
|
||||
(typeof value === 'string'
|
||||
? value.toString()
|
||||
: new TextDecoder(encoding || undefined).decode(value))
|
||||
|
||||
startPosition = 0
|
||||
buffer = ''
|
||||
|
||||
if (start) {
|
||||
// To do: `markdown-rs` actually parses BOMs (byte order mark).
|
||||
if (value.charCodeAt(0) === codes.byteOrderMarker) {
|
||||
startPosition++
|
||||
}
|
||||
|
||||
start = undefined
|
||||
}
|
||||
|
||||
while (startPosition < value.length) {
|
||||
search.lastIndex = startPosition
|
||||
match = search.exec(value)
|
||||
endPosition =
|
||||
match && match.index !== undefined ? match.index : value.length
|
||||
code = value.charCodeAt(endPosition)
|
||||
|
||||
if (!match) {
|
||||
buffer = value.slice(startPosition)
|
||||
break
|
||||
}
|
||||
|
||||
if (
|
||||
code === codes.lf &&
|
||||
startPosition === endPosition &&
|
||||
atCarriageReturn
|
||||
) {
|
||||
chunks.push(codes.carriageReturnLineFeed)
|
||||
atCarriageReturn = undefined
|
||||
} else {
|
||||
if (atCarriageReturn) {
|
||||
chunks.push(codes.carriageReturn)
|
||||
atCarriageReturn = undefined
|
||||
}
|
||||
|
||||
if (startPosition < endPosition) {
|
||||
chunks.push(value.slice(startPosition, endPosition))
|
||||
column += endPosition - startPosition
|
||||
}
|
||||
|
||||
switch (code) {
|
||||
case codes.nul: {
|
||||
chunks.push(codes.replacementCharacter)
|
||||
column++
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case codes.ht: {
|
||||
next = Math.ceil(column / constants.tabSize) * constants.tabSize
|
||||
chunks.push(codes.horizontalTab)
|
||||
while (column++ < next) chunks.push(codes.virtualSpace)
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
case codes.lf: {
|
||||
chunks.push(codes.lineFeed)
|
||||
column = 1
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
default: {
|
||||
atCarriageReturn = true
|
||||
column = 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
startPosition = endPosition + 1
|
||||
}
|
||||
|
||||
if (end) {
|
||||
if (atCarriageReturn) chunks.push(codes.carriageReturn)
|
||||
if (buffer) chunks.push(buffer)
|
||||
chunks.push(codes.eof)
|
||||
}
|
||||
|
||||
return chunks
|
||||
}
|
||||
}
|
35
node_modules/micromark/dev/stream.d.ts
generated
vendored
Normal file
35
node_modules/micromark/dev/stream.d.ts
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
/**
|
||||
* Create a duplex (readable and writable) stream.
|
||||
*
|
||||
* Some of the work to parse markdown can be done streaming, but in the
|
||||
* end buffering is required.
|
||||
*
|
||||
* micromark does not handle errors for you, so you must handle errors on whatever
|
||||
* streams you pipe into it.
|
||||
* As markdown does not know errors, `micromark` itself does not emit errors.
|
||||
*
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration (optional).
|
||||
* @returns {MinimalDuplex}
|
||||
* Duplex stream.
|
||||
*/
|
||||
export function stream(options?: Options | null | undefined): MinimalDuplex;
|
||||
export type Options = import("micromark-util-types").Options;
|
||||
/**
|
||||
* Function called when write was successful.
|
||||
*/
|
||||
export type Callback = () => undefined;
|
||||
/**
|
||||
* Configuration for piping.
|
||||
*/
|
||||
export type PipeOptions = {
|
||||
/**
|
||||
* Whether to end the destination stream when the source stream ends.
|
||||
*/
|
||||
end?: boolean | null | undefined;
|
||||
};
|
||||
/**
|
||||
* Duplex stream.
|
||||
*/
|
||||
export type MinimalDuplex = Omit<NodeJS.ReadableStream & NodeJS.WritableStream, "isPaused" | "pause" | "read" | "resume" | "setEncoding" | "unpipe" | "unshift" | "wrap">;
|
||||
//# sourceMappingURL=stream.d.ts.map
|
1
node_modules/micromark/dev/stream.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/dev/stream.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["stream.js"],"names":[],"mappings":"AA6BA;;;;;;;;;;;;;;GAcG;AACH,iCALW,OAAO,GAAG,IAAI,GAAG,SAAS,GAExB,aAAa,CAoOzB;sBAxQY,OAAO,sBAAsB,EAAE,OAAO;;;;6BAMtC,SAAS;;;;;;;;UAKR,OAAO,GAAG,IAAI,GAAG,SAAS;;;;;4BAG3B,IAAI,CAAC,MAAM,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,EAAE,UAAU,GAAG,OAAO,GAAG,MAAM,GAAG,QAAQ,GAAG,aAAa,GAAG,QAAQ,GAAG,SAAS,GAAG,MAAM,CAAC"}
|
270
node_modules/micromark/dev/stream.js
generated
vendored
Normal file
270
node_modules/micromark/dev/stream.js
generated
vendored
Normal file
@@ -0,0 +1,270 @@
|
||||
/**
|
||||
* @import {Encoding, Value} from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {import('micromark-util-types').Options} Options
|
||||
*/
|
||||
|
||||
/**
|
||||
* @callback Callback
|
||||
* Function called when write was successful.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*
|
||||
* @typedef PipeOptions
|
||||
* Configuration for piping.
|
||||
* @property {boolean | null | undefined} [end]
|
||||
* Whether to end the destination stream when the source stream ends.
|
||||
*
|
||||
* @typedef {Omit<NodeJS.ReadableStream & NodeJS.WritableStream, 'isPaused' | 'pause' | 'read' | 'resume' | 'setEncoding' | 'unpipe' | 'unshift' | 'wrap'>} MinimalDuplex
|
||||
* Duplex stream.
|
||||
*/
|
||||
|
||||
import {EventEmitter} from 'node:events'
|
||||
import {compile} from './lib/compile.js'
|
||||
import {parse} from './lib/parse.js'
|
||||
import {postprocess} from './lib/postprocess.js'
|
||||
import {preprocess} from './lib/preprocess.js'
|
||||
|
||||
/**
|
||||
* Create a duplex (readable and writable) stream.
|
||||
*
|
||||
* Some of the work to parse markdown can be done streaming, but in the
|
||||
* end buffering is required.
|
||||
*
|
||||
* micromark does not handle errors for you, so you must handle errors on whatever
|
||||
* streams you pipe into it.
|
||||
* As markdown does not know errors, `micromark` itself does not emit errors.
|
||||
*
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration (optional).
|
||||
* @returns {MinimalDuplex}
|
||||
* Duplex stream.
|
||||
*/
|
||||
export function stream(options) {
|
||||
const prep = preprocess()
|
||||
const tokenize = parse(options).document().write
|
||||
const comp = compile(options)
|
||||
/** @type {boolean} */
|
||||
let ended
|
||||
|
||||
const emitter = /** @type {MinimalDuplex} */ (new EventEmitter())
|
||||
// @ts-expect-error: fine.
|
||||
emitter.end = end
|
||||
emitter.pipe = pipe
|
||||
emitter.readable = true
|
||||
emitter.writable = true
|
||||
// @ts-expect-error: fine.
|
||||
emitter.write = write
|
||||
|
||||
return emitter
|
||||
|
||||
/**
|
||||
* Write a chunk into memory.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value | null | undefined} [chunk]
|
||||
* Slice of markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | null | undefined} [encoding]
|
||||
* Character encoding to understand `chunk` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value | null | undefined} [chunk]
|
||||
* Slice of markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*
|
||||
* @param {Value | null | undefined} [chunk]
|
||||
* Slice of markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Callback | Encoding | null | undefined} [encoding]
|
||||
* Character encoding to understand `chunk` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*/
|
||||
function write(chunk, encoding, callback) {
|
||||
if (typeof encoding === 'function') {
|
||||
callback = encoding
|
||||
encoding = undefined
|
||||
}
|
||||
|
||||
if (ended) {
|
||||
throw new Error('Did not expect `write` after `end`')
|
||||
}
|
||||
|
||||
tokenize(prep(chunk || '', encoding))
|
||||
|
||||
if (callback) {
|
||||
callback()
|
||||
}
|
||||
|
||||
// Signal successful write.
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* End the writing.
|
||||
*
|
||||
* Passes all arguments as a final `write`.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value | null | undefined} [chunk]
|
||||
* Slice of markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | null | undefined} [encoding]
|
||||
* Character encoding to understand `chunk` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value | null | undefined} [chunk]
|
||||
* Slice of markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*
|
||||
* @overload
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*
|
||||
* @param {Callback | Value | null | undefined} [chunk]
|
||||
* Slice of markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Callback | Encoding | null | undefined} [encoding]
|
||||
* Character encoding to understand `chunk` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*/
|
||||
function end(chunk, encoding, callback) {
|
||||
if (typeof chunk === 'function') {
|
||||
encoding = chunk
|
||||
chunk = undefined
|
||||
}
|
||||
|
||||
if (typeof encoding === 'function') {
|
||||
callback = encoding
|
||||
encoding = undefined
|
||||
}
|
||||
|
||||
write(chunk, encoding, callback)
|
||||
|
||||
emitter.emit('data', comp(postprocess(tokenize(prep('', encoding, true)))))
|
||||
|
||||
emitter.emit('end')
|
||||
ended = true
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Pipe the processor into a writable stream.
|
||||
*
|
||||
* Basically `Stream#pipe`, but inlined and simplified to keep the bundled
|
||||
* size down.
|
||||
* See: <https://github.com/nodejs/node/blob/43a5170/lib/internal/streams/legacy.js#L13>.
|
||||
*
|
||||
* @template {NodeJS.WritableStream} Stream
|
||||
* Writable stream.
|
||||
* @param {Stream} destination
|
||||
* Stream to pipe into.
|
||||
* @param {PipeOptions | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {Stream}
|
||||
* Destination stream.
|
||||
*/
|
||||
function pipe(destination, options) {
|
||||
emitter.on('data', ondata)
|
||||
emitter.on('error', onerror)
|
||||
emitter.on('end', cleanup)
|
||||
emitter.on('close', cleanup)
|
||||
|
||||
// If the `end` option is not supplied, `destination.end()` will be
|
||||
// called when the `end` or `close` events are received.
|
||||
// @ts-expect-error `_isStdio` is available on `std{err,out}`
|
||||
if (!destination._isStdio && (!options || options.end !== false)) {
|
||||
emitter.on('end', onend)
|
||||
}
|
||||
|
||||
destination.on('error', onerror)
|
||||
destination.on('close', cleanup)
|
||||
|
||||
destination.emit('pipe', emitter)
|
||||
|
||||
return destination
|
||||
|
||||
/**
|
||||
* End destination stream.
|
||||
*
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function onend() {
|
||||
if (destination.end) {
|
||||
destination.end()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle data.
|
||||
*
|
||||
* @param {string} chunk
|
||||
* Data.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function ondata(chunk) {
|
||||
if (destination.writable) {
|
||||
destination.write(chunk)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean listeners.
|
||||
*
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function cleanup() {
|
||||
emitter.removeListener('data', ondata)
|
||||
emitter.removeListener('end', onend)
|
||||
emitter.removeListener('error', onerror)
|
||||
emitter.removeListener('end', cleanup)
|
||||
emitter.removeListener('close', cleanup)
|
||||
|
||||
destination.removeListener('error', onerror)
|
||||
destination.removeListener('close', cleanup)
|
||||
}
|
||||
|
||||
/**
|
||||
* Close dangling pipes and handle unheard errors.
|
||||
*
|
||||
* @param {Error | null | undefined} [error]
|
||||
* Error, if any.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function onerror(error) {
|
||||
cleanup()
|
||||
|
||||
if (!emitter.listenerCount('error')) {
|
||||
throw error // Unhandled stream error in pipe.
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
82
node_modules/micromark/index.d.ts
generated
vendored
Normal file
82
node_modules/micromark/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
/**
|
||||
* Compile markdown to HTML.
|
||||
*
|
||||
* > Note: which encodings are supported depends on the engine.
|
||||
* > For info on Node.js, see:
|
||||
* > <https://nodejs.org/api/util.html#whatwg-supported-encodings>.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | null | undefined} encoding
|
||||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | Options | null | undefined} [encoding]
|
||||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*/
|
||||
export function micromark(value: Value, encoding: Encoding | null | undefined, options?: Options | null | undefined): string;
|
||||
/**
|
||||
* Compile markdown to HTML.
|
||||
*
|
||||
* > Note: which encodings are supported depends on the engine.
|
||||
* > For info on Node.js, see:
|
||||
* > <https://nodejs.org/api/util.html#whatwg-supported-encodings>.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | null | undefined} encoding
|
||||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | Options | null | undefined} [encoding]
|
||||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*/
|
||||
export function micromark(value: Value, options?: Options | null | undefined): string;
|
||||
export { compile } from "./lib/compile.js";
|
||||
export { parse } from "./lib/parse.js";
|
||||
export { postprocess } from "./lib/postprocess.js";
|
||||
export { preprocess } from "./lib/preprocess.js";
|
||||
export type Options = import("micromark-util-types").Options;
|
||||
import type { Value } from 'micromark-util-types';
|
||||
import type { Encoding } from 'micromark-util-types';
|
||||
//# sourceMappingURL=index.d.ts.map
|
1
node_modules/micromark/index.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/index.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["index.js"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyBG,iCACQ,KAAK,YAEL,QAAQ,GAAG,IAAI,GAAG,SAAS,YAG3B,OAAO,GAAG,IAAI,GAAG,SAAS,GAExB,MAAM,CAGhB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,iCACQ,KAAK,YAEL,OAAO,GAAG,IAAI,GAAG,SAAS,GAExB,MAAM,CAGhB;;;;;sBAvCU,OAAO,sBAAsB,EAAE,OAAO;2BAJjB,sBAAsB;8BAAtB,sBAAsB"}
|
60
node_modules/micromark/index.js
generated
vendored
Normal file
60
node_modules/micromark/index.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
/**
|
||||
* @import {Encoding, Value} from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {import('micromark-util-types').Options} Options
|
||||
*/
|
||||
|
||||
import { compile } from './lib/compile.js';
|
||||
import { parse } from './lib/parse.js';
|
||||
import { postprocess } from './lib/postprocess.js';
|
||||
import { preprocess } from './lib/preprocess.js';
|
||||
export { compile } from './lib/compile.js';
|
||||
export { parse } from './lib/parse.js';
|
||||
export { postprocess } from './lib/postprocess.js';
|
||||
export { preprocess } from './lib/preprocess.js';
|
||||
|
||||
/**
|
||||
* Compile markdown to HTML.
|
||||
*
|
||||
* > Note: which encodings are supported depends on the engine.
|
||||
* > For info on Node.js, see:
|
||||
* > <https://nodejs.org/api/util.html#whatwg-supported-encodings>.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | null | undefined} encoding
|
||||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*
|
||||
* @param {Value} value
|
||||
* Markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | Options | null | undefined} [encoding]
|
||||
* Character encoding to understand `value` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {string}
|
||||
* Compiled HTML.
|
||||
*/
|
||||
export function micromark(value, encoding, options) {
|
||||
if (typeof encoding !== 'string') {
|
||||
options = encoding;
|
||||
encoding = undefined;
|
||||
}
|
||||
return compile(options)(postprocess(parse(options).document().write(preprocess()(value, encoding, true))));
|
||||
}
|
16
node_modules/micromark/lib/compile.d.ts
generated
vendored
Normal file
16
node_modules/micromark/lib/compile.d.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
/**
|
||||
* @param {CompileOptions | null | undefined} [options]
|
||||
* @returns {Compile}
|
||||
*/
|
||||
export function compile(options?: CompileOptions | null | undefined): Compile;
|
||||
export type Media = {
|
||||
image?: boolean | undefined;
|
||||
labelId?: string | undefined;
|
||||
label?: string | undefined;
|
||||
referenceId?: string | undefined;
|
||||
destination?: string | undefined;
|
||||
title?: string | undefined;
|
||||
};
|
||||
import type { CompileOptions } from 'micromark-util-types';
|
||||
import type { Compile } from 'micromark-util-types';
|
||||
//# sourceMappingURL=compile.d.ts.map
|
1
node_modules/micromark/lib/compile.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/compile.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"compile.d.ts","sourceRoot":"","sources":["compile.js"],"names":[],"mappings":"AA6DA;;;GAGG;AACH,kCAHW,cAAc,GAAG,IAAI,GAAG,SAAS,GAC/B,OAAO,CAgkCnB;;YA/lCa,OAAO,GAAG,SAAS;cACnB,MAAM,GAAG,SAAS;YAClB,MAAM,GAAG,SAAS;kBAClB,MAAM,GAAG,SAAS;kBAClB,MAAM,GAAG,SAAS;YAClB,MAAM,GAAG,SAAS;;oCAVtB,sBAAsB;6BAAtB,sBAAsB"}
|
1060
node_modules/micromark/lib/compile.js
generated
vendored
Normal file
1060
node_modules/micromark/lib/compile.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
73
node_modules/micromark/lib/constructs.d.ts
generated
vendored
Normal file
73
node_modules/micromark/lib/constructs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
/** @satisfies {Extension['document']} */
|
||||
export const document: {
|
||||
42: import("micromark-util-types").Construct;
|
||||
43: import("micromark-util-types").Construct;
|
||||
45: import("micromark-util-types").Construct;
|
||||
48: import("micromark-util-types").Construct;
|
||||
49: import("micromark-util-types").Construct;
|
||||
50: import("micromark-util-types").Construct;
|
||||
51: import("micromark-util-types").Construct;
|
||||
52: import("micromark-util-types").Construct;
|
||||
53: import("micromark-util-types").Construct;
|
||||
54: import("micromark-util-types").Construct;
|
||||
55: import("micromark-util-types").Construct;
|
||||
56: import("micromark-util-types").Construct;
|
||||
57: import("micromark-util-types").Construct;
|
||||
62: import("micromark-util-types").Construct;
|
||||
};
|
||||
/** @satisfies {Extension['contentInitial']} */
|
||||
export const contentInitial: {
|
||||
91: import("micromark-util-types").Construct;
|
||||
};
|
||||
/** @satisfies {Extension['flowInitial']} */
|
||||
export const flowInitial: {
|
||||
[-2]: import("micromark-util-types").Construct;
|
||||
[-1]: import("micromark-util-types").Construct;
|
||||
32: import("micromark-util-types").Construct;
|
||||
};
|
||||
/** @satisfies {Extension['flow']} */
|
||||
export const flow: {
|
||||
35: import("micromark-util-types").Construct;
|
||||
42: import("micromark-util-types").Construct;
|
||||
45: import("micromark-util-types").Construct[];
|
||||
60: import("micromark-util-types").Construct;
|
||||
61: import("micromark-util-types").Construct;
|
||||
95: import("micromark-util-types").Construct;
|
||||
96: import("micromark-util-types").Construct;
|
||||
126: import("micromark-util-types").Construct;
|
||||
};
|
||||
/** @satisfies {Extension['string']} */
|
||||
export const string: {
|
||||
38: import("micromark-util-types").Construct;
|
||||
92: import("micromark-util-types").Construct;
|
||||
};
|
||||
/** @satisfies {Extension['text']} */
|
||||
export const text: {
|
||||
[-5]: import("micromark-util-types").Construct;
|
||||
[-4]: import("micromark-util-types").Construct;
|
||||
[-3]: import("micromark-util-types").Construct;
|
||||
33: import("micromark-util-types").Construct;
|
||||
38: import("micromark-util-types").Construct;
|
||||
42: import("micromark-util-types").Construct;
|
||||
60: import("micromark-util-types").Construct[];
|
||||
91: import("micromark-util-types").Construct;
|
||||
92: import("micromark-util-types").Construct[];
|
||||
93: import("micromark-util-types").Construct;
|
||||
95: import("micromark-util-types").Construct;
|
||||
96: import("micromark-util-types").Construct;
|
||||
};
|
||||
export namespace insideSpan {
|
||||
let _null: (import("micromark-util-types").Construct | {
|
||||
resolveAll: import("micromark-util-types").Resolver;
|
||||
})[];
|
||||
export { _null as null };
|
||||
}
|
||||
export namespace attentionMarkers {
|
||||
let _null_1: (42 | 95)[];
|
||||
export { _null_1 as null };
|
||||
}
|
||||
export namespace disable {
|
||||
let _null_2: never[];
|
||||
export { _null_2 as null };
|
||||
}
|
||||
//# sourceMappingURL=constructs.d.ts.map
|
1
node_modules/micromark/lib/constructs.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/constructs.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"constructs.d.ts","sourceRoot":"","sources":["constructs.js"],"names":[],"mappings":"AA6BA,yCAAyC;AACzC;;;;;;;;;;;;;;;EAeC;AAED,+CAA+C;AAC/C;;EAEC;AAED,4CAA4C;AAC5C;;;;EAIC;AAED,qCAAqC;AACrC;;;;;;;;;EASC;AAED,uCAAuC;AACvC;;;EAGC;AAED,qCAAqC;AACrC;;;;;;;;;;;;;EAaC"}
|
85
node_modules/micromark/lib/constructs.js
generated
vendored
Normal file
85
node_modules/micromark/lib/constructs.js
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
/**
|
||||
* @import {Extension} from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import { attention, autolink, blockQuote, characterEscape, characterReference, codeFenced, codeIndented, codeText, definition, hardBreakEscape, headingAtx, htmlFlow, htmlText, labelEnd, labelStartImage, labelStartLink, lineEnding, list, setextUnderline, thematicBreak } from 'micromark-core-commonmark';
|
||||
import { resolver as resolveText } from './initialize/text.js';
|
||||
|
||||
/** @satisfies {Extension['document']} */
|
||||
export const document = {
|
||||
[42]: list,
|
||||
[43]: list,
|
||||
[45]: list,
|
||||
[48]: list,
|
||||
[49]: list,
|
||||
[50]: list,
|
||||
[51]: list,
|
||||
[52]: list,
|
||||
[53]: list,
|
||||
[54]: list,
|
||||
[55]: list,
|
||||
[56]: list,
|
||||
[57]: list,
|
||||
[62]: blockQuote
|
||||
};
|
||||
|
||||
/** @satisfies {Extension['contentInitial']} */
|
||||
export const contentInitial = {
|
||||
[91]: definition
|
||||
};
|
||||
|
||||
/** @satisfies {Extension['flowInitial']} */
|
||||
export const flowInitial = {
|
||||
[-2]: codeIndented,
|
||||
[-1]: codeIndented,
|
||||
[32]: codeIndented
|
||||
};
|
||||
|
||||
/** @satisfies {Extension['flow']} */
|
||||
export const flow = {
|
||||
[35]: headingAtx,
|
||||
[42]: thematicBreak,
|
||||
[45]: [setextUnderline, thematicBreak],
|
||||
[60]: htmlFlow,
|
||||
[61]: setextUnderline,
|
||||
[95]: thematicBreak,
|
||||
[96]: codeFenced,
|
||||
[126]: codeFenced
|
||||
};
|
||||
|
||||
/** @satisfies {Extension['string']} */
|
||||
export const string = {
|
||||
[38]: characterReference,
|
||||
[92]: characterEscape
|
||||
};
|
||||
|
||||
/** @satisfies {Extension['text']} */
|
||||
export const text = {
|
||||
[-5]: lineEnding,
|
||||
[-4]: lineEnding,
|
||||
[-3]: lineEnding,
|
||||
[33]: labelStartImage,
|
||||
[38]: characterReference,
|
||||
[42]: attention,
|
||||
[60]: [autolink, htmlText],
|
||||
[91]: labelStartLink,
|
||||
[92]: [hardBreakEscape, characterEscape],
|
||||
[93]: labelEnd,
|
||||
[95]: attention,
|
||||
[96]: codeText
|
||||
};
|
||||
|
||||
/** @satisfies {Extension['insideSpan']} */
|
||||
export const insideSpan = {
|
||||
null: [attention, resolveText]
|
||||
};
|
||||
|
||||
/** @satisfies {Extension['attentionMarkers']} */
|
||||
export const attentionMarkers = {
|
||||
null: [42, 95]
|
||||
};
|
||||
|
||||
/** @satisfies {Extension['disable']} */
|
||||
export const disable = {
|
||||
null: []
|
||||
};
|
46
node_modules/micromark/lib/create-tokenizer.d.ts
generated
vendored
Normal file
46
node_modules/micromark/lib/create-tokenizer.d.ts
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
/**
|
||||
* Create a tokenizer.
|
||||
* Tokenizers deal with one type of data (e.g., containers, flow, text).
|
||||
* The parser is the object dealing with it all.
|
||||
* `initialize` works like other constructs, except that only its `tokenize`
|
||||
* function is used, in which case it doesn’t receive an `ok` or `nok`.
|
||||
* `from` can be given to set the point before the first character, although
|
||||
* when further lines are indented, they must be set with `defineSkip`.
|
||||
*
|
||||
* @param {ParseContext} parser
|
||||
* Parser.
|
||||
* @param {InitialConstruct} initialize
|
||||
* Construct.
|
||||
* @param {Omit<Point, '_bufferIndex' | '_index'> | undefined} [from]
|
||||
* Point (optional).
|
||||
* @returns {TokenizeContext}
|
||||
* Context.
|
||||
*/
|
||||
export function createTokenizer(parser: ParseContext, initialize: InitialConstruct, from?: Omit<Point, "_bufferIndex" | "_index"> | undefined): TokenizeContext;
|
||||
/**
|
||||
* Restore the state.
|
||||
*/
|
||||
export type Restore = () => undefined;
|
||||
/**
|
||||
* Info.
|
||||
*/
|
||||
export type Info = {
|
||||
/**
|
||||
* Restore.
|
||||
*/
|
||||
restore: Restore;
|
||||
/**
|
||||
* From.
|
||||
*/
|
||||
from: number;
|
||||
};
|
||||
/**
|
||||
* Handle a successful run.
|
||||
*/
|
||||
export type ReturnHandle = (construct: Construct, info: Info) => undefined;
|
||||
import type { ParseContext } from 'micromark-util-types';
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
import type { Point } from 'micromark-util-types';
|
||||
import type { TokenizeContext } from 'micromark-util-types';
|
||||
import type { Construct } from 'micromark-util-types';
|
||||
//# sourceMappingURL=create-tokenizer.d.ts.map
|
1
node_modules/micromark/lib/create-tokenizer.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/create-tokenizer.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"create-tokenizer.d.ts","sourceRoot":"","sources":["create-tokenizer.js"],"names":[],"mappings":"AAgDA;;;;;;;;;;;;;;;;;GAiBG;AACH,wCATW,YAAY,cAEZ,gBAAgB,SAEhB,IAAI,CAAC,KAAK,EAAE,cAAc,GAAG,QAAQ,CAAC,GAAG,SAAS,GAEhD,eAAe,CAwhB3B;;;;4BApkBY,SAAS;;;;;;;;aAKR,OAAO;;;;UAEP,MAAM;;;;;uCAKT,SAAS,QAET,IAAI,KAEF,SAAS;kCAtBZ,sBAAsB;sCAAtB,sBAAsB;2BAAtB,sBAAsB;qCAAtB,sBAAsB;+BAAtB,sBAAsB"}
|
611
node_modules/micromark/lib/create-tokenizer.js
generated
vendored
Normal file
611
node_modules/micromark/lib/create-tokenizer.js
generated
vendored
Normal file
@@ -0,0 +1,611 @@
|
||||
/**
|
||||
* @import {
|
||||
* Chunk,
|
||||
* Code,
|
||||
* ConstructRecord,
|
||||
* Construct,
|
||||
* Effects,
|
||||
* InitialConstruct,
|
||||
* ParseContext,
|
||||
* Point,
|
||||
* State,
|
||||
* TokenizeContext,
|
||||
* Token
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @callback Restore
|
||||
* Restore the state.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*
|
||||
* @typedef Info
|
||||
* Info.
|
||||
* @property {Restore} restore
|
||||
* Restore.
|
||||
* @property {number} from
|
||||
* From.
|
||||
*
|
||||
* @callback ReturnHandle
|
||||
* Handle a successful run.
|
||||
* @param {Construct} construct
|
||||
* Construct.
|
||||
* @param {Info} info
|
||||
* Info.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
|
||||
import { markdownLineEnding } from 'micromark-util-character';
|
||||
import { push, splice } from 'micromark-util-chunked';
|
||||
import { resolveAll } from 'micromark-util-resolve-all';
|
||||
/**
|
||||
* Create a tokenizer.
|
||||
* Tokenizers deal with one type of data (e.g., containers, flow, text).
|
||||
* The parser is the object dealing with it all.
|
||||
* `initialize` works like other constructs, except that only its `tokenize`
|
||||
* function is used, in which case it doesn’t receive an `ok` or `nok`.
|
||||
* `from` can be given to set the point before the first character, although
|
||||
* when further lines are indented, they must be set with `defineSkip`.
|
||||
*
|
||||
* @param {ParseContext} parser
|
||||
* Parser.
|
||||
* @param {InitialConstruct} initialize
|
||||
* Construct.
|
||||
* @param {Omit<Point, '_bufferIndex' | '_index'> | undefined} [from]
|
||||
* Point (optional).
|
||||
* @returns {TokenizeContext}
|
||||
* Context.
|
||||
*/
|
||||
export function createTokenizer(parser, initialize, from) {
|
||||
/** @type {Point} */
|
||||
let point = {
|
||||
_bufferIndex: -1,
|
||||
_index: 0,
|
||||
line: from && from.line || 1,
|
||||
column: from && from.column || 1,
|
||||
offset: from && from.offset || 0
|
||||
};
|
||||
/** @type {Record<string, number>} */
|
||||
const columnStart = {};
|
||||
/** @type {Array<Construct>} */
|
||||
const resolveAllConstructs = [];
|
||||
/** @type {Array<Chunk>} */
|
||||
let chunks = [];
|
||||
/** @type {Array<Token>} */
|
||||
let stack = [];
|
||||
/** @type {boolean | undefined} */
|
||||
let consumed = true;
|
||||
|
||||
/**
|
||||
* Tools used for tokenizing.
|
||||
*
|
||||
* @type {Effects}
|
||||
*/
|
||||
const effects = {
|
||||
attempt: constructFactory(onsuccessfulconstruct),
|
||||
check: constructFactory(onsuccessfulcheck),
|
||||
consume,
|
||||
enter,
|
||||
exit,
|
||||
interrupt: constructFactory(onsuccessfulcheck, {
|
||||
interrupt: true
|
||||
})
|
||||
};
|
||||
|
||||
/**
|
||||
* State and tools for resolving and serializing.
|
||||
*
|
||||
* @type {TokenizeContext}
|
||||
*/
|
||||
const context = {
|
||||
code: null,
|
||||
containerState: {},
|
||||
defineSkip,
|
||||
events: [],
|
||||
now,
|
||||
parser,
|
||||
previous: null,
|
||||
sliceSerialize,
|
||||
sliceStream,
|
||||
write
|
||||
};
|
||||
|
||||
/**
|
||||
* The state function.
|
||||
*
|
||||
* @type {State | undefined}
|
||||
*/
|
||||
let state = initialize.tokenize.call(context, effects);
|
||||
|
||||
/**
|
||||
* Track which character we expect to be consumed, to catch bugs.
|
||||
*
|
||||
* @type {Code}
|
||||
*/
|
||||
let expectedCode;
|
||||
if (initialize.resolveAll) {
|
||||
resolveAllConstructs.push(initialize);
|
||||
}
|
||||
return context;
|
||||
|
||||
/** @type {TokenizeContext['write']} */
|
||||
function write(slice) {
|
||||
chunks = push(chunks, slice);
|
||||
main();
|
||||
|
||||
// Exit if we’re not done, resolve might change stuff.
|
||||
if (chunks[chunks.length - 1] !== null) {
|
||||
return [];
|
||||
}
|
||||
addResult(initialize, 0);
|
||||
|
||||
// Otherwise, resolve, and exit.
|
||||
context.events = resolveAll(resolveAllConstructs, context.events, context);
|
||||
return context.events;
|
||||
}
|
||||
|
||||
//
|
||||
// Tools.
|
||||
//
|
||||
|
||||
/** @type {TokenizeContext['sliceSerialize']} */
|
||||
function sliceSerialize(token, expandTabs) {
|
||||
return serializeChunks(sliceStream(token), expandTabs);
|
||||
}
|
||||
|
||||
/** @type {TokenizeContext['sliceStream']} */
|
||||
function sliceStream(token) {
|
||||
return sliceChunks(chunks, token);
|
||||
}
|
||||
|
||||
/** @type {TokenizeContext['now']} */
|
||||
function now() {
|
||||
// This is a hot path, so we clone manually instead of `Object.assign({}, point)`
|
||||
const {
|
||||
_bufferIndex,
|
||||
_index,
|
||||
line,
|
||||
column,
|
||||
offset
|
||||
} = point;
|
||||
return {
|
||||
_bufferIndex,
|
||||
_index,
|
||||
line,
|
||||
column,
|
||||
offset
|
||||
};
|
||||
}
|
||||
|
||||
/** @type {TokenizeContext['defineSkip']} */
|
||||
function defineSkip(value) {
|
||||
columnStart[value.line] = value.column;
|
||||
accountForPotentialSkip();
|
||||
}
|
||||
|
||||
//
|
||||
// State management.
|
||||
//
|
||||
|
||||
/**
|
||||
* Main loop (note that `_index` and `_bufferIndex` in `point` are modified by
|
||||
* `consume`).
|
||||
* Here is where we walk through the chunks, which either include strings of
|
||||
* several characters, or numerical character codes.
|
||||
* The reason to do this in a loop instead of a call is so the stack can
|
||||
* drain.
|
||||
*
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function main() {
|
||||
/** @type {number} */
|
||||
let chunkIndex;
|
||||
while (point._index < chunks.length) {
|
||||
const chunk = chunks[point._index];
|
||||
|
||||
// If we’re in a buffer chunk, loop through it.
|
||||
if (typeof chunk === 'string') {
|
||||
chunkIndex = point._index;
|
||||
if (point._bufferIndex < 0) {
|
||||
point._bufferIndex = 0;
|
||||
}
|
||||
while (point._index === chunkIndex && point._bufferIndex < chunk.length) {
|
||||
go(chunk.charCodeAt(point._bufferIndex));
|
||||
}
|
||||
} else {
|
||||
go(chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deal with one code.
|
||||
*
|
||||
* @param {Code} code
|
||||
* Code.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function go(code) {
|
||||
consumed = undefined;
|
||||
expectedCode = code;
|
||||
state = state(code);
|
||||
}
|
||||
|
||||
/** @type {Effects['consume']} */
|
||||
function consume(code) {
|
||||
if (markdownLineEnding(code)) {
|
||||
point.line++;
|
||||
point.column = 1;
|
||||
point.offset += code === -3 ? 2 : 1;
|
||||
accountForPotentialSkip();
|
||||
} else if (code !== -1) {
|
||||
point.column++;
|
||||
point.offset++;
|
||||
}
|
||||
|
||||
// Not in a string chunk.
|
||||
if (point._bufferIndex < 0) {
|
||||
point._index++;
|
||||
} else {
|
||||
point._bufferIndex++;
|
||||
|
||||
// At end of string chunk.
|
||||
if (point._bufferIndex ===
|
||||
// Points w/ non-negative `_bufferIndex` reference
|
||||
// strings.
|
||||
/** @type {string} */
|
||||
chunks[point._index].length) {
|
||||
point._bufferIndex = -1;
|
||||
point._index++;
|
||||
}
|
||||
}
|
||||
|
||||
// Expose the previous character.
|
||||
context.previous = code;
|
||||
|
||||
// Mark as consumed.
|
||||
consumed = true;
|
||||
}
|
||||
|
||||
/** @type {Effects['enter']} */
|
||||
function enter(type, fields) {
|
||||
/** @type {Token} */
|
||||
// @ts-expect-error Patch instead of assign required fields to help GC.
|
||||
const token = fields || {};
|
||||
token.type = type;
|
||||
token.start = now();
|
||||
context.events.push(['enter', token, context]);
|
||||
stack.push(token);
|
||||
return token;
|
||||
}
|
||||
|
||||
/** @type {Effects['exit']} */
|
||||
function exit(type) {
|
||||
const token = stack.pop();
|
||||
token.end = now();
|
||||
context.events.push(['exit', token, context]);
|
||||
return token;
|
||||
}
|
||||
|
||||
/**
|
||||
* Use results.
|
||||
*
|
||||
* @type {ReturnHandle}
|
||||
*/
|
||||
function onsuccessfulconstruct(construct, info) {
|
||||
addResult(construct, info.from);
|
||||
}
|
||||
|
||||
/**
|
||||
* Discard results.
|
||||
*
|
||||
* @type {ReturnHandle}
|
||||
*/
|
||||
function onsuccessfulcheck(_, info) {
|
||||
info.restore();
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory to attempt/check/interrupt.
|
||||
*
|
||||
* @param {ReturnHandle} onreturn
|
||||
* Callback.
|
||||
* @param {{interrupt?: boolean | undefined} | undefined} [fields]
|
||||
* Fields.
|
||||
*/
|
||||
function constructFactory(onreturn, fields) {
|
||||
return hook;
|
||||
|
||||
/**
|
||||
* Handle either an object mapping codes to constructs, a list of
|
||||
* constructs, or a single construct.
|
||||
*
|
||||
* @param {Array<Construct> | ConstructRecord | Construct} constructs
|
||||
* Constructs.
|
||||
* @param {State} returnState
|
||||
* State.
|
||||
* @param {State | undefined} [bogusState]
|
||||
* State.
|
||||
* @returns {State}
|
||||
* State.
|
||||
*/
|
||||
function hook(constructs, returnState, bogusState) {
|
||||
/** @type {ReadonlyArray<Construct>} */
|
||||
let listOfConstructs;
|
||||
/** @type {number} */
|
||||
let constructIndex;
|
||||
/** @type {Construct} */
|
||||
let currentConstruct;
|
||||
/** @type {Info} */
|
||||
let info;
|
||||
return Array.isArray(constructs) ? /* c8 ignore next 1 */
|
||||
handleListOfConstructs(constructs) : 'tokenize' in constructs ?
|
||||
// Looks like a construct.
|
||||
handleListOfConstructs([(/** @type {Construct} */constructs)]) : handleMapOfConstructs(constructs);
|
||||
|
||||
/**
|
||||
* Handle a list of construct.
|
||||
*
|
||||
* @param {ConstructRecord} map
|
||||
* Constructs.
|
||||
* @returns {State}
|
||||
* State.
|
||||
*/
|
||||
function handleMapOfConstructs(map) {
|
||||
return start;
|
||||
|
||||
/** @type {State} */
|
||||
function start(code) {
|
||||
const left = code !== null && map[code];
|
||||
const all = code !== null && map.null;
|
||||
const list = [
|
||||
// To do: add more extension tests.
|
||||
/* c8 ignore next 2 */
|
||||
...(Array.isArray(left) ? left : left ? [left] : []), ...(Array.isArray(all) ? all : all ? [all] : [])];
|
||||
return handleListOfConstructs(list)(code);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a list of construct.
|
||||
*
|
||||
* @param {ReadonlyArray<Construct>} list
|
||||
* Constructs.
|
||||
* @returns {State}
|
||||
* State.
|
||||
*/
|
||||
function handleListOfConstructs(list) {
|
||||
listOfConstructs = list;
|
||||
constructIndex = 0;
|
||||
if (list.length === 0) {
|
||||
return bogusState;
|
||||
}
|
||||
return handleConstruct(list[constructIndex]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a single construct.
|
||||
*
|
||||
* @param {Construct} construct
|
||||
* Construct.
|
||||
* @returns {State}
|
||||
* State.
|
||||
*/
|
||||
function handleConstruct(construct) {
|
||||
return start;
|
||||
|
||||
/** @type {State} */
|
||||
function start(code) {
|
||||
// To do: not needed to store if there is no bogus state, probably?
|
||||
// Currently doesn’t work because `inspect` in document does a check
|
||||
// w/o a bogus, which doesn’t make sense. But it does seem to help perf
|
||||
// by not storing.
|
||||
info = store();
|
||||
currentConstruct = construct;
|
||||
if (!construct.partial) {
|
||||
context.currentConstruct = construct;
|
||||
}
|
||||
|
||||
// Always populated by defaults.
|
||||
|
||||
if (construct.name && context.parser.constructs.disable.null.includes(construct.name)) {
|
||||
return nok(code);
|
||||
}
|
||||
return construct.tokenize.call(
|
||||
// If we do have fields, create an object w/ `context` as its
|
||||
// prototype.
|
||||
// This allows a “live binding”, which is needed for `interrupt`.
|
||||
fields ? Object.assign(Object.create(context), fields) : context, effects, ok, nok)(code);
|
||||
}
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function ok(code) {
|
||||
consumed = true;
|
||||
onreturn(currentConstruct, info);
|
||||
return returnState;
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function nok(code) {
|
||||
consumed = true;
|
||||
info.restore();
|
||||
if (++constructIndex < listOfConstructs.length) {
|
||||
return handleConstruct(listOfConstructs[constructIndex]);
|
||||
}
|
||||
return bogusState;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Construct} construct
|
||||
* Construct.
|
||||
* @param {number} from
|
||||
* From.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function addResult(construct, from) {
|
||||
if (construct.resolveAll && !resolveAllConstructs.includes(construct)) {
|
||||
resolveAllConstructs.push(construct);
|
||||
}
|
||||
if (construct.resolve) {
|
||||
splice(context.events, from, context.events.length - from, construct.resolve(context.events.slice(from), context));
|
||||
}
|
||||
if (construct.resolveTo) {
|
||||
context.events = construct.resolveTo(context.events, context);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Store state.
|
||||
*
|
||||
* @returns {Info}
|
||||
* Info.
|
||||
*/
|
||||
function store() {
|
||||
const startPoint = now();
|
||||
const startPrevious = context.previous;
|
||||
const startCurrentConstruct = context.currentConstruct;
|
||||
const startEventsIndex = context.events.length;
|
||||
const startStack = Array.from(stack);
|
||||
return {
|
||||
from: startEventsIndex,
|
||||
restore
|
||||
};
|
||||
|
||||
/**
|
||||
* Restore state.
|
||||
*
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function restore() {
|
||||
point = startPoint;
|
||||
context.previous = startPrevious;
|
||||
context.currentConstruct = startCurrentConstruct;
|
||||
context.events.length = startEventsIndex;
|
||||
stack = startStack;
|
||||
accountForPotentialSkip();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Move the current point a bit forward in the line when it’s on a column
|
||||
* skip.
|
||||
*
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function accountForPotentialSkip() {
|
||||
if (point.line in columnStart && point.column < 2) {
|
||||
point.column = columnStart[point.line];
|
||||
point.offset += columnStart[point.line] - 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the chunks from a slice of chunks in the range of a token.
|
||||
*
|
||||
* @param {ReadonlyArray<Chunk>} chunks
|
||||
* Chunks.
|
||||
* @param {Pick<Token, 'end' | 'start'>} token
|
||||
* Token.
|
||||
* @returns {Array<Chunk>}
|
||||
* Chunks.
|
||||
*/
|
||||
function sliceChunks(chunks, token) {
|
||||
const startIndex = token.start._index;
|
||||
const startBufferIndex = token.start._bufferIndex;
|
||||
const endIndex = token.end._index;
|
||||
const endBufferIndex = token.end._bufferIndex;
|
||||
/** @type {Array<Chunk>} */
|
||||
let view;
|
||||
if (startIndex === endIndex) {
|
||||
// @ts-expect-error `_bufferIndex` is used on string chunks.
|
||||
view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)];
|
||||
} else {
|
||||
view = chunks.slice(startIndex, endIndex);
|
||||
if (startBufferIndex > -1) {
|
||||
const head = view[0];
|
||||
if (typeof head === 'string') {
|
||||
view[0] = head.slice(startBufferIndex);
|
||||
/* c8 ignore next 4 -- used to be used, no longer */
|
||||
} else {
|
||||
view.shift();
|
||||
}
|
||||
}
|
||||
if (endBufferIndex > 0) {
|
||||
// @ts-expect-error `_bufferIndex` is used on string chunks.
|
||||
view.push(chunks[endIndex].slice(0, endBufferIndex));
|
||||
}
|
||||
}
|
||||
return view;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the string value of a slice of chunks.
|
||||
*
|
||||
* @param {ReadonlyArray<Chunk>} chunks
|
||||
* Chunks.
|
||||
* @param {boolean | undefined} [expandTabs=false]
|
||||
* Whether to expand tabs (default: `false`).
|
||||
* @returns {string}
|
||||
* Result.
|
||||
*/
|
||||
function serializeChunks(chunks, expandTabs) {
|
||||
let index = -1;
|
||||
/** @type {Array<string>} */
|
||||
const result = [];
|
||||
/** @type {boolean | undefined} */
|
||||
let atTab;
|
||||
while (++index < chunks.length) {
|
||||
const chunk = chunks[index];
|
||||
/** @type {string} */
|
||||
let value;
|
||||
if (typeof chunk === 'string') {
|
||||
value = chunk;
|
||||
} else switch (chunk) {
|
||||
case -5:
|
||||
{
|
||||
value = "\r";
|
||||
break;
|
||||
}
|
||||
case -4:
|
||||
{
|
||||
value = "\n";
|
||||
break;
|
||||
}
|
||||
case -3:
|
||||
{
|
||||
value = "\r" + "\n";
|
||||
break;
|
||||
}
|
||||
case -2:
|
||||
{
|
||||
value = expandTabs ? " " : "\t";
|
||||
break;
|
||||
}
|
||||
case -1:
|
||||
{
|
||||
if (!expandTabs && atTab) continue;
|
||||
value = " ";
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{
|
||||
// Currently only replacement character.
|
||||
value = String.fromCharCode(chunk);
|
||||
}
|
||||
}
|
||||
atTab = chunk === -2;
|
||||
result.push(value);
|
||||
}
|
||||
return result.join('');
|
||||
}
|
4
node_modules/micromark/lib/initialize/content.d.ts
generated
vendored
Normal file
4
node_modules/micromark/lib/initialize/content.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
/** @type {InitialConstruct} */
|
||||
export const content: InitialConstruct;
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
//# sourceMappingURL=content.d.ts.map
|
1
node_modules/micromark/lib/initialize/content.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/initialize/content.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"content.d.ts","sourceRoot":"","sources":["content.js"],"names":[],"mappings":"AAeA,+BAA+B;AAC/B,sBADW,gBAAgB,CACyB;sCAT1C,sBAAsB"}
|
79
node_modules/micromark/lib/initialize/content.js
generated
vendored
Normal file
79
node_modules/micromark/lib/initialize/content.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
/**
|
||||
* @import {
|
||||
* InitialConstruct,
|
||||
* Initializer,
|
||||
* State,
|
||||
* TokenizeContext,
|
||||
* Token
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import { factorySpace } from 'micromark-factory-space';
|
||||
import { markdownLineEnding } from 'micromark-util-character';
|
||||
/** @type {InitialConstruct} */
|
||||
export const content = {
|
||||
tokenize: initializeContent
|
||||
};
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Context.
|
||||
* @type {Initializer}
|
||||
* Content.
|
||||
*/
|
||||
function initializeContent(effects) {
|
||||
const contentStart = effects.attempt(this.parser.constructs.contentInitial, afterContentStartConstruct, paragraphInitial);
|
||||
/** @type {Token} */
|
||||
let previous;
|
||||
return contentStart;
|
||||
|
||||
/** @type {State} */
|
||||
function afterContentStartConstruct(code) {
|
||||
if (code === null) {
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
effects.enter("lineEnding");
|
||||
effects.consume(code);
|
||||
effects.exit("lineEnding");
|
||||
return factorySpace(effects, contentStart, "linePrefix");
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function paragraphInitial(code) {
|
||||
effects.enter("paragraph");
|
||||
return lineStart(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function lineStart(code) {
|
||||
const token = effects.enter("chunkText", {
|
||||
contentType: "text",
|
||||
previous
|
||||
});
|
||||
if (previous) {
|
||||
previous.next = token;
|
||||
}
|
||||
previous = token;
|
||||
return data(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function data(code) {
|
||||
if (code === null) {
|
||||
effects.exit("chunkText");
|
||||
effects.exit("paragraph");
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.consume(code);
|
||||
effects.exit("chunkText");
|
||||
return lineStart;
|
||||
}
|
||||
|
||||
// Data.
|
||||
effects.consume(code);
|
||||
return data;
|
||||
}
|
||||
}
|
10
node_modules/micromark/lib/initialize/document.d.ts
generated
vendored
Normal file
10
node_modules/micromark/lib/initialize/document.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
/** @type {InitialConstruct} */
|
||||
export const document: InitialConstruct;
|
||||
/**
|
||||
* Construct and its state.
|
||||
*/
|
||||
export type StackItem = [Construct, ContainerState];
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
import type { Construct } from 'micromark-util-types';
|
||||
import type { ContainerState } from 'micromark-util-types';
|
||||
//# sourceMappingURL=document.d.ts.map
|
1
node_modules/micromark/lib/initialize/document.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/initialize/document.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"document.d.ts","sourceRoot":"","sources":["document.js"],"names":[],"mappings":"AAyBA,+BAA+B;AAC/B,uBADW,gBAAgB,CAC2B;;;;wBAXzC,CAAC,SAAS,EAAE,cAAc,CAAC;sCAJ9B,sBAAsB;+BAAtB,sBAAsB;oCAAtB,sBAAsB"}
|
362
node_modules/micromark/lib/initialize/document.js
generated
vendored
Normal file
362
node_modules/micromark/lib/initialize/document.js
generated
vendored
Normal file
@@ -0,0 +1,362 @@
|
||||
/**
|
||||
* @import {
|
||||
* Construct,
|
||||
* ContainerState,
|
||||
* InitialConstruct,
|
||||
* Initializer,
|
||||
* Point,
|
||||
* State,
|
||||
* TokenizeContext,
|
||||
* Tokenizer,
|
||||
* Token
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {[Construct, ContainerState]} StackItem
|
||||
* Construct and its state.
|
||||
*/
|
||||
|
||||
import { factorySpace } from 'micromark-factory-space';
|
||||
import { markdownLineEnding } from 'micromark-util-character';
|
||||
import { splice } from 'micromark-util-chunked';
|
||||
/** @type {InitialConstruct} */
|
||||
export const document = {
|
||||
tokenize: initializeDocument
|
||||
};
|
||||
|
||||
/** @type {Construct} */
|
||||
const containerConstruct = {
|
||||
tokenize: tokenizeContainer
|
||||
};
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Self.
|
||||
* @type {Initializer}
|
||||
* Initializer.
|
||||
*/
|
||||
function initializeDocument(effects) {
|
||||
const self = this;
|
||||
/** @type {Array<StackItem>} */
|
||||
const stack = [];
|
||||
let continued = 0;
|
||||
/** @type {TokenizeContext | undefined} */
|
||||
let childFlow;
|
||||
/** @type {Token | undefined} */
|
||||
let childToken;
|
||||
/** @type {number} */
|
||||
let lineStartOffset;
|
||||
return start;
|
||||
|
||||
/** @type {State} */
|
||||
function start(code) {
|
||||
// First we iterate through the open blocks, starting with the root
|
||||
// document, and descending through last children down to the last open
|
||||
// block.
|
||||
// Each block imposes a condition that the line must satisfy if the block is
|
||||
// to remain open.
|
||||
// For example, a block quote requires a `>` character.
|
||||
// A paragraph requires a non-blank line.
|
||||
// In this phase we may match all or just some of the open blocks.
|
||||
// But we cannot close unmatched blocks yet, because we may have a lazy
|
||||
// continuation line.
|
||||
if (continued < stack.length) {
|
||||
const item = stack[continued];
|
||||
self.containerState = item[1];
|
||||
return effects.attempt(item[0].continuation, documentContinue, checkNewContainers)(code);
|
||||
}
|
||||
|
||||
// Done.
|
||||
return checkNewContainers(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function documentContinue(code) {
|
||||
continued++;
|
||||
|
||||
// Note: this field is called `_closeFlow` but it also closes containers.
|
||||
// Perhaps a good idea to rename it but it’s already used in the wild by
|
||||
// extensions.
|
||||
if (self.containerState._closeFlow) {
|
||||
self.containerState._closeFlow = undefined;
|
||||
if (childFlow) {
|
||||
closeFlow();
|
||||
}
|
||||
|
||||
// Note: this algorithm for moving events around is similar to the
|
||||
// algorithm when dealing with lazy lines in `writeToChild`.
|
||||
const indexBeforeExits = self.events.length;
|
||||
let indexBeforeFlow = indexBeforeExits;
|
||||
/** @type {Point | undefined} */
|
||||
let point;
|
||||
|
||||
// Find the flow chunk.
|
||||
while (indexBeforeFlow--) {
|
||||
if (self.events[indexBeforeFlow][0] === 'exit' && self.events[indexBeforeFlow][1].type === "chunkFlow") {
|
||||
point = self.events[indexBeforeFlow][1].end;
|
||||
break;
|
||||
}
|
||||
}
|
||||
exitContainers(continued);
|
||||
|
||||
// Fix positions.
|
||||
let index = indexBeforeExits;
|
||||
while (index < self.events.length) {
|
||||
self.events[index][1].end = {
|
||||
...point
|
||||
};
|
||||
index++;
|
||||
}
|
||||
|
||||
// Inject the exits earlier (they’re still also at the end).
|
||||
splice(self.events, indexBeforeFlow + 1, 0, self.events.slice(indexBeforeExits));
|
||||
|
||||
// Discard the duplicate exits.
|
||||
self.events.length = index;
|
||||
return checkNewContainers(code);
|
||||
}
|
||||
return start(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function checkNewContainers(code) {
|
||||
// Next, after consuming the continuation markers for existing blocks, we
|
||||
// look for new block starts (e.g. `>` for a block quote).
|
||||
// If we encounter a new block start, we close any blocks unmatched in
|
||||
// step 1 before creating the new block as a child of the last matched
|
||||
// block.
|
||||
if (continued === stack.length) {
|
||||
// No need to `check` whether there’s a container, of `exitContainers`
|
||||
// would be moot.
|
||||
// We can instead immediately `attempt` to parse one.
|
||||
if (!childFlow) {
|
||||
return documentContinued(code);
|
||||
}
|
||||
|
||||
// If we have concrete content, such as block HTML or fenced code,
|
||||
// we can’t have containers “pierce” into them, so we can immediately
|
||||
// start.
|
||||
if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) {
|
||||
return flowStart(code);
|
||||
}
|
||||
|
||||
// If we do have flow, it could still be a blank line,
|
||||
// but we’d be interrupting it w/ a new container if there’s a current
|
||||
// construct.
|
||||
// To do: next major: remove `_gfmTableDynamicInterruptHack` (no longer
|
||||
// needed in micromark-extension-gfm-table@1.0.6).
|
||||
self.interrupt = Boolean(childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack);
|
||||
}
|
||||
|
||||
// Check if there is a new container.
|
||||
self.containerState = {};
|
||||
return effects.check(containerConstruct, thereIsANewContainer, thereIsNoNewContainer)(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function thereIsANewContainer(code) {
|
||||
if (childFlow) closeFlow();
|
||||
exitContainers(continued);
|
||||
return documentContinued(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function thereIsNoNewContainer(code) {
|
||||
self.parser.lazy[self.now().line] = continued !== stack.length;
|
||||
lineStartOffset = self.now().offset;
|
||||
return flowStart(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function documentContinued(code) {
|
||||
// Try new containers.
|
||||
self.containerState = {};
|
||||
return effects.attempt(containerConstruct, containerContinue, flowStart)(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function containerContinue(code) {
|
||||
continued++;
|
||||
stack.push([self.currentConstruct, self.containerState]);
|
||||
// Try another.
|
||||
return documentContinued(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function flowStart(code) {
|
||||
if (code === null) {
|
||||
if (childFlow) closeFlow();
|
||||
exitContainers(0);
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
childFlow = childFlow || self.parser.flow(self.now());
|
||||
effects.enter("chunkFlow", {
|
||||
_tokenizer: childFlow,
|
||||
contentType: "flow",
|
||||
previous: childToken
|
||||
});
|
||||
return flowContinue(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function flowContinue(code) {
|
||||
if (code === null) {
|
||||
writeToChild(effects.exit("chunkFlow"), true);
|
||||
exitContainers(0);
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
if (markdownLineEnding(code)) {
|
||||
effects.consume(code);
|
||||
writeToChild(effects.exit("chunkFlow"));
|
||||
// Get ready for the next line.
|
||||
continued = 0;
|
||||
self.interrupt = undefined;
|
||||
return start;
|
||||
}
|
||||
effects.consume(code);
|
||||
return flowContinue;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Token} token
|
||||
* Token.
|
||||
* @param {boolean | undefined} [endOfFile]
|
||||
* Whether the token is at the end of the file (default: `false`).
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function writeToChild(token, endOfFile) {
|
||||
const stream = self.sliceStream(token);
|
||||
if (endOfFile) stream.push(null);
|
||||
token.previous = childToken;
|
||||
if (childToken) childToken.next = token;
|
||||
childToken = token;
|
||||
childFlow.defineSkip(token.start);
|
||||
childFlow.write(stream);
|
||||
|
||||
// Alright, so we just added a lazy line:
|
||||
//
|
||||
// ```markdown
|
||||
// > a
|
||||
// b.
|
||||
//
|
||||
// Or:
|
||||
//
|
||||
// > ~~~c
|
||||
// d
|
||||
//
|
||||
// Or:
|
||||
//
|
||||
// > | e |
|
||||
// f
|
||||
// ```
|
||||
//
|
||||
// The construct in the second example (fenced code) does not accept lazy
|
||||
// lines, so it marked itself as done at the end of its first line, and
|
||||
// then the content construct parses `d`.
|
||||
// Most constructs in markdown match on the first line: if the first line
|
||||
// forms a construct, a non-lazy line can’t “unmake” it.
|
||||
//
|
||||
// The construct in the third example is potentially a GFM table, and
|
||||
// those are *weird*.
|
||||
// It *could* be a table, from the first line, if the following line
|
||||
// matches a condition.
|
||||
// In this case, that second line is lazy, which “unmakes” the first line
|
||||
// and turns the whole into one content block.
|
||||
//
|
||||
// We’ve now parsed the non-lazy and the lazy line, and can figure out
|
||||
// whether the lazy line started a new flow block.
|
||||
// If it did, we exit the current containers between the two flow blocks.
|
||||
if (self.parser.lazy[token.start.line]) {
|
||||
let index = childFlow.events.length;
|
||||
while (index--) {
|
||||
if (
|
||||
// The token starts before the line ending…
|
||||
childFlow.events[index][1].start.offset < lineStartOffset && (
|
||||
// …and either is not ended yet…
|
||||
!childFlow.events[index][1].end ||
|
||||
// …or ends after it.
|
||||
childFlow.events[index][1].end.offset > lineStartOffset)) {
|
||||
// Exit: there’s still something open, which means it’s a lazy line
|
||||
// part of something.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Note: this algorithm for moving events around is similar to the
|
||||
// algorithm when closing flow in `documentContinue`.
|
||||
const indexBeforeExits = self.events.length;
|
||||
let indexBeforeFlow = indexBeforeExits;
|
||||
/** @type {boolean | undefined} */
|
||||
let seen;
|
||||
/** @type {Point | undefined} */
|
||||
let point;
|
||||
|
||||
// Find the previous chunk (the one before the lazy line).
|
||||
while (indexBeforeFlow--) {
|
||||
if (self.events[indexBeforeFlow][0] === 'exit' && self.events[indexBeforeFlow][1].type === "chunkFlow") {
|
||||
if (seen) {
|
||||
point = self.events[indexBeforeFlow][1].end;
|
||||
break;
|
||||
}
|
||||
seen = true;
|
||||
}
|
||||
}
|
||||
exitContainers(continued);
|
||||
|
||||
// Fix positions.
|
||||
index = indexBeforeExits;
|
||||
while (index < self.events.length) {
|
||||
self.events[index][1].end = {
|
||||
...point
|
||||
};
|
||||
index++;
|
||||
}
|
||||
|
||||
// Inject the exits earlier (they’re still also at the end).
|
||||
splice(self.events, indexBeforeFlow + 1, 0, self.events.slice(indexBeforeExits));
|
||||
|
||||
// Discard the duplicate exits.
|
||||
self.events.length = index;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} size
|
||||
* Size.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function exitContainers(size) {
|
||||
let index = stack.length;
|
||||
|
||||
// Exit open containers.
|
||||
while (index-- > size) {
|
||||
const entry = stack[index];
|
||||
self.containerState = entry[1];
|
||||
entry[0].exit.call(self, effects);
|
||||
}
|
||||
stack.length = size;
|
||||
}
|
||||
function closeFlow() {
|
||||
childFlow.write([null]);
|
||||
childToken = undefined;
|
||||
childFlow = undefined;
|
||||
self.containerState._closeFlow = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Context.
|
||||
* @type {Tokenizer}
|
||||
* Tokenizer.
|
||||
*/
|
||||
function tokenizeContainer(effects, ok, nok) {
|
||||
// Always populated by defaults.
|
||||
|
||||
return factorySpace(effects, effects.attempt(this.parser.constructs.document, ok, nok), "linePrefix", this.parser.constructs.disable.null.includes('codeIndented') ? undefined : 4);
|
||||
}
|
4
node_modules/micromark/lib/initialize/flow.d.ts
generated
vendored
Normal file
4
node_modules/micromark/lib/initialize/flow.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
/** @type {InitialConstruct} */
|
||||
export const flow: InitialConstruct;
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
//# sourceMappingURL=flow.d.ts.map
|
1
node_modules/micromark/lib/initialize/flow.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/initialize/flow.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"flow.d.ts","sourceRoot":"","sources":["flow.js"],"names":[],"mappings":"AAeA,+BAA+B;AAC/B,mBADW,gBAAgB,CACmB;sCAVpC,sBAAsB"}
|
58
node_modules/micromark/lib/initialize/flow.js
generated
vendored
Normal file
58
node_modules/micromark/lib/initialize/flow.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
/**
|
||||
* @import {
|
||||
* InitialConstruct,
|
||||
* Initializer,
|
||||
* State,
|
||||
* TokenizeContext
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import { blankLine, content } from 'micromark-core-commonmark';
|
||||
import { factorySpace } from 'micromark-factory-space';
|
||||
import { markdownLineEnding } from 'micromark-util-character';
|
||||
/** @type {InitialConstruct} */
|
||||
export const flow = {
|
||||
tokenize: initializeFlow
|
||||
};
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Self.
|
||||
* @type {Initializer}
|
||||
* Initializer.
|
||||
*/
|
||||
function initializeFlow(effects) {
|
||||
const self = this;
|
||||
const initial = effects.attempt(
|
||||
// Try to parse a blank line.
|
||||
blankLine, atBlankEnding,
|
||||
// Try to parse initial flow (essentially, only code).
|
||||
effects.attempt(this.parser.constructs.flowInitial, afterConstruct, factorySpace(effects, effects.attempt(this.parser.constructs.flow, afterConstruct, effects.attempt(content, afterConstruct)), "linePrefix")));
|
||||
return initial;
|
||||
|
||||
/** @type {State} */
|
||||
function atBlankEnding(code) {
|
||||
if (code === null) {
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
effects.enter("lineEndingBlank");
|
||||
effects.consume(code);
|
||||
effects.exit("lineEndingBlank");
|
||||
self.currentConstruct = undefined;
|
||||
return initial;
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function afterConstruct(code) {
|
||||
if (code === null) {
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
effects.enter("lineEnding");
|
||||
effects.consume(code);
|
||||
effects.exit("lineEnding");
|
||||
self.currentConstruct = undefined;
|
||||
return initial;
|
||||
}
|
||||
}
|
8
node_modules/micromark/lib/initialize/text.d.ts
generated
vendored
Normal file
8
node_modules/micromark/lib/initialize/text.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export namespace resolver {
|
||||
let resolveAll: Resolver;
|
||||
}
|
||||
export const string: InitialConstruct;
|
||||
export const text: InitialConstruct;
|
||||
import type { Resolver } from 'micromark-util-types';
|
||||
import type { InitialConstruct } from 'micromark-util-types';
|
||||
//# sourceMappingURL=text.d.ts.map
|
1
node_modules/micromark/lib/initialize/text.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/initialize/text.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"text.d.ts","sourceRoot":"","sources":["text.js"],"names":[],"mappings":";;;AAeA,sCAAiD;AACjD,oCAA6C;8BARnC,sBAAsB;sCAAtB,sBAAsB"}
|
212
node_modules/micromark/lib/initialize/text.js
generated
vendored
Normal file
212
node_modules/micromark/lib/initialize/text.js
generated
vendored
Normal file
@@ -0,0 +1,212 @@
|
||||
/**
|
||||
* @import {
|
||||
* Code,
|
||||
* InitialConstruct,
|
||||
* Initializer,
|
||||
* Resolver,
|
||||
* State,
|
||||
* TokenizeContext
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
export const resolver = {
|
||||
resolveAll: createResolver()
|
||||
};
|
||||
export const string = initializeFactory('string');
|
||||
export const text = initializeFactory('text');
|
||||
|
||||
/**
|
||||
* @param {'string' | 'text'} field
|
||||
* Field.
|
||||
* @returns {InitialConstruct}
|
||||
* Construct.
|
||||
*/
|
||||
function initializeFactory(field) {
|
||||
return {
|
||||
resolveAll: createResolver(field === 'text' ? resolveAllLineSuffixes : undefined),
|
||||
tokenize: initializeText
|
||||
};
|
||||
|
||||
/**
|
||||
* @this {TokenizeContext}
|
||||
* Context.
|
||||
* @type {Initializer}
|
||||
*/
|
||||
function initializeText(effects) {
|
||||
const self = this;
|
||||
const constructs = this.parser.constructs[field];
|
||||
const text = effects.attempt(constructs, start, notText);
|
||||
return start;
|
||||
|
||||
/** @type {State} */
|
||||
function start(code) {
|
||||
return atBreak(code) ? text(code) : notText(code);
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function notText(code) {
|
||||
if (code === null) {
|
||||
effects.consume(code);
|
||||
return;
|
||||
}
|
||||
effects.enter("data");
|
||||
effects.consume(code);
|
||||
return data;
|
||||
}
|
||||
|
||||
/** @type {State} */
|
||||
function data(code) {
|
||||
if (atBreak(code)) {
|
||||
effects.exit("data");
|
||||
return text(code);
|
||||
}
|
||||
|
||||
// Data.
|
||||
effects.consume(code);
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Code} code
|
||||
* Code.
|
||||
* @returns {boolean}
|
||||
* Whether the code is a break.
|
||||
*/
|
||||
function atBreak(code) {
|
||||
if (code === null) {
|
||||
return true;
|
||||
}
|
||||
const list = constructs[code];
|
||||
let index = -1;
|
||||
if (list) {
|
||||
// Always populated by defaults.
|
||||
|
||||
while (++index < list.length) {
|
||||
const item = list[index];
|
||||
if (!item.previous || item.previous.call(self, self.previous)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Resolver | undefined} [extraResolver]
|
||||
* Resolver.
|
||||
* @returns {Resolver}
|
||||
* Resolver.
|
||||
*/
|
||||
function createResolver(extraResolver) {
|
||||
return resolveAllText;
|
||||
|
||||
/** @type {Resolver} */
|
||||
function resolveAllText(events, context) {
|
||||
let index = -1;
|
||||
/** @type {number | undefined} */
|
||||
let enter;
|
||||
|
||||
// A rather boring computation (to merge adjacent `data` events) which
|
||||
// improves mm performance by 29%.
|
||||
while (++index <= events.length) {
|
||||
if (enter === undefined) {
|
||||
if (events[index] && events[index][1].type === "data") {
|
||||
enter = index;
|
||||
index++;
|
||||
}
|
||||
} else if (!events[index] || events[index][1].type !== "data") {
|
||||
// Don’t do anything if there is one data token.
|
||||
if (index !== enter + 2) {
|
||||
events[enter][1].end = events[index - 1][1].end;
|
||||
events.splice(enter + 2, index - enter - 2);
|
||||
index = enter + 2;
|
||||
}
|
||||
enter = undefined;
|
||||
}
|
||||
}
|
||||
return extraResolver ? extraResolver(events, context) : events;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A rather ugly set of instructions which again looks at chunks in the input
|
||||
* stream.
|
||||
* The reason to do this here is that it is *much* faster to parse in reverse.
|
||||
* And that we can’t hook into `null` to split the line suffix before an EOF.
|
||||
* To do: figure out if we can make this into a clean utility, or even in core.
|
||||
* As it will be useful for GFMs literal autolink extension (and maybe even
|
||||
* tables?)
|
||||
*
|
||||
* @type {Resolver}
|
||||
*/
|
||||
function resolveAllLineSuffixes(events, context) {
|
||||
let eventIndex = 0; // Skip first.
|
||||
|
||||
while (++eventIndex <= events.length) {
|
||||
if ((eventIndex === events.length || events[eventIndex][1].type === "lineEnding") && events[eventIndex - 1][1].type === "data") {
|
||||
const data = events[eventIndex - 1][1];
|
||||
const chunks = context.sliceStream(data);
|
||||
let index = chunks.length;
|
||||
let bufferIndex = -1;
|
||||
let size = 0;
|
||||
/** @type {boolean | undefined} */
|
||||
let tabs;
|
||||
while (index--) {
|
||||
const chunk = chunks[index];
|
||||
if (typeof chunk === 'string') {
|
||||
bufferIndex = chunk.length;
|
||||
while (chunk.charCodeAt(bufferIndex - 1) === 32) {
|
||||
size++;
|
||||
bufferIndex--;
|
||||
}
|
||||
if (bufferIndex) break;
|
||||
bufferIndex = -1;
|
||||
}
|
||||
// Number
|
||||
else if (chunk === -2) {
|
||||
tabs = true;
|
||||
size++;
|
||||
} else if (chunk === -1) {
|
||||
// Empty
|
||||
} else {
|
||||
// Replacement character, exit.
|
||||
index++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Allow final trailing whitespace.
|
||||
if (context._contentTypeTextTrailing && eventIndex === events.length) {
|
||||
size = 0;
|
||||
}
|
||||
if (size) {
|
||||
const token = {
|
||||
type: eventIndex === events.length || tabs || size < 2 ? "lineSuffix" : "hardBreakTrailing",
|
||||
start: {
|
||||
_bufferIndex: index ? bufferIndex : data.start._bufferIndex + bufferIndex,
|
||||
_index: data.start._index + index,
|
||||
line: data.end.line,
|
||||
column: data.end.column - size,
|
||||
offset: data.end.offset - size
|
||||
},
|
||||
end: {
|
||||
...data.end
|
||||
}
|
||||
};
|
||||
data.end = {
|
||||
...token.start
|
||||
};
|
||||
if (data.start.offset === data.end.offset) {
|
||||
Object.assign(data, token);
|
||||
} else {
|
||||
events.splice(eventIndex, 0, ['enter', token, context], ['exit', token, context]);
|
||||
eventIndex += 2;
|
||||
}
|
||||
}
|
||||
eventIndex++;
|
||||
}
|
||||
}
|
||||
return events;
|
||||
}
|
10
node_modules/micromark/lib/parse.d.ts
generated
vendored
Normal file
10
node_modules/micromark/lib/parse.d.ts
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* @param {ParseOptions | null | undefined} [options]
|
||||
* Configuration (optional).
|
||||
* @returns {ParseContext}
|
||||
* Parser.
|
||||
*/
|
||||
export function parse(options?: ParseOptions | null | undefined): ParseContext;
|
||||
import type { ParseOptions } from 'micromark-util-types';
|
||||
import type { ParseContext } from 'micromark-util-types';
|
||||
//# sourceMappingURL=parse.d.ts.map
|
1
node_modules/micromark/lib/parse.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/parse.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"parse.d.ts","sourceRoot":"","sources":["parse.js"],"names":[],"mappings":"AAkBA;;;;;GAKG;AACH,gCALW,YAAY,GAAG,IAAI,GAAG,SAAS,GAE7B,YAAY,CAoCxB;kCAlDS,sBAAsB;kCAAtB,sBAAsB"}
|
56
node_modules/micromark/lib/parse.js
generated
vendored
Normal file
56
node_modules/micromark/lib/parse.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
/**
|
||||
* @import {
|
||||
* Create,
|
||||
* FullNormalizedExtension,
|
||||
* InitialConstruct,
|
||||
* ParseContext,
|
||||
* ParseOptions
|
||||
* } from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import { combineExtensions } from 'micromark-util-combine-extensions';
|
||||
import { content } from './initialize/content.js';
|
||||
import { document } from './initialize/document.js';
|
||||
import { flow } from './initialize/flow.js';
|
||||
import { string, text } from './initialize/text.js';
|
||||
import * as defaultConstructs from './constructs.js';
|
||||
import { createTokenizer } from './create-tokenizer.js';
|
||||
|
||||
/**
|
||||
* @param {ParseOptions | null | undefined} [options]
|
||||
* Configuration (optional).
|
||||
* @returns {ParseContext}
|
||||
* Parser.
|
||||
*/
|
||||
export function parse(options) {
|
||||
const settings = options || {};
|
||||
const constructs = /** @type {FullNormalizedExtension} */
|
||||
combineExtensions([defaultConstructs, ...(settings.extensions || [])]);
|
||||
|
||||
/** @type {ParseContext} */
|
||||
const parser = {
|
||||
constructs,
|
||||
content: create(content),
|
||||
defined: [],
|
||||
document: create(document),
|
||||
flow: create(flow),
|
||||
lazy: {},
|
||||
string: create(string),
|
||||
text: create(text)
|
||||
};
|
||||
return parser;
|
||||
|
||||
/**
|
||||
* @param {InitialConstruct} initial
|
||||
* Construct to start with.
|
||||
* @returns {Create}
|
||||
* Create a tokenizer.
|
||||
*/
|
||||
function create(initial) {
|
||||
return creator;
|
||||
/** @type {Create} */
|
||||
function creator(from) {
|
||||
return createTokenizer(parser, initial, from);
|
||||
}
|
||||
}
|
||||
}
|
9
node_modules/micromark/lib/postprocess.d.ts
generated
vendored
Normal file
9
node_modules/micromark/lib/postprocess.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* @param {Array<Event>} events
|
||||
* Events.
|
||||
* @returns {Array<Event>}
|
||||
* Events.
|
||||
*/
|
||||
export function postprocess(events: Array<Event>): Array<Event>;
|
||||
import type { Event } from 'micromark-util-types';
|
||||
//# sourceMappingURL=postprocess.d.ts.map
|
1
node_modules/micromark/lib/postprocess.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/postprocess.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"postprocess.d.ts","sourceRoot":"","sources":["postprocess.js"],"names":[],"mappings":"AAMA;;;;;GAKG;AACH,oCALW,KAAK,CAAC,KAAK,CAAC,GAEV,KAAK,CAAC,KAAK,CAAC,CASxB;2BAjBuB,sBAAsB"}
|
18
node_modules/micromark/lib/postprocess.js
generated
vendored
Normal file
18
node_modules/micromark/lib/postprocess.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
/**
|
||||
* @import {Event} from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
import { subtokenize } from 'micromark-util-subtokenize';
|
||||
|
||||
/**
|
||||
* @param {Array<Event>} events
|
||||
* Events.
|
||||
* @returns {Array<Event>}
|
||||
* Events.
|
||||
*/
|
||||
export function postprocess(events) {
|
||||
while (!subtokenize(events)) {
|
||||
// Empty
|
||||
}
|
||||
return events;
|
||||
}
|
13
node_modules/micromark/lib/preprocess.d.ts
generated
vendored
Normal file
13
node_modules/micromark/lib/preprocess.d.ts
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
/**
|
||||
* @returns {Preprocessor}
|
||||
* Preprocess a value.
|
||||
*/
|
||||
export function preprocess(): Preprocessor;
|
||||
/**
|
||||
* Preprocess a value.
|
||||
*/
|
||||
export type Preprocessor = (value: Value, encoding?: Encoding | null | undefined, end?: boolean | null | undefined) => Array<Chunk>;
|
||||
import type { Value } from 'micromark-util-types';
|
||||
import type { Encoding } from 'micromark-util-types';
|
||||
import type { Chunk } from 'micromark-util-types';
|
||||
//# sourceMappingURL=preprocess.d.ts.map
|
1
node_modules/micromark/lib/preprocess.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/lib/preprocess.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"preprocess.d.ts","sourceRoot":"","sources":["preprocess.js"],"names":[],"mappings":"AAqBA;;;GAGG;AACH,8BAHa,YAAY,CAsHxB;;;;mCArIU,KAAK,aAEL,QAAQ,GAAG,IAAI,GAAG,SAAS,QAE3B,OAAO,GAAG,IAAI,GAAG,SAAS,KAExB,KAAK,CAAC,KAAK,CAAC;2BAZsB,sBAAsB;8BAAtB,sBAAsB;2BAAtB,sBAAsB"}
|
115
node_modules/micromark/lib/preprocess.js
generated
vendored
Normal file
115
node_modules/micromark/lib/preprocess.js
generated
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
/**
|
||||
* @import {Chunk, Code, Encoding, Value} from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @callback Preprocessor
|
||||
* Preprocess a value.
|
||||
* @param {Value} value
|
||||
* Value.
|
||||
* @param {Encoding | null | undefined} [encoding]
|
||||
* Encoding when `value` is a typed array (optional).
|
||||
* @param {boolean | null | undefined} [end=false]
|
||||
* Whether this is the last chunk (default: `false`).
|
||||
* @returns {Array<Chunk>}
|
||||
* Chunks.
|
||||
*/
|
||||
|
||||
const search = /[\0\t\n\r]/g;
|
||||
|
||||
/**
|
||||
* @returns {Preprocessor}
|
||||
* Preprocess a value.
|
||||
*/
|
||||
export function preprocess() {
|
||||
let column = 1;
|
||||
let buffer = '';
|
||||
/** @type {boolean | undefined} */
|
||||
let start = true;
|
||||
/** @type {boolean | undefined} */
|
||||
let atCarriageReturn;
|
||||
return preprocessor;
|
||||
|
||||
/** @type {Preprocessor} */
|
||||
// eslint-disable-next-line complexity
|
||||
function preprocessor(value, encoding, end) {
|
||||
/** @type {Array<Chunk>} */
|
||||
const chunks = [];
|
||||
/** @type {RegExpMatchArray | null} */
|
||||
let match;
|
||||
/** @type {number} */
|
||||
let next;
|
||||
/** @type {number} */
|
||||
let startPosition;
|
||||
/** @type {number} */
|
||||
let endPosition;
|
||||
/** @type {Code} */
|
||||
let code;
|
||||
value = buffer + (typeof value === 'string' ? value.toString() : new TextDecoder(encoding || undefined).decode(value));
|
||||
startPosition = 0;
|
||||
buffer = '';
|
||||
if (start) {
|
||||
// To do: `markdown-rs` actually parses BOMs (byte order mark).
|
||||
if (value.charCodeAt(0) === 65279) {
|
||||
startPosition++;
|
||||
}
|
||||
start = undefined;
|
||||
}
|
||||
while (startPosition < value.length) {
|
||||
search.lastIndex = startPosition;
|
||||
match = search.exec(value);
|
||||
endPosition = match && match.index !== undefined ? match.index : value.length;
|
||||
code = value.charCodeAt(endPosition);
|
||||
if (!match) {
|
||||
buffer = value.slice(startPosition);
|
||||
break;
|
||||
}
|
||||
if (code === 10 && startPosition === endPosition && atCarriageReturn) {
|
||||
chunks.push(-3);
|
||||
atCarriageReturn = undefined;
|
||||
} else {
|
||||
if (atCarriageReturn) {
|
||||
chunks.push(-5);
|
||||
atCarriageReturn = undefined;
|
||||
}
|
||||
if (startPosition < endPosition) {
|
||||
chunks.push(value.slice(startPosition, endPosition));
|
||||
column += endPosition - startPosition;
|
||||
}
|
||||
switch (code) {
|
||||
case 0:
|
||||
{
|
||||
chunks.push(65533);
|
||||
column++;
|
||||
break;
|
||||
}
|
||||
case 9:
|
||||
{
|
||||
next = Math.ceil(column / 4) * 4;
|
||||
chunks.push(-2);
|
||||
while (column++ < next) chunks.push(-1);
|
||||
break;
|
||||
}
|
||||
case 10:
|
||||
{
|
||||
chunks.push(-4);
|
||||
column = 1;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{
|
||||
atCarriageReturn = true;
|
||||
column = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
startPosition = endPosition + 1;
|
||||
}
|
||||
if (end) {
|
||||
if (atCarriageReturn) chunks.push(-5);
|
||||
if (buffer) chunks.push(buffer);
|
||||
chunks.push(null);
|
||||
}
|
||||
return chunks;
|
||||
}
|
||||
}
|
22
node_modules/micromark/license
generated
vendored
Normal file
22
node_modules/micromark/license
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) Titus Wormer <tituswormer@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
100
node_modules/micromark/package.json
generated
vendored
Normal file
100
node_modules/micromark/package.json
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
{
|
||||
"name": "micromark",
|
||||
"version": "4.0.2",
|
||||
"description": "small commonmark compliant markdown parser with positional info and concrete tokens",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"commonmark",
|
||||
"compiler",
|
||||
"gfm",
|
||||
"html",
|
||||
"lexer",
|
||||
"markdown",
|
||||
"markup",
|
||||
"md",
|
||||
"unified",
|
||||
"parse",
|
||||
"parser",
|
||||
"plugin",
|
||||
"process",
|
||||
"remark",
|
||||
"render",
|
||||
"renderer",
|
||||
"token",
|
||||
"tokenizer"
|
||||
],
|
||||
"repository": "https://github.com/micromark/micromark/tree/main/packages/micromark",
|
||||
"bugs": "https://github.com/micromark/micromark/issues",
|
||||
"funding": [
|
||||
{
|
||||
"type": "GitHub Sponsors",
|
||||
"url": "https://github.com/sponsors/unifiedjs"
|
||||
},
|
||||
{
|
||||
"type": "OpenCollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
],
|
||||
"author": "Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)",
|
||||
"contributors": [
|
||||
"Titus Wormer <tituswormer@gmail.com> (https://wooorm.com)"
|
||||
],
|
||||
"sideEffects": false,
|
||||
"type": "module",
|
||||
"files": [
|
||||
"dev/",
|
||||
"lib/",
|
||||
"index.d.ts.map",
|
||||
"index.d.ts",
|
||||
"index.js",
|
||||
"stream.d.ts.map",
|
||||
"stream.d.ts",
|
||||
"stream.js"
|
||||
],
|
||||
"exports": {
|
||||
".": {
|
||||
"development": "./dev/index.js",
|
||||
"default": "./index.js"
|
||||
},
|
||||
"./stream": {
|
||||
"development": "./dev/stream.js",
|
||||
"default": "./stream.js"
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/debug": "^4.0.0",
|
||||
"debug": "^4.0.0",
|
||||
"decode-named-character-reference": "^1.0.0",
|
||||
"devlop": "^1.0.0",
|
||||
"micromark-core-commonmark": "^2.0.0",
|
||||
"micromark-factory-space": "^2.0.0",
|
||||
"micromark-util-character": "^2.0.0",
|
||||
"micromark-util-chunked": "^2.0.0",
|
||||
"micromark-util-combine-extensions": "^2.0.0",
|
||||
"micromark-util-decode-numeric-character-reference": "^2.0.0",
|
||||
"micromark-util-encode": "^2.0.0",
|
||||
"micromark-util-normalize-identifier": "^2.0.0",
|
||||
"micromark-util-resolve-all": "^2.0.0",
|
||||
"micromark-util-sanitize-uri": "^2.0.0",
|
||||
"micromark-util-subtokenize": "^2.0.0",
|
||||
"micromark-util-symbol": "^2.0.0",
|
||||
"micromark-util-types": "^2.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "micromark-build"
|
||||
},
|
||||
"xo": {
|
||||
"envs": [
|
||||
"shared-node-browser"
|
||||
],
|
||||
"prettier": true,
|
||||
"rules": {
|
||||
"logical-assignment-operators": "off",
|
||||
"max-depth": "off",
|
||||
"unicorn/no-this-assignment": "off",
|
||||
"unicorn/prefer-at": "off",
|
||||
"unicorn/prefer-code-point": "off",
|
||||
"unicorn/prefer-event-target": "off"
|
||||
}
|
||||
}
|
||||
}
|
488
node_modules/micromark/readme.md
generated
vendored
Normal file
488
node_modules/micromark/readme.md
generated
vendored
Normal file
@@ -0,0 +1,488 @@
|
||||
# micromark
|
||||
|
||||
[![Build][build-badge]][build]
|
||||
[![Coverage][coverage-badge]][coverage]
|
||||
[![Downloads][downloads-badge]][downloads]
|
||||
[![Size][bundle-size-badge]][bundle-size]
|
||||
[![Sponsors][sponsors-badge]][opencollective]
|
||||
[![Backers][backers-badge]][opencollective]
|
||||
[![Chat][chat-badge]][chat]
|
||||
|
||||
Markdown parser.
|
||||
|
||||
> **Note**: this is the `micromark` package from the micromark monorepo.
|
||||
> See the [monorepo readme][micromark] for more on the project.
|
||||
> See this readme for how to use it.
|
||||
|
||||
## Feature highlights
|
||||
|
||||
<!-- Note: this section has to be in sync with the monorepo readme. -->
|
||||
|
||||
* [x] **[compliant][commonmark]** (100% to CommonMark)
|
||||
* [x] **[extensions][]** (100% [GFM][], 100% [MDX.js][mdxjs], [directives][],
|
||||
[frontmatter][], [math][])
|
||||
* [x] **[safe][security]** (by default)
|
||||
* [x] **[robust][test]** (±2k tests, 100% coverage, fuzz testing)
|
||||
* [x] **[small][size-debug]** (smallest CM parser at ±14kb)
|
||||
|
||||
## Contents
|
||||
|
||||
* [When should I use this?](#when-should-i-use-this)
|
||||
* [What is this?](#what-is-this)
|
||||
* [Install](#install)
|
||||
* [Use](#use)
|
||||
* [API](#api)
|
||||
* [`micromark(value[, encoding][, options])`](#micromarkvalue-encoding-options)
|
||||
* [`stream(options?)`](#streamoptions)
|
||||
* [`Options`](#options)
|
||||
* [Types](#types)
|
||||
* [Compatibility](#compatibility)
|
||||
* [Security](#security)
|
||||
* [Contribute](#contribute)
|
||||
* [Sponsor](#sponsor)
|
||||
* [License](#license)
|
||||
|
||||
## When should I use this?
|
||||
|
||||
<!-- Note: this section has to be in sync with the monorepo readme. -->
|
||||
|
||||
* If you *just* want to turn markdown into HTML (with maybe a few extensions)
|
||||
* If you want to do *really complex things* with markdown
|
||||
|
||||
See [§ Comparison][comparison] for more info
|
||||
|
||||
## What is this?
|
||||
|
||||
<!-- Note: this section has to be in sync with the monorepo readme. -->
|
||||
|
||||
`micromark` is an open source markdown parser written in JavaScript.
|
||||
It’s implemented as a state machine that emits concrete tokens, so that every
|
||||
byte is accounted for, with positional info.
|
||||
It then compiles those tokens directly to HTML, but other tools can take the
|
||||
data and for example build an AST which is easier to work with
|
||||
([`mdast-util-to-markdown`][mdast-util-to-markdown]).
|
||||
|
||||
While most markdown parsers work towards compliancy with CommonMark (or GFM),
|
||||
this project goes further by following how the reference parsers (`cmark`,
|
||||
`cmark-gfm`) work, which is confirmed with thousands of extra tests.
|
||||
|
||||
Other than CommonMark and GFM, micromark also supports common extensions to
|
||||
markdown such as MDX, math, and frontmatter.
|
||||
|
||||
These npm packages have a sibling project in Rust:
|
||||
[`markdown-rs`][markdown-rs].
|
||||
|
||||
* to learn markdown, see this [cheatsheet and tutorial][cheat]
|
||||
* for more about us, see [`unifiedjs.com`][site]
|
||||
* for questions, see [Discussions][chat]
|
||||
* to help, see [contribute][] and [sponsor][] below
|
||||
|
||||
## Install
|
||||
|
||||
<!-- Note: this section has to be in sync with the monorepo readme. -->
|
||||
|
||||
This package is [ESM only][esm].
|
||||
In Node.js (version 16+), install with [npm][]:
|
||||
|
||||
```sh
|
||||
npm install micromark
|
||||
```
|
||||
|
||||
In Deno with [`esm.sh`][esmsh]:
|
||||
|
||||
```js
|
||||
import {micromark} from 'https://esm.sh/micromark@3'
|
||||
```
|
||||
|
||||
In browsers with [`esm.sh`][esmsh]:
|
||||
|
||||
```html
|
||||
<script type="module">
|
||||
import {micromark} from 'https://esm.sh/micromark@3?bundle'
|
||||
</script>
|
||||
```
|
||||
|
||||
## Use
|
||||
|
||||
<!-- Note: this section has to be in sync with the `micromark` readme. -->
|
||||
|
||||
Typical use (buffering):
|
||||
|
||||
```js
|
||||
import {micromark} from 'micromark'
|
||||
|
||||
console.log(micromark('## Hello, *world*!'))
|
||||
```
|
||||
|
||||
Yields:
|
||||
|
||||
```html
|
||||
<h2>Hello, <em>world</em>!</h2>
|
||||
```
|
||||
|
||||
You can pass extensions (in this case [`micromark-extension-gfm`][gfm]):
|
||||
|
||||
```js
|
||||
import {micromark} from 'micromark'
|
||||
import {gfmHtml, gfm} from 'micromark-extension-gfm'
|
||||
|
||||
const value = '* [x] contact@example.com ~~strikethrough~~'
|
||||
|
||||
const result = micromark(value, {
|
||||
extensions: [gfm()],
|
||||
htmlExtensions: [gfmHtml()]
|
||||
})
|
||||
|
||||
console.log(result)
|
||||
```
|
||||
|
||||
Yields:
|
||||
|
||||
```html
|
||||
<ul>
|
||||
<li><input checked="" disabled="" type="checkbox"> <a href="mailto:contact@example.com">contact@example.com</a> <del>strikethrough</del></li>
|
||||
</ul>
|
||||
```
|
||||
|
||||
Streaming interface:
|
||||
|
||||
```js
|
||||
import {createReadStream} from 'node:fs'
|
||||
import {stream} from 'micromark/stream'
|
||||
|
||||
createReadStream('example.md')
|
||||
.on('error', handleError)
|
||||
.pipe(stream())
|
||||
.pipe(process.stdout)
|
||||
|
||||
function handleError(error) {
|
||||
// Handle your error here!
|
||||
throw error
|
||||
}
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
`micromark` core has two entries in its export map: `micromark` and
|
||||
`micromark/stream`.
|
||||
|
||||
`micromark` exports the identifier [`micromark`][api-micromark].
|
||||
`micromark/stream` exports the identifier [`stream`][api-stream].
|
||||
There are no default exports.
|
||||
|
||||
The export map supports the [`development` condition][development].
|
||||
Run `node --conditions development module.js` to get instrumented dev code.
|
||||
Without this condition, production code is loaded.
|
||||
See [§ Size & debug][size-debug] for more info.
|
||||
|
||||
### `micromark(value[, encoding][, options])`
|
||||
|
||||
Compile markdown to HTML.
|
||||
|
||||
> Note: which encodings are supported depends on the engine.
|
||||
> For info on Node.js, see *[WHATWG supported encodings][encoding]*.
|
||||
|
||||
###### Parameters
|
||||
|
||||
* `value` (`string` or [`Uint8Array`][uint8-array])
|
||||
— markdown to parse
|
||||
* `encoding` (`string`, default: `'utf8'`)
|
||||
— [character encoding][encoding] to understand `value` as when it’s a
|
||||
[`Uint8Array`][uint8-array]
|
||||
* `options` ([`Options`][api-options], optional)
|
||||
— configuration
|
||||
|
||||
###### Returns
|
||||
|
||||
Compiled HTML (`string`).
|
||||
|
||||
### `stream(options?)`
|
||||
|
||||
Create a duplex (readable and writable) stream.
|
||||
|
||||
Some of the work to parse markdown can be done streaming, but in the
|
||||
end buffering is required.
|
||||
|
||||
micromark does not handle errors for you, so you must handle errors on whatever
|
||||
streams you pipe into it.
|
||||
As markdown does not know errors, `micromark` itself does not emit errors.
|
||||
|
||||
###### Parameters
|
||||
|
||||
* `options` ([`Options`][api-options], optional)
|
||||
— configuration
|
||||
|
||||
###### Returns
|
||||
|
||||
Duplex stream.
|
||||
|
||||
### `Options`
|
||||
|
||||
Configuration (TypeScript type).
|
||||
|
||||
##### Fields
|
||||
|
||||
###### `allowDangerousHtml`
|
||||
|
||||
Whether to allow (dangerous) HTML (`boolean`, default: `false`).
|
||||
|
||||
The default is `false`, which still parses the HTML according to `CommonMark`
|
||||
but shows the HTML as text instead of as elements.
|
||||
|
||||
Pass `true` for trusted content to get actual HTML elements.
|
||||
See [§ Security][security].
|
||||
|
||||
###### `allowDangerousProtocol`
|
||||
|
||||
Whether to allow dangerous protocols in links and images (`boolean`, default:
|
||||
`false`).
|
||||
|
||||
The default is `false`, which drops URLs in links and images that use dangerous
|
||||
protocols.
|
||||
|
||||
Pass `true` for trusted content to support all protocols.
|
||||
|
||||
URLs that have no protocol (which means it’s relative to the current page, such
|
||||
as `./some/page.html`) and URLs that have a safe protocol (for images: `http`,
|
||||
`https`; for links: `http`, `https`, `irc`, `ircs`, `mailto`, `xmpp`), are
|
||||
safe.
|
||||
All other URLs are dangerous and dropped.
|
||||
See [§ Security][security].
|
||||
|
||||
###### `defaultLineEnding`
|
||||
|
||||
Default line ending to use when compiling to HTML, for line endings not in
|
||||
`value` (`'\r'`, `'\n'`, or `'\r\n'`; default: first line ending or `'\n'`).
|
||||
|
||||
Generally, `micromark` copies line endings (`\r`, `\n`, `\r\n`) in the markdown
|
||||
document over to the compiled HTML.
|
||||
In some cases, such as `> a`, CommonMark requires that extra line endings are
|
||||
added: `<blockquote>\n<p>a</p>\n</blockquote>`.
|
||||
|
||||
To create that line ending, the document is checked for the first line ending
|
||||
that is used.
|
||||
If there is no line ending, `defaultLineEnding` is used.
|
||||
If that isn’t configured, `\n` is used.
|
||||
|
||||
###### `extensions`
|
||||
|
||||
Array of syntax extensions (`Array<SyntaxExtension>`, default: `[]`).
|
||||
See [§ Extensions][extensions].
|
||||
|
||||
###### `htmlExtensions`
|
||||
|
||||
Array of syntax extensions (`Array<HtmlExtension>`, default: `[]`).
|
||||
See [§ Extensions][extensions].
|
||||
|
||||
## Types
|
||||
|
||||
This package is fully typed with [TypeScript][].
|
||||
It exports the additional type [`Options`][api-options].
|
||||
|
||||
## Compatibility
|
||||
|
||||
Projects maintained by the unified collective are compatible with maintained
|
||||
versions of Node.js.
|
||||
|
||||
When we cut a new major release, we drop support for unmaintained versions of
|
||||
Node.
|
||||
This means we try to keep the current release line, `micromark@4`, compatible
|
||||
with Node.js 16.
|
||||
|
||||
## Security
|
||||
|
||||
This package is safe.
|
||||
See [`security.md`][securitymd] in [`micromark/.github`][health] for how to
|
||||
submit a security report.
|
||||
|
||||
## Contribute
|
||||
|
||||
See [`contributing.md`][contributing] in [`micromark/.github`][health] for ways
|
||||
to get started.
|
||||
See [`support.md`][support] for ways to get help.
|
||||
|
||||
This project has a [code of conduct][coc].
|
||||
By interacting with this repository, organisation, or community you agree to
|
||||
abide by its terms.
|
||||
|
||||
## Sponsor
|
||||
|
||||
<!-- Note: this section has to be in sync with the monorepo readme. -->
|
||||
|
||||
Support this effort and give back by sponsoring on [OpenCollective][]!
|
||||
|
||||
<table>
|
||||
<tr valign="middle">
|
||||
<td width="100%" align="center" colspan="10">
|
||||
<br>
|
||||
<a href="https://www.salesforce.com">Salesforce</a> 🏅<br><br>
|
||||
<a href="https://www.salesforce.com"><img src="https://images.opencollective.com/salesforce/ca8f997/logo/512.png" width="256"></a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr valign="middle">
|
||||
<td width="20%" align="center" rowspan="2" colspan="2">
|
||||
<a href="https://vercel.com">Vercel</a><br><br>
|
||||
<a href="https://vercel.com"><img src="https://avatars1.githubusercontent.com/u/14985020?s=256&v=4" width="128"></a>
|
||||
</td>
|
||||
<td width="20%" align="center" rowspan="2" colspan="2">
|
||||
<a href="https://motif.land">Motif</a><br><br>
|
||||
<a href="https://motif.land"><img src="https://avatars1.githubusercontent.com/u/74457950?s=256&v=4" width="128"></a>
|
||||
</td>
|
||||
<td width="20%" align="center" rowspan="2" colspan="2">
|
||||
<a href="https://www.hashicorp.com">HashiCorp</a><br><br>
|
||||
<a href="https://www.hashicorp.com"><img src="https://avatars1.githubusercontent.com/u/761456?s=256&v=4" width="128"></a>
|
||||
</td>
|
||||
<td width="20%" align="center" rowspan="2" colspan="2">
|
||||
<a href="https://www.gitbook.com">GitBook</a><br><br>
|
||||
<a href="https://www.gitbook.com"><img src="https://avatars1.githubusercontent.com/u/7111340?s=256&v=4" width="128"></a>
|
||||
</td>
|
||||
<td width="20%" align="center" rowspan="2" colspan="2">
|
||||
<a href="https://www.gatsbyjs.org">Gatsby</a><br><br>
|
||||
<a href="https://www.gatsbyjs.org"><img src="https://avatars1.githubusercontent.com/u/12551863?s=256&v=4" width="128"></a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr valign="middle">
|
||||
</tr>
|
||||
<tr valign="middle">
|
||||
<td width="20%" align="center" rowspan="2" colspan="2">
|
||||
<a href="https://www.netlify.com">Netlify</a><br><br>
|
||||
<!--OC has a sharper image-->
|
||||
<a href="https://www.netlify.com"><img src="https://images.opencollective.com/netlify/4087de2/logo/256.png" width="128"></a>
|
||||
</td>
|
||||
<td width="10%" align="center">
|
||||
<a href="https://www.coinbase.com">Coinbase</a><br><br>
|
||||
<a href="https://www.coinbase.com"><img src="https://avatars1.githubusercontent.com/u/1885080?s=256&v=4" width="64"></a>
|
||||
</td>
|
||||
<td width="10%" align="center">
|
||||
<a href="https://themeisle.com">ThemeIsle</a><br><br>
|
||||
<a href="https://themeisle.com"><img src="https://avatars1.githubusercontent.com/u/58979018?s=128&v=4" width="64"></a>
|
||||
</td>
|
||||
<td width="10%" align="center">
|
||||
<a href="https://expo.io">Expo</a><br><br>
|
||||
<a href="https://expo.io"><img src="https://avatars1.githubusercontent.com/u/12504344?s=128&v=4" width="64"></a>
|
||||
</td>
|
||||
<td width="10%" align="center">
|
||||
<a href="https://boostnote.io">Boost Note</a><br><br>
|
||||
<a href="https://boostnote.io"><img src="https://images.opencollective.com/boosthub/6318083/logo/128.png" width="64"></a>
|
||||
</td>
|
||||
<td width="10%" align="center">
|
||||
<a href="https://markdown.space">Markdown Space</a><br><br>
|
||||
<a href="https://markdown.space"><img src="https://images.opencollective.com/markdown-space/e1038ed/logo/128.png" width="64"></a>
|
||||
</td>
|
||||
<td width="10%" align="center">
|
||||
<a href="https://www.holloway.com">Holloway</a><br><br>
|
||||
<a href="https://www.holloway.com"><img src="https://avatars1.githubusercontent.com/u/35904294?s=128&v=4" width="64"></a>
|
||||
</td>
|
||||
<td width="10%"></td>
|
||||
<td width="10%"></td>
|
||||
</tr>
|
||||
<tr valign="middle">
|
||||
<td width="100%" align="center" colspan="8">
|
||||
<br>
|
||||
<a href="https://opencollective.com/unified"><strong>You?</strong></a>
|
||||
<br><br>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## License
|
||||
|
||||
[MIT][license] © [Titus Wormer][author]
|
||||
|
||||
<!-- Definitions -->
|
||||
|
||||
[api-micromark]: #micromarkvalue-encoding-options
|
||||
|
||||
[api-options]: #options
|
||||
|
||||
[api-stream]: #streamoptions
|
||||
|
||||
[author]: https://wooorm.com
|
||||
|
||||
[backers-badge]: https://opencollective.com/unified/backers/badge.svg
|
||||
|
||||
[build]: https://github.com/micromark/micromark/actions
|
||||
|
||||
[build-badge]: https://github.com/micromark/micromark/workflows/main/badge.svg
|
||||
|
||||
[bundle-size]: https://bundlejs.com/?q=micromark
|
||||
|
||||
[bundle-size-badge]: https://img.shields.io/badge/dynamic/json?label=minzipped%20size&query=$.size.compressedSize&url=https://deno.bundlejs.com/?q=micromark
|
||||
|
||||
[chat]: https://github.com/micromark/micromark/discussions
|
||||
|
||||
[chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg
|
||||
|
||||
[cheat]: https://commonmark.org/help/
|
||||
|
||||
[coc]: https://github.com/micromark/.github/blob/main/code-of-conduct.md
|
||||
|
||||
[commonmark]: https://commonmark.org
|
||||
|
||||
[comparison]: https://github.com/micromark/micromark#comparison
|
||||
|
||||
[contribute]: #contribute
|
||||
|
||||
[contributing]: https://github.com/micromark/.github/blob/main/contributing.md
|
||||
|
||||
[coverage]: https://codecov.io/github/micromark/micromark
|
||||
|
||||
[coverage-badge]: https://img.shields.io/codecov/c/github/micromark/micromark.svg
|
||||
|
||||
[development]: https://nodejs.org/api/packages.html#packages_resolving_user_conditions
|
||||
|
||||
[directives]: https://github.com/micromark/micromark-extension-directive
|
||||
|
||||
[downloads]: https://www.npmjs.com/package/micromark
|
||||
|
||||
[downloads-badge]: https://img.shields.io/npm/dm/micromark.svg
|
||||
|
||||
[encoding]: https://nodejs.org/api/util.html#whatwg-supported-encodings
|
||||
|
||||
[esm]: https://gist.github.com/sindresorhus/a39789f98801d908bbc7ff3ecc99d99c
|
||||
|
||||
[esmsh]: https://esm.sh
|
||||
|
||||
[extensions]: https://github.com/micromark/micromark#extensions
|
||||
|
||||
[frontmatter]: https://github.com/micromark/micromark-extension-frontmatter
|
||||
|
||||
[gfm]: https://github.com/micromark/micromark-extension-gfm
|
||||
|
||||
[health]: https://github.com/micromark/.github
|
||||
|
||||
[license]: https://github.com/micromark/micromark/blob/main/license
|
||||
|
||||
[markdown-rs]: https://github.com/wooorm/markdown-rs
|
||||
|
||||
[math]: https://github.com/micromark/micromark-extension-math
|
||||
|
||||
[mdast-util-to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown
|
||||
|
||||
[mdxjs]: https://github.com/micromark/micromark-extension-mdxjs
|
||||
|
||||
[micromark]: https://github.com/micromark/micromark
|
||||
|
||||
[npm]: https://docs.npmjs.com/cli/install
|
||||
|
||||
[opencollective]: https://opencollective.com/unified
|
||||
|
||||
[security]: #security
|
||||
|
||||
[securitymd]: https://github.com/micromark/.github/blob/main/security.md
|
||||
|
||||
[site]: https://unifiedjs.com
|
||||
|
||||
[size-debug]: https://github.com/micromark/micromark#size--debug
|
||||
|
||||
[sponsor]: #sponsor
|
||||
|
||||
[sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg
|
||||
|
||||
[support]: https://github.com/micromark/.github/blob/main/support.md
|
||||
|
||||
[test]: https://github.com/micromark/micromark#test
|
||||
|
||||
[typescript]: https://www.typescriptlang.org
|
||||
|
||||
[uint8-array]: https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array
|
35
node_modules/micromark/stream.d.ts
generated
vendored
Normal file
35
node_modules/micromark/stream.d.ts
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
/**
|
||||
* Create a duplex (readable and writable) stream.
|
||||
*
|
||||
* Some of the work to parse markdown can be done streaming, but in the
|
||||
* end buffering is required.
|
||||
*
|
||||
* micromark does not handle errors for you, so you must handle errors on whatever
|
||||
* streams you pipe into it.
|
||||
* As markdown does not know errors, `micromark` itself does not emit errors.
|
||||
*
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration (optional).
|
||||
* @returns {MinimalDuplex}
|
||||
* Duplex stream.
|
||||
*/
|
||||
export function stream(options?: Options | null | undefined): MinimalDuplex;
|
||||
export type Options = import("micromark-util-types").Options;
|
||||
/**
|
||||
* Function called when write was successful.
|
||||
*/
|
||||
export type Callback = () => undefined;
|
||||
/**
|
||||
* Configuration for piping.
|
||||
*/
|
||||
export type PipeOptions = {
|
||||
/**
|
||||
* Whether to end the destination stream when the source stream ends.
|
||||
*/
|
||||
end?: boolean | null | undefined;
|
||||
};
|
||||
/**
|
||||
* Duplex stream.
|
||||
*/
|
||||
export type MinimalDuplex = Omit<NodeJS.ReadableStream & NodeJS.WritableStream, "isPaused" | "pause" | "read" | "resume" | "setEncoding" | "unpipe" | "unshift" | "wrap">;
|
||||
//# sourceMappingURL=stream.d.ts.map
|
1
node_modules/micromark/stream.d.ts.map
generated
vendored
Normal file
1
node_modules/micromark/stream.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"stream.d.ts","sourceRoot":"","sources":["stream.js"],"names":[],"mappings":"AA6BA;;;;;;;;;;;;;;GAcG;AACH,iCALW,OAAO,GAAG,IAAI,GAAG,SAAS,GAExB,aAAa,CAoOzB;sBAxQY,OAAO,sBAAsB,EAAE,OAAO;;;;6BAMtC,SAAS;;;;;;;;UAKR,OAAO,GAAG,IAAI,GAAG,SAAS;;;;;4BAG3B,IAAI,CAAC,MAAM,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,EAAE,UAAU,GAAG,OAAO,GAAG,MAAM,GAAG,QAAQ,GAAG,aAAa,GAAG,QAAQ,GAAG,SAAS,GAAG,MAAM,CAAC"}
|
256
node_modules/micromark/stream.js
generated
vendored
Normal file
256
node_modules/micromark/stream.js
generated
vendored
Normal file
@@ -0,0 +1,256 @@
|
||||
/**
|
||||
* @import {Encoding, Value} from 'micromark-util-types'
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {import('micromark-util-types').Options} Options
|
||||
*/
|
||||
|
||||
/**
|
||||
* @callback Callback
|
||||
* Function called when write was successful.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*
|
||||
* @typedef PipeOptions
|
||||
* Configuration for piping.
|
||||
* @property {boolean | null | undefined} [end]
|
||||
* Whether to end the destination stream when the source stream ends.
|
||||
*
|
||||
* @typedef {Omit<NodeJS.ReadableStream & NodeJS.WritableStream, 'isPaused' | 'pause' | 'read' | 'resume' | 'setEncoding' | 'unpipe' | 'unshift' | 'wrap'>} MinimalDuplex
|
||||
* Duplex stream.
|
||||
*/
|
||||
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { compile } from './lib/compile.js';
|
||||
import { parse } from './lib/parse.js';
|
||||
import { postprocess } from './lib/postprocess.js';
|
||||
import { preprocess } from './lib/preprocess.js';
|
||||
|
||||
/**
|
||||
* Create a duplex (readable and writable) stream.
|
||||
*
|
||||
* Some of the work to parse markdown can be done streaming, but in the
|
||||
* end buffering is required.
|
||||
*
|
||||
* micromark does not handle errors for you, so you must handle errors on whatever
|
||||
* streams you pipe into it.
|
||||
* As markdown does not know errors, `micromark` itself does not emit errors.
|
||||
*
|
||||
* @param {Options | null | undefined} [options]
|
||||
* Configuration (optional).
|
||||
* @returns {MinimalDuplex}
|
||||
* Duplex stream.
|
||||
*/
|
||||
export function stream(options) {
|
||||
const prep = preprocess();
|
||||
const tokenize = parse(options).document().write;
|
||||
const comp = compile(options);
|
||||
/** @type {boolean} */
|
||||
let ended;
|
||||
const emitter = /** @type {MinimalDuplex} */new EventEmitter();
|
||||
// @ts-expect-error: fine.
|
||||
emitter.end = end;
|
||||
emitter.pipe = pipe;
|
||||
emitter.readable = true;
|
||||
emitter.writable = true;
|
||||
// @ts-expect-error: fine.
|
||||
emitter.write = write;
|
||||
return emitter;
|
||||
|
||||
/**
|
||||
* Write a chunk into memory.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value | null | undefined} [chunk]
|
||||
* Slice of markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | null | undefined} [encoding]
|
||||
* Character encoding to understand `chunk` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value | null | undefined} [chunk]
|
||||
* Slice of markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*
|
||||
* @param {Value | null | undefined} [chunk]
|
||||
* Slice of markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Callback | Encoding | null | undefined} [encoding]
|
||||
* Character encoding to understand `chunk` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*/
|
||||
function write(chunk, encoding, callback) {
|
||||
if (typeof encoding === 'function') {
|
||||
callback = encoding;
|
||||
encoding = undefined;
|
||||
}
|
||||
if (ended) {
|
||||
throw new Error('Did not expect `write` after `end`');
|
||||
}
|
||||
tokenize(prep(chunk || '', encoding));
|
||||
if (callback) {
|
||||
callback();
|
||||
}
|
||||
|
||||
// Signal successful write.
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* End the writing.
|
||||
*
|
||||
* Passes all arguments as a final `write`.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value | null | undefined} [chunk]
|
||||
* Slice of markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Encoding | null | undefined} [encoding]
|
||||
* Character encoding to understand `chunk` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*
|
||||
* @overload
|
||||
* @param {Value | null | undefined} [chunk]
|
||||
* Slice of markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*
|
||||
* @overload
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*
|
||||
* @param {Callback | Value | null | undefined} [chunk]
|
||||
* Slice of markdown to parse (`string` or `Uint8Array`).
|
||||
* @param {Callback | Encoding | null | undefined} [encoding]
|
||||
* Character encoding to understand `chunk` as when it’s a `Uint8Array`
|
||||
* (`string`, default: `'utf8'`).
|
||||
* @param {Callback | null | undefined} [callback]
|
||||
* Function called when write was successful.
|
||||
* @returns {boolean}
|
||||
* Whether write was successful.
|
||||
*/
|
||||
function end(chunk, encoding, callback) {
|
||||
if (typeof chunk === 'function') {
|
||||
encoding = chunk;
|
||||
chunk = undefined;
|
||||
}
|
||||
if (typeof encoding === 'function') {
|
||||
callback = encoding;
|
||||
encoding = undefined;
|
||||
}
|
||||
write(chunk, encoding, callback);
|
||||
emitter.emit('data', comp(postprocess(tokenize(prep('', encoding, true)))));
|
||||
emitter.emit('end');
|
||||
ended = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pipe the processor into a writable stream.
|
||||
*
|
||||
* Basically `Stream#pipe`, but inlined and simplified to keep the bundled
|
||||
* size down.
|
||||
* See: <https://github.com/nodejs/node/blob/43a5170/lib/internal/streams/legacy.js#L13>.
|
||||
*
|
||||
* @template {NodeJS.WritableStream} Stream
|
||||
* Writable stream.
|
||||
* @param {Stream} destination
|
||||
* Stream to pipe into.
|
||||
* @param {PipeOptions | null | undefined} [options]
|
||||
* Configuration.
|
||||
* @returns {Stream}
|
||||
* Destination stream.
|
||||
*/
|
||||
function pipe(destination, options) {
|
||||
emitter.on('data', ondata);
|
||||
emitter.on('error', onerror);
|
||||
emitter.on('end', cleanup);
|
||||
emitter.on('close', cleanup);
|
||||
|
||||
// If the `end` option is not supplied, `destination.end()` will be
|
||||
// called when the `end` or `close` events are received.
|
||||
// @ts-expect-error `_isStdio` is available on `std{err,out}`
|
||||
if (!destination._isStdio && (!options || options.end !== false)) {
|
||||
emitter.on('end', onend);
|
||||
}
|
||||
destination.on('error', onerror);
|
||||
destination.on('close', cleanup);
|
||||
destination.emit('pipe', emitter);
|
||||
return destination;
|
||||
|
||||
/**
|
||||
* End destination stream.
|
||||
*
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function onend() {
|
||||
if (destination.end) {
|
||||
destination.end();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle data.
|
||||
*
|
||||
* @param {string} chunk
|
||||
* Data.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function ondata(chunk) {
|
||||
if (destination.writable) {
|
||||
destination.write(chunk);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean listeners.
|
||||
*
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function cleanup() {
|
||||
emitter.removeListener('data', ondata);
|
||||
emitter.removeListener('end', onend);
|
||||
emitter.removeListener('error', onerror);
|
||||
emitter.removeListener('end', cleanup);
|
||||
emitter.removeListener('close', cleanup);
|
||||
destination.removeListener('error', onerror);
|
||||
destination.removeListener('close', cleanup);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close dangling pipes and handle unheard errors.
|
||||
*
|
||||
* @param {Error | null | undefined} [error]
|
||||
* Error, if any.
|
||||
* @returns {undefined}
|
||||
* Nothing.
|
||||
*/
|
||||
function onerror(error) {
|
||||
cleanup();
|
||||
if (!emitter.listenerCount('error')) {
|
||||
throw error; // Unhandled stream error in pipe.
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user