Start over, this time in Astro

This commit is contained in:
becarta
2025-05-23 10:34:43 +02:00
parent fe932180cb
commit f40db0f5c9
5933 changed files with 0 additions and 1720591 deletions

View File

@@ -1,12 +0,0 @@
'use strict';
const fontFace = {
parse: {
prelude: null,
block() {
return this.Block(true);
}
}
};
module.exports = fontFace;

View File

@@ -1,37 +0,0 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
const importAtrule = {
parse: {
prelude() {
const children = this.createList();
this.skipSC();
switch (this.tokenType) {
case types.String:
children.push(this.String());
break;
case types.Url:
case types.Function:
children.push(this.Url());
break;
default:
this.error('String or url() is expected');
}
if (this.lookupNonWSType(0) === types.Ident ||
this.lookupNonWSType(0) === types.LeftParenthesis) {
children.push(this.MediaQueryList());
}
return children;
},
block: null
}
};
module.exports = importAtrule;

View File

@@ -1,19 +0,0 @@
'use strict';
const fontFace = require('./font-face.cjs');
const _import = require('./import.cjs');
const media = require('./media.cjs');
const nest = require('./nest.cjs');
const page = require('./page.cjs');
const supports = require('./supports.cjs');
const atrule = {
'font-face': fontFace,
'import': _import,
media,
nest,
page,
supports
};
module.exports = atrule;

View File

@@ -1,16 +0,0 @@
'use strict';
const media = {
parse: {
prelude() {
return this.createSingleNodeList(
this.MediaQueryList()
);
},
block(isStyleBlock = false) {
return this.Block(isStyleBlock);
}
}
};
module.exports = media;

View File

@@ -1,16 +0,0 @@
'use strict';
const nest = {
parse: {
prelude() {
return this.createSingleNodeList(
this.SelectorList()
);
},
block() {
return this.Block(true);
}
}
};
module.exports = nest;

View File

@@ -1,16 +0,0 @@
'use strict';
const page = {
parse: {
prelude() {
return this.createSingleNodeList(
this.SelectorList()
);
},
block() {
return this.Block(true);
}
}
};
module.exports = page;

View File

@@ -1,77 +0,0 @@
'use strict';
const types = require('../../tokenizer/types.cjs');
function consumeRaw() {
return this.createSingleNodeList(
this.Raw(this.tokenIndex, null, false)
);
}
function parentheses() {
this.skipSC();
if (this.tokenType === types.Ident &&
this.lookupNonWSType(1) === types.Colon) {
return this.createSingleNodeList(
this.Declaration()
);
}
return readSequence.call(this);
}
function readSequence() {
const children = this.createList();
let child;
this.skipSC();
scan:
while (!this.eof) {
switch (this.tokenType) {
case types.Comment:
case types.WhiteSpace:
this.next();
continue;
case types.Function:
child = this.Function(consumeRaw, this.scope.AtrulePrelude);
break;
case types.Ident:
child = this.Identifier();
break;
case types.LeftParenthesis:
child = this.Parentheses(parentheses, this.scope.AtrulePrelude);
break;
default:
break scan;
}
children.push(child);
}
return children;
}
const supports = {
parse: {
prelude() {
const children = readSequence.call(this);
if (this.getFirstListNode(children) === null) {
this.error('Condition is expected');
}
return children;
},
block(isStyleBlock = false) {
return this.Block(isStyleBlock);
}
}
};
module.exports = supports;