first commit
This commit is contained in:
27
build/node_modules/css-tree/lib/convertor/create.js
generated
vendored
Normal file
27
build/node_modules/css-tree/lib/convertor/create.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
var List = require('../utils/list');
|
||||
|
||||
module.exports = function createConvertors(walker) {
|
||||
var walk = walker.walk;
|
||||
var walkUp = walker.walkUp;
|
||||
|
||||
return {
|
||||
fromPlainObject: function(ast) {
|
||||
walk(ast, function(node) {
|
||||
if (node.children && node.children instanceof List === false) {
|
||||
node.children = new List().fromArray(node.children);
|
||||
}
|
||||
});
|
||||
|
||||
return ast;
|
||||
},
|
||||
toPlainObject: function(ast) {
|
||||
walkUp(ast, function(node) {
|
||||
if (node.children && node.children instanceof List) {
|
||||
node.children = node.children.toArray();
|
||||
}
|
||||
});
|
||||
|
||||
return ast;
|
||||
}
|
||||
};
|
||||
};
|
||||
3
build/node_modules/css-tree/lib/convertor/index.js
generated
vendored
Normal file
3
build/node_modules/css-tree/lib/convertor/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
var createConvertor = require('./create');
|
||||
|
||||
module.exports = createConvertor(require('../walker'));
|
||||
161
build/node_modules/css-tree/lib/generator/create.js
generated
vendored
Normal file
161
build/node_modules/css-tree/lib/generator/create.js
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
'use strict';
|
||||
|
||||
var sourceMapGenerator = require('./sourceMap');
|
||||
var hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
var noop = function() {};
|
||||
|
||||
function each(processChunk, node) {
|
||||
var list = node.children;
|
||||
var cursor = list.head;
|
||||
|
||||
while (cursor !== null) {
|
||||
this.generate(processChunk, cursor.data, cursor, list);
|
||||
cursor = cursor.next;
|
||||
}
|
||||
}
|
||||
|
||||
function eachComma(processChunk, node) {
|
||||
var list = node.children;
|
||||
var cursor = list.head;
|
||||
|
||||
while (cursor !== null) {
|
||||
if (cursor.prev) {
|
||||
processChunk(',');
|
||||
}
|
||||
|
||||
this.generate(processChunk, cursor.data, cursor, list);
|
||||
cursor = cursor.next;
|
||||
}
|
||||
}
|
||||
|
||||
function createGenerator(types) {
|
||||
var context = {
|
||||
generate: function(processChunk, node, item, list) {
|
||||
if (hasOwnProperty.call(types, node.type)) {
|
||||
types[node.type].call(this, processChunk, node, item, list);
|
||||
} else {
|
||||
throw new Error('Unknown node type: ' + node.type);
|
||||
}
|
||||
},
|
||||
each: each,
|
||||
eachComma: eachComma
|
||||
};
|
||||
|
||||
return function(node, fn) {
|
||||
if (typeof fn !== 'function') {
|
||||
// default generator concats all chunks in a single string
|
||||
var buffer = [];
|
||||
context.generate(function(chunk) {
|
||||
buffer.push(chunk);
|
||||
}, node);
|
||||
return buffer.join('');
|
||||
}
|
||||
context.generate(fn, node);
|
||||
};
|
||||
}
|
||||
|
||||
function createMarkupGenerator(types) {
|
||||
var context = {
|
||||
generate: function(processChunk, node, item, list) {
|
||||
if (hasOwnProperty.call(types, node.type)) {
|
||||
var nodeBuffer = [];
|
||||
types[node.type].call(this, function(chunk) {
|
||||
nodeBuffer.push(chunk);
|
||||
}, node, item, list);
|
||||
processChunk({
|
||||
node: node,
|
||||
value: nodeBuffer
|
||||
});
|
||||
} else {
|
||||
throw new Error('Unknown node type: ' + node.type);
|
||||
}
|
||||
},
|
||||
each: each,
|
||||
eachComma: eachComma
|
||||
};
|
||||
|
||||
return function(node, enter, leave) {
|
||||
function updatePos(str) {
|
||||
for (var i = 0; i < str.length; i++) {
|
||||
if (str.charCodeAt(i) === 10) { // \n
|
||||
line++;
|
||||
column = 0;
|
||||
} else {
|
||||
column++;
|
||||
}
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
function walk(node, buffer) {
|
||||
var value = node.value;
|
||||
|
||||
enter(node.node, buffer, line, column);
|
||||
|
||||
if (typeof value === 'string') {
|
||||
buffer += updatePos(value);
|
||||
} else {
|
||||
for (var i = 0; i < value.length; i++) {
|
||||
if (typeof value[i] === 'string') {
|
||||
buffer += updatePos(value[i]);
|
||||
} else {
|
||||
buffer = walk(value[i], buffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
leave(node.node, buffer, line, column);
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
if (typeof enter !== 'function') {
|
||||
enter = noop;
|
||||
}
|
||||
if (typeof leave !== 'function') {
|
||||
leave = noop;
|
||||
}
|
||||
|
||||
var buffer = [];
|
||||
var line = 1;
|
||||
var column = 0;
|
||||
|
||||
context.generate(function() {
|
||||
buffer.push.apply(buffer, arguments);
|
||||
}, node);
|
||||
|
||||
return walk(buffer[0], '');
|
||||
};
|
||||
}
|
||||
|
||||
function getTypesFromConfig(config) {
|
||||
var types = {};
|
||||
|
||||
if (config.node) {
|
||||
for (var name in config.node) {
|
||||
var nodeType = config.node[name];
|
||||
|
||||
types[name] = nodeType.generate;
|
||||
}
|
||||
}
|
||||
|
||||
return types;
|
||||
}
|
||||
|
||||
module.exports = function(config) {
|
||||
var types = getTypesFromConfig(config);
|
||||
var markupGenerator = createMarkupGenerator(types);
|
||||
|
||||
return {
|
||||
translate: createGenerator(types),
|
||||
translateWithSourceMap: function(node) {
|
||||
return sourceMapGenerator(markupGenerator, node);
|
||||
},
|
||||
translateMarkup: markupGenerator
|
||||
};
|
||||
};
|
||||
|
||||
module.exports.createGenerator = createGenerator;
|
||||
module.exports.createMarkupGenerator = createMarkupGenerator;
|
||||
module.exports.sourceMap = require('./sourceMap');
|
||||
4
build/node_modules/css-tree/lib/generator/index.js
generated
vendored
Normal file
4
build/node_modules/css-tree/lib/generator/index.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
var createGenerator = require('./create');
|
||||
var config = require('../syntax/config/parser');
|
||||
|
||||
module.exports = createGenerator(config);
|
||||
78
build/node_modules/css-tree/lib/generator/sourceMap.js
generated
vendored
Normal file
78
build/node_modules/css-tree/lib/generator/sourceMap.js
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
'use strict';
|
||||
|
||||
var SourceMapGenerator = require('source-map').SourceMapGenerator;
|
||||
var trackNodes = {
|
||||
Atrule: true,
|
||||
Selector: true,
|
||||
Declaration: true
|
||||
};
|
||||
|
||||
module.exports = function generateSourceMap(generator, ast) {
|
||||
var map = new SourceMapGenerator();
|
||||
var generated = {
|
||||
line: 1,
|
||||
column: 0
|
||||
};
|
||||
var original = {
|
||||
line: 0, // should be zero to add first mapping
|
||||
column: 0
|
||||
};
|
||||
var sourceMappingActive = false;
|
||||
var activatedGenerated = {
|
||||
line: 1,
|
||||
column: 0
|
||||
};
|
||||
var activatedMapping = {
|
||||
generated: activatedGenerated
|
||||
};
|
||||
|
||||
var css = generator(ast, function(node, buffer, line, column) {
|
||||
if (!node.loc ||
|
||||
!node.loc.start ||
|
||||
!trackNodes.hasOwnProperty(node.type)) {
|
||||
return;
|
||||
}
|
||||
|
||||
var nodeLine = node.loc.start.line;
|
||||
var nodeColumn = node.loc.start.column - 1;
|
||||
|
||||
if (original.line !== nodeLine ||
|
||||
original.column !== nodeColumn) {
|
||||
original.line = nodeLine;
|
||||
original.column = nodeColumn;
|
||||
|
||||
generated.line = line;
|
||||
generated.column = column;
|
||||
|
||||
if (sourceMappingActive) {
|
||||
sourceMappingActive = false;
|
||||
if (generated.line !== activatedGenerated.line ||
|
||||
generated.column !== activatedGenerated.column) {
|
||||
map.addMapping(activatedMapping);
|
||||
}
|
||||
}
|
||||
|
||||
sourceMappingActive = true;
|
||||
map.addMapping({
|
||||
source: node.loc.source,
|
||||
original: original,
|
||||
generated: generated
|
||||
});
|
||||
}
|
||||
|
||||
}, function(node, buffer, line, column) {
|
||||
if (sourceMappingActive && trackNodes.hasOwnProperty(node.type)) {
|
||||
activatedGenerated.line = line;
|
||||
activatedGenerated.column = column;
|
||||
}
|
||||
});
|
||||
|
||||
if (sourceMappingActive) {
|
||||
map.addMapping(activatedMapping);
|
||||
}
|
||||
|
||||
return {
|
||||
css: css,
|
||||
map: map
|
||||
};
|
||||
};
|
||||
3
build/node_modules/css-tree/lib/index.js
generated
vendored
Normal file
3
build/node_modules/css-tree/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = require('./syntax');
|
||||
347
build/node_modules/css-tree/lib/lexer/Lexer.js
generated
vendored
Normal file
347
build/node_modules/css-tree/lib/lexer/Lexer.js
generated
vendored
Normal file
@@ -0,0 +1,347 @@
|
||||
'use strict';
|
||||
|
||||
var SyntaxReferenceError = require('./error').SyntaxReferenceError;
|
||||
var MatchError = require('./error').MatchError;
|
||||
var names = require('../utils/names');
|
||||
var generic = require('./generic');
|
||||
var parse = require('./grammar/parse');
|
||||
var translate = require('./grammar/translate');
|
||||
var walk = require('./grammar/walk');
|
||||
var match = require('./match');
|
||||
var trace = require('./trace');
|
||||
var search = require('./search');
|
||||
var getStructureFromConfig = require('./structure').getStructureFromConfig;
|
||||
var cssWideKeywords = parse('inherit | initial | unset');
|
||||
var cssWideKeywordsWithExpression = parse('inherit | initial | unset | <expression>');
|
||||
|
||||
function dumpMapSyntax(map, syntaxAsAst) {
|
||||
var result = {};
|
||||
|
||||
for (var name in map) {
|
||||
if (map[name].syntax) {
|
||||
result[name] = syntaxAsAst ? map[name].syntax : translate(map[name].syntax);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function unwrapNode(item) {
|
||||
return item && item.data;
|
||||
}
|
||||
|
||||
function valueHasVar(value) {
|
||||
var hasVar = false;
|
||||
|
||||
this.syntax.walk(value, function(node) {
|
||||
if (node.type === 'Function' && node.name.toLowerCase() === 'var') {
|
||||
hasVar = true;
|
||||
}
|
||||
});
|
||||
|
||||
return hasVar;
|
||||
}
|
||||
|
||||
// check node is \0 or \9 hack
|
||||
function isHack(node) {
|
||||
return node.type === 'Identifier' && /^\\[09]/.test(node.name);
|
||||
}
|
||||
|
||||
// white spaces, comments and some hacks can to be ignored at the end of value
|
||||
function isNextMayToBeIgnored(cursor) {
|
||||
while (cursor !== null) {
|
||||
if (cursor.data.type !== 'WhiteSpace' &&
|
||||
cursor.data.type !== 'Comment' &&
|
||||
!isHack(cursor.data)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
cursor = cursor.next;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function buildMatchResult(match, error) {
|
||||
return {
|
||||
matched: match,
|
||||
error: error,
|
||||
getTrace: trace.getTrace,
|
||||
isType: trace.isType,
|
||||
isProperty: trace.isProperty,
|
||||
isKeyword: trace.isKeyword
|
||||
};
|
||||
}
|
||||
|
||||
function matchSyntax(lexer, syntax, value) {
|
||||
var result;
|
||||
|
||||
if (!value || value.type !== 'Value') {
|
||||
return buildMatchResult(null, new Error('Not a Value node'));
|
||||
}
|
||||
|
||||
if (valueHasVar.call(lexer, value)) {
|
||||
return buildMatchResult(null, new Error('Matching for a value with var() is not supported'));
|
||||
}
|
||||
|
||||
result = match(lexer, lexer.valueCommonSyntax, value.children.head);
|
||||
|
||||
if (!result.match) {
|
||||
result = syntax.match(value.children.head);
|
||||
if (!result.match) {
|
||||
return buildMatchResult(null, new MatchError('Mismatch', lexer, syntax.syntax, value, result.badNode || unwrapNode(result.next) || value));
|
||||
}
|
||||
}
|
||||
|
||||
// enhance top-level match wrapper
|
||||
if (result.match.type === 'ASTNode') {
|
||||
result.match = {
|
||||
syntax: {
|
||||
type: syntax.type,
|
||||
name: syntax.name
|
||||
},
|
||||
match: [result.match]
|
||||
};
|
||||
} else if (result.match.syntax.type === 'Group') {
|
||||
result.match.syntax = {
|
||||
type: syntax.type,
|
||||
name: syntax.name
|
||||
};
|
||||
}
|
||||
|
||||
if (result.next && !isNextMayToBeIgnored(result.next)) {
|
||||
return buildMatchResult(null, new MatchError('Uncomplete match', lexer, syntax.syntax, value, result.badNode || unwrapNode(result.next) || value));
|
||||
}
|
||||
|
||||
return buildMatchResult(result.match, null);
|
||||
}
|
||||
|
||||
var Lexer = function(config, syntax, structure) {
|
||||
this.valueCommonSyntax = cssWideKeywords;
|
||||
this.syntax = syntax;
|
||||
this.generic = false;
|
||||
this.properties = {};
|
||||
this.types = {};
|
||||
this.structure = structure || getStructureFromConfig(config);
|
||||
|
||||
if (config) {
|
||||
if (config.generic) {
|
||||
this.generic = true;
|
||||
for (var name in generic) {
|
||||
this.addType_(name, generic[name]);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.types) {
|
||||
for (var name in config.types) {
|
||||
this.addType_(name, config.types[name]);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.properties) {
|
||||
for (var name in config.properties) {
|
||||
this.addProperty_(name, config.properties[name]);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Lexer.prototype = {
|
||||
structure: {},
|
||||
checkStructure: function(ast) {
|
||||
function collectWarning(node, message) {
|
||||
warns.push({
|
||||
node: node,
|
||||
message: message
|
||||
});
|
||||
}
|
||||
|
||||
var structure = this.structure;
|
||||
var warns = [];
|
||||
|
||||
this.syntax.walk(ast, function(node) {
|
||||
if (structure.hasOwnProperty(node.type)) {
|
||||
structure[node.type].check(node, collectWarning);
|
||||
} else {
|
||||
collectWarning(node, 'Unknown node type `' + node.type + '`');
|
||||
}
|
||||
});
|
||||
|
||||
return warns.length ? warns : false;
|
||||
},
|
||||
|
||||
createDescriptor: function(syntax, type, name) {
|
||||
var self = this;
|
||||
var descriptor = {
|
||||
type: type,
|
||||
name: name,
|
||||
syntax: null,
|
||||
match: null
|
||||
};
|
||||
|
||||
if (typeof syntax === 'function') {
|
||||
// convert syntax to pseudo syntax node
|
||||
// NOTE: that's not a part of match result tree
|
||||
syntax = {
|
||||
type: 'ASTNode',
|
||||
match: syntax
|
||||
};
|
||||
|
||||
descriptor.match = function(item) {
|
||||
return match(self, syntax, item);
|
||||
};
|
||||
} else {
|
||||
if (typeof syntax === 'string') {
|
||||
// lazy parsing on first access
|
||||
Object.defineProperty(descriptor, 'syntax', {
|
||||
get: function() {
|
||||
Object.defineProperty(descriptor, 'syntax', {
|
||||
value: parse(syntax)
|
||||
});
|
||||
|
||||
return descriptor.syntax;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
descriptor.syntax = syntax;
|
||||
}
|
||||
|
||||
descriptor.match = function(item) {
|
||||
return match(self, descriptor.syntax, item);
|
||||
};
|
||||
}
|
||||
|
||||
return descriptor;
|
||||
},
|
||||
addProperty_: function(name, syntax) {
|
||||
this.properties[name] = this.createDescriptor(syntax, 'Property', name);
|
||||
},
|
||||
addType_: function(name, syntax) {
|
||||
this.types[name] = this.createDescriptor(syntax, 'Type', name);
|
||||
|
||||
if (syntax === generic.expression) {
|
||||
this.valueCommonSyntax = cssWideKeywordsWithExpression;
|
||||
}
|
||||
},
|
||||
|
||||
matchDeclaration: function(node) {
|
||||
if (node.type !== 'Declaration') {
|
||||
return buildMatchResult(null, new Error('Not a Declaration node'));
|
||||
}
|
||||
|
||||
return this.matchProperty(node.property, node.value);
|
||||
},
|
||||
matchProperty: function(propertyName, value) {
|
||||
var property = names.property(propertyName);
|
||||
|
||||
// don't match syntax for a custom property
|
||||
if (property.custom) {
|
||||
return buildMatchResult(null, new Error('Lexer matching doesn\'t applicable for custom properties'));
|
||||
}
|
||||
|
||||
var propertySyntax = property.vendor
|
||||
? this.getProperty(property.vendor + property.name) || this.getProperty(property.name)
|
||||
: this.getProperty(property.name);
|
||||
|
||||
if (!propertySyntax) {
|
||||
return buildMatchResult(null, new SyntaxReferenceError('Unknown property', propertyName));
|
||||
}
|
||||
|
||||
return matchSyntax(this, propertySyntax, value);
|
||||
},
|
||||
matchType: function(typeName, value) {
|
||||
var typeSyntax = this.getType(typeName);
|
||||
|
||||
if (!typeSyntax) {
|
||||
return buildMatchResult(null, new SyntaxReferenceError('Unknown type', typeName));
|
||||
}
|
||||
|
||||
return matchSyntax(this, typeSyntax, value);
|
||||
},
|
||||
|
||||
findValueFragments: function(propertyName, value, type, name) {
|
||||
return search.matchFragments(this, value, this.matchProperty(propertyName, value), type, name);
|
||||
},
|
||||
findDeclarationValueFragments: function(declaration, type, name) {
|
||||
return search.matchFragments(this, declaration.value, this.matchDeclaration(declaration), type, name);
|
||||
},
|
||||
findAllFragments: function(ast, type, name) {
|
||||
var result = [];
|
||||
|
||||
this.syntax.walkDeclarations(ast, function(declaration) {
|
||||
result.push.apply(result, this.findDeclarationValueFragments(declaration, type, name));
|
||||
}.bind(this));
|
||||
|
||||
return result;
|
||||
},
|
||||
|
||||
getProperty: function(name) {
|
||||
return this.properties.hasOwnProperty(name) ? this.properties[name] : null;
|
||||
},
|
||||
getType: function(name) {
|
||||
return this.types.hasOwnProperty(name) ? this.types[name] : null;
|
||||
},
|
||||
|
||||
validate: function() {
|
||||
function validate(syntax, name, broken, descriptor) {
|
||||
if (broken.hasOwnProperty(name)) {
|
||||
return broken[name];
|
||||
}
|
||||
|
||||
broken[name] = false;
|
||||
if (descriptor.syntax !== null) {
|
||||
walk(descriptor.syntax, function(node) {
|
||||
if (node.type !== 'Type' && node.type !== 'Property') {
|
||||
return;
|
||||
}
|
||||
|
||||
var map = node.type === 'Type' ? syntax.types : syntax.properties;
|
||||
var brokenMap = node.type === 'Type' ? brokenTypes : brokenProperties;
|
||||
|
||||
if (!map.hasOwnProperty(node.name) || validate(syntax, node.name, brokenMap, map[node.name])) {
|
||||
broken[name] = true;
|
||||
}
|
||||
}, this);
|
||||
}
|
||||
}
|
||||
|
||||
var brokenTypes = {};
|
||||
var brokenProperties = {};
|
||||
|
||||
for (var key in this.types) {
|
||||
validate(this, key, brokenTypes, this.types[key]);
|
||||
}
|
||||
|
||||
for (var key in this.properties) {
|
||||
validate(this, key, brokenProperties, this.properties[key]);
|
||||
}
|
||||
|
||||
brokenTypes = Object.keys(brokenTypes).filter(function(name) {
|
||||
return brokenTypes[name];
|
||||
});
|
||||
brokenProperties = Object.keys(brokenProperties).filter(function(name) {
|
||||
return brokenProperties[name];
|
||||
});
|
||||
|
||||
if (brokenTypes.length || brokenProperties.length) {
|
||||
return {
|
||||
types: brokenTypes,
|
||||
properties: brokenProperties
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
dump: function(syntaxAsAst) {
|
||||
return {
|
||||
generic: this.generic,
|
||||
types: dumpMapSyntax(this.types, syntaxAsAst),
|
||||
properties: dumpMapSyntax(this.properties, syntaxAsAst)
|
||||
};
|
||||
},
|
||||
toString: function() {
|
||||
return JSON.stringify(this.dump());
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = Lexer;
|
||||
62
build/node_modules/css-tree/lib/lexer/error.js
generated
vendored
Normal file
62
build/node_modules/css-tree/lib/lexer/error.js
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
'use strict';
|
||||
|
||||
var createCustomError = require('../utils/createCustomError');
|
||||
var translateGrammar = require('./grammar/translate');
|
||||
|
||||
function getLocation(node, point) {
|
||||
var loc = node && node.loc && node.loc[point];
|
||||
|
||||
return loc
|
||||
? { offset: loc.offset,
|
||||
line: loc.line,
|
||||
column: loc.column }
|
||||
: null;
|
||||
}
|
||||
|
||||
var SyntaxReferenceError = function(type, referenceName) {
|
||||
var error = createCustomError('SyntaxReferenceError', type + ' `' + referenceName + '`');
|
||||
|
||||
error.reference = referenceName;
|
||||
|
||||
return error;
|
||||
};
|
||||
|
||||
var MatchError = function(message, lexer, syntax, value, badNode) {
|
||||
var error = createCustomError('SyntaxMatchError', message);
|
||||
var errorOffset = -1;
|
||||
var start = getLocation(badNode, 'start');
|
||||
var end = getLocation(badNode, 'end');
|
||||
var css = lexer.syntax.translateMarkup(value, function(node, buffer) {
|
||||
if (node === badNode) {
|
||||
errorOffset = buffer.length;
|
||||
}
|
||||
});
|
||||
|
||||
if (errorOffset === -1) {
|
||||
errorOffset = css.length;
|
||||
}
|
||||
|
||||
error.rawMessage = message;
|
||||
error.syntax = syntax ? translateGrammar(syntax) : '<generic>';
|
||||
error.css = css;
|
||||
error.mismatchOffset = errorOffset;
|
||||
error.loc = {
|
||||
source: badNode && badNode.loc && badNode.loc.source || '<unknown>',
|
||||
start: start,
|
||||
end: end
|
||||
};
|
||||
error.line = start ? start.line : undefined;
|
||||
error.column = start ? start.column : undefined;
|
||||
error.offset = start ? start.offset : undefined;
|
||||
error.message = message + '\n' +
|
||||
' syntax: ' + error.syntax + '\n' +
|
||||
' value: ' + (error.css || '<empty string>') + '\n' +
|
||||
' --------' + new Array(error.mismatchOffset + 1).join('-') + '^';
|
||||
|
||||
return error;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
SyntaxReferenceError: SyntaxReferenceError,
|
||||
MatchError: MatchError
|
||||
};
|
||||
221
build/node_modules/css-tree/lib/lexer/generic.js
generated
vendored
Normal file
221
build/node_modules/css-tree/lib/lexer/generic.js
generated
vendored
Normal file
@@ -0,0 +1,221 @@
|
||||
'use strict';
|
||||
|
||||
var names = require('../utils/names.js');
|
||||
|
||||
// https://www.w3.org/TR/css-values-3/#lengths
|
||||
var LENGTH = {
|
||||
// absolute length units
|
||||
'px': true,
|
||||
'mm': true,
|
||||
'cm': true,
|
||||
'in': true,
|
||||
'pt': true,
|
||||
'pc': true,
|
||||
'q': true,
|
||||
|
||||
// relative length units
|
||||
'em': true,
|
||||
'ex': true,
|
||||
'ch': true,
|
||||
'rem': true,
|
||||
|
||||
// viewport-percentage lengths
|
||||
'vh': true,
|
||||
'vw': true,
|
||||
'vmin': true,
|
||||
'vmax': true,
|
||||
'vm': true
|
||||
};
|
||||
|
||||
var ANGLE = {
|
||||
'deg': true,
|
||||
'grad': true,
|
||||
'rad': true,
|
||||
'turn': true
|
||||
};
|
||||
|
||||
var TIME = {
|
||||
's': true,
|
||||
'ms': true
|
||||
};
|
||||
|
||||
var FREQUENCY = {
|
||||
'hz': true,
|
||||
'khz': true
|
||||
};
|
||||
|
||||
// https://www.w3.org/TR/css-values-3/#resolution (https://drafts.csswg.org/css-values/#resolution)
|
||||
var RESOLUTION = {
|
||||
'dpi': true,
|
||||
'dpcm': true,
|
||||
'dppx': true,
|
||||
'x': true // https://github.com/w3c/csswg-drafts/issues/461
|
||||
};
|
||||
|
||||
// https://drafts.csswg.org/css-grid/#fr-unit
|
||||
var FLEX = {
|
||||
'fr': true
|
||||
};
|
||||
|
||||
// https://www.w3.org/TR/css3-speech/#mixing-props-voice-volume
|
||||
var DECIBEL = {
|
||||
'db': true
|
||||
};
|
||||
|
||||
// https://www.w3.org/TR/css3-speech/#voice-props-voice-pitch
|
||||
var SEMITONES = {
|
||||
'st': true
|
||||
};
|
||||
|
||||
// can be used wherever <length>, <frequency>, <angle>, <time>, <percentage>, <number>, or <integer> values are allowed
|
||||
// https://drafts.csswg.org/css-values/#calc-notation
|
||||
function isCalc(node) {
|
||||
if (node.data.type !== 'Function') {
|
||||
return false;
|
||||
}
|
||||
|
||||
var keyword = names.keyword(node.data.name);
|
||||
|
||||
if (keyword.name !== 'calc') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// there were some prefixed implementations
|
||||
return keyword.vendor === '' ||
|
||||
keyword.vendor === '-moz-' ||
|
||||
keyword.vendor === '-webkit-';
|
||||
}
|
||||
|
||||
function astNode(type) {
|
||||
return function(node) {
|
||||
return node.data.type === type;
|
||||
};
|
||||
}
|
||||
|
||||
function dimension(type) {
|
||||
return function(node) {
|
||||
return isCalc(node) ||
|
||||
(node.data.type === 'Dimension' && type.hasOwnProperty(node.data.unit.toLowerCase()));
|
||||
};
|
||||
}
|
||||
|
||||
function zeroUnitlessDimension(type) {
|
||||
return function(node) {
|
||||
return isCalc(node) ||
|
||||
(node.data.type === 'Dimension' && type.hasOwnProperty(node.data.unit.toLowerCase())) ||
|
||||
(node.data.type === 'Number' && Number(node.data.value) === 0);
|
||||
};
|
||||
}
|
||||
|
||||
function attr(node) {
|
||||
return node.data.type === 'Function' && node.data.name.toLowerCase() === 'attr';
|
||||
}
|
||||
|
||||
function number(node) {
|
||||
return isCalc(node) || node.data.type === 'Number';
|
||||
}
|
||||
|
||||
function numberZeroOne(node) {
|
||||
if (isCalc(node) || node.data.type === 'Number') {
|
||||
var value = Number(node.data.value);
|
||||
|
||||
return value >= 0 && value <= 1;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function numberOneOrGreater(node) {
|
||||
if (isCalc(node) || node.data.type === 'Number') {
|
||||
return Number(node.data.value) >= 1;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// TODO: fail on 10e-2
|
||||
function integer(node) {
|
||||
return isCalc(node) ||
|
||||
(node.data.type === 'Number' && node.data.value.indexOf('.') === -1);
|
||||
}
|
||||
|
||||
// TODO: fail on 10e-2
|
||||
function positiveInteger(node) {
|
||||
return isCalc(node) ||
|
||||
(node.data.type === 'Number' && node.data.value.indexOf('.') === -1 && node.data.value.charAt(0) !== '-');
|
||||
}
|
||||
|
||||
function percentage(node) {
|
||||
return isCalc(node) ||
|
||||
node.data.type === 'Percentage';
|
||||
}
|
||||
|
||||
function hexColor(node) {
|
||||
if (node.data.type !== 'HexColor') {
|
||||
return false;
|
||||
}
|
||||
|
||||
var hex = node.data.value;
|
||||
|
||||
return /^[0-9a-fA-F]{3,8}$/.test(hex) &&
|
||||
(hex.length === 3 || hex.length === 4 || hex.length === 6 || hex.length === 8);
|
||||
}
|
||||
|
||||
function expression(node) {
|
||||
return node.data.type === 'Function' && node.data.name.toLowerCase() === 'expression';
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/CSS/custom-ident
|
||||
// https://drafts.csswg.org/css-values-4/#identifier-value
|
||||
function customIdent(node) {
|
||||
if (node.data.type !== 'Identifier') {
|
||||
return false;
|
||||
}
|
||||
|
||||
var name = node.data.name.toLowerCase();
|
||||
|
||||
// § 3.2. Author-defined Identifiers: the <custom-ident> type
|
||||
// The CSS-wide keywords are not valid <custom-ident>s
|
||||
if (name === 'unset' || name === 'initial' || name === 'inherit') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// The default keyword is reserved and is also not a valid <custom-ident>
|
||||
if (name === 'default') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// TODO: ignore property specific keywords (as described https://developer.mozilla.org/en-US/docs/Web/CSS/custom-ident)
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
'angle': zeroUnitlessDimension(ANGLE),
|
||||
'attr()': attr,
|
||||
'custom-ident': customIdent,
|
||||
'decibel': dimension(DECIBEL),
|
||||
'dimension': astNode('Dimension'),
|
||||
'frequency': dimension(FREQUENCY),
|
||||
'flex': dimension(FLEX),
|
||||
'hex-color': hexColor,
|
||||
'id-selector': astNode('IdSelector'), // element( <id-selector> )
|
||||
'ident': astNode('Identifier'),
|
||||
'integer': integer,
|
||||
'length': zeroUnitlessDimension(LENGTH),
|
||||
'number': number,
|
||||
'number-zero-one': numberZeroOne,
|
||||
'number-one-or-greater': numberOneOrGreater,
|
||||
'percentage': percentage,
|
||||
'positive-integer': positiveInteger,
|
||||
'resolution': dimension(RESOLUTION),
|
||||
'semitones': dimension(SEMITONES),
|
||||
'string': astNode('String'),
|
||||
'time': dimension(TIME),
|
||||
'unicode-range': astNode('UnicodeRange'),
|
||||
'url': astNode('Url'),
|
||||
|
||||
// old IE stuff
|
||||
'progid': astNode('Raw'),
|
||||
'expression': expression
|
||||
};
|
||||
20
build/node_modules/css-tree/lib/lexer/grammar/error.js
generated
vendored
Normal file
20
build/node_modules/css-tree/lib/lexer/grammar/error.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
'use strict';
|
||||
|
||||
var createCustomError = require('../../utils/createCustomError');
|
||||
|
||||
var SyntaxParseError = function(message, syntaxStr, offset) {
|
||||
var error = createCustomError('SyntaxParseError', message);
|
||||
|
||||
error.rawMessage = message;
|
||||
error.syntax = syntaxStr;
|
||||
error.offset = offset;
|
||||
error.message = error.rawMessage + '\n' +
|
||||
' ' + error.syntax + '\n' +
|
||||
'--' + new Array((error.offset || error.syntax.length) + 1).join('-') + '^';
|
||||
|
||||
return error;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
SyntaxParseError: SyntaxParseError
|
||||
};
|
||||
6
build/node_modules/css-tree/lib/lexer/grammar/index.js
generated
vendored
Normal file
6
build/node_modules/css-tree/lib/lexer/grammar/index.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
SyntaxParseError: require('./error').SyntaxParseError,
|
||||
parse: require('./parse'),
|
||||
translate: require('./translate'),
|
||||
walk: require('./walk')
|
||||
};
|
||||
503
build/node_modules/css-tree/lib/lexer/grammar/parse.js
generated
vendored
Normal file
503
build/node_modules/css-tree/lib/lexer/grammar/parse.js
generated
vendored
Normal file
@@ -0,0 +1,503 @@
|
||||
'use strict';
|
||||
|
||||
var SyntaxParseError = require('./error').SyntaxParseError;
|
||||
|
||||
var TAB = 9;
|
||||
var N = 10;
|
||||
var F = 12;
|
||||
var R = 13;
|
||||
var SPACE = 32;
|
||||
var EXCLAMATIONMARK = 33; // !
|
||||
var NUMBERSIGN = 35; // #
|
||||
var PERCENTSIGN = 37; // %
|
||||
var AMPERSAND = 38; // &
|
||||
var APOSTROPHE = 39; // '
|
||||
var LEFTPARENTHESIS = 40; // (
|
||||
var RIGHTPARENTHESIS = 41; // )
|
||||
var ASTERISK = 42; // *
|
||||
var PLUSSIGN = 43; // +
|
||||
var COMMA = 44; // ,
|
||||
var SOLIDUS = 47; // /
|
||||
var LESSTHANSIGN = 60; // <
|
||||
var GREATERTHANSIGN = 62; // >
|
||||
var QUESTIONMARK = 63; // ?
|
||||
var LEFTSQUAREBRACKET = 91; // [
|
||||
var RIGHTSQUAREBRACKET = 93; // ]
|
||||
var LEFTCURLYBRACKET = 123; // {
|
||||
var VERTICALLINE = 124; // |
|
||||
var RIGHTCURLYBRACKET = 125; // }
|
||||
var COMBINATOR_PRECEDENCE = {
|
||||
' ': 1,
|
||||
'&&': 2,
|
||||
'||': 3,
|
||||
'|': 4
|
||||
};
|
||||
var MULTIPLIER_DEFAULT = {
|
||||
comma: false,
|
||||
min: 1,
|
||||
max: 1
|
||||
};
|
||||
var MULTIPLIER_ZERO_OR_MORE = {
|
||||
comma: false,
|
||||
min: 0,
|
||||
max: 0
|
||||
};
|
||||
var MULTIPLIER_ONE_OR_MORE = {
|
||||
comma: false,
|
||||
min: 1,
|
||||
max: 0
|
||||
};
|
||||
var MULTIPLIER_ONE_OR_MORE_COMMA_SEPARATED = {
|
||||
comma: true,
|
||||
min: 1,
|
||||
max: 0
|
||||
};
|
||||
var MULTIPLIER_ZERO_OR_ONE = {
|
||||
comma: false,
|
||||
min: 0,
|
||||
max: 1
|
||||
};
|
||||
var NAME_CHAR = (function() {
|
||||
var array = typeof Uint32Array === 'function' ? new Uint32Array(128) : new Array(128);
|
||||
for (var i = 0; i < 128; i++) {
|
||||
array[i] = /[a-zA-Z0-9\-]/.test(String.fromCharCode(i)) ? 1 : 0;
|
||||
}
|
||||
return array;
|
||||
})();
|
||||
|
||||
var Tokenizer = function(str) {
|
||||
this.str = str;
|
||||
this.pos = 0;
|
||||
};
|
||||
Tokenizer.prototype = {
|
||||
charCode: function() {
|
||||
return this.pos < this.str.length ? this.str.charCodeAt(this.pos) : 0;
|
||||
},
|
||||
nextCharCode: function() {
|
||||
return this.pos + 1 < this.str.length ? this.str.charCodeAt(this.pos + 1) : 0;
|
||||
},
|
||||
substringToPos: function(end) {
|
||||
return this.str.substring(this.pos, this.pos = end);
|
||||
},
|
||||
eat: function(code) {
|
||||
if (this.charCode() !== code) {
|
||||
error(this, this.pos, 'Expect `' + String.fromCharCode(code) + '`');
|
||||
}
|
||||
|
||||
this.pos++;
|
||||
}
|
||||
};
|
||||
|
||||
function scanSpaces(tokenizer) {
|
||||
var end = tokenizer.pos + 1;
|
||||
|
||||
for (; end < tokenizer.str.length; end++) {
|
||||
var code = tokenizer.str.charCodeAt(end);
|
||||
if (code !== R && code !== N && code !== F && code !== SPACE && code !== TAB) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return tokenizer.substringToPos(end);
|
||||
}
|
||||
|
||||
function scanWord(tokenizer) {
|
||||
var end = tokenizer.pos;
|
||||
|
||||
for (; end < tokenizer.str.length; end++) {
|
||||
var code = tokenizer.str.charCodeAt(end);
|
||||
if (code >= 128 || NAME_CHAR[code] === 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenizer.pos === end) {
|
||||
error(tokenizer, tokenizer.pos, 'Expect a keyword');
|
||||
}
|
||||
|
||||
return tokenizer.substringToPos(end);
|
||||
}
|
||||
|
||||
function scanNumber(tokenizer) {
|
||||
var end = tokenizer.pos;
|
||||
|
||||
for (; end < tokenizer.str.length; end++) {
|
||||
var code = tokenizer.str.charCodeAt(end);
|
||||
if (code < 48 || code > 57) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenizer.pos === end) {
|
||||
error(tokenizer, tokenizer.pos, 'Expect a number');
|
||||
}
|
||||
|
||||
return tokenizer.substringToPos(end);
|
||||
}
|
||||
|
||||
function scanString(tokenizer) {
|
||||
var end = tokenizer.str.indexOf('\'', tokenizer.pos + 1);
|
||||
|
||||
if (end === -1) {
|
||||
error(tokenizer, tokenizer.str.length, 'Expect a quote');
|
||||
}
|
||||
|
||||
return tokenizer.substringToPos(end + 1);
|
||||
}
|
||||
|
||||
function readMultiplierRange(tokenizer, comma) {
|
||||
var min = null;
|
||||
var max = null;
|
||||
|
||||
tokenizer.eat(LEFTCURLYBRACKET);
|
||||
|
||||
min = scanNumber(tokenizer);
|
||||
|
||||
if (tokenizer.charCode() === COMMA) {
|
||||
tokenizer.pos++;
|
||||
if (tokenizer.charCode() !== RIGHTCURLYBRACKET) {
|
||||
max = scanNumber(tokenizer);
|
||||
}
|
||||
} else {
|
||||
max = min;
|
||||
}
|
||||
|
||||
tokenizer.eat(RIGHTCURLYBRACKET);
|
||||
|
||||
return {
|
||||
comma: comma,
|
||||
min: Number(min),
|
||||
max: max ? Number(max) : 0
|
||||
};
|
||||
}
|
||||
|
||||
function readMultiplier(tokenizer) {
|
||||
switch (tokenizer.charCode()) {
|
||||
case ASTERISK:
|
||||
tokenizer.pos++;
|
||||
return MULTIPLIER_ZERO_OR_MORE;
|
||||
|
||||
case PLUSSIGN:
|
||||
tokenizer.pos++;
|
||||
return MULTIPLIER_ONE_OR_MORE;
|
||||
|
||||
case QUESTIONMARK:
|
||||
tokenizer.pos++;
|
||||
return MULTIPLIER_ZERO_OR_ONE;
|
||||
|
||||
case NUMBERSIGN:
|
||||
tokenizer.pos++;
|
||||
|
||||
if (tokenizer.charCode() !== LEFTCURLYBRACKET) {
|
||||
return MULTIPLIER_ONE_OR_MORE_COMMA_SEPARATED;
|
||||
}
|
||||
|
||||
return readMultiplierRange(tokenizer, true);
|
||||
|
||||
case LEFTCURLYBRACKET:
|
||||
return readMultiplierRange(tokenizer, false);
|
||||
}
|
||||
|
||||
return MULTIPLIER_DEFAULT;
|
||||
}
|
||||
|
||||
function maybeMultiplied(tokenizer, node) {
|
||||
var multiplier = readMultiplier(tokenizer);
|
||||
|
||||
if (multiplier !== MULTIPLIER_DEFAULT) {
|
||||
return {
|
||||
type: 'Group',
|
||||
terms: [node],
|
||||
combinator: '|', // `|` combinator is simplest in implementation (and therefore faster)
|
||||
disallowEmpty: false,
|
||||
multiplier: multiplier,
|
||||
explicit: false
|
||||
};
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
function readProperty(tokenizer) {
|
||||
var name;
|
||||
|
||||
tokenizer.eat(LESSTHANSIGN);
|
||||
tokenizer.eat(APOSTROPHE);
|
||||
|
||||
name = scanWord(tokenizer);
|
||||
|
||||
tokenizer.eat(APOSTROPHE);
|
||||
tokenizer.eat(GREATERTHANSIGN);
|
||||
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Property',
|
||||
name: name
|
||||
});
|
||||
}
|
||||
|
||||
function readType(tokenizer) {
|
||||
var name;
|
||||
|
||||
tokenizer.eat(LESSTHANSIGN);
|
||||
name = scanWord(tokenizer);
|
||||
|
||||
if (tokenizer.charCode() === LEFTPARENTHESIS &&
|
||||
tokenizer.nextCharCode() === RIGHTPARENTHESIS) {
|
||||
tokenizer.pos += 2;
|
||||
name += '()';
|
||||
}
|
||||
|
||||
tokenizer.eat(GREATERTHANSIGN);
|
||||
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Type',
|
||||
name: name
|
||||
});
|
||||
}
|
||||
|
||||
function readKeywordOrFunction(tokenizer) {
|
||||
var children = null;
|
||||
var name;
|
||||
|
||||
name = scanWord(tokenizer);
|
||||
|
||||
if (tokenizer.charCode() === LEFTPARENTHESIS) {
|
||||
tokenizer.pos++;
|
||||
children = readImplicitGroup(tokenizer);
|
||||
tokenizer.eat(RIGHTPARENTHESIS);
|
||||
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Function',
|
||||
name: name,
|
||||
children: children
|
||||
});
|
||||
}
|
||||
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Keyword',
|
||||
name: name
|
||||
});
|
||||
}
|
||||
|
||||
function regroupTerms(terms, combinators) {
|
||||
function createGroup(terms, combinator) {
|
||||
return {
|
||||
type: 'Group',
|
||||
terms: terms,
|
||||
combinator: combinator,
|
||||
disallowEmpty: false,
|
||||
multiplier: MULTIPLIER_DEFAULT,
|
||||
explicit: false
|
||||
};
|
||||
}
|
||||
|
||||
combinators = Object.keys(combinators).sort(function(a, b) {
|
||||
return COMBINATOR_PRECEDENCE[a] - COMBINATOR_PRECEDENCE[b];
|
||||
});
|
||||
|
||||
while (combinators.length > 0) {
|
||||
var combinator = combinators.shift();
|
||||
for (var i = 0, subgroupStart = 0; i < terms.length; i++) {
|
||||
var term = terms[i];
|
||||
if (term.type === 'Combinator') {
|
||||
if (term.value === combinator) {
|
||||
if (subgroupStart === -1) {
|
||||
subgroupStart = i - 1;
|
||||
}
|
||||
terms.splice(i, 1);
|
||||
i--;
|
||||
} else {
|
||||
if (subgroupStart !== -1 && i - subgroupStart > 1) {
|
||||
terms.splice(
|
||||
subgroupStart,
|
||||
i - subgroupStart,
|
||||
createGroup(terms.slice(subgroupStart, i), combinator)
|
||||
);
|
||||
i = subgroupStart + 1;
|
||||
}
|
||||
subgroupStart = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (subgroupStart !== -1 && combinators.length) {
|
||||
terms.splice(
|
||||
subgroupStart,
|
||||
i - subgroupStart,
|
||||
createGroup(terms.slice(subgroupStart, i), combinator)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return combinator;
|
||||
}
|
||||
|
||||
function readImplicitGroup(tokenizer) {
|
||||
var terms = [];
|
||||
var combinators = {};
|
||||
var token;
|
||||
var prevToken = null;
|
||||
var prevTokenPos = tokenizer.pos;
|
||||
|
||||
while (token = peek(tokenizer)) {
|
||||
if (token.type !== 'Spaces') {
|
||||
if (token.type === 'Combinator') {
|
||||
// check for combinator in group beginning and double combinator sequence
|
||||
if (prevToken === null || prevToken.type === 'Combinator') {
|
||||
error(tokenizer, prevTokenPos, 'Unexpected combinator');
|
||||
}
|
||||
|
||||
combinators[token.value] = true;
|
||||
} else if (prevToken !== null && prevToken.type !== 'Combinator') {
|
||||
combinators[' '] = true; // a b
|
||||
terms.push({
|
||||
type: 'Combinator',
|
||||
value: ' '
|
||||
});
|
||||
}
|
||||
|
||||
terms.push(token);
|
||||
prevToken = token;
|
||||
prevTokenPos = tokenizer.pos;
|
||||
}
|
||||
}
|
||||
|
||||
// check for combinator in group ending
|
||||
if (prevToken !== null && prevToken.type === 'Combinator') {
|
||||
error(tokenizer, tokenizer.pos - prevTokenPos, 'Unexpected combinator');
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Group',
|
||||
terms: terms,
|
||||
combinator: regroupTerms(terms, combinators) || ' ',
|
||||
disallowEmpty: false,
|
||||
multiplier: MULTIPLIER_DEFAULT,
|
||||
explicit: false
|
||||
};
|
||||
}
|
||||
|
||||
function readGroup(tokenizer) {
|
||||
var result;
|
||||
|
||||
tokenizer.eat(LEFTSQUAREBRACKET);
|
||||
result = readImplicitGroup(tokenizer);
|
||||
tokenizer.eat(RIGHTSQUAREBRACKET);
|
||||
|
||||
result.explicit = true;
|
||||
result.multiplier = readMultiplier(tokenizer);
|
||||
|
||||
if (tokenizer.charCode() === EXCLAMATIONMARK) {
|
||||
tokenizer.pos++;
|
||||
result.disallowEmpty = true;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function peek(tokenizer) {
|
||||
var code = tokenizer.charCode();
|
||||
|
||||
if (code < 128 && NAME_CHAR[code] === 1) {
|
||||
return readKeywordOrFunction(tokenizer);
|
||||
}
|
||||
|
||||
switch (code) {
|
||||
case LEFTSQUAREBRACKET:
|
||||
return readGroup(tokenizer);
|
||||
|
||||
case LESSTHANSIGN:
|
||||
if (tokenizer.nextCharCode() === APOSTROPHE) {
|
||||
return readProperty(tokenizer);
|
||||
} else {
|
||||
return readType(tokenizer);
|
||||
}
|
||||
|
||||
case VERTICALLINE:
|
||||
return {
|
||||
type: 'Combinator',
|
||||
value: tokenizer.substringToPos(tokenizer.nextCharCode() === VERTICALLINE ? tokenizer.pos + 2 : tokenizer.pos + 1)
|
||||
};
|
||||
|
||||
case AMPERSAND:
|
||||
tokenizer.pos++;
|
||||
tokenizer.eat(AMPERSAND);
|
||||
return {
|
||||
type: 'Combinator',
|
||||
value: '&&'
|
||||
};
|
||||
|
||||
case COMMA:
|
||||
tokenizer.pos++;
|
||||
return {
|
||||
type: 'Comma',
|
||||
value: ','
|
||||
};
|
||||
|
||||
case SOLIDUS:
|
||||
tokenizer.pos++;
|
||||
return {
|
||||
type: 'Slash',
|
||||
value: '/'
|
||||
};
|
||||
|
||||
case PERCENTSIGN: // looks like exception, needs for attr()'s <type-or-unit>
|
||||
tokenizer.pos++;
|
||||
return {
|
||||
type: 'Percent',
|
||||
value: '%'
|
||||
};
|
||||
|
||||
case LEFTPARENTHESIS:
|
||||
tokenizer.pos++;
|
||||
var children = readImplicitGroup(tokenizer);
|
||||
tokenizer.eat(RIGHTPARENTHESIS);
|
||||
|
||||
return {
|
||||
type: 'Parentheses',
|
||||
children: children
|
||||
};
|
||||
|
||||
case APOSTROPHE:
|
||||
return {
|
||||
type: 'String',
|
||||
value: scanString(tokenizer)
|
||||
};
|
||||
|
||||
case SPACE:
|
||||
case TAB:
|
||||
case N:
|
||||
case R:
|
||||
case F:
|
||||
return {
|
||||
type: 'Spaces',
|
||||
value: scanSpaces(tokenizer)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function error(tokenizer, pos, msg) {
|
||||
throw new SyntaxParseError(msg || 'Unexpected input', tokenizer.str, pos);
|
||||
}
|
||||
|
||||
function parse(str) {
|
||||
var tokenizer = new Tokenizer(str);
|
||||
var result = readImplicitGroup(tokenizer);
|
||||
|
||||
if (tokenizer.pos !== str.length) {
|
||||
error(tokenizer, tokenizer.pos);
|
||||
}
|
||||
|
||||
// reduce redundant groups with single group term
|
||||
if (result.terms.length === 1 && result.terms[0].type === 'Group') {
|
||||
result = result.terms[0];
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// warm up parse to elimitate code branches that never execute
|
||||
// fix soft deoptimizations (insufficient type feedback)
|
||||
parse('[a&&<b>#|<\'c\'>*||e(){2,} f{2} /,(% g#{1,2})]!');
|
||||
|
||||
module.exports = parse;
|
||||
106
build/node_modules/css-tree/lib/lexer/grammar/translate.js
generated
vendored
Normal file
106
build/node_modules/css-tree/lib/lexer/grammar/translate.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
'use strict';
|
||||
|
||||
function isNodeType(node, type) {
|
||||
return node && node.type === type;
|
||||
}
|
||||
|
||||
function serializeMultiplier(multiplier) {
|
||||
if (multiplier.min === 0 && multiplier.max === 0) {
|
||||
return '*';
|
||||
}
|
||||
|
||||
if (multiplier.min === 0 && multiplier.max === 1) {
|
||||
return '?';
|
||||
}
|
||||
|
||||
if (multiplier.min === 1 && multiplier.max === 0) {
|
||||
return multiplier.comma ? '#' : '+';
|
||||
}
|
||||
|
||||
if (multiplier.min === 1 && multiplier.max === 1) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return (
|
||||
(multiplier.comma ? '#' : '') +
|
||||
'{' + multiplier.min + (multiplier.min !== multiplier.max ? ',' + (multiplier.max !== 0 ? multiplier.max : '') : '') + '}'
|
||||
);
|
||||
}
|
||||
|
||||
function translateSequence(node, forceBraces, decorate) {
|
||||
var result = '';
|
||||
|
||||
if (node.explicit || forceBraces) {
|
||||
result += '[' + (!isNodeType(node.terms[0], 'Comma') ? ' ' : '');
|
||||
}
|
||||
|
||||
result += node.terms.map(function(term) {
|
||||
return translate(term, forceBraces, decorate);
|
||||
}).join(node.combinator === ' ' ? ' ' : ' ' + node.combinator + ' ');
|
||||
|
||||
if (node.explicit || forceBraces) {
|
||||
result += ' ]';
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function translateParentheses(group, forceBraces, decorate) {
|
||||
if (!group.terms.length) {
|
||||
return '()';
|
||||
}
|
||||
|
||||
return '( ' + translateSequence(group, forceBraces, decorate) + ' )';
|
||||
}
|
||||
|
||||
function translate(node, forceBraces, decorate) {
|
||||
var result;
|
||||
|
||||
switch (node.type) {
|
||||
case 'Group':
|
||||
result =
|
||||
translateSequence(node, forceBraces, decorate) +
|
||||
(node.disallowEmpty ? '!' : '') +
|
||||
serializeMultiplier(node.multiplier);
|
||||
break;
|
||||
|
||||
case 'Keyword':
|
||||
result = node.name;
|
||||
break;
|
||||
|
||||
case 'Function':
|
||||
result = node.name + translateParentheses(node.children, forceBraces, decorate);
|
||||
break;
|
||||
|
||||
case 'Parentheses': // replace for seq('(' seq(...node.children) ')')
|
||||
result = translateParentheses(node.children, forceBraces, decorate);
|
||||
break;
|
||||
|
||||
case 'Type':
|
||||
result = '<' + node.name + '>';
|
||||
break;
|
||||
|
||||
case 'Property':
|
||||
result = '<\'' + node.name + '\'>';
|
||||
break;
|
||||
|
||||
case 'Combinator': // remove?
|
||||
case 'Slash': // replace for String? '/'
|
||||
case 'Percent': // replace for String? '%'
|
||||
case 'String':
|
||||
case 'Comma':
|
||||
result = node.value;
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error('Unknown node type `' + node.type + '`');
|
||||
}
|
||||
|
||||
if (typeof decorate === 'function') {
|
||||
result = decorate(result, node);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = translate;
|
||||
31
build/node_modules/css-tree/lib/lexer/grammar/walk.js
generated
vendored
Normal file
31
build/node_modules/css-tree/lib/lexer/grammar/walk.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function walk(node, fn, context) {
|
||||
switch (node.type) {
|
||||
case 'Group':
|
||||
node.terms.forEach(function(term) {
|
||||
walk(term, fn, context);
|
||||
});
|
||||
break;
|
||||
|
||||
case 'Function':
|
||||
case 'Parentheses':
|
||||
walk(node.children, fn, context);
|
||||
break;
|
||||
|
||||
case 'Keyword':
|
||||
case 'Type':
|
||||
case 'Property':
|
||||
case 'Combinator':
|
||||
case 'Comma':
|
||||
case 'Slash':
|
||||
case 'String':
|
||||
case 'Percent':
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error('Unknown type: ' + node.type);
|
||||
}
|
||||
|
||||
fn.call(context, node);
|
||||
};
|
||||
6
build/node_modules/css-tree/lib/lexer/index.js
generated
vendored
Normal file
6
build/node_modules/css-tree/lib/lexer/index.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = {
|
||||
Lexer: require('./Lexer'),
|
||||
grammar: require('./grammar')
|
||||
};
|
||||
515
build/node_modules/css-tree/lib/lexer/match.js
generated
vendored
Normal file
515
build/node_modules/css-tree/lib/lexer/match.js
generated
vendored
Normal file
@@ -0,0 +1,515 @@
|
||||
'use strict';
|
||||
|
||||
var names = require('../utils/names');
|
||||
var MULTIPLIER_DEFAULT = {
|
||||
comma: false,
|
||||
min: 1,
|
||||
max: 1
|
||||
};
|
||||
|
||||
function skipSpaces(node) {
|
||||
while (node !== null && (node.data.type === 'WhiteSpace' || node.data.type === 'Comment')) {
|
||||
node = node.next;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
function putResult(buffer, match) {
|
||||
var type = match.type || match.syntax.type;
|
||||
|
||||
// ignore groups
|
||||
if (type === 'Group') {
|
||||
buffer.push.apply(buffer, match.match);
|
||||
} else {
|
||||
buffer.push(match);
|
||||
}
|
||||
}
|
||||
|
||||
function matchToJSON() {
|
||||
return {
|
||||
type: this.syntax.type,
|
||||
name: this.syntax.name,
|
||||
match: this.match,
|
||||
node: this.node
|
||||
};
|
||||
}
|
||||
|
||||
function buildMatchNode(badNode, lastNode, next, match) {
|
||||
if (badNode) {
|
||||
return {
|
||||
badNode: badNode,
|
||||
lastNode: null,
|
||||
next: null,
|
||||
match: null
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
badNode: null,
|
||||
lastNode: lastNode,
|
||||
next: next,
|
||||
match: match
|
||||
};
|
||||
}
|
||||
|
||||
function matchGroup(lexer, syntaxNode, node) {
|
||||
var result = [];
|
||||
var buffer;
|
||||
var multiplier = syntaxNode.multiplier || MULTIPLIER_DEFAULT;
|
||||
var min = multiplier.min;
|
||||
var max = multiplier.max === 0 ? Infinity : multiplier.max;
|
||||
var lastCommaTermCount;
|
||||
var lastComma;
|
||||
var matchCount = 0;
|
||||
var lastNode = null;
|
||||
var badNode = null;
|
||||
|
||||
mismatch:
|
||||
while (matchCount < max) {
|
||||
node = skipSpaces(node);
|
||||
buffer = [];
|
||||
|
||||
switch (syntaxNode.combinator) {
|
||||
case '|':
|
||||
for (var i = 0; i < syntaxNode.terms.length; i++) {
|
||||
var term = syntaxNode.terms[i];
|
||||
var res = matchSyntax(lexer, term, node);
|
||||
|
||||
if (res.match) {
|
||||
putResult(buffer, res.match);
|
||||
node = res.next;
|
||||
break; // continue matching
|
||||
} else if (res.badNode) {
|
||||
badNode = res.badNode;
|
||||
break mismatch;
|
||||
} else if (res.lastNode) {
|
||||
lastNode = res.lastNode;
|
||||
}
|
||||
}
|
||||
|
||||
if (buffer.length === 0) {
|
||||
break mismatch; // nothing found -> stop matching
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case ' ':
|
||||
var beforeMatchNode = node;
|
||||
var lastMatchedTerm = null;
|
||||
var hasTailMatch = false;
|
||||
var commaMissed = false;
|
||||
|
||||
for (var i = 0; i < syntaxNode.terms.length; i++) {
|
||||
var term = syntaxNode.terms[i];
|
||||
var res = matchSyntax(lexer, term, node);
|
||||
|
||||
if (res.match) {
|
||||
if (term.type === 'Comma' && i !== 0 && !hasTailMatch) {
|
||||
// recover cursor to state before last match and stop matching
|
||||
lastNode = node && node.data;
|
||||
node = beforeMatchNode;
|
||||
break mismatch;
|
||||
}
|
||||
|
||||
// non-empty match (res.next will refer to another node)
|
||||
if (res.next !== node) {
|
||||
// match should be preceded by a comma
|
||||
if (commaMissed) {
|
||||
lastNode = node && node.data;
|
||||
node = beforeMatchNode;
|
||||
break mismatch;
|
||||
}
|
||||
|
||||
hasTailMatch = term.type !== 'Comma';
|
||||
lastMatchedTerm = term;
|
||||
}
|
||||
|
||||
putResult(buffer, res.match);
|
||||
node = skipSpaces(res.next);
|
||||
} else if (res.badNode) {
|
||||
badNode = res.badNode;
|
||||
break mismatch;
|
||||
} else {
|
||||
if (res.lastNode) {
|
||||
lastNode = res.lastNode;
|
||||
}
|
||||
|
||||
// it's ok when comma doesn't match when no matches yet
|
||||
// but only if comma is not first or last term
|
||||
if (term.type === 'Comma' && i !== 0 && i !== syntaxNode.terms.length - 1) {
|
||||
if (hasTailMatch) {
|
||||
commaMissed = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// recover cursor to state before last match and stop matching
|
||||
lastNode = res.lastNode || (node && node.data);
|
||||
node = beforeMatchNode;
|
||||
break mismatch;
|
||||
}
|
||||
}
|
||||
|
||||
// don't allow empty match when [ ]!
|
||||
if (!lastMatchedTerm && syntaxNode.disallowEmpty) {
|
||||
// empty match but shouldn't
|
||||
// recover cursor to state before last match and stop matching
|
||||
lastNode = node && node.data;
|
||||
node = beforeMatchNode;
|
||||
break mismatch;
|
||||
}
|
||||
|
||||
// don't allow comma at the end but only if last term isn't a comma
|
||||
if (lastMatchedTerm && lastMatchedTerm.type === 'Comma' && term.type !== 'Comma') {
|
||||
lastNode = node && node.data;
|
||||
node = beforeMatchNode;
|
||||
break mismatch;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case '&&':
|
||||
var beforeMatchNode = node;
|
||||
var lastMatchedTerm = null;
|
||||
var terms = syntaxNode.terms.slice();
|
||||
|
||||
while (terms.length) {
|
||||
var wasMatch = false;
|
||||
var emptyMatched = 0;
|
||||
|
||||
for (var i = 0; i < terms.length; i++) {
|
||||
var term = terms[i];
|
||||
var res = matchSyntax(lexer, term, node);
|
||||
|
||||
if (res.match) {
|
||||
// non-empty match (res.next will refer to another node)
|
||||
if (res.next !== node) {
|
||||
lastMatchedTerm = term;
|
||||
} else {
|
||||
emptyMatched++;
|
||||
continue;
|
||||
}
|
||||
|
||||
wasMatch = true;
|
||||
terms.splice(i--, 1);
|
||||
putResult(buffer, res.match);
|
||||
node = skipSpaces(res.next);
|
||||
break;
|
||||
} else if (res.badNode) {
|
||||
badNode = res.badNode;
|
||||
break mismatch;
|
||||
} else if (res.lastNode) {
|
||||
lastNode = res.lastNode;
|
||||
}
|
||||
}
|
||||
|
||||
if (!wasMatch) {
|
||||
// terms left, but they all are optional
|
||||
if (emptyMatched === terms.length) {
|
||||
break;
|
||||
}
|
||||
|
||||
// not ok
|
||||
lastNode = node && node.data;
|
||||
node = beforeMatchNode;
|
||||
break mismatch;
|
||||
}
|
||||
}
|
||||
|
||||
if (!lastMatchedTerm && syntaxNode.disallowEmpty) { // don't allow empty match when [ ]!
|
||||
// empty match but shouldn't
|
||||
// recover cursor to state before last match and stop matching
|
||||
lastNode = node && node.data;
|
||||
node = beforeMatchNode;
|
||||
break mismatch;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case '||':
|
||||
var beforeMatchNode = node;
|
||||
var lastMatchedTerm = null;
|
||||
var terms = syntaxNode.terms.slice();
|
||||
|
||||
while (terms.length) {
|
||||
var wasMatch = false;
|
||||
var emptyMatched = 0;
|
||||
|
||||
for (var i = 0; i < terms.length; i++) {
|
||||
var term = terms[i];
|
||||
var res = matchSyntax(lexer, term, node);
|
||||
|
||||
if (res.match) {
|
||||
// non-empty match (res.next will refer to another node)
|
||||
if (res.next !== node) {
|
||||
lastMatchedTerm = term;
|
||||
} else {
|
||||
emptyMatched++;
|
||||
continue;
|
||||
}
|
||||
|
||||
wasMatch = true;
|
||||
terms.splice(i--, 1);
|
||||
putResult(buffer, res.match);
|
||||
node = skipSpaces(res.next);
|
||||
break;
|
||||
} else if (res.badNode) {
|
||||
badNode = res.badNode;
|
||||
break mismatch;
|
||||
} else if (res.lastNode) {
|
||||
lastNode = res.lastNode;
|
||||
}
|
||||
}
|
||||
|
||||
if (!wasMatch) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// don't allow empty match
|
||||
if (!lastMatchedTerm && (emptyMatched !== terms.length || syntaxNode.disallowEmpty)) {
|
||||
// empty match but shouldn't
|
||||
// recover cursor to state before last match and stop matching
|
||||
lastNode = node && node.data;
|
||||
node = beforeMatchNode;
|
||||
break mismatch;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
// flush buffer
|
||||
result.push.apply(result, buffer);
|
||||
matchCount++;
|
||||
|
||||
if (!node) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (multiplier.comma) {
|
||||
if (lastComma && lastCommaTermCount === result.length) {
|
||||
// nothing match after comma
|
||||
break mismatch;
|
||||
}
|
||||
|
||||
node = skipSpaces(node);
|
||||
if (node !== null && node.data.type === 'Operator' && node.data.value === ',') {
|
||||
result.push({
|
||||
syntax: syntaxNode,
|
||||
match: [{
|
||||
type: 'ASTNode',
|
||||
node: node.data,
|
||||
childrenMatch: null
|
||||
}]
|
||||
});
|
||||
lastCommaTermCount = result.length;
|
||||
lastComma = node;
|
||||
node = node.next;
|
||||
} else {
|
||||
lastNode = node !== null ? node.data : null;
|
||||
break mismatch;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// console.log(syntaxNode.type, badNode, lastNode);
|
||||
|
||||
if (lastComma && lastCommaTermCount === result.length) {
|
||||
// nothing match after comma
|
||||
node = lastComma;
|
||||
result.pop();
|
||||
}
|
||||
|
||||
return buildMatchNode(badNode, lastNode, node, matchCount < min ? null : {
|
||||
syntax: syntaxNode,
|
||||
match: result,
|
||||
toJSON: matchToJSON
|
||||
});
|
||||
}
|
||||
|
||||
function matchSyntax(lexer, syntaxNode, node) {
|
||||
var badNode = null;
|
||||
var lastNode = null;
|
||||
var match = null;
|
||||
|
||||
switch (syntaxNode.type) {
|
||||
case 'Group':
|
||||
return matchGroup(lexer, syntaxNode, node);
|
||||
|
||||
case 'Function':
|
||||
// expect a function node
|
||||
if (!node || node.data.type !== 'Function') {
|
||||
break;
|
||||
}
|
||||
|
||||
var keyword = names.keyword(node.data.name);
|
||||
var name = syntaxNode.name.toLowerCase();
|
||||
|
||||
// check function name with vendor consideration
|
||||
if (name !== keyword.vendor + keyword.name) {
|
||||
break;
|
||||
}
|
||||
|
||||
var res = matchSyntax(lexer, syntaxNode.children, node.data.children.head);
|
||||
if (!res.match || res.next) {
|
||||
badNode = res.badNode || res.lastNode || (res.next ? res.next.data : null) || node.data;
|
||||
break;
|
||||
}
|
||||
|
||||
match = [{
|
||||
type: 'ASTNode',
|
||||
node: node.data,
|
||||
childrenMatch: res.match.match
|
||||
}];
|
||||
|
||||
// Use node.next instead of res.next here since syntax is matching
|
||||
// for internal list and it should be completelly matched (res.next is null at this point).
|
||||
// Therefore function is matched and we are going to next node
|
||||
node = node.next;
|
||||
break;
|
||||
|
||||
case 'Parentheses':
|
||||
if (!node || node.data.type !== 'Parentheses') {
|
||||
break;
|
||||
}
|
||||
|
||||
var res = matchSyntax(lexer, syntaxNode.children, node.data.children.head);
|
||||
if (!res.match || res.next) {
|
||||
badNode = res.badNode || res.lastNode || (res.next ? res.next.data : null) || node.data; // TODO: case when res.next === null
|
||||
break;
|
||||
}
|
||||
|
||||
match = [{
|
||||
type: 'ASTNode',
|
||||
node: node.data,
|
||||
childrenMatch: res.match.match
|
||||
}];
|
||||
|
||||
node = res.next;
|
||||
break;
|
||||
|
||||
case 'Type':
|
||||
var typeSyntax = lexer.getType(syntaxNode.name);
|
||||
if (!typeSyntax) {
|
||||
throw new Error('Unknown syntax type `' + syntaxNode.name + '`');
|
||||
}
|
||||
|
||||
var res = typeSyntax.match(node);
|
||||
if (!res.match) {
|
||||
badNode = res && res.badNode; // TODO: case when res.next === null
|
||||
lastNode = (res && res.lastNode) || (node && node.data);
|
||||
break;
|
||||
}
|
||||
|
||||
node = res.next;
|
||||
putResult(match = [], res.match);
|
||||
if (match.length === 0) {
|
||||
match = null;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'Property':
|
||||
var propertySyntax = lexer.getProperty(syntaxNode.name);
|
||||
if (!propertySyntax) {
|
||||
throw new Error('Unknown property `' + syntaxNode.name + '`');
|
||||
}
|
||||
|
||||
var res = propertySyntax.match(node);
|
||||
if (!res.match) {
|
||||
badNode = res && res.badNode; // TODO: case when res.next === null
|
||||
lastNode = (res && res.lastNode) || (node && node.data);
|
||||
break;
|
||||
}
|
||||
|
||||
node = res.next;
|
||||
putResult(match = [], res.match);
|
||||
if (match.length === 0) {
|
||||
match = null;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'Keyword':
|
||||
if (!node) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (node.data.type === 'Identifier') {
|
||||
var keyword = names.keyword(node.data.name);
|
||||
var keywordName = keyword.name;
|
||||
var name = syntaxNode.name.toLowerCase();
|
||||
|
||||
// drop \0 and \9 hack from keyword name
|
||||
if (keywordName.indexOf('\\') !== -1) {
|
||||
keywordName = keywordName.replace(/\\[09].*$/, '');
|
||||
}
|
||||
|
||||
if (name !== keyword.vendor + keywordName) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
// keyword may to be a number (e.g. font-weight: 400 )
|
||||
if (node.data.type !== 'Number' || node.data.value !== syntaxNode.name) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
match = [{
|
||||
type: 'ASTNode',
|
||||
node: node.data,
|
||||
childrenMatch: null
|
||||
}];
|
||||
node = node.next;
|
||||
break;
|
||||
|
||||
case 'Slash':
|
||||
case 'Comma':
|
||||
if (!node || node.data.type !== 'Operator' || node.data.value !== syntaxNode.value) {
|
||||
break;
|
||||
}
|
||||
|
||||
match = [{
|
||||
type: 'ASTNode',
|
||||
node: node.data,
|
||||
childrenMatch: null
|
||||
}];
|
||||
node = node.next;
|
||||
break;
|
||||
|
||||
case 'String':
|
||||
if (!node || node.data.type !== 'String') {
|
||||
break;
|
||||
}
|
||||
|
||||
match = [{
|
||||
type: 'ASTNode',
|
||||
node: node.data,
|
||||
childrenMatch: null
|
||||
}];
|
||||
node = node.next;
|
||||
break;
|
||||
|
||||
case 'ASTNode':
|
||||
if (node && syntaxNode.match(node)) {
|
||||
match = {
|
||||
type: 'ASTNode',
|
||||
node: node.data,
|
||||
childrenMatch: null
|
||||
};
|
||||
node = node.next;
|
||||
}
|
||||
return buildMatchNode(badNode, lastNode, node, match);
|
||||
|
||||
default:
|
||||
throw new Error('Not implemented yet node type: ' + syntaxNode.type);
|
||||
}
|
||||
|
||||
return buildMatchNode(badNode, lastNode, node, match === null ? null : {
|
||||
syntax: syntaxNode,
|
||||
match: match,
|
||||
toJSON: matchToJSON
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
module.exports = matchSyntax;
|
||||
84
build/node_modules/css-tree/lib/lexer/search.js
generated
vendored
Normal file
84
build/node_modules/css-tree/lib/lexer/search.js
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
var List = require('../utils/list');
|
||||
|
||||
function getFirstMatchNode(matchNode) {
|
||||
if (matchNode.type === 'ASTNode') {
|
||||
return matchNode.node;
|
||||
}
|
||||
|
||||
if (matchNode.match.length !== 0) {
|
||||
return getFirstMatchNode(matchNode.match[0]);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function getLastMatchNode(matchNode) {
|
||||
if (matchNode.type === 'ASTNode') {
|
||||
return matchNode.node;
|
||||
}
|
||||
|
||||
if (matchNode.match.length !== 0) {
|
||||
return getLastMatchNode(matchNode.match[matchNode.match.length - 1]);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function matchFragments(lexer, ast, match, type, name) {
|
||||
function findFragments(matchNode) {
|
||||
if (matchNode.type === 'ASTNode') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (matchNode.syntax.type === type &&
|
||||
matchNode.syntax.name === name) {
|
||||
var start = getFirstMatchNode(matchNode);
|
||||
var end = getLastMatchNode(matchNode);
|
||||
|
||||
lexer.syntax.walk(ast, function(node, item, list) {
|
||||
if (node === start) {
|
||||
var nodes = new List();
|
||||
var loc = null;
|
||||
|
||||
do {
|
||||
nodes.appendData(item.data);
|
||||
|
||||
if (item.data === end) {
|
||||
break;
|
||||
}
|
||||
|
||||
item = item.next;
|
||||
} while (item !== null);
|
||||
|
||||
if (start.loc !== null && end.loc !== null) {
|
||||
loc = {
|
||||
source: start.loc.source,
|
||||
start: start.loc.start,
|
||||
end: end.loc.end
|
||||
};
|
||||
}
|
||||
|
||||
fragments.push({
|
||||
parent: list,
|
||||
loc: loc,
|
||||
nodes: nodes
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
matchNode.match.forEach(findFragments);
|
||||
}
|
||||
|
||||
var fragments = [];
|
||||
|
||||
if (match.matched !== null) {
|
||||
findFragments(match.matched);
|
||||
}
|
||||
|
||||
return fragments;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
matchFragments: matchFragments
|
||||
};
|
||||
163
build/node_modules/css-tree/lib/lexer/structure.js
generated
vendored
Normal file
163
build/node_modules/css-tree/lib/lexer/structure.js
generated
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
var List = require('../utils/list');
|
||||
var hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
|
||||
function isValidNumber(value) {
|
||||
// Number.isInteger(value) && value >= 0
|
||||
return (
|
||||
typeof value === 'number' &&
|
||||
isFinite(value) &&
|
||||
Math.floor(value) === value &&
|
||||
value >= 0
|
||||
);
|
||||
}
|
||||
|
||||
function isValidLocation(loc) {
|
||||
return (
|
||||
Boolean(loc) &&
|
||||
isValidNumber(loc.offset) &&
|
||||
isValidNumber(loc.line) &&
|
||||
isValidNumber(loc.column)
|
||||
);
|
||||
}
|
||||
|
||||
function createNodeStructureChecker(type, fields) {
|
||||
return function checkNode(node, warn) {
|
||||
if (!node || node.constructor !== Object) {
|
||||
return warn(node, 'Type of node should be an Object');
|
||||
}
|
||||
|
||||
for (var key in node) {
|
||||
var valid = true;
|
||||
|
||||
if (hasOwnProperty.call(node, key) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (key === 'type') {
|
||||
if (node.type !== type) {
|
||||
warn(node, 'Wrong node type `' + node.type + '`, expected `' + type + '`');
|
||||
}
|
||||
} else if (key === 'loc') {
|
||||
if (node.loc === null) {
|
||||
continue;
|
||||
} else if (node.loc && node.loc.constructor === Object) {
|
||||
if (typeof node.loc.source !== 'string') {
|
||||
key += '.source';
|
||||
} else if (!isValidLocation(node.loc.start)) {
|
||||
key += '.start';
|
||||
} else if (!isValidLocation(node.loc.end)) {
|
||||
key += '.end';
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
valid = false;
|
||||
} else if (fields.hasOwnProperty(key)) {
|
||||
for (var i = 0, valid = false; !valid && i < fields[key].length; i++) {
|
||||
var fieldType = fields[key][i];
|
||||
|
||||
switch (fieldType) {
|
||||
case String:
|
||||
valid = typeof node[key] === 'string';
|
||||
break;
|
||||
|
||||
case Boolean:
|
||||
valid = typeof node[key] === 'boolean';
|
||||
break;
|
||||
|
||||
case null:
|
||||
valid = node[key] === null;
|
||||
break;
|
||||
|
||||
default:
|
||||
if (typeof fieldType === 'string') {
|
||||
valid = node[key] && node[key].type === fieldType;
|
||||
} else if (Array.isArray(fieldType)) {
|
||||
valid = node[key] instanceof List;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
warn(node, 'Unknown field `' + key + '` for ' + type + ' node type');
|
||||
}
|
||||
|
||||
if (!valid) {
|
||||
warn(node, 'Bad value for `' + type + '.' + key + '`');
|
||||
}
|
||||
}
|
||||
|
||||
for (var key in fields) {
|
||||
if (hasOwnProperty.call(fields, key) &&
|
||||
hasOwnProperty.call(node, key) === false) {
|
||||
warn(node, 'Field `' + type + '.' + key + '` is missed');
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function processStructure(name, nodeType) {
|
||||
var structure = nodeType.structure;
|
||||
var fields = {
|
||||
type: String,
|
||||
loc: true
|
||||
};
|
||||
var docs = {
|
||||
type: '"' + name + '"'
|
||||
};
|
||||
|
||||
for (var key in structure) {
|
||||
if (hasOwnProperty.call(structure, key) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
var docsTypes = [];
|
||||
var fieldTypes = fields[key] = Array.isArray(structure[key])
|
||||
? structure[key].slice()
|
||||
: [structure[key]];
|
||||
|
||||
for (var i = 0; i < fieldTypes.length; i++) {
|
||||
var fieldType = fieldTypes[i];
|
||||
if (fieldType === String || fieldType === Boolean) {
|
||||
docsTypes.push(fieldType.name);
|
||||
} else if (fieldType === null) {
|
||||
docsTypes.push('null');
|
||||
} else if (typeof fieldType === 'string') {
|
||||
docsTypes.push('<' + fieldType + '>');
|
||||
} else if (Array.isArray(fieldType)) {
|
||||
docsTypes.push('List'); // TODO: use type enum
|
||||
} else {
|
||||
throw new Error('Wrong value `' + fieldType + '` in `' + name + '.' + key + '` structure definition');
|
||||
}
|
||||
}
|
||||
|
||||
docs[key] = docsTypes.join(' | ');
|
||||
}
|
||||
|
||||
return {
|
||||
docs: docs,
|
||||
check: createNodeStructureChecker(name, fields)
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getStructureFromConfig: function(config) {
|
||||
var structure = {};
|
||||
|
||||
if (config.node) {
|
||||
for (var name in config.node) {
|
||||
if (hasOwnProperty.call(config.node, name)) {
|
||||
var nodeType = config.node[name];
|
||||
|
||||
if (nodeType.structure) {
|
||||
structure[name] = processStructure(name, nodeType);
|
||||
} else {
|
||||
throw new Error('Missed `structure` field in `' + name + '` node type definition');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return structure;
|
||||
}
|
||||
};
|
||||
76
build/node_modules/css-tree/lib/lexer/trace.js
generated
vendored
Normal file
76
build/node_modules/css-tree/lib/lexer/trace.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
function getTrace(node) {
|
||||
function hasMatch(matchNode) {
|
||||
if (matchNode.type === 'ASTNode') {
|
||||
if (matchNode.node === node) {
|
||||
result = [];
|
||||
return true;
|
||||
}
|
||||
|
||||
if (matchNode.childrenMatch) {
|
||||
// use for-loop for better perfomance
|
||||
for (var i = 0; i < matchNode.childrenMatch.length; i++) {
|
||||
if (hasMatch(matchNode.childrenMatch[i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// use for-loop for better perfomance
|
||||
for (var i = 0; i < matchNode.match.length; i++) {
|
||||
if (hasMatch(matchNode.match[i])) {
|
||||
if (matchNode.syntax.type === 'Type' ||
|
||||
matchNode.syntax.type === 'Property' ||
|
||||
matchNode.syntax.type === 'Keyword') {
|
||||
result.unshift(matchNode.syntax);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
var result = null;
|
||||
|
||||
if (this.matched !== null) {
|
||||
hasMatch(this.matched);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function testNode(match, node, fn) {
|
||||
var trace = getTrace.call(match, node);
|
||||
|
||||
if (trace === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return trace.some(fn);
|
||||
}
|
||||
|
||||
function isType(node, type) {
|
||||
return testNode(this, node, function(matchNode) {
|
||||
return matchNode.type === 'Type' && matchNode.name === type;
|
||||
});
|
||||
}
|
||||
|
||||
function isProperty(node, property) {
|
||||
return testNode(this, node, function(matchNode) {
|
||||
return matchNode.type === 'Property' && matchNode.name === property;
|
||||
});
|
||||
}
|
||||
|
||||
function isKeyword(node) {
|
||||
return testNode(this, node, function(matchNode) {
|
||||
return matchNode.type === 'Keyword';
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getTrace: getTrace,
|
||||
isType: isType,
|
||||
isProperty: isProperty,
|
||||
isKeyword: isKeyword
|
||||
};
|
||||
158
build/node_modules/css-tree/lib/parser/create.js
generated
vendored
Normal file
158
build/node_modules/css-tree/lib/parser/create.js
generated
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
'use strict';
|
||||
|
||||
var Tokenizer = require('../tokenizer');
|
||||
var sequence = require('./sequence');
|
||||
var noop = function() {};
|
||||
|
||||
function createParseContext(name) {
|
||||
return function() {
|
||||
return this[name]();
|
||||
};
|
||||
}
|
||||
|
||||
function processConfig(config) {
|
||||
var parserConfig = {
|
||||
context: {},
|
||||
scope: {},
|
||||
atrule: {},
|
||||
pseudo: {}
|
||||
};
|
||||
|
||||
if (config.parseContext) {
|
||||
for (var name in config.parseContext) {
|
||||
switch (typeof config.parseContext[name]) {
|
||||
case 'function':
|
||||
parserConfig.context[name] = config.parseContext[name];
|
||||
break;
|
||||
|
||||
case 'string':
|
||||
parserConfig.context[name] = createParseContext(config.parseContext[name]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (config.scope) {
|
||||
for (var name in config.scope) {
|
||||
parserConfig.scope[name] = config.scope[name];
|
||||
}
|
||||
}
|
||||
|
||||
if (config.atrule) {
|
||||
for (var name in config.atrule) {
|
||||
var atrule = config.atrule[name];
|
||||
|
||||
if (atrule.parse) {
|
||||
parserConfig.atrule[name] = atrule.parse;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (config.pseudo) {
|
||||
for (var name in config.pseudo) {
|
||||
var pseudo = config.pseudo[name];
|
||||
|
||||
if (pseudo.parse) {
|
||||
parserConfig.pseudo[name] = pseudo.parse;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (config.node) {
|
||||
for (var name in config.node) {
|
||||
parserConfig[name] = config.node[name].parse;
|
||||
}
|
||||
}
|
||||
|
||||
return parserConfig;
|
||||
}
|
||||
|
||||
module.exports = function createParser(config) {
|
||||
var parser = {
|
||||
scanner: new Tokenizer(),
|
||||
filename: '<unknown>',
|
||||
needPositions: false,
|
||||
tolerant: false,
|
||||
onParseError: noop,
|
||||
parseAtrulePrelude: true,
|
||||
parseRulePrelude: true,
|
||||
parseValue: true,
|
||||
parseCustomProperty: false,
|
||||
|
||||
readSequence: sequence,
|
||||
|
||||
tolerantParse: function(consumer, fallback) {
|
||||
if (this.tolerant) {
|
||||
var start = this.scanner.currentToken;
|
||||
|
||||
try {
|
||||
return consumer.call(this);
|
||||
} catch (e) {
|
||||
var fallbackNode = fallback.call(this, start);
|
||||
this.onParseError(e, fallbackNode);
|
||||
return fallbackNode;
|
||||
}
|
||||
} else {
|
||||
return consumer.call(this);
|
||||
}
|
||||
},
|
||||
|
||||
getLocation: function(start, end) {
|
||||
if (this.needPositions) {
|
||||
return this.scanner.getLocationRange(
|
||||
start,
|
||||
end,
|
||||
this.filename
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
getLocationFromList: function(list) {
|
||||
if (this.needPositions) {
|
||||
return this.scanner.getLocationRange(
|
||||
list.head !== null ? list.first().loc.start.offset - this.scanner.startOffset : this.scanner.tokenStart,
|
||||
list.head !== null ? list.last().loc.end.offset - this.scanner.startOffset : this.scanner.tokenStart,
|
||||
this.filename
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
config = processConfig(config || {});
|
||||
for (var key in config) {
|
||||
parser[key] = config[key];
|
||||
}
|
||||
|
||||
return function(source, options) {
|
||||
options = options || {};
|
||||
|
||||
var context = options.context || 'default';
|
||||
var ast;
|
||||
|
||||
parser.scanner.setSource(source, options.offset, options.line, options.column);
|
||||
parser.filename = options.filename || '<unknown>';
|
||||
parser.needPositions = Boolean(options.positions);
|
||||
parser.tolerant = Boolean(options.tolerant);
|
||||
parser.onParseError = typeof options.onParseError === 'function' ? options.onParseError : noop;
|
||||
parser.parseAtrulePrelude = 'parseAtrulePrelude' in options ? Boolean(options.parseAtrulePrelude) : true;
|
||||
parser.parseRulePrelude = 'parseRulePrelude' in options ? Boolean(options.parseRulePrelude) : true;
|
||||
parser.parseValue = 'parseValue' in options ? Boolean(options.parseValue) : true;
|
||||
parser.parseCustomProperty = 'parseCustomProperty' in options ? Boolean(options.parseCustomProperty) : false;
|
||||
|
||||
if (!parser.context.hasOwnProperty(context)) {
|
||||
throw new Error('Unknown context `' + context + '`');
|
||||
}
|
||||
|
||||
ast = parser.context[context].call(parser, options);
|
||||
|
||||
if (!parser.scanner.eof) {
|
||||
parser.scanner.error();
|
||||
}
|
||||
|
||||
// console.log(JSON.stringify(ast, null, 4));
|
||||
return ast;
|
||||
};
|
||||
};
|
||||
4
build/node_modules/css-tree/lib/parser/index.js
generated
vendored
Normal file
4
build/node_modules/css-tree/lib/parser/index.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
var createParser = require('./create');
|
||||
var config = require('../syntax/config/parser');
|
||||
|
||||
module.exports = createParser(config);
|
||||
55
build/node_modules/css-tree/lib/parser/sequence.js
generated
vendored
Normal file
55
build/node_modules/css-tree/lib/parser/sequence.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
var List = require('../utils/list');
|
||||
var TYPE = require('../tokenizer').TYPE;
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var COMMENT = TYPE.Comment;
|
||||
|
||||
module.exports = function readSequence(recognizer) {
|
||||
var children = new List();
|
||||
var child = null;
|
||||
var context = {
|
||||
recognizer: recognizer,
|
||||
space: null,
|
||||
ignoreWS: false,
|
||||
ignoreWSAfter: false
|
||||
};
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
while (!this.scanner.eof) {
|
||||
switch (this.scanner.tokenType) {
|
||||
case COMMENT:
|
||||
this.scanner.next();
|
||||
continue;
|
||||
|
||||
case WHITESPACE:
|
||||
if (context.ignoreWS) {
|
||||
this.scanner.next();
|
||||
} else {
|
||||
context.space = this.WhiteSpace();
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
child = recognizer.getNode.call(this, context);
|
||||
|
||||
if (child === undefined) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (context.space !== null) {
|
||||
children.appendData(context.space);
|
||||
context.space = null;
|
||||
}
|
||||
|
||||
children.appendData(child);
|
||||
|
||||
if (context.ignoreWSAfter) {
|
||||
context.ignoreWSAfter = false;
|
||||
context.ignoreWS = true;
|
||||
} else {
|
||||
context.ignoreWS = false;
|
||||
}
|
||||
}
|
||||
|
||||
return children;
|
||||
};
|
||||
8
build/node_modules/css-tree/lib/syntax/atrule/font-face.js
generated
vendored
Normal file
8
build/node_modules/css-tree/lib/syntax/atrule/font-face.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
module.exports = {
|
||||
parse: {
|
||||
prelude: null,
|
||||
block: function() {
|
||||
return this.Block(true);
|
||||
}
|
||||
}
|
||||
};
|
||||
39
build/node_modules/css-tree/lib/syntax/atrule/import.js
generated
vendored
Normal file
39
build/node_modules/css-tree/lib/syntax/atrule/import.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
var List = require('../../utils/list');
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var STRING = TYPE.String;
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var URL = TYPE.Url;
|
||||
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
|
||||
|
||||
module.exports = {
|
||||
parse: {
|
||||
prelude: function() {
|
||||
var children = new List();
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
switch (this.scanner.tokenType) {
|
||||
case STRING:
|
||||
children.appendData(this.String());
|
||||
break;
|
||||
|
||||
case URL:
|
||||
children.appendData(this.Url());
|
||||
break;
|
||||
|
||||
default:
|
||||
this.scanner.error('String or url() is expected');
|
||||
}
|
||||
|
||||
if (this.scanner.lookupNonWSType(0) === IDENTIFIER ||
|
||||
this.scanner.lookupNonWSType(0) === LEFTPARENTHESIS) {
|
||||
children.appendData(this.WhiteSpace());
|
||||
children.appendData(this.MediaQueryList());
|
||||
}
|
||||
|
||||
return children;
|
||||
},
|
||||
block: null
|
||||
}
|
||||
};
|
||||
7
build/node_modules/css-tree/lib/syntax/atrule/index.js
generated
vendored
Normal file
7
build/node_modules/css-tree/lib/syntax/atrule/index.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
module.exports = {
|
||||
'font-face': require('./font-face'),
|
||||
'import': require('./import'),
|
||||
'media': require('./media'),
|
||||
'page': require('./page'),
|
||||
'supports': require('./supports')
|
||||
};
|
||||
14
build/node_modules/css-tree/lib/syntax/atrule/media.js
generated
vendored
Normal file
14
build/node_modules/css-tree/lib/syntax/atrule/media.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
var List = require('../../utils/list');
|
||||
|
||||
module.exports = {
|
||||
parse: {
|
||||
prelude: function() {
|
||||
return new List().appendData(
|
||||
this.MediaQueryList()
|
||||
);
|
||||
},
|
||||
block: function() {
|
||||
return this.Block(false);
|
||||
}
|
||||
}
|
||||
};
|
||||
14
build/node_modules/css-tree/lib/syntax/atrule/page.js
generated
vendored
Normal file
14
build/node_modules/css-tree/lib/syntax/atrule/page.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
var List = require('../../utils/list');
|
||||
|
||||
module.exports = {
|
||||
parse: {
|
||||
prelude: function() {
|
||||
return new List().appendData(
|
||||
this.SelectorList()
|
||||
);
|
||||
},
|
||||
block: function() {
|
||||
return this.Block(true);
|
||||
}
|
||||
}
|
||||
};
|
||||
100
build/node_modules/css-tree/lib/syntax/atrule/supports.js
generated
vendored
Normal file
100
build/node_modules/css-tree/lib/syntax/atrule/supports.js
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
var List = require('../../utils/list');
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var COMMENT = TYPE.Comment;
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var FUNCTION = TYPE.Function;
|
||||
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
|
||||
var HYPHENMINUS = TYPE.HyphenMinus;
|
||||
var COLON = TYPE.Colon;
|
||||
|
||||
function consumeRaw() {
|
||||
return new List().appendData(
|
||||
this.Raw(this.scanner.currentToken, 0, 0, false, false)
|
||||
);
|
||||
}
|
||||
|
||||
function parentheses() {
|
||||
var index = 0;
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
// TODO: make it simplier
|
||||
if (this.scanner.tokenType === IDENTIFIER) {
|
||||
index = 1;
|
||||
} else if (this.scanner.tokenType === HYPHENMINUS &&
|
||||
this.scanner.lookupType(1) === IDENTIFIER) {
|
||||
index = 2;
|
||||
}
|
||||
|
||||
if (index !== 0 && this.scanner.lookupNonWSType(index) === COLON) {
|
||||
return new List().appendData(
|
||||
this.Declaration()
|
||||
);
|
||||
}
|
||||
|
||||
return readSequence.call(this);
|
||||
}
|
||||
|
||||
function readSequence() {
|
||||
var children = new List();
|
||||
var space = null;
|
||||
var child;
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
scan:
|
||||
while (!this.scanner.eof) {
|
||||
switch (this.scanner.tokenType) {
|
||||
case WHITESPACE:
|
||||
space = this.WhiteSpace();
|
||||
continue;
|
||||
|
||||
case COMMENT:
|
||||
this.scanner.next();
|
||||
continue;
|
||||
|
||||
case FUNCTION:
|
||||
child = this.Function(consumeRaw, this.scope.AtrulePrelude);
|
||||
break;
|
||||
|
||||
case IDENTIFIER:
|
||||
child = this.Identifier();
|
||||
break;
|
||||
|
||||
case LEFTPARENTHESIS:
|
||||
child = this.Parentheses(parentheses, this.scope.AtrulePrelude);
|
||||
break;
|
||||
|
||||
default:
|
||||
break scan;
|
||||
}
|
||||
|
||||
if (space !== null) {
|
||||
children.appendData(space);
|
||||
space = null;
|
||||
}
|
||||
|
||||
children.appendData(child);
|
||||
}
|
||||
|
||||
return children;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parse: {
|
||||
prelude: function() {
|
||||
var children = readSequence.call(this);
|
||||
|
||||
if (children.isEmpty()) {
|
||||
this.scanner.error('Condition is expected');
|
||||
}
|
||||
|
||||
return children;
|
||||
},
|
||||
block: function() {
|
||||
return this.Block(false);
|
||||
}
|
||||
}
|
||||
};
|
||||
8
build/node_modules/css-tree/lib/syntax/config/lexer.js
generated
vendored
Normal file
8
build/node_modules/css-tree/lib/syntax/config/lexer.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
var data = require('../../../data');
|
||||
|
||||
module.exports = {
|
||||
generic: true,
|
||||
types: data.types,
|
||||
properties: data.properties,
|
||||
node: require('../node')
|
||||
};
|
||||
94
build/node_modules/css-tree/lib/syntax/config/mix.js
generated
vendored
Normal file
94
build/node_modules/css-tree/lib/syntax/config/mix.js
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
var hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
var shape = {
|
||||
generic: true,
|
||||
types: {},
|
||||
properties: {},
|
||||
parseContext: {},
|
||||
scope: {},
|
||||
atrule: ['parse'],
|
||||
pseudo: ['parse'],
|
||||
node: ['name', 'structure', 'parse', 'generate', 'walkContext']
|
||||
};
|
||||
|
||||
function isObject(value) {
|
||||
return value && value.constructor === Object;
|
||||
}
|
||||
|
||||
function copy(value) {
|
||||
if (isObject(value)) {
|
||||
var res = {};
|
||||
for (var key in value) {
|
||||
if (hasOwnProperty.call(value, key)) {
|
||||
res[key] = value[key];
|
||||
}
|
||||
}
|
||||
return res;
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
function extend(dest, src) {
|
||||
for (var key in src) {
|
||||
if (hasOwnProperty.call(src, key)) {
|
||||
if (isObject(dest[key])) {
|
||||
extend(dest[key], copy(src[key]));
|
||||
} else {
|
||||
dest[key] = copy(src[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function mix(dest, src, shape) {
|
||||
for (var key in shape) {
|
||||
if (hasOwnProperty.call(shape, key) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (shape[key] === true) {
|
||||
if (key in src) {
|
||||
if (hasOwnProperty.call(src, key)) {
|
||||
dest[key] = copy(src[key]);
|
||||
}
|
||||
}
|
||||
} else if (shape[key]) {
|
||||
if (isObject(shape[key])) {
|
||||
var res = {};
|
||||
extend(res, dest[key]);
|
||||
extend(res, src[key]);
|
||||
dest[key] = res;
|
||||
} else if (Array.isArray(shape[key])) {
|
||||
var res = {};
|
||||
var innerShape = shape[key].reduce(function(s, k) {
|
||||
s[k] = true;
|
||||
return s;
|
||||
}, {});
|
||||
for (var name in dest[key]) {
|
||||
if (hasOwnProperty.call(dest[key], name)) {
|
||||
res[name] = {};
|
||||
if (dest[key] && dest[key][name]) {
|
||||
mix(res[name], dest[key][name], innerShape);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (var name in src[key]) {
|
||||
if (hasOwnProperty.call(src[key], name)) {
|
||||
if (!res[name]) {
|
||||
res[name] = {};
|
||||
}
|
||||
if (src[key] && src[key][name]) {
|
||||
mix(res[name], src[key][name], innerShape);
|
||||
}
|
||||
}
|
||||
}
|
||||
dest[key] = res;
|
||||
}
|
||||
}
|
||||
}
|
||||
return dest;
|
||||
}
|
||||
|
||||
module.exports = function(dest, src) {
|
||||
return mix(dest, src, shape);
|
||||
};
|
||||
27
build/node_modules/css-tree/lib/syntax/config/parser.js
generated
vendored
Normal file
27
build/node_modules/css-tree/lib/syntax/config/parser.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
module.exports = {
|
||||
parseContext: {
|
||||
default: 'StyleSheet',
|
||||
stylesheet: 'StyleSheet',
|
||||
atrule: 'Atrule',
|
||||
atrulePrelude: function(options) {
|
||||
return this.AtrulePrelude(options.atrule ? String(options.atrule) : null);
|
||||
},
|
||||
mediaQueryList: 'MediaQueryList',
|
||||
mediaQuery: 'MediaQuery',
|
||||
rule: 'Rule',
|
||||
selectorList: 'SelectorList',
|
||||
selector: 'Selector',
|
||||
block: function() {
|
||||
return this.Block(true);
|
||||
},
|
||||
declarationList: 'DeclarationList',
|
||||
declaration: 'Declaration',
|
||||
value: function(options) {
|
||||
return this.Value(options.property ? String(options.property) : null);
|
||||
}
|
||||
},
|
||||
scope: require('../scope'),
|
||||
atrule: require('../atrule'),
|
||||
pseudo: require('../pseudo'),
|
||||
node: require('../node')
|
||||
};
|
||||
3
build/node_modules/css-tree/lib/syntax/config/walker.js
generated
vendored
Normal file
3
build/node_modules/css-tree/lib/syntax/config/walker.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
node: require('../node')
|
||||
};
|
||||
82
build/node_modules/css-tree/lib/syntax/create.js
generated
vendored
Normal file
82
build/node_modules/css-tree/lib/syntax/create.js
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
var List = require('../utils/list');
|
||||
var Tokenizer = require('../tokenizer');
|
||||
var Lexer = require('../lexer/Lexer');
|
||||
var grammar = require('../lexer/grammar');
|
||||
var createParser = require('../parser/create');
|
||||
var createGenerator = require('../generator/create');
|
||||
var createConvertor = require('../convertor/create');
|
||||
var createWalker = require('../walker/create');
|
||||
var clone = require('../utils/clone');
|
||||
var names = require('../utils/names');
|
||||
var mix = require('./config/mix');
|
||||
|
||||
function assign(dest, src) {
|
||||
for (var key in src) {
|
||||
dest[key] = src[key];
|
||||
}
|
||||
|
||||
return dest;
|
||||
}
|
||||
|
||||
function createSyntax(config) {
|
||||
var parse = createParser(config);
|
||||
var walker = createWalker(config);
|
||||
var generator = createGenerator(config);
|
||||
var convertor = createConvertor(walker);
|
||||
|
||||
var syntax = {
|
||||
List: List,
|
||||
Tokenizer: Tokenizer,
|
||||
Lexer: Lexer,
|
||||
|
||||
property: names.property,
|
||||
keyword: names.keyword,
|
||||
|
||||
grammar: grammar,
|
||||
lexer: null,
|
||||
createLexer: function(config) {
|
||||
return new Lexer(config, syntax, syntax.lexer.structure);
|
||||
},
|
||||
|
||||
parse: parse,
|
||||
|
||||
walk: walker.walk,
|
||||
walkUp: walker.walkUp,
|
||||
walkRules: walker.walkRules,
|
||||
walkRulesRight: walker.walkRulesRight,
|
||||
walkDeclarations: walker.walkDeclarations,
|
||||
|
||||
translate: generator.translate,
|
||||
translateWithSourceMap: generator.translateWithSourceMap,
|
||||
translateMarkup: generator.translateMarkup,
|
||||
|
||||
clone: clone,
|
||||
fromPlainObject: convertor.fromPlainObject,
|
||||
toPlainObject: convertor.toPlainObject,
|
||||
|
||||
createSyntax: function(config) {
|
||||
return createSyntax(mix({}, config));
|
||||
},
|
||||
fork: function(extension) {
|
||||
var base = mix({}, config); // copy of config
|
||||
return createSyntax(
|
||||
typeof extension === 'function'
|
||||
? extension(base, assign)
|
||||
: mix(base, extension)
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
syntax.lexer = new Lexer({
|
||||
generic: true,
|
||||
types: config.types,
|
||||
properties: config.properties,
|
||||
node: config.node
|
||||
}, syntax);
|
||||
|
||||
return syntax;
|
||||
};
|
||||
|
||||
exports.create = function(config) {
|
||||
return createSyntax(mix({}, config));
|
||||
};
|
||||
15
build/node_modules/css-tree/lib/syntax/function/element.js
generated
vendored
Normal file
15
build/node_modules/css-tree/lib/syntax/function/element.js
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
var List = require('../../utils/list');
|
||||
|
||||
// https://drafts.csswg.org/css-images-4/#element-notation
|
||||
// https://developer.mozilla.org/en-US/docs/Web/CSS/element
|
||||
module.exports = function() {
|
||||
this.scanner.skipSC();
|
||||
|
||||
var id = this.IdSelector();
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
return new List().appendData(
|
||||
id
|
||||
);
|
||||
};
|
||||
9
build/node_modules/css-tree/lib/syntax/function/expression.js
generated
vendored
Normal file
9
build/node_modules/css-tree/lib/syntax/function/expression.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
var List = require('../../utils/list');
|
||||
|
||||
// legacy IE function
|
||||
// expression '(' raw ')'
|
||||
module.exports = function() {
|
||||
return new List().appendData(
|
||||
this.Raw(this.scanner.currentToken, 0, 0, false, false)
|
||||
);
|
||||
};
|
||||
41
build/node_modules/css-tree/lib/syntax/function/var.js
generated
vendored
Normal file
41
build/node_modules/css-tree/lib/syntax/function/var.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
var List = require('../../utils/list');
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var COMMA = TYPE.Comma;
|
||||
var SEMICOLON = TYPE.Semicolon;
|
||||
var HYPHENMINUS = TYPE.HyphenMinus;
|
||||
var EXCLAMATIONMARK = TYPE.ExclamationMark;
|
||||
|
||||
// var '(' ident (',' <value>? )? ')'
|
||||
module.exports = function() {
|
||||
var children = new List();
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
var identStart = this.scanner.tokenStart;
|
||||
|
||||
this.scanner.eat(HYPHENMINUS);
|
||||
if (this.scanner.source.charCodeAt(this.scanner.tokenStart) !== HYPHENMINUS) {
|
||||
this.scanner.error('HyphenMinus is expected');
|
||||
}
|
||||
this.scanner.eat(IDENTIFIER);
|
||||
|
||||
children.appendData({
|
||||
type: 'Identifier',
|
||||
loc: this.getLocation(identStart, this.scanner.tokenStart),
|
||||
name: this.scanner.substrToCursor(identStart)
|
||||
});
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
if (this.scanner.tokenType === COMMA) {
|
||||
children.appendData(this.Operator());
|
||||
children.appendData(this.parseCustomProperty
|
||||
? this.Value(null)
|
||||
: this.Raw(this.scanner.currentToken, EXCLAMATIONMARK, SEMICOLON, false, false)
|
||||
);
|
||||
}
|
||||
|
||||
return children;
|
||||
};
|
||||
20
build/node_modules/css-tree/lib/syntax/index.js
generated
vendored
Normal file
20
build/node_modules/css-tree/lib/syntax/index.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
function merge() {
|
||||
var dest = {};
|
||||
|
||||
for (var i = 0; i < arguments.length; i++) {
|
||||
var src = arguments[i];
|
||||
for (var key in src) {
|
||||
dest[key] = src[key];
|
||||
}
|
||||
}
|
||||
|
||||
return dest;
|
||||
}
|
||||
|
||||
module.exports = require('./create').create(
|
||||
merge(
|
||||
require('./config/lexer'),
|
||||
require('./config/parser'),
|
||||
require('./config/walker')
|
||||
)
|
||||
);
|
||||
180
build/node_modules/css-tree/lib/syntax/node/AnPlusB.js
generated
vendored
Normal file
180
build/node_modules/css-tree/lib/syntax/node/AnPlusB.js
generated
vendored
Normal file
@@ -0,0 +1,180 @@
|
||||
var cmpChar = require('../../tokenizer').cmpChar;
|
||||
var isNumber = require('../../tokenizer').isNumber;
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var NUMBER = TYPE.Number;
|
||||
var PLUSSIGN = TYPE.PlusSign;
|
||||
var HYPHENMINUS = TYPE.HyphenMinus;
|
||||
var N = 110; // 'n'.charCodeAt(0)
|
||||
var DISALLOW_SIGN = true;
|
||||
var ALLOW_SIGN = false;
|
||||
|
||||
function checkTokenIsInteger(scanner, disallowSign) {
|
||||
var pos = scanner.tokenStart;
|
||||
|
||||
if (scanner.source.charCodeAt(pos) === PLUSSIGN ||
|
||||
scanner.source.charCodeAt(pos) === HYPHENMINUS) {
|
||||
if (disallowSign) {
|
||||
scanner.error();
|
||||
}
|
||||
pos++;
|
||||
}
|
||||
|
||||
for (; pos < scanner.tokenEnd; pos++) {
|
||||
if (!isNumber(scanner.source.charCodeAt(pos))) {
|
||||
scanner.error('Unexpected input', pos);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// An+B microsyntax https://www.w3.org/TR/css-syntax-3/#anb
|
||||
module.exports = {
|
||||
name: 'AnPlusB',
|
||||
structure: {
|
||||
a: [String, null],
|
||||
b: [String, null]
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var end = start;
|
||||
var prefix = '';
|
||||
var a = null;
|
||||
var b = null;
|
||||
|
||||
if (this.scanner.tokenType === NUMBER ||
|
||||
this.scanner.tokenType === PLUSSIGN) {
|
||||
checkTokenIsInteger(this.scanner, ALLOW_SIGN);
|
||||
prefix = this.scanner.getTokenValue();
|
||||
this.scanner.next();
|
||||
end = this.scanner.tokenStart;
|
||||
}
|
||||
|
||||
if (this.scanner.tokenType === IDENTIFIER) {
|
||||
var bStart = this.scanner.tokenStart;
|
||||
|
||||
if (cmpChar(this.scanner.source, bStart, HYPHENMINUS)) {
|
||||
if (prefix === '') {
|
||||
prefix = '-';
|
||||
bStart++;
|
||||
} else {
|
||||
this.scanner.error('Unexpected hyphen minus');
|
||||
}
|
||||
}
|
||||
|
||||
if (!cmpChar(this.scanner.source, bStart, N)) {
|
||||
this.scanner.error();
|
||||
}
|
||||
|
||||
a = prefix === '' ? '1' :
|
||||
prefix === '+' ? '+1' :
|
||||
prefix === '-' ? '-1' :
|
||||
prefix;
|
||||
|
||||
var len = this.scanner.tokenEnd - bStart;
|
||||
if (len > 1) {
|
||||
// ..n-..
|
||||
if (this.scanner.source.charCodeAt(bStart + 1) !== HYPHENMINUS) {
|
||||
this.scanner.error('Unexpected input', bStart + 1);
|
||||
}
|
||||
|
||||
if (len > 2) {
|
||||
// ..n-{number}..
|
||||
this.scanner.tokenStart = bStart + 2;
|
||||
} else {
|
||||
// ..n- {number}
|
||||
this.scanner.next();
|
||||
this.scanner.skipSC();
|
||||
}
|
||||
|
||||
checkTokenIsInteger(this.scanner, DISALLOW_SIGN);
|
||||
b = '-' + this.scanner.getTokenValue();
|
||||
this.scanner.next();
|
||||
end = this.scanner.tokenStart;
|
||||
} else {
|
||||
prefix = '';
|
||||
this.scanner.next();
|
||||
end = this.scanner.tokenStart;
|
||||
this.scanner.skipSC();
|
||||
|
||||
if (this.scanner.tokenType === HYPHENMINUS ||
|
||||
this.scanner.tokenType === PLUSSIGN) {
|
||||
prefix = this.scanner.getTokenValue();
|
||||
this.scanner.next();
|
||||
this.scanner.skipSC();
|
||||
}
|
||||
|
||||
if (this.scanner.tokenType === NUMBER) {
|
||||
checkTokenIsInteger(this.scanner, prefix !== '');
|
||||
|
||||
if (!isNumber(this.scanner.source.charCodeAt(this.scanner.tokenStart))) {
|
||||
prefix = this.scanner.source.charAt(this.scanner.tokenStart);
|
||||
this.scanner.tokenStart++;
|
||||
}
|
||||
|
||||
if (prefix === '') {
|
||||
// should be an operator before number
|
||||
this.scanner.error();
|
||||
} else if (prefix === '+') {
|
||||
// plus is using by default
|
||||
prefix = '';
|
||||
}
|
||||
|
||||
b = prefix + this.scanner.getTokenValue();
|
||||
|
||||
this.scanner.next();
|
||||
end = this.scanner.tokenStart;
|
||||
} else {
|
||||
if (prefix) {
|
||||
this.scanner.eat(NUMBER);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (prefix === '' || prefix === '+') { // no number
|
||||
this.scanner.error(
|
||||
'Number or identifier is expected',
|
||||
this.scanner.tokenStart + (
|
||||
this.scanner.tokenType === PLUSSIGN ||
|
||||
this.scanner.tokenType === HYPHENMINUS
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
b = prefix;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'AnPlusB',
|
||||
loc: this.getLocation(start, end),
|
||||
a: a,
|
||||
b: b
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
var a = node.a !== null && node.a !== undefined;
|
||||
var b = node.b !== null && node.b !== undefined;
|
||||
|
||||
if (a) {
|
||||
processChunk(
|
||||
node.a === '+1' ? '+n' :
|
||||
node.a === '1' ? 'n' :
|
||||
node.a === '-1' ? '-n' :
|
||||
node.a + 'n'
|
||||
);
|
||||
|
||||
if (b) {
|
||||
b = String(node.b);
|
||||
if (b.charAt(0) === '-' || b.charAt(0) === '+') {
|
||||
processChunk(b.charAt(0));
|
||||
processChunk(b.substr(1));
|
||||
} else {
|
||||
processChunk('+');
|
||||
processChunk(b);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
processChunk(String(node.b));
|
||||
}
|
||||
}
|
||||
};
|
||||
134
build/node_modules/css-tree/lib/syntax/node/Atrule.js
generated
vendored
Normal file
134
build/node_modules/css-tree/lib/syntax/node/Atrule.js
generated
vendored
Normal file
@@ -0,0 +1,134 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var ATRULE = TYPE.Atrule;
|
||||
var SEMICOLON = TYPE.Semicolon;
|
||||
var LEFTCURLYBRACKET = TYPE.LeftCurlyBracket;
|
||||
var RIGHTCURLYBRACKET = TYPE.RightCurlyBracket;
|
||||
|
||||
function consumeRaw(startToken) {
|
||||
return this.Raw(startToken, SEMICOLON, LEFTCURLYBRACKET, false, true);
|
||||
}
|
||||
|
||||
function isDeclarationBlockAtrule() {
|
||||
for (var offset = 1, type; type = this.scanner.lookupType(offset); offset++) {
|
||||
if (type === RIGHTCURLYBRACKET) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (type === LEFTCURLYBRACKET ||
|
||||
type === ATRULE) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.tolerant) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this.scanner.skip(offset);
|
||||
this.scanner.eat(RIGHTCURLYBRACKET);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name: 'Atrule',
|
||||
structure: {
|
||||
name: String,
|
||||
prelude: ['AtrulePrelude', 'Raw', null],
|
||||
block: ['Block', null]
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var name;
|
||||
var nameLowerCase;
|
||||
var prelude = null;
|
||||
var block = null;
|
||||
|
||||
this.scanner.eat(ATRULE);
|
||||
|
||||
name = this.scanner.substrToCursor(start + 1);
|
||||
nameLowerCase = name.toLowerCase();
|
||||
this.scanner.skipSC();
|
||||
|
||||
// parse prelude
|
||||
if (this.scanner.eof === false &&
|
||||
this.scanner.tokenType !== LEFTCURLYBRACKET &&
|
||||
this.scanner.tokenType !== SEMICOLON) {
|
||||
if (this.parseAtrulePrelude) {
|
||||
var preludeStartToken = this.scanner.currentToken;
|
||||
prelude = this.tolerantParse(this.AtrulePrelude.bind(this, name), consumeRaw);
|
||||
|
||||
if (this.tolerant && !this.scanner.eof) {
|
||||
if (prelude.type !== 'Raw' &&
|
||||
this.scanner.tokenType !== LEFTCURLYBRACKET &&
|
||||
this.scanner.tokenType !== SEMICOLON) {
|
||||
prelude = consumeRaw.call(this, preludeStartToken);
|
||||
}
|
||||
}
|
||||
|
||||
// turn empty AtrulePrelude into null
|
||||
if (prelude.type === 'AtrulePrelude' && prelude.children.head === null) {
|
||||
prelude = null;
|
||||
}
|
||||
} else {
|
||||
prelude = consumeRaw.call(this, this.scanner.currentToken);
|
||||
}
|
||||
|
||||
this.scanner.skipSC();
|
||||
}
|
||||
|
||||
if (this.atrule.hasOwnProperty(nameLowerCase)) {
|
||||
if (typeof this.atrule[nameLowerCase].block === 'function') {
|
||||
if (this.scanner.tokenType !== LEFTCURLYBRACKET) {
|
||||
// FIXME: make tolerant
|
||||
this.scanner.error('Curly bracket is expected');
|
||||
}
|
||||
|
||||
block = this.atrule[nameLowerCase].block.call(this);
|
||||
} else {
|
||||
if (!this.tolerant || !this.scanner.eof) {
|
||||
this.scanner.eat(SEMICOLON);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
switch (this.scanner.tokenType) {
|
||||
case SEMICOLON:
|
||||
this.scanner.next();
|
||||
break;
|
||||
|
||||
case LEFTCURLYBRACKET:
|
||||
// TODO: should consume block content as Raw?
|
||||
block = this.Block(isDeclarationBlockAtrule.call(this));
|
||||
break;
|
||||
|
||||
default:
|
||||
if (!this.tolerant) {
|
||||
this.scanner.error('Semicolon or block is expected');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Atrule',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
name: name,
|
||||
prelude: prelude,
|
||||
block: block
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk('@');
|
||||
processChunk(node.name);
|
||||
|
||||
if (node.prelude !== null) {
|
||||
processChunk(' ');
|
||||
this.generate(processChunk, node.prelude);
|
||||
}
|
||||
|
||||
if (node.block) {
|
||||
this.generate(processChunk, node.block);
|
||||
} else {
|
||||
processChunk(';');
|
||||
}
|
||||
},
|
||||
walkContext: 'atrule'
|
||||
};
|
||||
40
build/node_modules/css-tree/lib/syntax/node/AtrulePrelude.js
generated
vendored
Normal file
40
build/node_modules/css-tree/lib/syntax/node/AtrulePrelude.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
var List = require('../../utils/list');
|
||||
|
||||
module.exports = {
|
||||
name: 'AtrulePrelude',
|
||||
structure: {
|
||||
children: [[]]
|
||||
},
|
||||
parse: function(name) {
|
||||
var children = null;
|
||||
|
||||
if (name !== null) {
|
||||
name = name.toLowerCase();
|
||||
}
|
||||
|
||||
if (this.atrule.hasOwnProperty(name)) {
|
||||
// custom consumer
|
||||
if (typeof this.atrule[name].prelude === 'function') {
|
||||
children = this.atrule[name].prelude.call(this);
|
||||
}
|
||||
} else {
|
||||
// default consumer
|
||||
this.scanner.skipSC();
|
||||
children = this.readSequence(this.scope.AtrulePrelude);
|
||||
}
|
||||
|
||||
if (children === null) {
|
||||
children = new List();
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'AtrulePrelude',
|
||||
loc: this.getLocationFromList(children),
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
this.each(processChunk, node);
|
||||
},
|
||||
walkContext: 'atrulePrelude'
|
||||
};
|
||||
162
build/node_modules/css-tree/lib/syntax/node/AttributeSelector.js
generated
vendored
Normal file
162
build/node_modules/css-tree/lib/syntax/node/AttributeSelector.js
generated
vendored
Normal file
@@ -0,0 +1,162 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var STRING = TYPE.String;
|
||||
var DOLLARSIGN = TYPE.DollarSign;
|
||||
var ASTERISK = TYPE.Asterisk;
|
||||
var COLON = TYPE.Colon;
|
||||
var EQUALSSIGN = TYPE.EqualsSign;
|
||||
var LEFTSQUAREBRACKET = TYPE.LeftSquareBracket;
|
||||
var RIGHTSQUAREBRACKET = TYPE.RightSquareBracket;
|
||||
var CIRCUMFLEXACCENT = TYPE.CircumflexAccent;
|
||||
var VERTICALLINE = TYPE.VerticalLine;
|
||||
var TILDE = TYPE.Tilde;
|
||||
|
||||
function getAttributeName() {
|
||||
if (this.scanner.eof) {
|
||||
this.scanner.error('Unexpected end of input');
|
||||
}
|
||||
|
||||
var start = this.scanner.tokenStart;
|
||||
var expectIdentifier = false;
|
||||
var checkColon = true;
|
||||
|
||||
if (this.scanner.tokenType === ASTERISK) {
|
||||
expectIdentifier = true;
|
||||
checkColon = false;
|
||||
this.scanner.next();
|
||||
} else if (this.scanner.tokenType !== VERTICALLINE) {
|
||||
this.scanner.eat(IDENTIFIER);
|
||||
}
|
||||
|
||||
if (this.scanner.tokenType === VERTICALLINE) {
|
||||
if (this.scanner.lookupType(1) !== EQUALSSIGN) {
|
||||
this.scanner.next();
|
||||
this.scanner.eat(IDENTIFIER);
|
||||
} else if (expectIdentifier) {
|
||||
this.scanner.error('Identifier is expected', this.scanner.tokenEnd);
|
||||
}
|
||||
} else if (expectIdentifier) {
|
||||
this.scanner.error('Vertical line is expected');
|
||||
}
|
||||
|
||||
if (checkColon && this.scanner.tokenType === COLON) {
|
||||
this.scanner.next();
|
||||
this.scanner.eat(IDENTIFIER);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Identifier',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
name: this.scanner.substrToCursor(start)
|
||||
};
|
||||
}
|
||||
|
||||
function getOperator() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var tokenType = this.scanner.tokenType;
|
||||
|
||||
if (tokenType !== EQUALSSIGN && // =
|
||||
tokenType !== TILDE && // ~=
|
||||
tokenType !== CIRCUMFLEXACCENT && // ^=
|
||||
tokenType !== DOLLARSIGN && // $=
|
||||
tokenType !== ASTERISK && // *=
|
||||
tokenType !== VERTICALLINE // |=
|
||||
) {
|
||||
this.scanner.error('Attribute selector (=, ~=, ^=, $=, *=, |=) is expected');
|
||||
}
|
||||
|
||||
if (tokenType === EQUALSSIGN) {
|
||||
this.scanner.next();
|
||||
} else {
|
||||
this.scanner.next();
|
||||
this.scanner.eat(EQUALSSIGN);
|
||||
}
|
||||
|
||||
return this.scanner.substrToCursor(start);
|
||||
}
|
||||
|
||||
// '[' S* attrib_name ']'
|
||||
// '[' S* attrib_name S* attrib_matcher S* [ IDENT | STRING ] S* attrib_flags? S* ']'
|
||||
module.exports = {
|
||||
name: 'AttributeSelector',
|
||||
structure: {
|
||||
name: 'Identifier',
|
||||
matcher: [String, null],
|
||||
value: ['String', 'Identifier', null],
|
||||
flags: [String, null]
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var name;
|
||||
var matcher = null;
|
||||
var value = null;
|
||||
var flags = null;
|
||||
|
||||
this.scanner.eat(LEFTSQUAREBRACKET);
|
||||
this.scanner.skipSC();
|
||||
|
||||
name = getAttributeName.call(this);
|
||||
this.scanner.skipSC();
|
||||
|
||||
if (this.scanner.tokenType !== RIGHTSQUAREBRACKET) {
|
||||
// avoid case `[name i]`
|
||||
if (this.scanner.tokenType !== IDENTIFIER) {
|
||||
matcher = getOperator.call(this);
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
value = this.scanner.tokenType === STRING
|
||||
? this.String()
|
||||
: this.Identifier();
|
||||
|
||||
this.scanner.skipSC();
|
||||
}
|
||||
|
||||
// attribute flags
|
||||
if (this.scanner.tokenType === IDENTIFIER) {
|
||||
flags = this.scanner.getTokenValue();
|
||||
this.scanner.next();
|
||||
|
||||
this.scanner.skipSC();
|
||||
}
|
||||
}
|
||||
|
||||
this.scanner.eat(RIGHTSQUAREBRACKET);
|
||||
|
||||
return {
|
||||
type: 'AttributeSelector',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
name: name,
|
||||
matcher: matcher,
|
||||
value: value,
|
||||
flags: flags
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
var flagsPrefix = ' ';
|
||||
|
||||
processChunk('[');
|
||||
this.generate(processChunk, node.name);
|
||||
|
||||
if (node.matcher !== null) {
|
||||
processChunk(node.matcher);
|
||||
|
||||
if (node.value !== null) {
|
||||
this.generate(processChunk, node.value);
|
||||
|
||||
// space between string and flags is not required
|
||||
if (node.value.type === 'String') {
|
||||
flagsPrefix = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (node.flags !== null) {
|
||||
processChunk(flagsPrefix);
|
||||
processChunk(node.flags);
|
||||
}
|
||||
|
||||
processChunk(']');
|
||||
}
|
||||
};
|
||||
79
build/node_modules/css-tree/lib/syntax/node/Block.js
generated
vendored
Normal file
79
build/node_modules/css-tree/lib/syntax/node/Block.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
var List = require('../../utils/list');
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var COMMENT = TYPE.Comment;
|
||||
var SEMICOLON = TYPE.Semicolon;
|
||||
var ATRULE = TYPE.Atrule;
|
||||
var LEFTCURLYBRACKET = TYPE.LeftCurlyBracket;
|
||||
var RIGHTCURLYBRACKET = TYPE.RightCurlyBracket;
|
||||
|
||||
function consumeRaw(startToken) {
|
||||
return this.Raw(startToken, 0, 0, false, true);
|
||||
}
|
||||
function consumeRule() {
|
||||
return this.tolerantParse(this.Rule, consumeRaw);
|
||||
}
|
||||
function consumeRawDeclaration(startToken) {
|
||||
return this.Raw(startToken, 0, SEMICOLON, true, true);
|
||||
}
|
||||
function consumeDeclaration() {
|
||||
var node = this.tolerantParse(this.Declaration, consumeRawDeclaration);
|
||||
|
||||
if (this.scanner.tokenType === SEMICOLON) {
|
||||
this.scanner.next();
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name: 'Block',
|
||||
structure: {
|
||||
children: [['Atrule', 'Rule', 'Declaration']]
|
||||
},
|
||||
parse: function(isDeclaration) {
|
||||
var consumer = isDeclaration ? consumeDeclaration : consumeRule;
|
||||
|
||||
var start = this.scanner.tokenStart;
|
||||
var children = new List();
|
||||
|
||||
this.scanner.eat(LEFTCURLYBRACKET);
|
||||
|
||||
scan:
|
||||
while (!this.scanner.eof) {
|
||||
switch (this.scanner.tokenType) {
|
||||
case RIGHTCURLYBRACKET:
|
||||
break scan;
|
||||
|
||||
case WHITESPACE:
|
||||
case COMMENT:
|
||||
this.scanner.next();
|
||||
break;
|
||||
|
||||
case ATRULE:
|
||||
children.appendData(this.tolerantParse(this.Atrule, consumeRaw));
|
||||
break;
|
||||
|
||||
default:
|
||||
children.appendData(consumer.call(this));
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.tolerant || !this.scanner.eof) {
|
||||
this.scanner.eat(RIGHTCURLYBRACKET);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Block',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk('{');
|
||||
this.each(processChunk, node);
|
||||
processChunk('}');
|
||||
},
|
||||
walkContext: 'block'
|
||||
};
|
||||
32
build/node_modules/css-tree/lib/syntax/node/Brackets.js
generated
vendored
Normal file
32
build/node_modules/css-tree/lib/syntax/node/Brackets.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
var LEFTSQUAREBRACKET = TYPE.LeftSquareBracket;
|
||||
var RIGHTSQUAREBRACKET = TYPE.RightSquareBracket;
|
||||
|
||||
// currently only Grid Layout uses square brackets, but left it universal
|
||||
// https://drafts.csswg.org/css-grid/#track-sizing
|
||||
// [ ident* ]
|
||||
module.exports = {
|
||||
name: 'Brackets',
|
||||
structure: {
|
||||
children: [[]]
|
||||
},
|
||||
parse: function(readSequence, recognizer) {
|
||||
var start = this.scanner.tokenStart;
|
||||
var children = null;
|
||||
|
||||
this.scanner.eat(LEFTSQUAREBRACKET);
|
||||
children = readSequence.call(this, recognizer);
|
||||
this.scanner.eat(RIGHTSQUAREBRACKET);
|
||||
|
||||
return {
|
||||
type: 'Brackets',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk('[');
|
||||
this.each(processChunk, node);
|
||||
processChunk(']');
|
||||
}
|
||||
};
|
||||
19
build/node_modules/css-tree/lib/syntax/node/CDC.js
generated
vendored
Normal file
19
build/node_modules/css-tree/lib/syntax/node/CDC.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
var CDC = require('../../tokenizer').TYPE.CDC;
|
||||
|
||||
module.exports = {
|
||||
name: 'CDC',
|
||||
structure: [],
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
|
||||
this.scanner.eat(CDC); // -->
|
||||
|
||||
return {
|
||||
type: 'CDC',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk) {
|
||||
processChunk('-->');
|
||||
}
|
||||
};
|
||||
19
build/node_modules/css-tree/lib/syntax/node/CDO.js
generated
vendored
Normal file
19
build/node_modules/css-tree/lib/syntax/node/CDO.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
var CDO = require('../../tokenizer').TYPE.CDO;
|
||||
|
||||
module.exports = {
|
||||
name: 'CDO',
|
||||
structure: [],
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
|
||||
this.scanner.eat(CDO); // <!--
|
||||
|
||||
return {
|
||||
type: 'CDO',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk) {
|
||||
processChunk('<!--');
|
||||
}
|
||||
};
|
||||
24
build/node_modules/css-tree/lib/syntax/node/ClassSelector.js
generated
vendored
Normal file
24
build/node_modules/css-tree/lib/syntax/node/ClassSelector.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var FULLSTOP = TYPE.FullStop;
|
||||
|
||||
// '.' ident
|
||||
module.exports = {
|
||||
name: 'ClassSelector',
|
||||
structure: {
|
||||
name: String
|
||||
},
|
||||
parse: function() {
|
||||
this.scanner.eat(FULLSTOP);
|
||||
|
||||
return {
|
||||
type: 'ClassSelector',
|
||||
loc: this.getLocation(this.scanner.tokenStart - 1, this.scanner.tokenEnd),
|
||||
name: this.scanner.consume(IDENTIFIER)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk('.');
|
||||
processChunk(node.name);
|
||||
}
|
||||
};
|
||||
43
build/node_modules/css-tree/lib/syntax/node/Combinator.js
generated
vendored
Normal file
43
build/node_modules/css-tree/lib/syntax/node/Combinator.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var PLUSSIGN = TYPE.PlusSign;
|
||||
var SOLIDUS = TYPE.Solidus;
|
||||
var GREATERTHANSIGN = TYPE.GreaterThanSign;
|
||||
var TILDE = TYPE.Tilde;
|
||||
|
||||
// + | > | ~ | /deep/
|
||||
module.exports = {
|
||||
name: 'Combinator',
|
||||
structure: {
|
||||
name: String
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
|
||||
switch (this.scanner.tokenType) {
|
||||
case GREATERTHANSIGN:
|
||||
case PLUSSIGN:
|
||||
case TILDE:
|
||||
this.scanner.next();
|
||||
break;
|
||||
|
||||
case SOLIDUS:
|
||||
this.scanner.next();
|
||||
this.scanner.expectIdentifier('deep');
|
||||
this.scanner.eat(SOLIDUS);
|
||||
break;
|
||||
|
||||
default:
|
||||
this.scanner.error('Combinator is expected');
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Combinator',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
name: this.scanner.substrToCursor(start)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.name);
|
||||
}
|
||||
};
|
||||
35
build/node_modules/css-tree/lib/syntax/node/Comment.js
generated
vendored
Normal file
35
build/node_modules/css-tree/lib/syntax/node/Comment.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var ASTERISK = TYPE.Asterisk;
|
||||
var SOLIDUS = TYPE.Solidus;
|
||||
|
||||
// '/*' .* '*/'
|
||||
module.exports = {
|
||||
name: 'Comment',
|
||||
structure: {
|
||||
value: String
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var end = this.scanner.tokenEnd;
|
||||
|
||||
if ((end - start + 2) >= 2 &&
|
||||
this.scanner.source.charCodeAt(end - 2) === ASTERISK &&
|
||||
this.scanner.source.charCodeAt(end - 1) === SOLIDUS) {
|
||||
end -= 2;
|
||||
}
|
||||
|
||||
this.scanner.next();
|
||||
|
||||
return {
|
||||
type: 'Comment',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
value: this.scanner.source.substring(start + 2, end)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk('/*');
|
||||
processChunk(node.value);
|
||||
processChunk('*/');
|
||||
}
|
||||
};
|
||||
127
build/node_modules/css-tree/lib/syntax/node/Declaration.js
generated
vendored
Normal file
127
build/node_modules/css-tree/lib/syntax/node/Declaration.js
generated
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var COLON = TYPE.Colon;
|
||||
var EXCLAMATIONMARK = TYPE.ExclamationMark;
|
||||
var SOLIDUS = TYPE.Solidus;
|
||||
var ASTERISK = TYPE.Asterisk;
|
||||
var DOLLARSIGN = TYPE.DollarSign;
|
||||
var HYPHENMINUS = TYPE.HyphenMinus;
|
||||
var SEMICOLON = TYPE.Semicolon;
|
||||
var RIGHTCURLYBRACKET = TYPE.RightCurlyBracket;
|
||||
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
|
||||
var PLUSSIGN = TYPE.PlusSign;
|
||||
var NUMBERSIGN = TYPE.NumberSign;
|
||||
|
||||
module.exports = {
|
||||
name: 'Declaration',
|
||||
structure: {
|
||||
important: [Boolean, String],
|
||||
property: String,
|
||||
value: ['Value', 'Raw']
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var property = readProperty.call(this);
|
||||
var important = false;
|
||||
var value;
|
||||
|
||||
this.scanner.skipSC();
|
||||
this.scanner.eat(COLON);
|
||||
|
||||
if (isCustomProperty(property) ? this.parseCustomProperty : this.parseValue) {
|
||||
value = this.Value(property);
|
||||
} else {
|
||||
value = this.Raw(this.scanner.currentToken, EXCLAMATIONMARK, SEMICOLON, false, false);
|
||||
}
|
||||
|
||||
if (this.scanner.tokenType === EXCLAMATIONMARK) {
|
||||
important = getImportant(this.scanner);
|
||||
this.scanner.skipSC();
|
||||
}
|
||||
|
||||
// TODO: include or not to include semicolon to range?
|
||||
// if (this.scanner.tokenType === SEMICOLON) {
|
||||
// this.scanner.next();
|
||||
// }
|
||||
|
||||
if (!this.scanner.eof &&
|
||||
this.scanner.tokenType !== SEMICOLON &&
|
||||
this.scanner.tokenType !== RIGHTPARENTHESIS &&
|
||||
this.scanner.tokenType !== RIGHTCURLYBRACKET) {
|
||||
this.scanner.error();
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Declaration',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
important: important,
|
||||
property: property,
|
||||
value: value
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node, item) {
|
||||
processChunk(node.property);
|
||||
processChunk(':');
|
||||
this.generate(processChunk, node.value);
|
||||
|
||||
if (node.important) {
|
||||
processChunk(node.important === true ? '!important' : '!' + node.important);
|
||||
}
|
||||
|
||||
if (item && item.next) {
|
||||
processChunk(';');
|
||||
}
|
||||
},
|
||||
walkContext: 'declaration'
|
||||
};
|
||||
|
||||
function isCustomProperty(name) {
|
||||
return name.length >= 2 &&
|
||||
name.charCodeAt(0) === HYPHENMINUS &&
|
||||
name.charCodeAt(1) === HYPHENMINUS;
|
||||
}
|
||||
|
||||
function readProperty() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var prefix = 0;
|
||||
|
||||
// hacks
|
||||
switch (this.scanner.tokenType) {
|
||||
case ASTERISK:
|
||||
case DOLLARSIGN:
|
||||
case PLUSSIGN:
|
||||
case NUMBERSIGN:
|
||||
prefix = 1;
|
||||
break;
|
||||
|
||||
// TODO: not sure we should support this hack
|
||||
case SOLIDUS:
|
||||
prefix = this.scanner.lookupType(1) === SOLIDUS ? 2 : 1;
|
||||
break;
|
||||
}
|
||||
|
||||
if (this.scanner.lookupType(prefix) === HYPHENMINUS) {
|
||||
prefix++;
|
||||
}
|
||||
|
||||
if (prefix) {
|
||||
this.scanner.skip(prefix);
|
||||
}
|
||||
|
||||
this.scanner.eat(IDENTIFIER);
|
||||
|
||||
return this.scanner.substrToCursor(start);
|
||||
}
|
||||
|
||||
// ! ws* important
|
||||
function getImportant(scanner) {
|
||||
scanner.eat(EXCLAMATIONMARK);
|
||||
scanner.skipSC();
|
||||
|
||||
var important = scanner.consume(IDENTIFIER);
|
||||
|
||||
// store original value in case it differ from `important`
|
||||
// for better original source restoring and hacks like `!ie` support
|
||||
return important === 'important' ? true : important;
|
||||
}
|
||||
43
build/node_modules/css-tree/lib/syntax/node/DeclarationList.js
generated
vendored
Normal file
43
build/node_modules/css-tree/lib/syntax/node/DeclarationList.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
var List = require('../../utils/list');
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var COMMENT = TYPE.Comment;
|
||||
var SEMICOLON = TYPE.Semicolon;
|
||||
|
||||
function consumeRaw(startToken) {
|
||||
return this.Raw(startToken, 0, SEMICOLON, true, true);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name: 'DeclarationList',
|
||||
structure: {
|
||||
children: [['Declaration']]
|
||||
},
|
||||
parse: function() {
|
||||
var children = new List();
|
||||
|
||||
scan:
|
||||
while (!this.scanner.eof) {
|
||||
switch (this.scanner.tokenType) {
|
||||
case WHITESPACE:
|
||||
case COMMENT:
|
||||
case SEMICOLON:
|
||||
this.scanner.next();
|
||||
break;
|
||||
|
||||
default:
|
||||
children.appendData(this.tolerantParse(this.Declaration, consumeRaw));
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'DeclarationList',
|
||||
loc: this.getLocationFromList(children),
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
this.each(processChunk, node);
|
||||
}
|
||||
};
|
||||
45
build/node_modules/css-tree/lib/syntax/node/Dimension.js
generated
vendored
Normal file
45
build/node_modules/css-tree/lib/syntax/node/Dimension.js
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
var NUMBER = require('../../tokenizer').TYPE.Number;
|
||||
|
||||
// special reader for units to avoid adjoined IE hacks (i.e. '1px\9')
|
||||
function readUnit(scanner) {
|
||||
var unit = scanner.getTokenValue();
|
||||
var backSlashPos = unit.indexOf('\\');
|
||||
|
||||
if (backSlashPos > 0) {
|
||||
// patch token offset
|
||||
scanner.tokenStart += backSlashPos;
|
||||
|
||||
// return part before backslash
|
||||
return unit.substring(0, backSlashPos);
|
||||
}
|
||||
|
||||
// no backslash in unit name
|
||||
scanner.next();
|
||||
|
||||
return unit;
|
||||
}
|
||||
|
||||
// number ident
|
||||
module.exports = {
|
||||
name: 'Dimension',
|
||||
structure: {
|
||||
value: String,
|
||||
unit: String
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var value = this.scanner.consume(NUMBER);
|
||||
var unit = readUnit(this.scanner);
|
||||
|
||||
return {
|
||||
type: 'Dimension',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
value: value,
|
||||
unit: unit
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.value);
|
||||
processChunk(node.unit);
|
||||
}
|
||||
};
|
||||
37
build/node_modules/css-tree/lib/syntax/node/Function.js
generated
vendored
Normal file
37
build/node_modules/css-tree/lib/syntax/node/Function.js
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
|
||||
|
||||
// <function-token> <sequence> ')'
|
||||
module.exports = {
|
||||
name: 'Function',
|
||||
structure: {
|
||||
name: String,
|
||||
children: [[]]
|
||||
},
|
||||
parse: function(readSequence, recognizer) {
|
||||
var start = this.scanner.tokenStart;
|
||||
var name = this.scanner.consumeFunctionName();
|
||||
var nameLowerCase = name.toLowerCase();
|
||||
var children;
|
||||
|
||||
children = recognizer.hasOwnProperty(nameLowerCase)
|
||||
? recognizer[nameLowerCase].call(this, recognizer)
|
||||
: readSequence.call(this, recognizer);
|
||||
|
||||
this.scanner.eat(RIGHTPARENTHESIS);
|
||||
|
||||
return {
|
||||
type: 'Function',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
name: name,
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.name);
|
||||
processChunk('(');
|
||||
this.each(processChunk, node);
|
||||
processChunk(')');
|
||||
},
|
||||
walkContext: 'function'
|
||||
};
|
||||
74
build/node_modules/css-tree/lib/syntax/node/HexColor.js
generated
vendored
Normal file
74
build/node_modules/css-tree/lib/syntax/node/HexColor.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
var isHex = require('../../tokenizer').isHex;
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var NUMBER = TYPE.Number;
|
||||
var NUMBERSIGN = TYPE.NumberSign;
|
||||
|
||||
function consumeHexSequence(scanner, required) {
|
||||
if (!isHex(scanner.source.charCodeAt(scanner.tokenStart))) {
|
||||
if (required) {
|
||||
scanner.error('Unexpected input', scanner.tokenStart);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
for (var pos = scanner.tokenStart + 1; pos < scanner.tokenEnd; pos++) {
|
||||
var code = scanner.source.charCodeAt(pos);
|
||||
|
||||
// break on non-hex char
|
||||
if (!isHex(code)) {
|
||||
// break token, exclude symbol
|
||||
scanner.tokenStart = pos;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// token is full hex sequence, go to next token
|
||||
scanner.next();
|
||||
}
|
||||
|
||||
// # ident
|
||||
module.exports = {
|
||||
name: 'HexColor',
|
||||
structure: {
|
||||
value: String
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
|
||||
this.scanner.eat(NUMBERSIGN);
|
||||
|
||||
scan:
|
||||
switch (this.scanner.tokenType) {
|
||||
case NUMBER:
|
||||
consumeHexSequence(this.scanner, true);
|
||||
|
||||
// if token is identifier then number consists of hex only,
|
||||
// try to add identifier to result
|
||||
if (this.scanner.tokenType === IDENTIFIER) {
|
||||
consumeHexSequence(this.scanner, false);
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case IDENTIFIER:
|
||||
consumeHexSequence(this.scanner, true);
|
||||
break;
|
||||
|
||||
default:
|
||||
this.scanner.error('Number or identifier is expected');
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'HexColor',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
value: this.scanner.substrToCursor(start + 1) // skip #
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk('#');
|
||||
processChunk(node.value);
|
||||
}
|
||||
};
|
||||
24
build/node_modules/css-tree/lib/syntax/node/IdSelector.js
generated
vendored
Normal file
24
build/node_modules/css-tree/lib/syntax/node/IdSelector.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var NUMBERSIGN = TYPE.NumberSign;
|
||||
|
||||
// '#' ident
|
||||
module.exports = {
|
||||
name: 'IdSelector',
|
||||
structure: {
|
||||
name: String
|
||||
},
|
||||
parse: function() {
|
||||
this.scanner.eat(NUMBERSIGN);
|
||||
|
||||
return {
|
||||
type: 'IdSelector',
|
||||
loc: this.getLocation(this.scanner.tokenStart - 1, this.scanner.tokenEnd),
|
||||
name: this.scanner.consume(IDENTIFIER)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk('#');
|
||||
processChunk(node.name);
|
||||
}
|
||||
};
|
||||
19
build/node_modules/css-tree/lib/syntax/node/Identifier.js
generated
vendored
Normal file
19
build/node_modules/css-tree/lib/syntax/node/Identifier.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
|
||||
module.exports = {
|
||||
name: 'Identifier',
|
||||
structure: {
|
||||
name: String
|
||||
},
|
||||
parse: function() {
|
||||
return {
|
||||
type: 'Identifier',
|
||||
loc: this.getLocation(this.scanner.tokenStart, this.scanner.tokenEnd),
|
||||
name: this.scanner.consume(IDENTIFIER)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.name);
|
||||
}
|
||||
};
|
||||
73
build/node_modules/css-tree/lib/syntax/node/MediaFeature.js
generated
vendored
Normal file
73
build/node_modules/css-tree/lib/syntax/node/MediaFeature.js
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var NUMBER = TYPE.Number;
|
||||
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
|
||||
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
|
||||
var COLON = TYPE.Colon;
|
||||
var SOLIDUS = TYPE.Solidus;
|
||||
|
||||
module.exports = {
|
||||
name: 'MediaFeature',
|
||||
structure: {
|
||||
name: String,
|
||||
value: ['Identifier', 'Number', 'Dimension', 'Ratio', null]
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var name;
|
||||
var value = null;
|
||||
|
||||
this.scanner.eat(LEFTPARENTHESIS);
|
||||
this.scanner.skipSC();
|
||||
|
||||
name = this.scanner.consume(IDENTIFIER);
|
||||
this.scanner.skipSC();
|
||||
|
||||
if (this.scanner.tokenType !== RIGHTPARENTHESIS) {
|
||||
this.scanner.eat(COLON);
|
||||
this.scanner.skipSC();
|
||||
|
||||
switch (this.scanner.tokenType) {
|
||||
case NUMBER:
|
||||
if (this.scanner.lookupType(1) === IDENTIFIER) {
|
||||
value = this.Dimension();
|
||||
} else if (this.scanner.lookupNonWSType(1) === SOLIDUS) {
|
||||
value = this.Ratio();
|
||||
} else {
|
||||
value = this.Number();
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case IDENTIFIER:
|
||||
value = this.Identifier();
|
||||
|
||||
break;
|
||||
|
||||
default:
|
||||
this.scanner.error('Number, dimension, ratio or identifier is expected');
|
||||
}
|
||||
|
||||
this.scanner.skipSC();
|
||||
}
|
||||
|
||||
this.scanner.eat(RIGHTPARENTHESIS);
|
||||
|
||||
return {
|
||||
type: 'MediaFeature',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
name: name,
|
||||
value: value
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk('(');
|
||||
processChunk(node.name);
|
||||
if (node.value !== null) {
|
||||
processChunk(':');
|
||||
this.generate(processChunk, node.value);
|
||||
}
|
||||
processChunk(')');
|
||||
}
|
||||
};
|
||||
65
build/node_modules/css-tree/lib/syntax/node/MediaQuery.js
generated
vendored
Normal file
65
build/node_modules/css-tree/lib/syntax/node/MediaQuery.js
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
var List = require('../../utils/list');
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var COMMENT = TYPE.Comment;
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
|
||||
|
||||
module.exports = {
|
||||
name: 'MediaQuery',
|
||||
structure: {
|
||||
children: [['Identifier', 'MediaFeature', 'WhiteSpace']]
|
||||
},
|
||||
parse: function() {
|
||||
this.scanner.skipSC();
|
||||
|
||||
var children = new List();
|
||||
var child = null;
|
||||
var space = null;
|
||||
|
||||
scan:
|
||||
while (!this.scanner.eof) {
|
||||
switch (this.scanner.tokenType) {
|
||||
case COMMENT:
|
||||
this.scanner.next();
|
||||
continue;
|
||||
|
||||
case WHITESPACE:
|
||||
space = this.WhiteSpace();
|
||||
continue;
|
||||
|
||||
case IDENTIFIER:
|
||||
child = this.Identifier();
|
||||
break;
|
||||
|
||||
case LEFTPARENTHESIS:
|
||||
child = this.MediaFeature();
|
||||
break;
|
||||
|
||||
default:
|
||||
break scan;
|
||||
}
|
||||
|
||||
if (space !== null) {
|
||||
children.appendData(space);
|
||||
space = null;
|
||||
}
|
||||
|
||||
children.appendData(child);
|
||||
}
|
||||
|
||||
if (child === null) {
|
||||
this.scanner.error('Identifier or parenthesis is expected');
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'MediaQuery',
|
||||
loc: this.getLocationFromList(children),
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
this.each(processChunk, node);
|
||||
}
|
||||
};
|
||||
33
build/node_modules/css-tree/lib/syntax/node/MediaQueryList.js
generated
vendored
Normal file
33
build/node_modules/css-tree/lib/syntax/node/MediaQueryList.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
var List = require('../../utils/list');
|
||||
var COMMA = require('../../tokenizer').TYPE.Comma;
|
||||
|
||||
module.exports = {
|
||||
name: 'MediaQueryList',
|
||||
structure: {
|
||||
children: [['MediaQuery']]
|
||||
},
|
||||
parse: function(relative) {
|
||||
var children = new List();
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
while (!this.scanner.eof) {
|
||||
children.appendData(this.MediaQuery(relative));
|
||||
|
||||
if (this.scanner.tokenType !== COMMA) {
|
||||
break;
|
||||
}
|
||||
|
||||
this.scanner.next();
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'MediaQueryList',
|
||||
loc: this.getLocationFromList(children),
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
this.eachComma(processChunk, node);
|
||||
}
|
||||
};
|
||||
52
build/node_modules/css-tree/lib/syntax/node/Nth.js
generated
vendored
Normal file
52
build/node_modules/css-tree/lib/syntax/node/Nth.js
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
// https://drafts.csswg.org/css-syntax-3/#the-anb-type
|
||||
module.exports = {
|
||||
name: 'Nth',
|
||||
structure: {
|
||||
nth: ['AnPlusB', 'Identifier'],
|
||||
selector: ['SelectorList', null]
|
||||
},
|
||||
parse: function(allowOfClause) {
|
||||
this.scanner.skipSC();
|
||||
|
||||
var start = this.scanner.tokenStart;
|
||||
var end = start;
|
||||
var selector = null;
|
||||
var query;
|
||||
|
||||
if (this.scanner.lookupValue(0, 'odd') || this.scanner.lookupValue(0, 'even')) {
|
||||
query = this.Identifier();
|
||||
} else {
|
||||
query = this.AnPlusB();
|
||||
}
|
||||
|
||||
this.scanner.skipSC();
|
||||
|
||||
if (allowOfClause && this.scanner.lookupValue(0, 'of')) {
|
||||
this.scanner.next();
|
||||
|
||||
selector = this.SelectorList();
|
||||
|
||||
if (this.needPositions) {
|
||||
end = selector.children.last().loc.end.offset;
|
||||
}
|
||||
} else {
|
||||
if (this.needPositions) {
|
||||
end = query.loc.end.offset;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Nth',
|
||||
loc: this.getLocation(start, end),
|
||||
nth: query,
|
||||
selector: selector
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
this.generate(processChunk, node.nth);
|
||||
if (node.selector !== null) {
|
||||
processChunk(' of ');
|
||||
this.generate(processChunk, node.selector);
|
||||
}
|
||||
}
|
||||
};
|
||||
18
build/node_modules/css-tree/lib/syntax/node/Number.js
generated
vendored
Normal file
18
build/node_modules/css-tree/lib/syntax/node/Number.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
var NUMBER = require('../../tokenizer').TYPE.Number;
|
||||
|
||||
module.exports = {
|
||||
name: 'Number',
|
||||
structure: {
|
||||
value: String
|
||||
},
|
||||
parse: function() {
|
||||
return {
|
||||
type: 'Number',
|
||||
loc: this.getLocation(this.scanner.tokenStart, this.scanner.tokenEnd),
|
||||
value: this.scanner.consume(NUMBER)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.value);
|
||||
}
|
||||
};
|
||||
21
build/node_modules/css-tree/lib/syntax/node/Operator.js
generated
vendored
Normal file
21
build/node_modules/css-tree/lib/syntax/node/Operator.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
// '/' | '*' | ',' | ':' | '+' | '-'
|
||||
module.exports = {
|
||||
name: 'Operator',
|
||||
structure: {
|
||||
value: String
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
|
||||
this.scanner.next();
|
||||
|
||||
return {
|
||||
type: 'Operator',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
value: this.scanner.substrToCursor(start)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.value);
|
||||
}
|
||||
};
|
||||
29
build/node_modules/css-tree/lib/syntax/node/Parentheses.js
generated
vendored
Normal file
29
build/node_modules/css-tree/lib/syntax/node/Parentheses.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
|
||||
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
|
||||
|
||||
module.exports = {
|
||||
name: 'Parentheses',
|
||||
structure: {
|
||||
children: [[]]
|
||||
},
|
||||
parse: function(readSequence, recognizer) {
|
||||
var start = this.scanner.tokenStart;
|
||||
var children = null;
|
||||
|
||||
this.scanner.eat(LEFTPARENTHESIS);
|
||||
children = readSequence.call(this, recognizer);
|
||||
this.scanner.eat(RIGHTPARENTHESIS);
|
||||
|
||||
return {
|
||||
type: 'Parentheses',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk('(');
|
||||
this.each(processChunk, node);
|
||||
processChunk(')');
|
||||
}
|
||||
};
|
||||
27
build/node_modules/css-tree/lib/syntax/node/Percentage.js
generated
vendored
Normal file
27
build/node_modules/css-tree/lib/syntax/node/Percentage.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var NUMBER = TYPE.Number;
|
||||
var PERCENTSIGN = TYPE.PercentSign;
|
||||
|
||||
module.exports = {
|
||||
name: 'Percentage',
|
||||
structure: {
|
||||
value: String
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var number = this.scanner.consume(NUMBER);
|
||||
|
||||
this.scanner.eat(PERCENTSIGN);
|
||||
|
||||
return {
|
||||
type: 'Percentage',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
value: number
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.value);
|
||||
processChunk('%');
|
||||
}
|
||||
};
|
||||
61
build/node_modules/css-tree/lib/syntax/node/PseudoClassSelector.js
generated
vendored
Normal file
61
build/node_modules/css-tree/lib/syntax/node/PseudoClassSelector.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
var List = require('../../utils/list');
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var FUNCTION = TYPE.Function;
|
||||
var COLON = TYPE.Colon;
|
||||
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
|
||||
|
||||
// : ident [ '(' .. ')' ]?
|
||||
module.exports = {
|
||||
name: 'PseudoClassSelector',
|
||||
structure: {
|
||||
name: String,
|
||||
children: [['Raw'], null]
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var children = null;
|
||||
var name;
|
||||
var nameLowerCase;
|
||||
|
||||
this.scanner.eat(COLON);
|
||||
|
||||
if (this.scanner.tokenType === FUNCTION) {
|
||||
name = this.scanner.consumeFunctionName();
|
||||
nameLowerCase = name.toLowerCase();
|
||||
|
||||
if (this.pseudo.hasOwnProperty(nameLowerCase)) {
|
||||
this.scanner.skipSC();
|
||||
children = this.pseudo[nameLowerCase].call(this);
|
||||
this.scanner.skipSC();
|
||||
} else {
|
||||
children = new List().appendData(
|
||||
this.Raw(this.scanner.currentToken, 0, 0, false, false)
|
||||
);
|
||||
}
|
||||
|
||||
this.scanner.eat(RIGHTPARENTHESIS);
|
||||
} else {
|
||||
name = this.scanner.consume(IDENTIFIER);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'PseudoClassSelector',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
name: name,
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(':');
|
||||
processChunk(node.name);
|
||||
|
||||
if (node.children !== null) {
|
||||
processChunk('(');
|
||||
this.each(processChunk, node);
|
||||
processChunk(')');
|
||||
}
|
||||
},
|
||||
walkContext: 'function'
|
||||
};
|
||||
62
build/node_modules/css-tree/lib/syntax/node/PseudoElementSelector.js
generated
vendored
Normal file
62
build/node_modules/css-tree/lib/syntax/node/PseudoElementSelector.js
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
var List = require('../../utils/list');
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var FUNCTION = TYPE.Function;
|
||||
var COLON = TYPE.Colon;
|
||||
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
|
||||
|
||||
// :: ident [ '(' .. ')' ]?
|
||||
module.exports = {
|
||||
name: 'PseudoElementSelector',
|
||||
structure: {
|
||||
name: String,
|
||||
children: [['Raw'], null]
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var children = null;
|
||||
var name;
|
||||
var nameLowerCase;
|
||||
|
||||
this.scanner.eat(COLON);
|
||||
this.scanner.eat(COLON);
|
||||
|
||||
if (this.scanner.tokenType === FUNCTION) {
|
||||
name = this.scanner.consumeFunctionName();
|
||||
nameLowerCase = name.toLowerCase();
|
||||
|
||||
if (this.pseudo.hasOwnProperty(nameLowerCase)) {
|
||||
this.scanner.skipSC();
|
||||
children = this.pseudo[nameLowerCase].call(this);
|
||||
this.scanner.skipSC();
|
||||
} else {
|
||||
children = new List().appendData(
|
||||
this.Raw(this.scanner.currentToken, 0, 0, false, false)
|
||||
);
|
||||
}
|
||||
|
||||
this.scanner.eat(RIGHTPARENTHESIS);
|
||||
} else {
|
||||
name = this.scanner.consume(IDENTIFIER);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'PseudoElementSelector',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
name: name,
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk('::');
|
||||
processChunk(node.name);
|
||||
|
||||
if (node.children !== null) {
|
||||
processChunk('(');
|
||||
this.each(processChunk, node);
|
||||
processChunk(')');
|
||||
}
|
||||
},
|
||||
walkContext: 'function'
|
||||
};
|
||||
57
build/node_modules/css-tree/lib/syntax/node/Ratio.js
generated
vendored
Normal file
57
build/node_modules/css-tree/lib/syntax/node/Ratio.js
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
var isNumber = require('../../tokenizer').isNumber;
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
var NUMBER = TYPE.Number;
|
||||
var SOLIDUS = TYPE.Solidus;
|
||||
var FULLSTOP = TYPE.FullStop;
|
||||
|
||||
// Terms of <ratio> should to be a positive number (not zero or negative)
|
||||
// (see https://drafts.csswg.org/mediaqueries-3/#values)
|
||||
// However, -o-min-device-pixel-ratio takes fractional values as a ratio's term
|
||||
// and this is using by various sites. Therefore we relax checking on parse
|
||||
// to test a term is unsigned number without exponent part.
|
||||
// Additional checks may to be applied on lexer validation.
|
||||
function consumeNumber(scanner) {
|
||||
var value = scanner.consumeNonWS(NUMBER);
|
||||
|
||||
for (var i = 0; i < value.length; i++) {
|
||||
var code = value.charCodeAt(i);
|
||||
if (!isNumber(code) && code !== FULLSTOP) {
|
||||
scanner.error('Unsigned number is expected', scanner.tokenStart - value.length + i);
|
||||
}
|
||||
}
|
||||
|
||||
if (Number(value) === 0) {
|
||||
scanner.error('Zero number is not allowed', scanner.tokenStart - value.length);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
// <positive-integer> S* '/' S* <positive-integer>
|
||||
module.exports = {
|
||||
name: 'Ratio',
|
||||
structure: {
|
||||
left: String,
|
||||
right: String
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var left = consumeNumber(this.scanner);
|
||||
var right;
|
||||
|
||||
this.scanner.eatNonWS(SOLIDUS);
|
||||
right = consumeNumber(this.scanner);
|
||||
|
||||
return {
|
||||
type: 'Ratio',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
left: left,
|
||||
right: right
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.left);
|
||||
processChunk('/');
|
||||
processChunk(node.right);
|
||||
}
|
||||
};
|
||||
34
build/node_modules/css-tree/lib/syntax/node/Raw.js
generated
vendored
Normal file
34
build/node_modules/css-tree/lib/syntax/node/Raw.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
module.exports = {
|
||||
name: 'Raw',
|
||||
structure: {
|
||||
value: String
|
||||
},
|
||||
parse: function(startToken, endTokenType1, endTokenType2, includeTokenType2, excludeWhiteSpace) {
|
||||
var startOffset = this.scanner.getTokenStart(startToken);
|
||||
var endOffset;
|
||||
|
||||
this.scanner.skip(
|
||||
this.scanner.getRawLength(
|
||||
startToken,
|
||||
endTokenType1,
|
||||
endTokenType2,
|
||||
includeTokenType2
|
||||
)
|
||||
);
|
||||
|
||||
if (excludeWhiteSpace && this.scanner.tokenStart > startOffset) {
|
||||
endOffset = this.scanner.getOffsetExcludeWS();
|
||||
} else {
|
||||
endOffset = this.scanner.tokenStart;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Raw',
|
||||
loc: this.getLocation(startOffset, endOffset),
|
||||
value: this.scanner.source.substring(startOffset, endOffset)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.value);
|
||||
}
|
||||
};
|
||||
47
build/node_modules/css-tree/lib/syntax/node/Rule.js
generated
vendored
Normal file
47
build/node_modules/css-tree/lib/syntax/node/Rule.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var LEFTCURLYBRACKET = TYPE.LeftCurlyBracket;
|
||||
|
||||
function consumeRaw(startToken) {
|
||||
return this.Raw(startToken, LEFTCURLYBRACKET, 0, false, true);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name: 'Rule',
|
||||
structure: {
|
||||
prelude: ['SelectorList', 'Raw'],
|
||||
block: ['Block']
|
||||
},
|
||||
parse: function() {
|
||||
var startToken = this.scanner.currentToken;
|
||||
var startOffset = this.scanner.tokenStart;
|
||||
var prelude;
|
||||
var block;
|
||||
|
||||
if (this.parseRulePrelude) {
|
||||
prelude = this.tolerantParse(this.SelectorList, consumeRaw);
|
||||
|
||||
if (this.tolerant && !this.scanner.eof) {
|
||||
if (prelude.type !== 'Raw' && this.scanner.tokenType !== LEFTCURLYBRACKET) {
|
||||
prelude = consumeRaw.call(this, startToken);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
prelude = consumeRaw.call(this, startToken);
|
||||
}
|
||||
|
||||
block = this.Block(true);
|
||||
|
||||
return {
|
||||
type: 'Rule',
|
||||
loc: this.getLocation(startOffset, this.scanner.tokenStart),
|
||||
prelude: prelude,
|
||||
block: block
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
this.generate(processChunk, node.prelude);
|
||||
this.generate(processChunk, node.block);
|
||||
},
|
||||
walkContext: 'rule'
|
||||
};
|
||||
32
build/node_modules/css-tree/lib/syntax/node/Selector.js
generated
vendored
Normal file
32
build/node_modules/css-tree/lib/syntax/node/Selector.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
module.exports = {
|
||||
name: 'Selector',
|
||||
structure: {
|
||||
children: [[
|
||||
'TypeSelector',
|
||||
'IdSelector',
|
||||
'ClassSelector',
|
||||
'AttributeSelector',
|
||||
'PseudoClassSelector',
|
||||
'PseudoElementSelector',
|
||||
'Combinator',
|
||||
'WhiteSpace'
|
||||
]]
|
||||
},
|
||||
parse: function() {
|
||||
var children = this.readSequence(this.scope.Selector);
|
||||
|
||||
// nothing were consumed
|
||||
if (children.isEmpty()) {
|
||||
this.scanner.error('Selector is expected');
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Selector',
|
||||
loc: this.getLocationFromList(children),
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
this.each(processChunk, node);
|
||||
}
|
||||
};
|
||||
35
build/node_modules/css-tree/lib/syntax/node/SelectorList.js
generated
vendored
Normal file
35
build/node_modules/css-tree/lib/syntax/node/SelectorList.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
var List = require('../../utils/list');
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var COMMA = TYPE.Comma;
|
||||
|
||||
module.exports = {
|
||||
name: 'SelectorList',
|
||||
structure: {
|
||||
children: [['Selector', 'Raw']]
|
||||
},
|
||||
parse: function() {
|
||||
var children = new List();
|
||||
|
||||
while (!this.scanner.eof) {
|
||||
children.appendData(this.Selector());
|
||||
|
||||
if (this.scanner.tokenType === COMMA) {
|
||||
this.scanner.next();
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'SelectorList',
|
||||
loc: this.getLocationFromList(children),
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
this.eachComma(processChunk, node);
|
||||
},
|
||||
walkContext: 'selector'
|
||||
};
|
||||
18
build/node_modules/css-tree/lib/syntax/node/String.js
generated
vendored
Normal file
18
build/node_modules/css-tree/lib/syntax/node/String.js
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
var STRING = require('../../tokenizer').TYPE.String;
|
||||
|
||||
module.exports = {
|
||||
name: 'String',
|
||||
structure: {
|
||||
value: String
|
||||
},
|
||||
parse: function() {
|
||||
return {
|
||||
type: 'String',
|
||||
loc: this.getLocation(this.scanner.tokenStart, this.scanner.tokenEnd),
|
||||
value: this.scanner.consume(STRING)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.value);
|
||||
}
|
||||
};
|
||||
75
build/node_modules/css-tree/lib/syntax/node/StyleSheet.js
generated
vendored
Normal file
75
build/node_modules/css-tree/lib/syntax/node/StyleSheet.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
var List = require('../../utils/list');
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var COMMENT = TYPE.Comment;
|
||||
var EXCLAMATIONMARK = TYPE.ExclamationMark;
|
||||
var ATRULE = TYPE.Atrule;
|
||||
var CDO = TYPE.CDO;
|
||||
var CDC = TYPE.CDC;
|
||||
|
||||
function consumeRaw(startToken) {
|
||||
return this.Raw(startToken, 0, 0, false, false);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name: 'StyleSheet',
|
||||
structure: {
|
||||
children: [['Comment', 'Atrule', 'Rule', 'Raw']]
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var children = new List();
|
||||
var child;
|
||||
|
||||
scan:
|
||||
while (!this.scanner.eof) {
|
||||
switch (this.scanner.tokenType) {
|
||||
case WHITESPACE:
|
||||
this.scanner.next();
|
||||
continue;
|
||||
|
||||
case COMMENT:
|
||||
// ignore comments except exclamation comments (i.e. /*! .. */) on top level
|
||||
if (this.scanner.source.charCodeAt(this.scanner.tokenStart + 2) !== EXCLAMATIONMARK) {
|
||||
this.scanner.next();
|
||||
continue;
|
||||
}
|
||||
|
||||
child = this.Comment();
|
||||
break;
|
||||
|
||||
case CDO: // <!--
|
||||
child = this.CDO();
|
||||
break;
|
||||
|
||||
case CDC: // -->
|
||||
child = this.CDC();
|
||||
break;
|
||||
|
||||
// CSS Syntax Module Level 3
|
||||
// §2.2 Error handling
|
||||
// At the "top level" of a stylesheet, an <at-keyword-token> starts an at-rule.
|
||||
case ATRULE:
|
||||
child = this.Atrule();
|
||||
break;
|
||||
|
||||
// Anything else starts a qualified rule ...
|
||||
default:
|
||||
child = this.tolerantParse(this.Rule, consumeRaw);
|
||||
}
|
||||
|
||||
children.appendData(child);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'StyleSheet',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
this.each(processChunk, node);
|
||||
},
|
||||
walkContext: 'stylesheet'
|
||||
};
|
||||
53
build/node_modules/css-tree/lib/syntax/node/TypeSelector.js
generated
vendored
Normal file
53
build/node_modules/css-tree/lib/syntax/node/TypeSelector.js
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var ASTERISK = TYPE.Asterisk;
|
||||
var VERTICALLINE = TYPE.VerticalLine;
|
||||
|
||||
function eatIdentifierOrAsterisk() {
|
||||
if (this.scanner.tokenType !== IDENTIFIER &&
|
||||
this.scanner.tokenType !== ASTERISK) {
|
||||
this.scanner.error('Identifier or asterisk is expected');
|
||||
}
|
||||
|
||||
this.scanner.next();
|
||||
}
|
||||
|
||||
// ident
|
||||
// ident|ident
|
||||
// ident|*
|
||||
// *
|
||||
// *|ident
|
||||
// *|*
|
||||
// |ident
|
||||
// |*
|
||||
module.exports = {
|
||||
name: 'TypeSelector',
|
||||
structure: {
|
||||
name: String
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
|
||||
if (this.scanner.tokenType === VERTICALLINE) {
|
||||
this.scanner.next();
|
||||
eatIdentifierOrAsterisk.call(this);
|
||||
} else {
|
||||
eatIdentifierOrAsterisk.call(this);
|
||||
|
||||
if (this.scanner.tokenType === VERTICALLINE) {
|
||||
this.scanner.next();
|
||||
eatIdentifierOrAsterisk.call(this);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'TypeSelector',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
name: this.scanner.substrToCursor(start)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.name);
|
||||
}
|
||||
};
|
||||
125
build/node_modules/css-tree/lib/syntax/node/UnicodeRange.js
generated
vendored
Normal file
125
build/node_modules/css-tree/lib/syntax/node/UnicodeRange.js
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
var isHex = require('../../tokenizer').isHex;
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var NUMBER = TYPE.Number;
|
||||
var PLUSSIGN = TYPE.PlusSign;
|
||||
var HYPHENMINUS = TYPE.HyphenMinus;
|
||||
var FULLSTOP = TYPE.FullStop;
|
||||
var QUESTIONMARK = TYPE.QuestionMark;
|
||||
|
||||
function scanUnicodeNumber(scanner) {
|
||||
for (var pos = scanner.tokenStart + 1; pos < scanner.tokenEnd; pos++) {
|
||||
var code = scanner.source.charCodeAt(pos);
|
||||
|
||||
// break on fullstop or hyperminus/plussign after exponent
|
||||
if (code === FULLSTOP || code === PLUSSIGN) {
|
||||
// break token, exclude symbol
|
||||
scanner.tokenStart = pos;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-syntax-3/#urange
|
||||
function scanUnicodeRange(scanner) {
|
||||
var hexStart = scanner.tokenStart + 1; // skip +
|
||||
var hexLength = 0;
|
||||
|
||||
scan: {
|
||||
if (scanner.tokenType === NUMBER) {
|
||||
if (scanner.source.charCodeAt(scanner.tokenStart) !== FULLSTOP && scanUnicodeNumber(scanner)) {
|
||||
scanner.next();
|
||||
} else if (scanner.source.charCodeAt(scanner.tokenStart) !== HYPHENMINUS) {
|
||||
break scan;
|
||||
}
|
||||
} else {
|
||||
scanner.next(); // PLUSSIGN
|
||||
}
|
||||
|
||||
if (scanner.tokenType === HYPHENMINUS) {
|
||||
scanner.next();
|
||||
}
|
||||
|
||||
if (scanner.tokenType === NUMBER) {
|
||||
scanner.next();
|
||||
}
|
||||
|
||||
if (scanner.tokenType === IDENTIFIER) {
|
||||
scanner.next();
|
||||
}
|
||||
|
||||
if (scanner.tokenStart === hexStart) {
|
||||
scanner.error('Unexpected input', hexStart);
|
||||
}
|
||||
}
|
||||
|
||||
// validate for U+x{1,6} or U+x{1,6}-x{1,6}
|
||||
// where x is [0-9a-fA-F]
|
||||
for (var i = hexStart, wasHyphenMinus = false; i < scanner.tokenStart; i++) {
|
||||
var code = scanner.source.charCodeAt(i);
|
||||
|
||||
if (isHex(code) === false && (code !== HYPHENMINUS || wasHyphenMinus)) {
|
||||
scanner.error('Unexpected input', i);
|
||||
}
|
||||
|
||||
if (code === HYPHENMINUS) {
|
||||
// hex sequence shouldn't be an empty
|
||||
if (hexLength === 0) {
|
||||
scanner.error('Unexpected input', i);
|
||||
}
|
||||
|
||||
wasHyphenMinus = true;
|
||||
hexLength = 0;
|
||||
} else {
|
||||
hexLength++;
|
||||
|
||||
// too long hex sequence
|
||||
if (hexLength > 6) {
|
||||
scanner.error('Too long hex sequence', i);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// check we have a non-zero sequence
|
||||
if (hexLength === 0) {
|
||||
scanner.error('Unexpected input', i - 1);
|
||||
}
|
||||
|
||||
// U+abc???
|
||||
if (!wasHyphenMinus) {
|
||||
// consume as many U+003F QUESTION MARK (?) code points as possible
|
||||
for (; hexLength < 6 && !scanner.eof; scanner.next()) {
|
||||
if (scanner.tokenType !== QUESTIONMARK) {
|
||||
break;
|
||||
}
|
||||
|
||||
hexLength++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name: 'UnicodeRange',
|
||||
structure: {
|
||||
value: String
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
|
||||
this.scanner.next(); // U or u
|
||||
scanUnicodeRange(this.scanner);
|
||||
|
||||
return {
|
||||
type: 'UnicodeRange',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
value: this.scanner.substrToCursor(start)
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.value);
|
||||
}
|
||||
};
|
||||
49
build/node_modules/css-tree/lib/syntax/node/Url.js
generated
vendored
Normal file
49
build/node_modules/css-tree/lib/syntax/node/Url.js
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var STRING = TYPE.String;
|
||||
var URL = TYPE.Url;
|
||||
var RAW = TYPE.Raw;
|
||||
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
|
||||
|
||||
// url '(' S* (string | raw) S* ')'
|
||||
module.exports = {
|
||||
name: 'Url',
|
||||
structure: {
|
||||
value: ['String', 'Raw']
|
||||
},
|
||||
parse: function() {
|
||||
var start = this.scanner.tokenStart;
|
||||
var value;
|
||||
|
||||
this.scanner.eat(URL);
|
||||
this.scanner.skipSC();
|
||||
|
||||
switch (this.scanner.tokenType) {
|
||||
case STRING:
|
||||
value = this.String();
|
||||
break;
|
||||
|
||||
case RAW:
|
||||
value = this.Raw(this.scanner.currentToken, 0, RAW, true, false);
|
||||
break;
|
||||
|
||||
default:
|
||||
this.scanner.error('String or Raw is expected');
|
||||
}
|
||||
|
||||
this.scanner.skipSC();
|
||||
this.scanner.eat(RIGHTPARENTHESIS);
|
||||
|
||||
return {
|
||||
type: 'Url',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
value: value
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk('url');
|
||||
processChunk('(');
|
||||
this.generate(processChunk, node.value);
|
||||
processChunk(')');
|
||||
}
|
||||
};
|
||||
61
build/node_modules/css-tree/lib/syntax/node/Value.js
generated
vendored
Normal file
61
build/node_modules/css-tree/lib/syntax/node/Value.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
var endsWith = require('../../tokenizer').endsWith;
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var COMMENT = TYPE.Comment;
|
||||
var FUNCTION = TYPE.Function;
|
||||
var COLON = TYPE.Colon;
|
||||
var SEMICOLON = TYPE.Semicolon;
|
||||
var EXCLAMATIONMARK = TYPE.ExclamationMark;
|
||||
|
||||
// 'progid:' ws* 'DXImageTransform.Microsoft.' ident ws* '(' .* ')'
|
||||
function checkProgid(scanner) {
|
||||
var offset = 0;
|
||||
|
||||
for (var type; type = scanner.lookupType(offset); offset++) {
|
||||
if (type !== WHITESPACE && type !== COMMENT) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (scanner.lookupValue(offset, 'alpha(') ||
|
||||
scanner.lookupValue(offset, 'chroma(') ||
|
||||
scanner.lookupValue(offset, 'dropshadow(')) {
|
||||
if (scanner.lookupType(offset) !== FUNCTION) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (scanner.lookupValue(offset, 'progid') === false ||
|
||||
scanner.lookupType(offset + 1) !== COLON) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name: 'Value',
|
||||
structure: {
|
||||
children: [[]]
|
||||
},
|
||||
parse: function(property) {
|
||||
// special parser for filter property since it can contains non-standart syntax for old IE
|
||||
if (property !== null && endsWith(property, 'filter') && checkProgid(this.scanner)) {
|
||||
this.scanner.skipSC();
|
||||
return this.Raw(this.scanner.currentToken, EXCLAMATIONMARK, SEMICOLON, false, false);
|
||||
}
|
||||
|
||||
var start = this.scanner.tokenStart;
|
||||
var children = this.readSequence(this.scope.Value);
|
||||
|
||||
return {
|
||||
type: 'Value',
|
||||
loc: this.getLocation(start, this.scanner.tokenStart),
|
||||
children: children
|
||||
};
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
this.each(processChunk, node);
|
||||
}
|
||||
};
|
||||
26
build/node_modules/css-tree/lib/syntax/node/WhiteSpace.js
generated
vendored
Normal file
26
build/node_modules/css-tree/lib/syntax/node/WhiteSpace.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
var WHITESPACE = require('../../tokenizer').TYPE.WhiteSpace;
|
||||
var SPACE = Object.freeze({
|
||||
type: 'WhiteSpace',
|
||||
loc: null,
|
||||
value: ' '
|
||||
});
|
||||
|
||||
module.exports = {
|
||||
name: 'WhiteSpace',
|
||||
structure: {
|
||||
value: String
|
||||
},
|
||||
parse: function() {
|
||||
this.scanner.eat(WHITESPACE);
|
||||
return SPACE;
|
||||
|
||||
// return {
|
||||
// type: 'WhiteSpace',
|
||||
// loc: this.getLocation(this.scanner.tokenStart, this.scanner.tokenEnd),
|
||||
// value: this.scanner.consume(WHITESPACE)
|
||||
// };
|
||||
},
|
||||
generate: function(processChunk, node) {
|
||||
processChunk(node.value);
|
||||
}
|
||||
};
|
||||
42
build/node_modules/css-tree/lib/syntax/node/index.js
generated
vendored
Normal file
42
build/node_modules/css-tree/lib/syntax/node/index.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
module.exports = {
|
||||
AnPlusB: require('./AnPlusB'),
|
||||
Atrule: require('./Atrule'),
|
||||
AtrulePrelude: require('./AtrulePrelude'),
|
||||
AttributeSelector: require('./AttributeSelector'),
|
||||
Block: require('./Block'),
|
||||
Brackets: require('./Brackets'),
|
||||
CDC: require('./CDC'),
|
||||
CDO: require('./CDO'),
|
||||
ClassSelector: require('./ClassSelector'),
|
||||
Combinator: require('./Combinator'),
|
||||
Comment: require('./Comment'),
|
||||
Declaration: require('./Declaration'),
|
||||
DeclarationList: require('./DeclarationList'),
|
||||
Dimension: require('./Dimension'),
|
||||
Function: require('./Function'),
|
||||
HexColor: require('./HexColor'),
|
||||
Identifier: require('./Identifier'),
|
||||
IdSelector: require('./IdSelector'),
|
||||
MediaFeature: require('./MediaFeature'),
|
||||
MediaQuery: require('./MediaQuery'),
|
||||
MediaQueryList: require('./MediaQueryList'),
|
||||
Nth: require('./Nth'),
|
||||
Number: require('./Number'),
|
||||
Operator: require('./Operator'),
|
||||
Parentheses: require('./Parentheses'),
|
||||
Percentage: require('./Percentage'),
|
||||
PseudoClassSelector: require('./PseudoClassSelector'),
|
||||
PseudoElementSelector: require('./PseudoElementSelector'),
|
||||
Ratio: require('./Ratio'),
|
||||
Raw: require('./Raw'),
|
||||
Rule: require('./Rule'),
|
||||
Selector: require('./Selector'),
|
||||
SelectorList: require('./SelectorList'),
|
||||
String: require('./String'),
|
||||
StyleSheet: require('./StyleSheet'),
|
||||
TypeSelector: require('./TypeSelector'),
|
||||
UnicodeRange: require('./UnicodeRange'),
|
||||
Url: require('./Url'),
|
||||
Value: require('./Value'),
|
||||
WhiteSpace: require('./WhiteSpace')
|
||||
};
|
||||
10
build/node_modules/css-tree/lib/syntax/pseudo/common/nth.js
generated
vendored
Normal file
10
build/node_modules/css-tree/lib/syntax/pseudo/common/nth.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
var List = require('../../../utils/list');
|
||||
var DISALLOW_OF_CLAUSE = false;
|
||||
|
||||
module.exports = {
|
||||
parse: function nth() {
|
||||
return new List().appendData(
|
||||
this.Nth(DISALLOW_OF_CLAUSE)
|
||||
);
|
||||
}
|
||||
};
|
||||
10
build/node_modules/css-tree/lib/syntax/pseudo/common/nthWithOfClause.js
generated
vendored
Normal file
10
build/node_modules/css-tree/lib/syntax/pseudo/common/nthWithOfClause.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
var List = require('../../../utils/list');
|
||||
var ALLOW_OF_CLAUSE = true;
|
||||
|
||||
module.exports = {
|
||||
parse: function() {
|
||||
return new List().appendData(
|
||||
this.Nth(ALLOW_OF_CLAUSE)
|
||||
);
|
||||
}
|
||||
};
|
||||
9
build/node_modules/css-tree/lib/syntax/pseudo/common/selectorList.js
generated
vendored
Normal file
9
build/node_modules/css-tree/lib/syntax/pseudo/common/selectorList.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
var List = require('../../../utils/list');
|
||||
|
||||
module.exports = {
|
||||
parse: function selectorList() {
|
||||
return new List().appendData(
|
||||
this.SelectorList()
|
||||
);
|
||||
}
|
||||
};
|
||||
9
build/node_modules/css-tree/lib/syntax/pseudo/dir.js
generated
vendored
Normal file
9
build/node_modules/css-tree/lib/syntax/pseudo/dir.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
var List = require('../../utils/list');
|
||||
|
||||
module.exports = {
|
||||
parse: function() {
|
||||
return new List().appendData(
|
||||
this.Identifier()
|
||||
);
|
||||
}
|
||||
};
|
||||
9
build/node_modules/css-tree/lib/syntax/pseudo/has.js
generated
vendored
Normal file
9
build/node_modules/css-tree/lib/syntax/pseudo/has.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
var List = require('../../utils/list');
|
||||
|
||||
module.exports = {
|
||||
parse: function() {
|
||||
return new List().appendData(
|
||||
this.SelectorList()
|
||||
);
|
||||
}
|
||||
};
|
||||
12
build/node_modules/css-tree/lib/syntax/pseudo/index.js
generated
vendored
Normal file
12
build/node_modules/css-tree/lib/syntax/pseudo/index.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
module.exports = {
|
||||
'dir': require('./dir'),
|
||||
'has': require('./has'),
|
||||
'lang': require('./lang'),
|
||||
'matches': require('./matches'),
|
||||
'not': require('./not'),
|
||||
'nth-child': require('./nth-child'),
|
||||
'nth-last-child': require('./nth-last-child'),
|
||||
'nth-last-of-type': require('./nth-last-of-type'),
|
||||
'nth-of-type': require('./nth-of-type'),
|
||||
'slotted': require('./slotted')
|
||||
};
|
||||
9
build/node_modules/css-tree/lib/syntax/pseudo/lang.js
generated
vendored
Normal file
9
build/node_modules/css-tree/lib/syntax/pseudo/lang.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
var List = require('../../utils/list');
|
||||
|
||||
module.exports = {
|
||||
parse: function() {
|
||||
return new List().appendData(
|
||||
this.Identifier()
|
||||
);
|
||||
}
|
||||
};
|
||||
1
build/node_modules/css-tree/lib/syntax/pseudo/matches.js
generated
vendored
Normal file
1
build/node_modules/css-tree/lib/syntax/pseudo/matches.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('./common/selectorList');
|
||||
1
build/node_modules/css-tree/lib/syntax/pseudo/not.js
generated
vendored
Normal file
1
build/node_modules/css-tree/lib/syntax/pseudo/not.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('./common/selectorList');
|
||||
1
build/node_modules/css-tree/lib/syntax/pseudo/nth-child.js
generated
vendored
Normal file
1
build/node_modules/css-tree/lib/syntax/pseudo/nth-child.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('./common/nthWithOfClause');
|
||||
1
build/node_modules/css-tree/lib/syntax/pseudo/nth-last-child.js
generated
vendored
Normal file
1
build/node_modules/css-tree/lib/syntax/pseudo/nth-last-child.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('./common/nthWithOfClause');
|
||||
1
build/node_modules/css-tree/lib/syntax/pseudo/nth-last-of-type.js
generated
vendored
Normal file
1
build/node_modules/css-tree/lib/syntax/pseudo/nth-last-of-type.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('./common/nth');
|
||||
1
build/node_modules/css-tree/lib/syntax/pseudo/nth-of-type.js
generated
vendored
Normal file
1
build/node_modules/css-tree/lib/syntax/pseudo/nth-of-type.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = require('./common/nth');
|
||||
9
build/node_modules/css-tree/lib/syntax/pseudo/slotted.js
generated
vendored
Normal file
9
build/node_modules/css-tree/lib/syntax/pseudo/slotted.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
var List = require('../../utils/list');
|
||||
|
||||
module.exports = {
|
||||
parse: function compoundSelector() {
|
||||
return new List().appendData(
|
||||
this.Selector()
|
||||
);
|
||||
}
|
||||
};
|
||||
3
build/node_modules/css-tree/lib/syntax/scope/atrulePrelude.js
generated
vendored
Normal file
3
build/node_modules/css-tree/lib/syntax/scope/atrulePrelude.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
getNode: require('./default')
|
||||
};
|
||||
78
build/node_modules/css-tree/lib/syntax/scope/default.js
generated
vendored
Normal file
78
build/node_modules/css-tree/lib/syntax/scope/default.js
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
var cmpChar = require('../../tokenizer').cmpChar;
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var STRING = TYPE.String;
|
||||
var NUMBER = TYPE.Number;
|
||||
var FUNCTION = TYPE.Function;
|
||||
var URL = TYPE.Url;
|
||||
var NUMBERSIGN = TYPE.NumberSign;
|
||||
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
|
||||
var LEFTSQUAREBRACKET = TYPE.LeftSquareBracket;
|
||||
var PLUSSIGN = TYPE.PlusSign;
|
||||
var HYPHENMINUS = TYPE.HyphenMinus;
|
||||
var COMMA = TYPE.Comma;
|
||||
var SOLIDUS = TYPE.Solidus;
|
||||
var ASTERISK = TYPE.Asterisk;
|
||||
var PERCENTSIGN = TYPE.PercentSign;
|
||||
var BACKSLASH = TYPE.Backslash;
|
||||
var U = 117; // 'u'.charCodeAt(0)
|
||||
|
||||
module.exports = function defaultRecognizer(context) {
|
||||
switch (this.scanner.tokenType) {
|
||||
case NUMBERSIGN:
|
||||
return this.HexColor();
|
||||
|
||||
case COMMA:
|
||||
context.space = null;
|
||||
context.ignoreWSAfter = true;
|
||||
return this.Operator();
|
||||
|
||||
case SOLIDUS:
|
||||
case ASTERISK:
|
||||
case PLUSSIGN:
|
||||
case HYPHENMINUS:
|
||||
return this.Operator();
|
||||
|
||||
case LEFTPARENTHESIS:
|
||||
return this.Parentheses(this.readSequence, context.recognizer);
|
||||
|
||||
case LEFTSQUAREBRACKET:
|
||||
return this.Brackets(this.readSequence, context.recognizer);
|
||||
|
||||
case STRING:
|
||||
return this.String();
|
||||
|
||||
case NUMBER:
|
||||
switch (this.scanner.lookupType(1)) {
|
||||
case PERCENTSIGN:
|
||||
return this.Percentage();
|
||||
|
||||
case IDENTIFIER:
|
||||
// edge case: number with folowing \0 and \9 hack shouldn't to be a Dimension
|
||||
if (cmpChar(this.scanner.source, this.scanner.tokenEnd, BACKSLASH)) {
|
||||
return this.Number();
|
||||
} else {
|
||||
return this.Dimension();
|
||||
}
|
||||
|
||||
default:
|
||||
return this.Number();
|
||||
}
|
||||
|
||||
case FUNCTION:
|
||||
return this.Function(this.readSequence, context.recognizer);
|
||||
|
||||
case URL:
|
||||
return this.Url();
|
||||
|
||||
case IDENTIFIER:
|
||||
// check for unicode range, it should start with u+ or U+
|
||||
if (cmpChar(this.scanner.source, this.scanner.tokenStart, U) &&
|
||||
cmpChar(this.scanner.source, this.scanner.tokenStart + 1, PLUSSIGN)) {
|
||||
return this.UnicodeRange();
|
||||
} else {
|
||||
return this.Identifier();
|
||||
}
|
||||
}
|
||||
};
|
||||
5
build/node_modules/css-tree/lib/syntax/scope/index.js
generated
vendored
Normal file
5
build/node_modules/css-tree/lib/syntax/scope/index.js
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
module.exports = {
|
||||
AtrulePrelude: require('./atrulePrelude'),
|
||||
Selector: require('./selector'),
|
||||
Value: require('./value')
|
||||
};
|
||||
56
build/node_modules/css-tree/lib/syntax/scope/selector.js
generated
vendored
Normal file
56
build/node_modules/css-tree/lib/syntax/scope/selector.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
var TYPE = require('../../tokenizer').TYPE;
|
||||
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var NUMBER = TYPE.Number;
|
||||
var NUMBERSIGN = TYPE.NumberSign;
|
||||
var LEFTSQUAREBRACKET = TYPE.LeftSquareBracket;
|
||||
var PLUSSIGN = TYPE.PlusSign;
|
||||
var SOLIDUS = TYPE.Solidus;
|
||||
var ASTERISK = TYPE.Asterisk;
|
||||
var FULLSTOP = TYPE.FullStop;
|
||||
var COLON = TYPE.Colon;
|
||||
var GREATERTHANSIGN = TYPE.GreaterThanSign;
|
||||
var VERTICALLINE = TYPE.VerticalLine;
|
||||
var TILDE = TYPE.Tilde;
|
||||
|
||||
function getNode(context) {
|
||||
switch (this.scanner.tokenType) {
|
||||
case PLUSSIGN:
|
||||
case GREATERTHANSIGN:
|
||||
case TILDE:
|
||||
context.space = null;
|
||||
context.ignoreWSAfter = true;
|
||||
return this.Combinator();
|
||||
|
||||
case SOLIDUS: // /deep/
|
||||
return this.Combinator();
|
||||
|
||||
case FULLSTOP:
|
||||
return this.ClassSelector();
|
||||
|
||||
case LEFTSQUAREBRACKET:
|
||||
return this.AttributeSelector();
|
||||
|
||||
case NUMBERSIGN:
|
||||
return this.IdSelector();
|
||||
|
||||
case COLON:
|
||||
if (this.scanner.lookupType(1) === COLON) {
|
||||
return this.PseudoElementSelector();
|
||||
} else {
|
||||
return this.PseudoClassSelector();
|
||||
}
|
||||
|
||||
case IDENTIFIER:
|
||||
case ASTERISK:
|
||||
case VERTICALLINE:
|
||||
return this.TypeSelector();
|
||||
|
||||
case NUMBER:
|
||||
return this.Percentage();
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getNode: getNode
|
||||
};
|
||||
7
build/node_modules/css-tree/lib/syntax/scope/value.js
generated
vendored
Normal file
7
build/node_modules/css-tree/lib/syntax/scope/value.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
module.exports = {
|
||||
getNode: require('./default'),
|
||||
'-moz-element': require('../function/element'),
|
||||
'element': require('../function/element'),
|
||||
'expression': require('../function/expression'),
|
||||
'var': require('../function/var')
|
||||
};
|
||||
627
build/node_modules/css-tree/lib/tokenizer/Tokenizer.js
generated
vendored
Normal file
627
build/node_modules/css-tree/lib/tokenizer/Tokenizer.js
generated
vendored
Normal file
@@ -0,0 +1,627 @@
|
||||
'use strict';
|
||||
|
||||
var CssSyntaxError = require('./error');
|
||||
|
||||
var constants = require('./const');
|
||||
var TYPE = constants.TYPE;
|
||||
var NAME = constants.NAME;
|
||||
var SYMBOL_TYPE = constants.SYMBOL_TYPE;
|
||||
|
||||
var utils = require('./utils');
|
||||
var firstCharOffset = utils.firstCharOffset;
|
||||
var cmpStr = utils.cmpStr;
|
||||
var isNumber = utils.isNumber;
|
||||
var findLastNonSpaceLocation = utils.findLastNonSpaceLocation;
|
||||
var findWhiteSpaceEnd = utils.findWhiteSpaceEnd;
|
||||
var findCommentEnd = utils.findCommentEnd;
|
||||
var findStringEnd = utils.findStringEnd;
|
||||
var findNumberEnd = utils.findNumberEnd;
|
||||
var findIdentifierEnd = utils.findIdentifierEnd;
|
||||
var findUrlRawEnd = utils.findUrlRawEnd;
|
||||
|
||||
var NULL = 0;
|
||||
var WHITESPACE = TYPE.WhiteSpace;
|
||||
var IDENTIFIER = TYPE.Identifier;
|
||||
var NUMBER = TYPE.Number;
|
||||
var STRING = TYPE.String;
|
||||
var COMMENT = TYPE.Comment;
|
||||
var PUNCTUATOR = TYPE.Punctuator;
|
||||
var CDO = TYPE.CDO;
|
||||
var CDC = TYPE.CDC;
|
||||
var ATRULE = TYPE.Atrule;
|
||||
var FUNCTION = TYPE.Function;
|
||||
var URL = TYPE.Url;
|
||||
var RAW = TYPE.Raw;
|
||||
|
||||
var N = 10;
|
||||
var F = 12;
|
||||
var R = 13;
|
||||
var STAR = TYPE.Asterisk;
|
||||
var SLASH = TYPE.Solidus;
|
||||
var FULLSTOP = TYPE.FullStop;
|
||||
var PLUSSIGN = TYPE.PlusSign;
|
||||
var HYPHENMINUS = TYPE.HyphenMinus;
|
||||
var GREATERTHANSIGN = TYPE.GreaterThanSign;
|
||||
var LESSTHANSIGN = TYPE.LessThanSign;
|
||||
var EXCLAMATIONMARK = TYPE.ExclamationMark;
|
||||
var COMMERCIALAT = TYPE.CommercialAt;
|
||||
var QUOTATIONMARK = TYPE.QuotationMark;
|
||||
var APOSTROPHE = TYPE.Apostrophe;
|
||||
var LEFTPARENTHESIS = TYPE.LeftParenthesis;
|
||||
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
|
||||
var LEFTCURLYBRACKET = TYPE.LeftCurlyBracket;
|
||||
var RIGHTCURLYBRACKET = TYPE.RightCurlyBracket;
|
||||
var LEFTSQUAREBRACKET = TYPE.LeftSquareBracket;
|
||||
var RIGHTSQUAREBRACKET = TYPE.RightSquareBracket;
|
||||
|
||||
var MIN_BUFFER_SIZE = 16 * 1024;
|
||||
var OFFSET_MASK = 0x00FFFFFF;
|
||||
var TYPE_SHIFT = 24;
|
||||
var SafeUint32Array = typeof Uint32Array !== 'undefined' ? Uint32Array : Array; // fallback on Array when TypedArray is not supported
|
||||
|
||||
function computeLinesAndColumns(tokenizer, source) {
|
||||
var sourceLength = source.length;
|
||||
var start = firstCharOffset(source);
|
||||
var lines = tokenizer.lines;
|
||||
var line = tokenizer.startLine;
|
||||
var columns = tokenizer.columns;
|
||||
var column = tokenizer.startColumn;
|
||||
|
||||
if (lines === null || lines.length < sourceLength + 1) {
|
||||
lines = new SafeUint32Array(Math.max(sourceLength + 1024, MIN_BUFFER_SIZE));
|
||||
columns = new SafeUint32Array(lines.length);
|
||||
}
|
||||
|
||||
for (var i = start; i < sourceLength; i++) {
|
||||
var code = source.charCodeAt(i);
|
||||
|
||||
lines[i] = line;
|
||||
columns[i] = column++;
|
||||
|
||||
if (code === N || code === R || code === F) {
|
||||
if (code === R && i + 1 < sourceLength && source.charCodeAt(i + 1) === N) {
|
||||
i++;
|
||||
lines[i] = line;
|
||||
columns[i] = column;
|
||||
}
|
||||
|
||||
line++;
|
||||
column = 1;
|
||||
}
|
||||
}
|
||||
|
||||
lines[i] = line;
|
||||
columns[i] = column;
|
||||
|
||||
tokenizer.linesAnsColumnsComputed = true;
|
||||
tokenizer.lines = lines;
|
||||
tokenizer.columns = columns;
|
||||
}
|
||||
|
||||
function tokenLayout(tokenizer, source, startPos) {
|
||||
var sourceLength = source.length;
|
||||
var offsetAndType = tokenizer.offsetAndType;
|
||||
var balance = tokenizer.balance;
|
||||
var tokenCount = 0;
|
||||
var prevType = 0;
|
||||
var offset = startPos;
|
||||
var anchor = 0;
|
||||
var balanceCloseCode = 0;
|
||||
var balanceStart = 0;
|
||||
var balancePrev = 0;
|
||||
|
||||
if (offsetAndType === null || offsetAndType.length < sourceLength + 1) {
|
||||
offsetAndType = new SafeUint32Array(sourceLength + 1024);
|
||||
balance = new SafeUint32Array(sourceLength + 1024);
|
||||
}
|
||||
|
||||
while (offset < sourceLength) {
|
||||
var code = source.charCodeAt(offset);
|
||||
var type = code < 0x80 ? SYMBOL_TYPE[code] : IDENTIFIER;
|
||||
|
||||
balance[tokenCount] = sourceLength;
|
||||
|
||||
switch (type) {
|
||||
case WHITESPACE:
|
||||
offset = findWhiteSpaceEnd(source, offset + 1);
|
||||
break;
|
||||
|
||||
case PUNCTUATOR:
|
||||
switch (code) {
|
||||
case balanceCloseCode:
|
||||
balancePrev = balanceStart & OFFSET_MASK;
|
||||
balanceStart = balance[balancePrev];
|
||||
balanceCloseCode = balanceStart >> TYPE_SHIFT;
|
||||
balance[tokenCount] = balancePrev;
|
||||
balance[balancePrev++] = tokenCount;
|
||||
for (; balancePrev < tokenCount; balancePrev++) {
|
||||
if (balance[balancePrev] === sourceLength) {
|
||||
balance[balancePrev] = tokenCount;
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case LEFTSQUAREBRACKET:
|
||||
balance[tokenCount] = balanceStart;
|
||||
balanceCloseCode = RIGHTSQUAREBRACKET;
|
||||
balanceStart = (balanceCloseCode << TYPE_SHIFT) | tokenCount;
|
||||
break;
|
||||
|
||||
case LEFTCURLYBRACKET:
|
||||
balance[tokenCount] = balanceStart;
|
||||
balanceCloseCode = RIGHTCURLYBRACKET;
|
||||
balanceStart = (balanceCloseCode << TYPE_SHIFT) | tokenCount;
|
||||
break;
|
||||
|
||||
case LEFTPARENTHESIS:
|
||||
balance[tokenCount] = balanceStart;
|
||||
balanceCloseCode = RIGHTPARENTHESIS;
|
||||
balanceStart = (balanceCloseCode << TYPE_SHIFT) | tokenCount;
|
||||
break;
|
||||
}
|
||||
|
||||
// /*
|
||||
if (code === STAR && prevType === SLASH) {
|
||||
type = COMMENT;
|
||||
offset = findCommentEnd(source, offset + 1);
|
||||
tokenCount--; // rewrite prev token
|
||||
break;
|
||||
}
|
||||
|
||||
// edge case for -.123 and +.123
|
||||
if (code === FULLSTOP && (prevType === PLUSSIGN || prevType === HYPHENMINUS)) {
|
||||
if (offset + 1 < sourceLength && isNumber(source.charCodeAt(offset + 1))) {
|
||||
type = NUMBER;
|
||||
offset = findNumberEnd(source, offset + 2, false);
|
||||
tokenCount--; // rewrite prev token
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// <!--
|
||||
if (code === EXCLAMATIONMARK && prevType === LESSTHANSIGN) {
|
||||
if (offset + 2 < sourceLength &&
|
||||
source.charCodeAt(offset + 1) === HYPHENMINUS &&
|
||||
source.charCodeAt(offset + 2) === HYPHENMINUS) {
|
||||
type = CDO;
|
||||
offset = offset + 3;
|
||||
tokenCount--; // rewrite prev token
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// -->
|
||||
if (code === HYPHENMINUS && prevType === HYPHENMINUS) {
|
||||
if (offset + 1 < sourceLength && source.charCodeAt(offset + 1) === GREATERTHANSIGN) {
|
||||
type = CDC;
|
||||
offset = offset + 2;
|
||||
tokenCount--; // rewrite prev token
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// ident(
|
||||
if (code === LEFTPARENTHESIS && prevType === IDENTIFIER) {
|
||||
offset = offset + 1;
|
||||
tokenCount--; // rewrite prev token
|
||||
balance[tokenCount] = balance[tokenCount + 1];
|
||||
balanceStart--;
|
||||
|
||||
// 4 char length identifier and equal to `url(` (case insensitive)
|
||||
if (offset - anchor === 4 && cmpStr(source, anchor, offset, 'url(')) {
|
||||
// special case for url() because it can contain any symbols sequence with few exceptions
|
||||
anchor = findWhiteSpaceEnd(source, offset);
|
||||
code = source.charCodeAt(anchor);
|
||||
if (code !== LEFTPARENTHESIS &&
|
||||
code !== RIGHTPARENTHESIS &&
|
||||
code !== QUOTATIONMARK &&
|
||||
code !== APOSTROPHE) {
|
||||
// url(
|
||||
offsetAndType[tokenCount++] = (URL << TYPE_SHIFT) | offset;
|
||||
balance[tokenCount] = sourceLength;
|
||||
|
||||
// ws*
|
||||
if (anchor !== offset) {
|
||||
offsetAndType[tokenCount++] = (WHITESPACE << TYPE_SHIFT) | anchor;
|
||||
balance[tokenCount] = sourceLength;
|
||||
}
|
||||
|
||||
// raw
|
||||
type = RAW;
|
||||
offset = findUrlRawEnd(source, anchor);
|
||||
} else {
|
||||
type = URL;
|
||||
}
|
||||
} else {
|
||||
type = FUNCTION;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
type = code;
|
||||
offset = offset + 1;
|
||||
break;
|
||||
|
||||
case NUMBER:
|
||||
offset = findNumberEnd(source, offset + 1, prevType !== FULLSTOP);
|
||||
|
||||
// merge number with a preceding dot, dash or plus
|
||||
if (prevType === FULLSTOP ||
|
||||
prevType === HYPHENMINUS ||
|
||||
prevType === PLUSSIGN) {
|
||||
tokenCount--; // rewrite prev token
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case STRING:
|
||||
offset = findStringEnd(source, offset + 1, code);
|
||||
break;
|
||||
|
||||
default:
|
||||
anchor = offset;
|
||||
offset = findIdentifierEnd(source, offset);
|
||||
|
||||
// merge identifier with a preceding dash
|
||||
if (prevType === HYPHENMINUS) {
|
||||
// rewrite prev token
|
||||
tokenCount--;
|
||||
// restore prev prev token type
|
||||
// for case @-prefix-ident
|
||||
prevType = tokenCount === 0 ? 0 : offsetAndType[tokenCount - 1] >> TYPE_SHIFT;
|
||||
}
|
||||
|
||||
if (prevType === COMMERCIALAT) {
|
||||
// rewrite prev token and change type to <at-keyword-token>
|
||||
tokenCount--;
|
||||
type = ATRULE;
|
||||
}
|
||||
}
|
||||
|
||||
offsetAndType[tokenCount++] = (type << TYPE_SHIFT) | offset;
|
||||
prevType = type;
|
||||
}
|
||||
|
||||
// finalize arrays
|
||||
offsetAndType[tokenCount] = offset;
|
||||
balance[tokenCount] = sourceLength;
|
||||
balance[sourceLength] = sourceLength; // prevents false positive balance match with any token
|
||||
while (balanceStart !== 0) {
|
||||
balancePrev = balanceStart & OFFSET_MASK;
|
||||
balanceStart = balance[balancePrev];
|
||||
balance[balancePrev] = sourceLength;
|
||||
}
|
||||
|
||||
tokenizer.offsetAndType = offsetAndType;
|
||||
tokenizer.tokenCount = tokenCount;
|
||||
tokenizer.balance = balance;
|
||||
}
|
||||
|
||||
//
|
||||
// tokenizer
|
||||
//
|
||||
|
||||
var Tokenizer = function(source, startOffset, startLine, startColumn) {
|
||||
this.offsetAndType = null;
|
||||
this.balance = null;
|
||||
this.lines = null;
|
||||
this.columns = null;
|
||||
|
||||
this.setSource(source, startOffset, startLine, startColumn);
|
||||
};
|
||||
|
||||
Tokenizer.prototype = {
|
||||
setSource: function(source, startOffset, startLine, startColumn) {
|
||||
var safeSource = String(source || '');
|
||||
var start = firstCharOffset(safeSource);
|
||||
|
||||
this.source = safeSource;
|
||||
this.firstCharOffset = start;
|
||||
this.startOffset = typeof startOffset === 'undefined' ? 0 : startOffset;
|
||||
this.startLine = typeof startLine === 'undefined' ? 1 : startLine;
|
||||
this.startColumn = typeof startColumn === 'undefined' ? 1 : startColumn;
|
||||
this.linesAnsColumnsComputed = false;
|
||||
|
||||
this.eof = false;
|
||||
this.currentToken = -1;
|
||||
this.tokenType = 0;
|
||||
this.tokenStart = start;
|
||||
this.tokenEnd = start;
|
||||
|
||||
tokenLayout(this, safeSource, start);
|
||||
this.next();
|
||||
},
|
||||
|
||||
lookupType: function(offset) {
|
||||
offset += this.currentToken;
|
||||
|
||||
if (offset < this.tokenCount) {
|
||||
return this.offsetAndType[offset] >> TYPE_SHIFT;
|
||||
}
|
||||
|
||||
return NULL;
|
||||
},
|
||||
lookupNonWSType: function(offset) {
|
||||
offset += this.currentToken;
|
||||
|
||||
for (var type; offset < this.tokenCount; offset++) {
|
||||
type = this.offsetAndType[offset] >> TYPE_SHIFT;
|
||||
|
||||
if (type !== WHITESPACE) {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
return NULL;
|
||||
},
|
||||
lookupValue: function(offset, referenceStr) {
|
||||
offset += this.currentToken;
|
||||
|
||||
if (offset < this.tokenCount) {
|
||||
return cmpStr(
|
||||
this.source,
|
||||
this.offsetAndType[offset - 1] & OFFSET_MASK,
|
||||
this.offsetAndType[offset] & OFFSET_MASK,
|
||||
referenceStr
|
||||
);
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
getTokenStart: function(tokenNum) {
|
||||
if (tokenNum === this.currentToken) {
|
||||
return this.tokenStart;
|
||||
}
|
||||
|
||||
if (tokenNum > 0) {
|
||||
return tokenNum < this.tokenCount
|
||||
? this.offsetAndType[tokenNum - 1] & OFFSET_MASK
|
||||
: this.offsetAndType[this.tokenCount] & OFFSET_MASK;
|
||||
}
|
||||
|
||||
return this.firstCharOffset;
|
||||
},
|
||||
getOffsetExcludeWS: function() {
|
||||
if (this.currentToken > 0) {
|
||||
if ((this.offsetAndType[this.currentToken - 1] >> TYPE_SHIFT) === WHITESPACE) {
|
||||
return this.currentToken > 1
|
||||
? this.offsetAndType[this.currentToken - 2] & OFFSET_MASK
|
||||
: this.firstCharOffset;
|
||||
}
|
||||
}
|
||||
return this.tokenStart;
|
||||
},
|
||||
getRawLength: function(startToken, endTokenType1, endTokenType2, includeTokenType2) {
|
||||
var cursor = startToken;
|
||||
var balanceEnd;
|
||||
|
||||
loop:
|
||||
for (; cursor < this.tokenCount; cursor++) {
|
||||
balanceEnd = this.balance[cursor];
|
||||
|
||||
// belance end points to offset before start
|
||||
if (balanceEnd < startToken) {
|
||||
break loop;
|
||||
}
|
||||
|
||||
// check token is stop type
|
||||
switch (this.offsetAndType[cursor] >> TYPE_SHIFT) {
|
||||
case endTokenType1:
|
||||
break loop;
|
||||
|
||||
case endTokenType2:
|
||||
if (includeTokenType2) {
|
||||
cursor++;
|
||||
}
|
||||
break loop;
|
||||
|
||||
default:
|
||||
// fast forward to the end of balanced block
|
||||
if (this.balance[balanceEnd] === cursor) {
|
||||
cursor = balanceEnd;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return cursor - this.currentToken;
|
||||
},
|
||||
|
||||
getTokenValue: function() {
|
||||
return this.source.substring(this.tokenStart, this.tokenEnd);
|
||||
},
|
||||
substrToCursor: function(start) {
|
||||
return this.source.substring(start, this.tokenStart);
|
||||
},
|
||||
|
||||
skipWS: function() {
|
||||
for (var i = this.currentToken, skipTokenCount = 0; i < this.tokenCount; i++, skipTokenCount++) {
|
||||
if ((this.offsetAndType[i] >> TYPE_SHIFT) !== WHITESPACE) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (skipTokenCount > 0) {
|
||||
this.skip(skipTokenCount);
|
||||
}
|
||||
},
|
||||
skipSC: function() {
|
||||
while (this.tokenType === WHITESPACE || this.tokenType === COMMENT) {
|
||||
this.next();
|
||||
}
|
||||
},
|
||||
skip: function(tokenCount) {
|
||||
var next = this.currentToken + tokenCount;
|
||||
|
||||
if (next < this.tokenCount) {
|
||||
this.currentToken = next;
|
||||
this.tokenStart = this.offsetAndType[next - 1] & OFFSET_MASK;
|
||||
next = this.offsetAndType[next];
|
||||
this.tokenType = next >> TYPE_SHIFT;
|
||||
this.tokenEnd = next & OFFSET_MASK;
|
||||
} else {
|
||||
this.currentToken = this.tokenCount;
|
||||
this.next();
|
||||
}
|
||||
},
|
||||
next: function() {
|
||||
var next = this.currentToken + 1;
|
||||
|
||||
if (next < this.tokenCount) {
|
||||
this.currentToken = next;
|
||||
this.tokenStart = this.tokenEnd;
|
||||
next = this.offsetAndType[next];
|
||||
this.tokenType = next >> TYPE_SHIFT;
|
||||
this.tokenEnd = next & OFFSET_MASK;
|
||||
} else {
|
||||
this.currentToken = this.tokenCount;
|
||||
this.eof = true;
|
||||
this.tokenType = NULL;
|
||||
this.tokenStart = this.tokenEnd = this.source.length;
|
||||
}
|
||||
},
|
||||
|
||||
eat: function(tokenType) {
|
||||
if (this.tokenType !== tokenType) {
|
||||
var offset = this.tokenStart;
|
||||
var message = NAME[tokenType] + ' is expected';
|
||||
|
||||
// tweak message and offset
|
||||
if (tokenType === IDENTIFIER) {
|
||||
// when identifier is expected but there is a function or url
|
||||
if (this.tokenType === FUNCTION || this.tokenType === URL) {
|
||||
offset = this.tokenEnd - 1;
|
||||
message += ' but function found';
|
||||
}
|
||||
} else {
|
||||
// when test type is part of another token show error for current position + 1
|
||||
// e.g. eat(HYPHENMINUS) will fail on "-foo", but pointing on "-" is odd
|
||||
if (this.source.charCodeAt(this.tokenStart) === tokenType) {
|
||||
offset = offset + 1;
|
||||
}
|
||||
}
|
||||
|
||||
this.error(message, offset);
|
||||
}
|
||||
|
||||
this.next();
|
||||
},
|
||||
eatNonWS: function(tokenType) {
|
||||
this.skipWS();
|
||||
this.eat(tokenType);
|
||||
},
|
||||
|
||||
consume: function(tokenType) {
|
||||
var value = this.getTokenValue();
|
||||
|
||||
this.eat(tokenType);
|
||||
|
||||
return value;
|
||||
},
|
||||
consumeFunctionName: function() {
|
||||
var name = this.source.substring(this.tokenStart, this.tokenEnd - 1);
|
||||
|
||||
this.eat(FUNCTION);
|
||||
|
||||
return name;
|
||||
},
|
||||
consumeNonWS: function(tokenType) {
|
||||
this.skipWS();
|
||||
|
||||
return this.consume(tokenType);
|
||||
},
|
||||
|
||||
expectIdentifier: function(name) {
|
||||
if (this.tokenType !== IDENTIFIER || cmpStr(this.source, this.tokenStart, this.tokenEnd, name) === false) {
|
||||
this.error('Identifier `' + name + '` is expected');
|
||||
}
|
||||
|
||||
this.next();
|
||||
},
|
||||
|
||||
getLocation: function(offset, filename) {
|
||||
if (!this.linesAnsColumnsComputed) {
|
||||
computeLinesAndColumns(this, this.source);
|
||||
}
|
||||
|
||||
return {
|
||||
source: filename,
|
||||
offset: this.startOffset + offset,
|
||||
line: this.lines[offset],
|
||||
column: this.columns[offset]
|
||||
};
|
||||
},
|
||||
|
||||
getLocationRange: function(start, end, filename) {
|
||||
if (!this.linesAnsColumnsComputed) {
|
||||
computeLinesAndColumns(this, this.source);
|
||||
}
|
||||
|
||||
return {
|
||||
source: filename,
|
||||
start: {
|
||||
offset: this.startOffset + start,
|
||||
line: this.lines[start],
|
||||
column: this.columns[start]
|
||||
},
|
||||
end: {
|
||||
offset: this.startOffset + end,
|
||||
line: this.lines[end],
|
||||
column: this.columns[end]
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
error: function(message, offset) {
|
||||
var location = typeof offset !== 'undefined' && offset < this.source.length
|
||||
? this.getLocation(offset)
|
||||
: this.eof
|
||||
? findLastNonSpaceLocation(this)
|
||||
: this.getLocation(this.tokenStart);
|
||||
|
||||
throw new CssSyntaxError(
|
||||
message || 'Unexpected input',
|
||||
this.source,
|
||||
location.offset,
|
||||
location.line,
|
||||
location.column
|
||||
);
|
||||
},
|
||||
|
||||
dump: function() {
|
||||
var offset = 0;
|
||||
|
||||
return Array.prototype.slice.call(this.offsetAndType, 0, this.tokenCount).map(function(item, idx) {
|
||||
var start = offset;
|
||||
var end = item & OFFSET_MASK;
|
||||
|
||||
offset = end;
|
||||
|
||||
return {
|
||||
idx: idx,
|
||||
type: NAME[item >> TYPE_SHIFT],
|
||||
chunk: this.source.substring(start, end),
|
||||
balance: this.balance[idx]
|
||||
};
|
||||
}, this);
|
||||
}
|
||||
};
|
||||
|
||||
// extend with error class
|
||||
Tokenizer.CssSyntaxError = CssSyntaxError;
|
||||
|
||||
// extend tokenizer with constants
|
||||
Object.keys(constants).forEach(function(key) {
|
||||
Tokenizer[key] = constants[key];
|
||||
});
|
||||
|
||||
// extend tokenizer with static methods from utils
|
||||
Object.keys(utils).forEach(function(key) {
|
||||
Tokenizer[key] = utils[key];
|
||||
});
|
||||
|
||||
// warm up tokenizer to elimitate code branches that never execute
|
||||
// fix soft deoptimizations (insufficient type feedback)
|
||||
new Tokenizer('\n\r\r\n\f<!---->//""\'\'/*\r\n\f*/1a;.\\31\t\+2{url(a);func();+1.2e3 -.4e-5 .6e+7}').getLocation();
|
||||
|
||||
module.exports = Tokenizer;
|
||||
171
build/node_modules/css-tree/lib/tokenizer/const.js
generated
vendored
Normal file
171
build/node_modules/css-tree/lib/tokenizer/const.js
generated
vendored
Normal file
@@ -0,0 +1,171 @@
|
||||
'use strict';
|
||||
|
||||
// token types (note: value shouldn't intersect with used char codes)
|
||||
var WHITESPACE = 1;
|
||||
var IDENTIFIER = 2;
|
||||
var NUMBER = 3;
|
||||
var STRING = 4;
|
||||
var COMMENT = 5;
|
||||
var PUNCTUATOR = 6;
|
||||
var CDO = 7;
|
||||
var CDC = 8;
|
||||
var ATRULE = 14;
|
||||
var FUNCTION = 15;
|
||||
var URL = 16;
|
||||
var RAW = 17;
|
||||
|
||||
var TAB = 9;
|
||||
var N = 10;
|
||||
var F = 12;
|
||||
var R = 13;
|
||||
var SPACE = 32;
|
||||
|
||||
var TYPE = {
|
||||
WhiteSpace: WHITESPACE,
|
||||
Identifier: IDENTIFIER,
|
||||
Number: NUMBER,
|
||||
String: STRING,
|
||||
Comment: COMMENT,
|
||||
Punctuator: PUNCTUATOR,
|
||||
CDO: CDO,
|
||||
CDC: CDC,
|
||||
Atrule: ATRULE,
|
||||
Function: FUNCTION,
|
||||
Url: URL,
|
||||
Raw: RAW,
|
||||
|
||||
ExclamationMark: 33, // !
|
||||
QuotationMark: 34, // "
|
||||
NumberSign: 35, // #
|
||||
DollarSign: 36, // $
|
||||
PercentSign: 37, // %
|
||||
Ampersand: 38, // &
|
||||
Apostrophe: 39, // '
|
||||
LeftParenthesis: 40, // (
|
||||
RightParenthesis: 41, // )
|
||||
Asterisk: 42, // *
|
||||
PlusSign: 43, // +
|
||||
Comma: 44, // ,
|
||||
HyphenMinus: 45, // -
|
||||
FullStop: 46, // .
|
||||
Solidus: 47, // /
|
||||
Colon: 58, // :
|
||||
Semicolon: 59, // ;
|
||||
LessThanSign: 60, // <
|
||||
EqualsSign: 61, // =
|
||||
GreaterThanSign: 62, // >
|
||||
QuestionMark: 63, // ?
|
||||
CommercialAt: 64, // @
|
||||
LeftSquareBracket: 91, // [
|
||||
Backslash: 92, // \
|
||||
RightSquareBracket: 93, // ]
|
||||
CircumflexAccent: 94, // ^
|
||||
LowLine: 95, // _
|
||||
GraveAccent: 96, // `
|
||||
LeftCurlyBracket: 123, // {
|
||||
VerticalLine: 124, // |
|
||||
RightCurlyBracket: 125, // }
|
||||
Tilde: 126 // ~
|
||||
};
|
||||
|
||||
var NAME = Object.keys(TYPE).reduce(function(result, key) {
|
||||
result[TYPE[key]] = key;
|
||||
return result;
|
||||
}, {});
|
||||
|
||||
// https://drafts.csswg.org/css-syntax/#tokenizer-definitions
|
||||
// > non-ASCII code point
|
||||
// > A code point with a value equal to or greater than U+0080 <control>
|
||||
// > name-start code point
|
||||
// > A letter, a non-ASCII code point, or U+005F LOW LINE (_).
|
||||
// > name code point
|
||||
// > A name-start code point, a digit, or U+002D HYPHEN-MINUS (-)
|
||||
// That means only ASCII code points has a special meaning and we a maps for 0..127 codes only
|
||||
var SafeUint32Array = typeof Uint32Array !== 'undefined' ? Uint32Array : Array; // fallback on Array when TypedArray is not supported
|
||||
var SYMBOL_TYPE = new SafeUint32Array(0x80);
|
||||
var PUNCTUATION = new SafeUint32Array(0x80);
|
||||
var STOP_URL_RAW = new SafeUint32Array(0x80);
|
||||
|
||||
for (var i = 0; i < SYMBOL_TYPE.length; i++) {
|
||||
SYMBOL_TYPE[i] = IDENTIFIER;
|
||||
}
|
||||
|
||||
// fill categories
|
||||
[
|
||||
TYPE.ExclamationMark, // !
|
||||
TYPE.QuotationMark, // "
|
||||
TYPE.NumberSign, // #
|
||||
TYPE.DollarSign, // $
|
||||
TYPE.PercentSign, // %
|
||||
TYPE.Ampersand, // &
|
||||
TYPE.Apostrophe, // '
|
||||
TYPE.LeftParenthesis, // (
|
||||
TYPE.RightParenthesis, // )
|
||||
TYPE.Asterisk, // *
|
||||
TYPE.PlusSign, // +
|
||||
TYPE.Comma, // ,
|
||||
TYPE.HyphenMinus, // -
|
||||
TYPE.FullStop, // .
|
||||
TYPE.Solidus, // /
|
||||
TYPE.Colon, // :
|
||||
TYPE.Semicolon, // ;
|
||||
TYPE.LessThanSign, // <
|
||||
TYPE.EqualsSign, // =
|
||||
TYPE.GreaterThanSign, // >
|
||||
TYPE.QuestionMark, // ?
|
||||
TYPE.CommercialAt, // @
|
||||
TYPE.LeftSquareBracket, // [
|
||||
// TYPE.Backslash, // \
|
||||
TYPE.RightSquareBracket, // ]
|
||||
TYPE.CircumflexAccent, // ^
|
||||
// TYPE.LowLine, // _
|
||||
TYPE.GraveAccent, // `
|
||||
TYPE.LeftCurlyBracket, // {
|
||||
TYPE.VerticalLine, // |
|
||||
TYPE.RightCurlyBracket, // }
|
||||
TYPE.Tilde // ~
|
||||
].forEach(function(key) {
|
||||
SYMBOL_TYPE[Number(key)] = PUNCTUATOR;
|
||||
PUNCTUATION[Number(key)] = PUNCTUATOR;
|
||||
});
|
||||
|
||||
for (var i = 48; i <= 57; i++) {
|
||||
SYMBOL_TYPE[i] = NUMBER;
|
||||
}
|
||||
|
||||
SYMBOL_TYPE[SPACE] = WHITESPACE;
|
||||
SYMBOL_TYPE[TAB] = WHITESPACE;
|
||||
SYMBOL_TYPE[N] = WHITESPACE;
|
||||
SYMBOL_TYPE[R] = WHITESPACE;
|
||||
SYMBOL_TYPE[F] = WHITESPACE;
|
||||
|
||||
SYMBOL_TYPE[TYPE.Apostrophe] = STRING;
|
||||
SYMBOL_TYPE[TYPE.QuotationMark] = STRING;
|
||||
|
||||
STOP_URL_RAW[SPACE] = 1;
|
||||
STOP_URL_RAW[TAB] = 1;
|
||||
STOP_URL_RAW[N] = 1;
|
||||
STOP_URL_RAW[R] = 1;
|
||||
STOP_URL_RAW[F] = 1;
|
||||
STOP_URL_RAW[TYPE.Apostrophe] = 1;
|
||||
STOP_URL_RAW[TYPE.QuotationMark] = 1;
|
||||
STOP_URL_RAW[TYPE.LeftParenthesis] = 1;
|
||||
STOP_URL_RAW[TYPE.RightParenthesis] = 1;
|
||||
|
||||
// whitespace is punctuation ...
|
||||
PUNCTUATION[SPACE] = PUNCTUATOR;
|
||||
PUNCTUATION[TAB] = PUNCTUATOR;
|
||||
PUNCTUATION[N] = PUNCTUATOR;
|
||||
PUNCTUATION[R] = PUNCTUATOR;
|
||||
PUNCTUATION[F] = PUNCTUATOR;
|
||||
// ... hyper minus is not
|
||||
PUNCTUATION[TYPE.HyphenMinus] = 0;
|
||||
|
||||
module.exports = {
|
||||
TYPE: TYPE,
|
||||
NAME: NAME,
|
||||
|
||||
SYMBOL_TYPE: SYMBOL_TYPE,
|
||||
PUNCTUATION: PUNCTUATION,
|
||||
STOP_URL_RAW: STOP_URL_RAW
|
||||
};
|
||||
84
build/node_modules/css-tree/lib/tokenizer/error.js
generated
vendored
Normal file
84
build/node_modules/css-tree/lib/tokenizer/error.js
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
'use strict';
|
||||
|
||||
var createCustomError = require('../utils/createCustomError');
|
||||
var MAX_LINE_LENGTH = 100;
|
||||
var OFFSET_CORRECTION = 60;
|
||||
var TAB_REPLACEMENT = ' ';
|
||||
|
||||
function sourceFragment(error, extraLines) {
|
||||
function processLines(start, end) {
|
||||
return lines.slice(start, end).map(function(line, idx) {
|
||||
var num = String(start + idx + 1);
|
||||
|
||||
while (num.length < maxNumLength) {
|
||||
num = ' ' + num;
|
||||
}
|
||||
|
||||
return num + ' |' + line;
|
||||
}).join('\n');
|
||||
}
|
||||
|
||||
var lines = error.source.split(/\r\n?|\n|\f/);
|
||||
var line = error.line;
|
||||
var column = error.column;
|
||||
var startLine = Math.max(1, line - extraLines) - 1;
|
||||
var endLine = Math.min(line + extraLines, lines.length + 1);
|
||||
var maxNumLength = Math.max(4, String(endLine).length) + 1;
|
||||
var cutLeft = 0;
|
||||
|
||||
// column correction according to replaced tab before column
|
||||
column += (TAB_REPLACEMENT.length - 1) * (lines[line - 1].substr(0, column - 1).match(/\t/g) || []).length;
|
||||
|
||||
if (column > MAX_LINE_LENGTH) {
|
||||
cutLeft = column - OFFSET_CORRECTION + 3;
|
||||
column = OFFSET_CORRECTION - 2;
|
||||
}
|
||||
|
||||
for (var i = startLine; i <= endLine; i++) {
|
||||
if (i >= 0 && i < lines.length) {
|
||||
lines[i] = lines[i].replace(/\t/g, TAB_REPLACEMENT);
|
||||
lines[i] =
|
||||
(cutLeft > 0 && lines[i].length > cutLeft ? '\u2026' : '') +
|
||||
lines[i].substr(cutLeft, MAX_LINE_LENGTH - 2) +
|
||||
(lines[i].length > cutLeft + MAX_LINE_LENGTH - 1 ? '\u2026' : '');
|
||||
}
|
||||
}
|
||||
|
||||
return [
|
||||
processLines(startLine, line),
|
||||
new Array(column + maxNumLength + 2).join('-') + '^',
|
||||
processLines(line, endLine)
|
||||
].filter(Boolean).join('\n');
|
||||
}
|
||||
|
||||
var CssSyntaxError = function(message, source, offset, line, column) {
|
||||
var error = createCustomError('CssSyntaxError', message);
|
||||
|
||||
error.source = source;
|
||||
error.offset = offset;
|
||||
error.line = line;
|
||||
error.column = column;
|
||||
|
||||
error.sourceFragment = function(extraLines) {
|
||||
return sourceFragment(error, isNaN(extraLines) ? 0 : extraLines);
|
||||
};
|
||||
Object.defineProperty(error, 'formattedMessage', {
|
||||
get: function() {
|
||||
return (
|
||||
'Parse error: ' + error.message + '\n' +
|
||||
sourceFragment(error, 2)
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// for backward capability
|
||||
error.parseError = {
|
||||
offset: offset,
|
||||
line: line,
|
||||
column: column
|
||||
};
|
||||
|
||||
return error;
|
||||
};
|
||||
|
||||
module.exports = CssSyntaxError;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user