This commit is contained in:
lalBi94
2023-03-05 13:23:23 +01:00
commit 7bc56c09b5
14034 changed files with 1834369 additions and 0 deletions

22
node_modules/scss-tokenizer/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2015 sasstools
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

50
node_modules/scss-tokenizer/README.md generated vendored Normal file
View File

@@ -0,0 +1,50 @@
# scss-tokenizer
A tokenizer for Sass' SCSS syntax
![https://travis-ci.org/sasstools/scss-tokenizer.svg?branch=master](https://img.shields.io/travis/sasstools/scss-tokenizer.svg)
![https://www.npmjs.com/package/scss-tokenizer](https://img.shields.io/npm/v/scss-tokenizer.svg)
![https://github.com/sasstools/scss-tokenizer/issues](https://img.shields.io/github/issues/sasstools/scss-tokenizer.svg)
![](https://img.shields.io/github/license/sasstools/scss-tokenizer.svg)
# Install
```
npm install scss-tokenizer
```
# Usage
```js
var scss = require('scss-tokenizer');
scss.tokenize(css);
```
# API
### `tokenize`
Tokenizes source `css` and returns an ordered array of tokens with positional
data.
```js
var tokenizer = require('scss-tokenizer');
var tokens = tokenize.tokenize(css);
```
Arguments:
* `css (string|#toString)`: String with input CSS or any object
with `toString()` method, like file stream.
* `opts (object) optional`: options:
* `from`: the path to the source CSS file. You should always set `from`,
because it is used in map generation and in syntax error messages.
# Test
```
npm test
```
## Attribution
This project started as a fork of the [PostCSS](https://github.com/postcss/postcss) tokenizer.

1
node_modules/scss-tokenizer/index.js generated vendored Normal file
View File

@@ -0,0 +1 @@
module.exports = require('./lib/entry').default;

23
node_modules/scss-tokenizer/lib/entry.js generated vendored Normal file
View File

@@ -0,0 +1,23 @@
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _input = require('./input');
var _input2 = _interopRequireDefault(_input);
var _tokenize = require('./tokenize');
var _tokenize2 = _interopRequireDefault(_tokenize);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var scss = {};
scss.tokenize = function (css) {
var input = new _input2.default(css);
return (0, _tokenize2.default)(input);
};
exports.default = scss;

64
node_modules/scss-tokenizer/lib/input.js generated vendored Normal file
View File

@@ -0,0 +1,64 @@
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _previousMap = require('./previous-map');
var _previousMap2 = _interopRequireDefault(_previousMap);
var _path = require('path');
var _path2 = _interopRequireDefault(_path);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var sequence = 0;
var Input = function () {
function Input(css) {
var opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
_classCallCheck(this, Input);
this.css = css.toString();
if (this.css[0] === '\uFEFF' || this.css[0] === '\uFFFE') {
this.css = this.css.slice(1);
}
if (opts.from) this.file = _path2.default.resolve(opts.from);
var map = new _previousMap2.default(this.css, opts, this.id);
if (map.text) {
this.map = map;
var file = map.consumer().file;
if (!this.file && file) this.file = this.mapResolve(file);
}
if (this.file) {
this.from = this.file;
} else {
sequence += 1;
this.id = '<input css ' + sequence + '>';
this.from = this.id;
}
if (this.map) this.map.file = this.from;
}
_createClass(Input, [{
key: 'mapResolve',
value: function mapResolve(file) {
return _path2.default.resolve(this.map.consumer().sourceRoot || '.', file);
}
}]);
return Input;
}();
exports.default = Input;

123
node_modules/scss-tokenizer/lib/previous-map.js generated vendored Normal file
View File

@@ -0,0 +1,123 @@
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; };
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _jsBase = require('js-base64');
var _sourceMap = require('source-map');
var _sourceMap2 = _interopRequireDefault(_sourceMap);
var _path = require('path');
var _path2 = _interopRequireDefault(_path);
var _fs = require('fs');
var _fs2 = _interopRequireDefault(_fs);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var PreviousMap = function () {
function PreviousMap(css, opts) {
_classCallCheck(this, PreviousMap);
this.loadAnnotation(css);
this.inline = this.startWith(this.annotation, 'data:');
var prev = opts.map ? opts.map.prev : undefined;
var text = this.loadMap(opts.from, prev);
if (text) this.text = text;
}
_createClass(PreviousMap, [{
key: 'consumer',
value: function consumer() {
if (!this.consumerCache) {
this.consumerCache = new _sourceMap2.default.SourceMapConsumer(this.text);
}
return this.consumerCache;
}
}, {
key: 'withContent',
value: function withContent() {
return !!(this.consumer().sourcesContent && this.consumer().sourcesContent.length > 0);
}
}, {
key: 'startWith',
value: function startWith(string, start) {
if (!string) return false;
return string.substr(0, start.length) === start;
}
}, {
key: 'loadAnnotation',
value: function loadAnnotation(css) {
var match = css.match(/\/\*\s*# sourceMappingURL=((?:(?!sourceMappingURL=).)*)\s*\*\//);
if (match) this.annotation = match[1].trim();
}
}, {
key: 'decodeInline',
value: function decodeInline(text) {
var utfd64 = 'data:application/json;charset=utf-8;base64,';
var utf64 = 'data:application/json;charset=utf8;base64,';
var b64 = 'data:application/json;base64,';
var uri = 'data:application/json,';
if (this.startWith(text, uri)) {
return decodeURIComponent(text.substr(uri.length));
} else if (this.startWith(text, base64)) {
return _jsBase.Base64.decode(text.substr(base64.length));
} else if (this.startWith(text, utf64)) {
return _jsBase.Base64.decode(text.substr(utf64.length));
} else if (this.startWith(text, utfd64)) {
return _jsBase.Base64.decode(text.substr(utfd64.length));
} else {
var encoding = text.match(/data:application\/json;([^,]+),/)[1];
throw new Error('Unsupported source map encoding ' + encoding);
}
}
}, {
key: 'loadMap',
value: function loadMap(file, prev) {
if (prev === false) return false;
if (prev) {
if (typeof prev === 'string') {
return prev;
} else if (prev instanceof _sourceMap2.default.SourceMapConsumer) {
return _sourceMap2.default.SourceMapGenerator.fromSourceMap(prev).toString();
} else if (prev instanceof _sourceMap2.default.SourceMapGenerator) {
return prev.toString();
} else if ((typeof prev === 'undefined' ? 'undefined' : _typeof(prev)) === 'object' && prev.mappings) {
return JSON.stringify(prev);
} else {
throw new Error('Unsupported previous source map format: ' + prev.toString());
}
} else if (this.inline) {
return this.decodeInline(this.annotation);
} else if (this.annotation) {
var map = this.annotation;
if (file) map = _path2.default.join(_path2.default.dirname(file), map);
this.root = _path2.default.dirname(map);
if (_fs2.default.existsSync && _fs2.default.existsSync(map)) {
return _fs2.default.readFileSync(map, 'utf-8').toString().trim();
} else {
return false;
}
}
}
}]);
return PreviousMap;
}();
exports.default = PreviousMap;

154
node_modules/scss-tokenizer/lib/tokenize-comment.js generated vendored Normal file
View File

@@ -0,0 +1,154 @@
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = tokenize;
var _input = require('./input');
var _input2 = _interopRequireDefault(_input);
var _tokenizeString = require('./tokenize-string');
var _tokenizeString2 = _interopRequireDefault(_tokenizeString);
var _tokenizeInterpolant2 = require('./tokenize-interpolant');
var _tokenizeInterpolant3 = _interopRequireDefault(_tokenizeInterpolant2);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var newline = '\n'.charCodeAt(0),
space = ' '.charCodeAt(0),
feed = '\f'.charCodeAt(0),
tab = '\t'.charCodeAt(0),
cr = '\r'.charCodeAt(0),
hash = '#'.charCodeAt(0),
backslash = '\\'.charCodeAt(0),
slash = '/'.charCodeAt(0),
openCurly = '{'.charCodeAt(0),
closeCurly = '}'.charCodeAt(0),
asterisk = '*'.charCodeAt(0),
wordEnd = /[ \n\t\r\(\)\{\},:;@!'"\\]|\*(?=\/)|#(?={)/g;
function tokenize(input, l, p, o) {
var tokens = [];
var css = input.css.valueOf();
var code = void 0,
next = void 0,
lines = void 0,
last = void 0,
content = void 0,
escape = void 0,
nextLine = void 0,
nextOffset = void 0,
escaped = void 0,
escapePos = void 0,
inInterpolant = void 0,
inComment = void 0,
inString = void 0;
var length = css.length;
var offset = o || -1;
var line = l || 1;
var pos = p || 0;
loop: while (pos < length) {
code = css.charCodeAt(pos);
if (code === newline) {
offset = pos;
line += 1;
}
switch (code) {
case space:
case tab:
case cr:
case feed:
next = pos;
do {
next += 1;
code = css.charCodeAt(next);
if (code === newline) {
offset = next;
line += 1;
}
} while (code === space || code === tab || code === cr || code === feed);
tokens.push(['space', css.slice(pos, next)]);
pos = next - 1;
break;
case newline:
tokens.push(['newline', '\n', line, pos - offset]);
break;
case closeCurly:
tokens.push(['endInterpolant', '}', line, pos - offset]);
break;
case backslash:
next = pos;
escape = true;
while (css.charCodeAt(next + 1) === backslash) {
next += 1;
escape = !escape;
}
code = css.charCodeAt(next + 1);
if (escape && code !== slash && code !== space && code !== newline && code !== tab && code !== cr && code !== feed) {
next += 1;
}
tokens.push(['word', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
break;
default:
if (code === asterisk && css.charCodeAt(pos + 1) === slash) {
next = pos;
pos = next - 1;
break loop;
}
if (code === hash && css.charCodeAt(pos + 1) === openCurly) {
tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]);
next = pos + 1;
var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1, offset),
t = _tokenizeInterpolant.tokens,
_l = _tokenizeInterpolant.line,
_p = _tokenizeInterpolant.pos,
_o = _tokenizeInterpolant.offset;
tokens = tokens.concat(t);
next = _p;
line = _l;
offset = _o;
pos = next;
break;
}
wordEnd.lastIndex = pos + 1;
wordEnd.test(css);
if (wordEnd.lastIndex === 0) {
next = css.length - 1;
} else {
next = wordEnd.lastIndex - 2;
}
tokens.push(['word', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
break;
}
pos++;
}
return { tokens: tokens, line: line, pos: pos, offset: offset };
}

304
node_modules/scss-tokenizer/lib/tokenize-interpolant.js generated vendored Normal file
View File

@@ -0,0 +1,304 @@
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = tokenize;
var _input = require('./input');
var _input2 = _interopRequireDefault(_input);
var _tokenizeString2 = require('./tokenize-string');
var _tokenizeString3 = _interopRequireDefault(_tokenizeString2);
var _tokenizeComment2 = require('./tokenize-comment');
var _tokenizeComment3 = _interopRequireDefault(_tokenizeComment2);
var _tokenizeInterpolant2 = require('./tokenize-interpolant');
var _tokenizeInterpolant3 = _interopRequireDefault(_tokenizeInterpolant2);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var singleQuote = "'".charCodeAt(0),
doubleQuote = '"'.charCodeAt(0),
dollar = '$'.charCodeAt(0),
hash = '#'.charCodeAt(0),
backslash = '\\'.charCodeAt(0),
slash = '/'.charCodeAt(0),
newline = '\n'.charCodeAt(0),
space = ' '.charCodeAt(0),
feed = '\f'.charCodeAt(0),
tab = '\t'.charCodeAt(0),
cr = '\r'.charCodeAt(0),
openBracket = '('.charCodeAt(0),
closeBracket = ')'.charCodeAt(0),
openCurly = '{'.charCodeAt(0),
closeCurly = '}'.charCodeAt(0),
semicolon = ';'.charCodeAt(0),
asterisk = '*'.charCodeAt(0),
colon = ':'.charCodeAt(0),
at = '@'.charCodeAt(0),
comma = ','.charCodeAt(0),
plus = '+'.charCodeAt(0),
minus = '-'.charCodeAt(0),
decComb = '>'.charCodeAt(0),
adjComb = '~'.charCodeAt(0),
number = /[+-]?(\d+(\.\d+)?|\.\d+)|(e[+-]\d+)/gi,
sQuoteEnd = /(.*?)[^\\](?=((#{)|'))/gm,
dQuoteEnd = /(.*?)[^\\](?=((#{)|"))/gm,
wordEnd = /[ \n\t\r\(\)\{\},:;@!'"\\]|\/(?=\*)|#(?={)/g,
ident = /-?([a-z_]|\\[^\\])([a-z-_0-9]|\\[^\\])*/gi;
function tokenize(input, l, p, o) {
var tokens = [];
var css = input.css.valueOf();
var code = void 0,
next = void 0,
quote = void 0,
lines = void 0,
last = void 0,
content = void 0,
escape = void 0,
nextLine = void 0,
nextOffset = void 0,
escaped = void 0,
escapePos = void 0,
inInterpolant = void 0,
inComment = void 0,
inString = void 0;
var length = css.length;
var offset = o || -1;
var line = l || 1;
var pos = p || 0;
loop: while (pos < length) {
code = css.charCodeAt(pos);
if (code === newline) {
offset = pos;
line += 1;
}
switch (code) {
case space:
case tab:
case cr:
case feed:
next = pos;
do {
next += 1;
code = css.charCodeAt(next);
if (code === newline) {
offset = next;
line += 1;
}
} while (code === space || code === tab || code === cr || code === feed);
tokens.push(['space', css.slice(pos, next)]);
pos = next - 1;
break;
case newline:
tokens.push(['newline', '\n', line, pos - offset]);
break;
case plus:
tokens.push(['+', '+', line, pos - offset]);
break;
case minus:
tokens.push(['-', '-', line, pos - offset]);
break;
case decComb:
tokens.push(['>', '>', line, pos - offset]);
break;
case adjComb:
tokens.push(['~', '~', line, pos - offset]);
break;
case openCurly:
tokens.push(['{', '{', line, pos - offset]);
break;
case closeCurly:
tokens.push(['endInterpolant', '}', line, pos - offset]);
break loop;
case comma:
tokens.push([',', ',', line, pos - offset]);
break;
case dollar:
tokens.push(['$', '$', line, pos - offset]);
break;
case colon:
tokens.push([':', ':', line, pos - offset]);
break;
case semicolon:
tokens.push([';', ';', line, pos - offset]);
break;
case openBracket:
tokens.push(['(', '(', line, pos - offset]);
break;
case closeBracket:
tokens.push([')', ')', line, pos - offset]);
break;
case singleQuote:
case doubleQuote:
quote = code === singleQuote ? "'" : '"';
tokens.push([quote, quote, line, pos - offset]);
next = pos + 1;
var _tokenizeString = (0, _tokenizeString3.default)(input, line, next, offset, quote),
t = _tokenizeString.tokens,
_l = _tokenizeString.line,
_p = _tokenizeString.pos,
_o = _tokenizeString.offset;
tokens = tokens.concat(t);
next = _p;
line = _l;
offset = _o;
pos = next;
break;
case at:
tokens.push(['@', '@', line, pos - offset]);
break;
case backslash:
next = pos;
escape = true;
while (css.charCodeAt(next + 1) === backslash) {
next += 1;
escape = !escape;
}
code = css.charCodeAt(next + 1);
if (escape && code !== space && code !== newline && code !== tab && code !== cr && code !== feed) {
next += 1;
}
tokens.push(['word', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
break;
default:
ident.lastIndex = pos;
number.lastIndex = pos;
wordEnd.lastIndex = pos;
if (code === slash && css.charCodeAt(pos + 1) === asterisk) {
inComment = true;
tokens.push(['startComment', '/*', line, pos + 1 - offset]);
next = pos + 1;
var _tokenizeComment = (0, _tokenizeComment3.default)(input, line, next + 1, offset),
_t = _tokenizeComment.tokens,
_l2 = _tokenizeComment.line,
_p2 = _tokenizeComment.pos,
_o2 = _tokenizeComment.offset;
tokens = tokens.concat(_t);
next = _p2;
line = _l2;
offset = _o2;
pos = next;
break;
}
if (code === asterisk && css.charCodeAt(pos + 1) !== slash) {
tokens.push(['*', '*', line, pos - offset]);
break;
}
if (inComment && code === asterisk && css.charCodeAt(pos + 1) === slash) {
inComment = false;
tokens.push(['endComment', '*/', line, pos + 1 - offset]);
pos += 2;
break;
}
if (code === slash && css.charCodeAt(pos + 1) !== slash) {
tokens.push(['/', '/', line, pos - offset]);
pos += 2;
break;
}
if (code === hash && css.charCodeAt(pos + 1) === openCurly) {
inInterpolant = true;
tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]);
next = pos + 1;
var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1),
_t2 = _tokenizeInterpolant.tokens,
_p3 = _tokenizeInterpolant.pos;
tokens = tokens.concat(_t2);
next = _p3;
pos = next;
break;
}
if (code === slash && css.charCodeAt(pos + 1) === slash) {
next = css.indexOf('\n\n', pos + 2);
next = next > 0 ? next : css.length;
tokens.push(['scssComment', css.slice(pos, next), line, pos - offset, line, next - offset]);
pos = next;
break;
}
if (ident.test(css) && (ident.lastIndex = pos || 1) && ident.exec(css).index === pos) {
next = ident.lastIndex - 1;
tokens.push(['ident', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
break;
}
if (number.test(css) && (number.lastIndex = pos || 1) && number.exec(css).index === pos) {
next = number.lastIndex - 1;
tokens.push(['number', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
break;
}
wordEnd.lastIndex = pos + 1;
wordEnd.test(css);
if (wordEnd.lastIndex === 0) {
next = css.length - 1;
} else {
next = wordEnd.lastIndex - 2;
}
tokens.push(['word', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
break;
}
pos++;
}
return { tokens: tokens, line: line, pos: pos, offset: offset };
}

136
node_modules/scss-tokenizer/lib/tokenize-string.js generated vendored Normal file
View File

@@ -0,0 +1,136 @@
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = tokenize;
var _input = require('./input');
var _input2 = _interopRequireDefault(_input);
var _tokenizeString = require('./tokenize-string');
var _tokenizeString2 = _interopRequireDefault(_tokenizeString);
var _tokenizeInterpolant2 = require('./tokenize-interpolant');
var _tokenizeInterpolant3 = _interopRequireDefault(_tokenizeInterpolant2);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var singleQuote = "'".charCodeAt(0),
doubleQuote = '"'.charCodeAt(0),
newline = '\n'.charCodeAt(0),
space = ' '.charCodeAt(0),
feed = '\f'.charCodeAt(0),
tab = '\t'.charCodeAt(0),
cr = '\r'.charCodeAt(0),
hash = '#'.charCodeAt(0),
backslash = '\\'.charCodeAt(0),
slash = '/'.charCodeAt(0),
openCurly = '{'.charCodeAt(0),
closeCurly = '}'.charCodeAt(0),
interpolantEnd = /([.\s]*?)[^\\](?=(}))/gm,
sQuoteEnd = /([.\s]*?)[^\\](?=((#{)|'))/gm,
dQuoteEnd = /([.\s]*?)[^\\](?=((#{)|"))/gm;
function tokenize(input, l, p, o, quote) {
var tokens = [];
var css = input.css.valueOf();
var code = void 0,
next = void 0,
lines = void 0,
last = void 0,
content = void 0,
escape = void 0,
nextLine = void 0,
nextOffset = void 0,
escaped = void 0,
escapePos = void 0,
inInterpolant = void 0,
inComment = void 0,
inString = void 0;
var length = css.length;
var offset = o || -1;
var line = l || 1;
var pos = p || 0;
var quoteEnd = quote === "'" ? sQuoteEnd : dQuoteEnd;
var quoteChar = quote.charCodeAt(0);
loop: while (pos < length) {
code = css.charCodeAt(pos);
if (code === newline) {
offset = pos;
line += 1;
}
switch (code) {
case closeCurly:
tokens.push(['endInterpolant', '}', line, pos - offset]);
break;
case quoteChar:
tokens.push([quote, quote, line, pos - offset]);
break loop;
case backslash:
next = pos;
escape = true;
while (css.charCodeAt(next + 1) === backslash) {
next += 1;
escape = !escape;
}
code = css.charCodeAt(next + 1);
if (escape && code !== slash && code !== space && code !== newline && code !== tab && code !== cr && code !== feed) {
next += 1;
}
tokens.push(['string', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
break;
default:
if (code === hash && css.charCodeAt(pos + 1) === openCurly) {
tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]);
next = pos + 1;
var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1, offset),
t = _tokenizeInterpolant.tokens,
_l = _tokenizeInterpolant.line,
_p = _tokenizeInterpolant.pos,
_o = _tokenizeInterpolant.offset;
tokens = tokens.concat(t);
next = _p;
line = _l;
offset = _o;
pos = next;
} else {
quoteEnd.lastIndex = pos;
quoteEnd.test(css);
if (quoteEnd.lastIndex === 0) {
next = css.length - 1;
} else {
next = quoteEnd.lastIndex - 1;
}
tokens.push(['string', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
}
break;
}
pos++;
}
return { tokens: tokens, line: line, pos: pos, offset: offset };
}

312
node_modules/scss-tokenizer/lib/tokenize.js generated vendored Normal file
View File

@@ -0,0 +1,312 @@
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = tokenize;
var _input = require('./input');
var _input2 = _interopRequireDefault(_input);
var _tokenizeString2 = require('./tokenize-string');
var _tokenizeString3 = _interopRequireDefault(_tokenizeString2);
var _tokenizeComment2 = require('./tokenize-comment');
var _tokenizeComment3 = _interopRequireDefault(_tokenizeComment2);
var _tokenizeInterpolant2 = require('./tokenize-interpolant');
var _tokenizeInterpolant3 = _interopRequireDefault(_tokenizeInterpolant2);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var singleQuote = "'".charCodeAt(0),
doubleQuote = '"'.charCodeAt(0),
dollar = '$'.charCodeAt(0),
hash = '#'.charCodeAt(0),
backslash = '\\'.charCodeAt(0),
slash = '/'.charCodeAt(0),
newline = '\n'.charCodeAt(0),
space = ' '.charCodeAt(0),
feed = '\f'.charCodeAt(0),
tab = '\t'.charCodeAt(0),
cr = '\r'.charCodeAt(0),
openBracket = '('.charCodeAt(0),
closeBracket = ')'.charCodeAt(0),
openCurly = '{'.charCodeAt(0),
closeCurly = '}'.charCodeAt(0),
semicolon = ';'.charCodeAt(0),
asterisk = '*'.charCodeAt(0),
colon = ':'.charCodeAt(0),
at = '@'.charCodeAt(0),
comma = ','.charCodeAt(0),
plus = '+'.charCodeAt(0),
minus = '-'.charCodeAt(0),
decComb = '>'.charCodeAt(0),
adjComb = '~'.charCodeAt(0),
number = /[+-]?(\d+(\.\d+)?|\.\d+)|(e[+-]\d+)/gi,
sQuoteEnd = /(.*?)[^\\](?=((#{)|'))/gm,
dQuoteEnd = /(.*?)[^\\](?=((#{)|"))/gm,
wordEnd = /[ \n\t\r\(\)\{\},:;@!'"\\]|\/(?=\*)|#(?={)/g,
ident = /-?([a-z_]|\\[^\\])([a-z-_0-9]|\\[^\\])*/gi;
function tokenize(input, l, p) {
var tokens = [];
var css = input.css.valueOf();
var code = void 0,
next = void 0,
quote = void 0,
lines = void 0,
last = void 0,
content = void 0,
escape = void 0,
nextLine = void 0,
nextOffset = void 0,
escaped = void 0,
escapePos = void 0,
inInterpolant = void 0,
inComment = void 0,
inString = void 0;
var length = css.length;
var offset = -1;
var line = l || 1;
var pos = p || 0;
while (pos < length) {
code = css.charCodeAt(pos);
if (code === newline) {
offset = pos;
line += 1;
}
switch (code) {
case space:
case tab:
case cr:
case feed:
next = pos;
do {
next += 1;
code = css.charCodeAt(next);
if (code === newline) {
offset = next;
line += 1;
}
} while (code === space || code === tab || code === cr || code === feed);
tokens.push(['space', css.slice(pos, next)]);
pos = next - 1;
break;
case newline:
tokens.push(['newline', '\n', line, pos - offset]);
break;
case plus:
tokens.push(['+', '+', line, pos - offset]);
break;
case minus:
tokens.push(['-', '-', line, pos - offset]);
break;
case decComb:
tokens.push(['>', '>', line, pos - offset]);
break;
case adjComb:
tokens.push(['~', '~', line, pos - offset]);
break;
case openCurly:
tokens.push(['{', '{', line, pos - offset]);
break;
case closeCurly:
if (inInterpolant) {
inInterpolant = false;
tokens.push(['endInterpolant', '}', line, pos - offset]);
} else {
tokens.push(['}', '}', line, pos - offset]);
}
break;
case comma:
tokens.push([',', ',', line, pos - offset]);
break;
case dollar:
tokens.push(['$', '$', line, pos - offset]);
break;
case colon:
tokens.push([':', ':', line, pos - offset]);
break;
case semicolon:
tokens.push([';', ';', line, pos - offset]);
break;
case openBracket:
tokens.push(['(', '(', line, pos - offset]);
break;
case closeBracket:
tokens.push([')', ')', line, pos - offset]);
break;
case singleQuote:
case doubleQuote:
quote = code === singleQuote ? "'" : '"';
tokens.push([quote, quote, line, pos - offset]);
next = pos + 1;
var _tokenizeString = (0, _tokenizeString3.default)(input, line, next, offset, quote),
t = _tokenizeString.tokens,
_l = _tokenizeString.line,
_p = _tokenizeString.pos,
o = _tokenizeString.offset;
tokens = tokens.concat(t);
next = _p;
line = _l;
offset = o;
pos = next;
break;
case at:
tokens.push(['@', '@', line, pos - offset]);
break;
case backslash:
next = pos;
escape = true;
while (css.charCodeAt(next + 1) === backslash) {
next += 1;
escape = !escape;
}
code = css.charCodeAt(next + 1);
if (escape && code !== space && code !== newline && code !== tab && code !== cr && code !== feed) {
next += 1;
}
tokens.push(['word', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
break;
default:
ident.lastIndex = pos;
number.lastIndex = pos;
wordEnd.lastIndex = pos;
if (code === slash && css.charCodeAt(pos + 1) === asterisk) {
inComment = true;
tokens.push(['startComment', '/*', line, pos + 1 - offset]);
next = pos + 1;
var _tokenizeComment = (0, _tokenizeComment3.default)(input, line, next + 1, offset),
_t = _tokenizeComment.tokens,
_l2 = _tokenizeComment.line,
_p2 = _tokenizeComment.pos,
_o = _tokenizeComment.offset;
tokens = tokens.concat(_t);
next = _p2;
line = _l2;
offset = _o;
pos = next;
break;
}
if (code === asterisk && css.charCodeAt(pos + 1) !== slash) {
tokens.push(['*', '*', line, pos - offset]);
break;
}
if (inComment && code === asterisk && css.charCodeAt(pos + 1) === slash) {
inComment = false;
tokens.push(['endComment', '*/', line, pos + 1 - offset]);
pos += 2;
break;
}
if (code === slash && css.charCodeAt(pos + 1) !== slash) {
tokens.push(['/', '/', line, pos - offset]);
break;
}
if (code === hash && css.charCodeAt(pos + 1) === openCurly) {
inInterpolant = true;
tokens.push(['startInterpolant', '#{', line, pos + 1 - offset]);
next = pos + 1;
var _tokenizeInterpolant = (0, _tokenizeInterpolant3.default)(input, line, next + 1, offset),
_t2 = _tokenizeInterpolant.tokens,
_l3 = _tokenizeInterpolant.line,
_p3 = _tokenizeInterpolant.pos,
_o2 = _tokenizeInterpolant.offset;
tokens = tokens.concat(_t2);
next = _p3;
line = _l3;
offset = _o2;
pos = next;
break;
}
if (code === slash && css.charCodeAt(pos + 1) === slash) {
next = css.indexOf('\n', pos + 2);
next = (next > 0 ? next : css.length) - 1;
tokens.push(['scssComment', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
break;
}
if (ident.test(css) && (ident.lastIndex = pos || 1) && ident.exec(css).index === pos) {
next = ident.lastIndex - 1;
tokens.push(['ident', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
break;
}
if (number.test(css) && (number.lastIndex = pos || 1) && number.exec(css).index === pos) {
next = number.lastIndex - 1;
tokens.push(['number', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
break;
}
wordEnd.lastIndex = pos + 1;
wordEnd.test(css);
if (wordEnd.lastIndex === 0) {
next = css.length - 1;
} else {
next = wordEnd.lastIndex - 2;
}
tokens.push(['word', css.slice(pos, next + 1), line, pos - offset, line, next - offset]);
pos = next;
break;
}
pos++;
}
return tokens;
}

View File

@@ -0,0 +1,28 @@
Copyright (c) 2009-2011, Mozilla Foundation and contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the names of the Mozilla Foundation nor the names of project
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,100 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
/**
* A data structure which is a combination of an array and a set. Adding a new
* member is O(1), testing for membership is O(1), and finding the index of an
* element is O(1). Removing elements from the set is not supported. Only
* strings are supported for membership.
*/
class ArraySet {
constructor() {
this._array = [];
this._set = new Map();
}
/**
* Static method for creating ArraySet instances from an existing array.
*/
static fromArray(aArray, aAllowDuplicates) {
const set = new ArraySet();
for (let i = 0, len = aArray.length; i < len; i++) {
set.add(aArray[i], aAllowDuplicates);
}
return set;
}
/**
* Return how many unique items are in this ArraySet. If duplicates have been
* added, than those do not count towards the size.
*
* @returns Number
*/
size() {
return this._set.size;
}
/**
* Add the given string to this set.
*
* @param String aStr
*/
add(aStr, aAllowDuplicates) {
const isDuplicate = this.has(aStr);
const idx = this._array.length;
if (!isDuplicate || aAllowDuplicates) {
this._array.push(aStr);
}
if (!isDuplicate) {
this._set.set(aStr, idx);
}
}
/**
* Is the given string a member of this set?
*
* @param String aStr
*/
has(aStr) {
return this._set.has(aStr);
}
/**
* What is the index of the given string in the array?
*
* @param String aStr
*/
indexOf(aStr) {
const idx = this._set.get(aStr);
if (idx >= 0) {
return idx;
}
throw new Error('"' + aStr + '" is not in the set.');
}
/**
* What is the element at the given index?
*
* @param Number aIdx
*/
at(aIdx) {
if (aIdx >= 0 && aIdx < this._array.length) {
return this._array[aIdx];
}
throw new Error("No element indexed by " + aIdx);
}
/**
* Returns the array representation of this set (which has the proper indices
* indicated by indexOf). Note that this is a copy of the internal array used
* for storing the members so that no one can mess with internal state.
*/
toArray() {
return this._array.slice();
}
}
exports.ArraySet = ArraySet;

View File

@@ -0,0 +1,111 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*
* Based on the Base 64 VLQ implementation in Closure Compiler:
* https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java
*
* Copyright 2011 The Closure Compiler Authors. All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
const base64 = require("./base64");
// A single base 64 digit can contain 6 bits of data. For the base 64 variable
// length quantities we use in the source map spec, the first bit is the sign,
// the next four bits are the actual value, and the 6th bit is the
// continuation bit. The continuation bit tells us whether there are more
// digits in this value following this digit.
//
// Continuation
// | Sign
// | |
// V V
// 101011
const VLQ_BASE_SHIFT = 5;
// binary: 100000
const VLQ_BASE = 1 << VLQ_BASE_SHIFT;
// binary: 011111
const VLQ_BASE_MASK = VLQ_BASE - 1;
// binary: 100000
const VLQ_CONTINUATION_BIT = VLQ_BASE;
/**
* Converts from a two-complement value to a value where the sign bit is
* placed in the least significant bit. For example, as decimals:
* 1 becomes 2 (10 binary), -1 becomes 3 (11 binary)
* 2 becomes 4 (100 binary), -2 becomes 5 (101 binary)
*/
function toVLQSigned(aValue) {
return aValue < 0
? ((-aValue) << 1) + 1
: (aValue << 1) + 0;
}
/**
* Converts to a two-complement value from a value where the sign bit is
* placed in the least significant bit. For example, as decimals:
* 2 (10 binary) becomes 1, 3 (11 binary) becomes -1
* 4 (100 binary) becomes 2, 5 (101 binary) becomes -2
*/
// eslint-disable-next-line no-unused-vars
function fromVLQSigned(aValue) {
const isNegative = (aValue & 1) === 1;
const shifted = aValue >> 1;
return isNegative
? -shifted
: shifted;
}
/**
* Returns the base 64 VLQ encoded value.
*/
exports.encode = function base64VLQ_encode(aValue) {
let encoded = "";
let digit;
let vlq = toVLQSigned(aValue);
do {
digit = vlq & VLQ_BASE_MASK;
vlq >>>= VLQ_BASE_SHIFT;
if (vlq > 0) {
// There are still more digits in this value, so we must make sure the
// continuation bit is marked.
digit |= VLQ_CONTINUATION_BIT;
}
encoded += base64.encode(digit);
} while (vlq > 0);
return encoded;
};

View File

@@ -0,0 +1,18 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
const intToCharMap = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");
/**
* Encode an integer in the range of 0 to 63 to a single base 64 digit.
*/
exports.encode = function(number) {
if (0 <= number && number < intToCharMap.length) {
return intToCharMap[number];
}
throw new TypeError("Must be between 0 and 63: " + number);
};

View File

@@ -0,0 +1,107 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
exports.GREATEST_LOWER_BOUND = 1;
exports.LEAST_UPPER_BOUND = 2;
/**
* Recursive implementation of binary search.
*
* @param aLow Indices here and lower do not contain the needle.
* @param aHigh Indices here and higher do not contain the needle.
* @param aNeedle The element being searched for.
* @param aHaystack The non-empty array being searched.
* @param aCompare Function which takes two elements and returns -1, 0, or 1.
* @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or
* 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the
* closest element that is smaller than or greater than the one we are
* searching for, respectively, if the exact element cannot be found.
*/
function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {
// This function terminates when one of the following is true:
//
// 1. We find the exact element we are looking for.
//
// 2. We did not find the exact element, but we can return the index of
// the next-closest element.
//
// 3. We did not find the exact element, and there is no next-closest
// element than the one we are searching for, so we return -1.
const mid = Math.floor((aHigh - aLow) / 2) + aLow;
const cmp = aCompare(aNeedle, aHaystack[mid], true);
if (cmp === 0) {
// Found the element we are looking for.
return mid;
} else if (cmp > 0) {
// Our needle is greater than aHaystack[mid].
if (aHigh - mid > 1) {
// The element is in the upper half.
return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);
}
// The exact needle element was not found in this haystack. Determine if
// we are in termination case (3) or (2) and return the appropriate thing.
if (aBias == exports.LEAST_UPPER_BOUND) {
return aHigh < aHaystack.length ? aHigh : -1;
}
return mid;
}
// Our needle is less than aHaystack[mid].
if (mid - aLow > 1) {
// The element is in the lower half.
return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);
}
// we are in termination case (3) or (2) and return the appropriate thing.
if (aBias == exports.LEAST_UPPER_BOUND) {
return mid;
}
return aLow < 0 ? -1 : aLow;
}
/**
* This is an implementation of binary search which will always try and return
* the index of the closest element if there is no exact hit. This is because
* mappings between original and generated line/col pairs are single points,
* and there is an implicit region between each of them, so a miss just means
* that you aren't on the very start of a region.
*
* @param aNeedle The element you are looking for.
* @param aHaystack The array that is being searched.
* @param aCompare A function which takes the needle and an element in the
* array and returns -1, 0, or 1 depending on whether the needle is less
* than, equal to, or greater than the element, respectively.
* @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or
* 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the
* closest element that is smaller than or greater than the one we are
* searching for, respectively, if the exact element cannot be found.
* Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.
*/
exports.search = function search(aNeedle, aHaystack, aCompare, aBias) {
if (aHaystack.length === 0) {
return -1;
}
let index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,
aCompare, aBias || exports.GREATEST_LOWER_BOUND);
if (index < 0) {
return -1;
}
// We have found either the exact element, or the next-closest element than
// the one we are searching for. However, there may be more than one such
// element. Make sure we always return the smallest of these.
while (index - 1 >= 0) {
if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {
break;
}
--index;
}
return index;
};

View File

@@ -0,0 +1,80 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2014 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
const util = require("./util");
/**
* Determine whether mappingB is after mappingA with respect to generated
* position.
*/
function generatedPositionAfter(mappingA, mappingB) {
// Optimized for most common case
const lineA = mappingA.generatedLine;
const lineB = mappingB.generatedLine;
const columnA = mappingA.generatedColumn;
const columnB = mappingB.generatedColumn;
return lineB > lineA || lineB == lineA && columnB >= columnA ||
util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0;
}
/**
* A data structure to provide a sorted view of accumulated mappings in a
* performance conscious manner. It trades a negligible overhead in general
* case for a large speedup in case of mappings being added in order.
*/
class MappingList {
constructor() {
this._array = [];
this._sorted = true;
// Serves as infimum
this._last = {generatedLine: -1, generatedColumn: 0};
}
/**
* Iterate through internal items. This method takes the same arguments that
* `Array.prototype.forEach` takes.
*
* NOTE: The order of the mappings is NOT guaranteed.
*/
unsortedForEach(aCallback, aThisArg) {
this._array.forEach(aCallback, aThisArg);
}
/**
* Add the given source mapping.
*
* @param Object aMapping
*/
add(aMapping) {
if (generatedPositionAfter(this._last, aMapping)) {
this._last = aMapping;
this._array.push(aMapping);
} else {
this._sorted = false;
this._array.push(aMapping);
}
}
/**
* Returns the flat, sorted array of mappings. The mappings are sorted by
* generated position.
*
* WARNING: This method returns internal data without copying, for
* performance. The return value must NOT be mutated, and should be treated as
* an immutable borrow. If you want to take ownership, you must make your own
* copy.
*/
toArray() {
if (!this._sorted) {
this._array.sort(util.compareByGeneratedPositionsInflated);
this._sorted = true;
}
return this._array;
}
}
exports.MappingList = MappingList;

Binary file not shown.

View File

@@ -0,0 +1,49 @@
/* Determine browser vs node environment by testing the default top level context. Solution courtesy of: https://stackoverflow.com/questions/17575790/environment-detection-node-js-or-browser */
const isBrowserEnvironment = (function() {
// eslint-disable-next-line no-undef
return (typeof window !== "undefined") && (this === window);
}).call();
if (isBrowserEnvironment) {
// Web version of reading a wasm file into an array buffer.
let mappingsWasm = null;
module.exports = function readWasm() {
if (typeof mappingsWasm === "string") {
return fetch(mappingsWasm)
.then(response => response.arrayBuffer());
}
if (mappingsWasm instanceof ArrayBuffer) {
return Promise.resolve(mappingsWasm);
}
throw new Error("You must provide the string URL or ArrayBuffer contents " +
"of lib/mappings.wasm by calling " +
"SourceMapConsumer.initialize({ 'lib/mappings.wasm': ... }) " +
"before using SourceMapConsumer");
};
module.exports.initialize = input => mappingsWasm = input;
} else {
// Node version of reading a wasm file into an array buffer.
const fs = require("fs");
const path = require("path");
module.exports = function readWasm() {
return new Promise((resolve, reject) => {
const wasmPath = path.join(__dirname, "mappings.wasm");
fs.readFile(wasmPath, null, (error, data) => {
if (error) {
reject(error);
return;
}
resolve(data.buffer);
});
});
};
module.exports.initialize = _ => {
console.debug("SourceMapConsumer.initialize is a no-op when running in node.js");
};
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,413 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
const base64VLQ = require("./base64-vlq");
const util = require("./util");
const ArraySet = require("./array-set").ArraySet;
const MappingList = require("./mapping-list").MappingList;
/**
* An instance of the SourceMapGenerator represents a source map which is
* being built incrementally. You may pass an object with the following
* properties:
*
* - file: The filename of the generated source.
* - sourceRoot: A root for all relative URLs in this source map.
*/
class SourceMapGenerator {
constructor(aArgs) {
if (!aArgs) {
aArgs = {};
}
this._file = util.getArg(aArgs, "file", null);
this._sourceRoot = util.getArg(aArgs, "sourceRoot", null);
this._skipValidation = util.getArg(aArgs, "skipValidation", false);
this._sources = new ArraySet();
this._names = new ArraySet();
this._mappings = new MappingList();
this._sourcesContents = null;
}
/**
* Creates a new SourceMapGenerator based on a SourceMapConsumer
*
* @param aSourceMapConsumer The SourceMap.
*/
static fromSourceMap(aSourceMapConsumer) {
const sourceRoot = aSourceMapConsumer.sourceRoot;
const generator = new SourceMapGenerator({
file: aSourceMapConsumer.file,
sourceRoot
});
aSourceMapConsumer.eachMapping(function(mapping) {
const newMapping = {
generated: {
line: mapping.generatedLine,
column: mapping.generatedColumn
}
};
if (mapping.source != null) {
newMapping.source = mapping.source;
if (sourceRoot != null) {
newMapping.source = util.relative(sourceRoot, newMapping.source);
}
newMapping.original = {
line: mapping.originalLine,
column: mapping.originalColumn
};
if (mapping.name != null) {
newMapping.name = mapping.name;
}
}
generator.addMapping(newMapping);
});
aSourceMapConsumer.sources.forEach(function(sourceFile) {
let sourceRelative = sourceFile;
if (sourceRoot !== null) {
sourceRelative = util.relative(sourceRoot, sourceFile);
}
if (!generator._sources.has(sourceRelative)) {
generator._sources.add(sourceRelative);
}
const content = aSourceMapConsumer.sourceContentFor(sourceFile);
if (content != null) {
generator.setSourceContent(sourceFile, content);
}
});
return generator;
}
/**
* Add a single mapping from original source line and column to the generated
* source's line and column for this source map being created. The mapping
* object should have the following properties:
*
* - generated: An object with the generated line and column positions.
* - original: An object with the original line and column positions.
* - source: The original source file (relative to the sourceRoot).
* - name: An optional original token name for this mapping.
*/
addMapping(aArgs) {
const generated = util.getArg(aArgs, "generated");
const original = util.getArg(aArgs, "original", null);
let source = util.getArg(aArgs, "source", null);
let name = util.getArg(aArgs, "name", null);
if (!this._skipValidation) {
this._validateMapping(generated, original, source, name);
}
if (source != null) {
source = String(source);
if (!this._sources.has(source)) {
this._sources.add(source);
}
}
if (name != null) {
name = String(name);
if (!this._names.has(name)) {
this._names.add(name);
}
}
this._mappings.add({
generatedLine: generated.line,
generatedColumn: generated.column,
originalLine: original != null && original.line,
originalColumn: original != null && original.column,
source,
name
});
}
/**
* Set the source content for a source file.
*/
setSourceContent(aSourceFile, aSourceContent) {
let source = aSourceFile;
if (this._sourceRoot != null) {
source = util.relative(this._sourceRoot, source);
}
if (aSourceContent != null) {
// Add the source content to the _sourcesContents map.
// Create a new _sourcesContents map if the property is null.
if (!this._sourcesContents) {
this._sourcesContents = Object.create(null);
}
this._sourcesContents[util.toSetString(source)] = aSourceContent;
} else if (this._sourcesContents) {
// Remove the source file from the _sourcesContents map.
// If the _sourcesContents map is empty, set the property to null.
delete this._sourcesContents[util.toSetString(source)];
if (Object.keys(this._sourcesContents).length === 0) {
this._sourcesContents = null;
}
}
}
/**
* Applies the mappings of a sub-source-map for a specific source file to the
* source map being generated. Each mapping to the supplied source file is
* rewritten using the supplied source map. Note: The resolution for the
* resulting mappings is the minimium of this map and the supplied map.
*
* @param aSourceMapConsumer The source map to be applied.
* @param aSourceFile Optional. The filename of the source file.
* If omitted, SourceMapConsumer's file property will be used.
* @param aSourceMapPath Optional. The dirname of the path to the source map
* to be applied. If relative, it is relative to the SourceMapConsumer.
* This parameter is needed when the two source maps aren't in the same
* directory, and the source map to be applied contains relative source
* paths. If so, those relative source paths need to be rewritten
* relative to the SourceMapGenerator.
*/
applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) {
let sourceFile = aSourceFile;
// If aSourceFile is omitted, we will use the file property of the SourceMap
if (aSourceFile == null) {
if (aSourceMapConsumer.file == null) {
throw new Error(
"SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, " +
'or the source map\'s "file" property. Both were omitted.'
);
}
sourceFile = aSourceMapConsumer.file;
}
const sourceRoot = this._sourceRoot;
// Make "sourceFile" relative if an absolute Url is passed.
if (sourceRoot != null) {
sourceFile = util.relative(sourceRoot, sourceFile);
}
// Applying the SourceMap can add and remove items from the sources and
// the names array.
const newSources = this._mappings.toArray().length > 0
? new ArraySet()
: this._sources;
const newNames = new ArraySet();
// Find mappings for the "sourceFile"
this._mappings.unsortedForEach(function(mapping) {
if (mapping.source === sourceFile && mapping.originalLine != null) {
// Check if it can be mapped by the source map, then update the mapping.
const original = aSourceMapConsumer.originalPositionFor({
line: mapping.originalLine,
column: mapping.originalColumn
});
if (original.source != null) {
// Copy mapping
mapping.source = original.source;
if (aSourceMapPath != null) {
mapping.source = util.join(aSourceMapPath, mapping.source);
}
if (sourceRoot != null) {
mapping.source = util.relative(sourceRoot, mapping.source);
}
mapping.originalLine = original.line;
mapping.originalColumn = original.column;
if (original.name != null) {
mapping.name = original.name;
}
}
}
const source = mapping.source;
if (source != null && !newSources.has(source)) {
newSources.add(source);
}
const name = mapping.name;
if (name != null && !newNames.has(name)) {
newNames.add(name);
}
}, this);
this._sources = newSources;
this._names = newNames;
// Copy sourcesContents of applied map.
aSourceMapConsumer.sources.forEach(function(srcFile) {
const content = aSourceMapConsumer.sourceContentFor(srcFile);
if (content != null) {
if (aSourceMapPath != null) {
srcFile = util.join(aSourceMapPath, srcFile);
}
if (sourceRoot != null) {
srcFile = util.relative(sourceRoot, srcFile);
}
this.setSourceContent(srcFile, content);
}
}, this);
}
/**
* A mapping can have one of the three levels of data:
*
* 1. Just the generated position.
* 2. The Generated position, original position, and original source.
* 3. Generated and original position, original source, as well as a name
* token.
*
* To maintain consistency, we validate that any new mapping being added falls
* in to one of these categories.
*/
_validateMapping(aGenerated, aOriginal, aSource, aName) {
// When aOriginal is truthy but has empty values for .line and .column,
// it is most likely a programmer error. In this case we throw a very
// specific error message to try to guide them the right way.
// For example: https://github.com/Polymer/polymer-bundler/pull/519
if (aOriginal && typeof aOriginal.line !== "number" && typeof aOriginal.column !== "number") {
throw new Error(
"original.line and original.column are not numbers -- you probably meant to omit " +
"the original mapping entirely and only map the generated position. If so, pass " +
"null for the original mapping instead of an object with empty or null values."
);
}
if (aGenerated && "line" in aGenerated && "column" in aGenerated
&& aGenerated.line > 0 && aGenerated.column >= 0
&& !aOriginal && !aSource && !aName) {
// Case 1.
} else if (aGenerated && "line" in aGenerated && "column" in aGenerated
&& aOriginal && "line" in aOriginal && "column" in aOriginal
&& aGenerated.line > 0 && aGenerated.column >= 0
&& aOriginal.line > 0 && aOriginal.column >= 0
&& aSource) {
// Cases 2 and 3.
} else {
throw new Error("Invalid mapping: " + JSON.stringify({
generated: aGenerated,
source: aSource,
original: aOriginal,
name: aName
}));
}
}
/**
* Serialize the accumulated mappings in to the stream of base 64 VLQs
* specified by the source map format.
*/
_serializeMappings() {
let previousGeneratedColumn = 0;
let previousGeneratedLine = 1;
let previousOriginalColumn = 0;
let previousOriginalLine = 0;
let previousName = 0;
let previousSource = 0;
let result = "";
let next;
let mapping;
let nameIdx;
let sourceIdx;
const mappings = this._mappings.toArray();
for (let i = 0, len = mappings.length; i < len; i++) {
mapping = mappings[i];
next = "";
if (mapping.generatedLine !== previousGeneratedLine) {
previousGeneratedColumn = 0;
while (mapping.generatedLine !== previousGeneratedLine) {
next += ";";
previousGeneratedLine++;
}
} else if (i > 0) {
if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) {
continue;
}
next += ",";
}
next += base64VLQ.encode(mapping.generatedColumn
- previousGeneratedColumn);
previousGeneratedColumn = mapping.generatedColumn;
if (mapping.source != null) {
sourceIdx = this._sources.indexOf(mapping.source);
next += base64VLQ.encode(sourceIdx - previousSource);
previousSource = sourceIdx;
// lines are stored 0-based in SourceMap spec version 3
next += base64VLQ.encode(mapping.originalLine - 1
- previousOriginalLine);
previousOriginalLine = mapping.originalLine - 1;
next += base64VLQ.encode(mapping.originalColumn
- previousOriginalColumn);
previousOriginalColumn = mapping.originalColumn;
if (mapping.name != null) {
nameIdx = this._names.indexOf(mapping.name);
next += base64VLQ.encode(nameIdx - previousName);
previousName = nameIdx;
}
}
result += next;
}
return result;
}
_generateSourcesContent(aSources, aSourceRoot) {
return aSources.map(function(source) {
if (!this._sourcesContents) {
return null;
}
if (aSourceRoot != null) {
source = util.relative(aSourceRoot, source);
}
const key = util.toSetString(source);
return Object.prototype.hasOwnProperty.call(this._sourcesContents, key)
? this._sourcesContents[key]
: null;
}, this);
}
/**
* Externalize the source map.
*/
toJSON() {
const map = {
version: this._version,
sources: this._sources.toArray(),
names: this._names.toArray(),
mappings: this._serializeMappings()
};
if (this._file != null) {
map.file = this._file;
}
if (this._sourceRoot != null) {
map.sourceRoot = this._sourceRoot;
}
if (this._sourcesContents) {
map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot);
}
return map;
}
/**
* Render the source map being generated to a string.
*/
toString() {
return JSON.stringify(this.toJSON());
}
}
SourceMapGenerator.prototype._version = 3;
exports.SourceMapGenerator = SourceMapGenerator;

View File

@@ -0,0 +1,404 @@
/* -*- Mode: js; js-indent-level: 2; -*- */
/*
* Copyright 2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE or:
* http://opensource.org/licenses/BSD-3-Clause
*/
const SourceMapGenerator = require("./source-map-generator").SourceMapGenerator;
const util = require("./util");
// Matches a Windows-style `\r\n` newline or a `\n` newline used by all other
// operating systems these days (capturing the result).
const REGEX_NEWLINE = /(\r?\n)/;
// Newline character code for charCodeAt() comparisons
const NEWLINE_CODE = 10;
// Private symbol for identifying `SourceNode`s when multiple versions of
// the source-map library are loaded. This MUST NOT CHANGE across
// versions!
const isSourceNode = "$$$isSourceNode$$$";
/**
* SourceNodes provide a way to abstract over interpolating/concatenating
* snippets of generated JavaScript source code while maintaining the line and
* column information associated with the original source code.
*
* @param aLine The original line number.
* @param aColumn The original column number.
* @param aSource The original source's filename.
* @param aChunks Optional. An array of strings which are snippets of
* generated JS, or other SourceNodes.
* @param aName The original identifier.
*/
class SourceNode {
constructor(aLine, aColumn, aSource, aChunks, aName) {
this.children = [];
this.sourceContents = {};
this.line = aLine == null ? null : aLine;
this.column = aColumn == null ? null : aColumn;
this.source = aSource == null ? null : aSource;
this.name = aName == null ? null : aName;
this[isSourceNode] = true;
if (aChunks != null) this.add(aChunks);
}
/**
* Creates a SourceNode from generated code and a SourceMapConsumer.
*
* @param aGeneratedCode The generated code
* @param aSourceMapConsumer The SourceMap for the generated code
* @param aRelativePath Optional. The path that relative sources in the
* SourceMapConsumer should be relative to.
*/
static fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) {
// The SourceNode we want to fill with the generated code
// and the SourceMap
const node = new SourceNode();
// All even indices of this array are one line of the generated code,
// while all odd indices are the newlines between two adjacent lines
// (since `REGEX_NEWLINE` captures its match).
// Processed fragments are accessed by calling `shiftNextLine`.
const remainingLines = aGeneratedCode.split(REGEX_NEWLINE);
let remainingLinesIndex = 0;
const shiftNextLine = function() {
const lineContents = getNextLine();
// The last line of a file might not have a newline.
const newLine = getNextLine() || "";
return lineContents + newLine;
function getNextLine() {
return remainingLinesIndex < remainingLines.length ?
remainingLines[remainingLinesIndex++] : undefined;
}
};
// We need to remember the position of "remainingLines"
let lastGeneratedLine = 1, lastGeneratedColumn = 0;
// The generate SourceNodes we need a code range.
// To extract it current and last mapping is used.
// Here we store the last mapping.
let lastMapping = null;
let nextLine;
aSourceMapConsumer.eachMapping(function(mapping) {
if (lastMapping !== null) {
// We add the code from "lastMapping" to "mapping":
// First check if there is a new line in between.
if (lastGeneratedLine < mapping.generatedLine) {
// Associate first line with "lastMapping"
addMappingWithCode(lastMapping, shiftNextLine());
lastGeneratedLine++;
lastGeneratedColumn = 0;
// The remaining code is added without mapping
} else {
// There is no new line in between.
// Associate the code between "lastGeneratedColumn" and
// "mapping.generatedColumn" with "lastMapping"
nextLine = remainingLines[remainingLinesIndex] || "";
const code = nextLine.substr(0, mapping.generatedColumn -
lastGeneratedColumn);
remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn -
lastGeneratedColumn);
lastGeneratedColumn = mapping.generatedColumn;
addMappingWithCode(lastMapping, code);
// No more remaining code, continue
lastMapping = mapping;
return;
}
}
// We add the generated code until the first mapping
// to the SourceNode without any mapping.
// Each line is added as separate string.
while (lastGeneratedLine < mapping.generatedLine) {
node.add(shiftNextLine());
lastGeneratedLine++;
}
if (lastGeneratedColumn < mapping.generatedColumn) {
nextLine = remainingLines[remainingLinesIndex] || "";
node.add(nextLine.substr(0, mapping.generatedColumn));
remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn);
lastGeneratedColumn = mapping.generatedColumn;
}
lastMapping = mapping;
}, this);
// We have processed all mappings.
if (remainingLinesIndex < remainingLines.length) {
if (lastMapping) {
// Associate the remaining code in the current line with "lastMapping"
addMappingWithCode(lastMapping, shiftNextLine());
}
// and add the remaining lines without any mapping
node.add(remainingLines.splice(remainingLinesIndex).join(""));
}
// Copy sourcesContent into SourceNode
aSourceMapConsumer.sources.forEach(function(sourceFile) {
const content = aSourceMapConsumer.sourceContentFor(sourceFile);
if (content != null) {
if (aRelativePath != null) {
sourceFile = util.join(aRelativePath, sourceFile);
}
node.setSourceContent(sourceFile, content);
}
});
return node;
function addMappingWithCode(mapping, code) {
if (mapping === null || mapping.source === undefined) {
node.add(code);
} else {
const source = aRelativePath
? util.join(aRelativePath, mapping.source)
: mapping.source;
node.add(new SourceNode(mapping.originalLine,
mapping.originalColumn,
source,
code,
mapping.name));
}
}
}
/**
* Add a chunk of generated JS to this source node.
*
* @param aChunk A string snippet of generated JS code, another instance of
* SourceNode, or an array where each member is one of those things.
*/
add(aChunk) {
if (Array.isArray(aChunk)) {
aChunk.forEach(function(chunk) {
this.add(chunk);
}, this);
} else if (aChunk[isSourceNode] || typeof aChunk === "string") {
if (aChunk) {
this.children.push(aChunk);
}
} else {
throw new TypeError(
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
);
}
return this;
}
/**
* Add a chunk of generated JS to the beginning of this source node.
*
* @param aChunk A string snippet of generated JS code, another instance of
* SourceNode, or an array where each member is one of those things.
*/
prepend(aChunk) {
if (Array.isArray(aChunk)) {
for (let i = aChunk.length - 1; i >= 0; i--) {
this.prepend(aChunk[i]);
}
} else if (aChunk[isSourceNode] || typeof aChunk === "string") {
this.children.unshift(aChunk);
} else {
throw new TypeError(
"Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk
);
}
return this;
}
/**
* Walk over the tree of JS snippets in this node and its children. The
* walking function is called once for each snippet of JS and is passed that
* snippet and the its original associated source's line/column location.
*
* @param aFn The traversal function.
*/
walk(aFn) {
let chunk;
for (let i = 0, len = this.children.length; i < len; i++) {
chunk = this.children[i];
if (chunk[isSourceNode]) {
chunk.walk(aFn);
} else if (chunk !== "") {
aFn(chunk, { source: this.source,
line: this.line,
column: this.column,
name: this.name });
}
}
}
/**
* Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between
* each of `this.children`.
*
* @param aSep The separator.
*/
join(aSep) {
let newChildren;
let i;
const len = this.children.length;
if (len > 0) {
newChildren = [];
for (i = 0; i < len - 1; i++) {
newChildren.push(this.children[i]);
newChildren.push(aSep);
}
newChildren.push(this.children[i]);
this.children = newChildren;
}
return this;
}
/**
* Call String.prototype.replace on the very right-most source snippet. Useful
* for trimming whitespace from the end of a source node, etc.
*
* @param aPattern The pattern to replace.
* @param aReplacement The thing to replace the pattern with.
*/
replaceRight(aPattern, aReplacement) {
const lastChild = this.children[this.children.length - 1];
if (lastChild[isSourceNode]) {
lastChild.replaceRight(aPattern, aReplacement);
} else if (typeof lastChild === "string") {
this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement);
} else {
this.children.push("".replace(aPattern, aReplacement));
}
return this;
}
/**
* Set the source content for a source file. This will be added to the SourceMapGenerator
* in the sourcesContent field.
*
* @param aSourceFile The filename of the source file
* @param aSourceContent The content of the source file
*/
setSourceContent(aSourceFile, aSourceContent) {
this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent;
}
/**
* Walk over the tree of SourceNodes. The walking function is called for each
* source file content and is passed the filename and source content.
*
* @param aFn The traversal function.
*/
walkSourceContents(aFn) {
for (let i = 0, len = this.children.length; i < len; i++) {
if (this.children[i][isSourceNode]) {
this.children[i].walkSourceContents(aFn);
}
}
const sources = Object.keys(this.sourceContents);
for (let i = 0, len = sources.length; i < len; i++) {
aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]);
}
}
/**
* Return the string representation of this source node. Walks over the tree
* and concatenates all the various snippets together to one string.
*/
toString() {
let str = "";
this.walk(function(chunk) {
str += chunk;
});
return str;
}
/**
* Returns the string representation of this source node along with a source
* map.
*/
toStringWithSourceMap(aArgs) {
const generated = {
code: "",
line: 1,
column: 0
};
const map = new SourceMapGenerator(aArgs);
let sourceMappingActive = false;
let lastOriginalSource = null;
let lastOriginalLine = null;
let lastOriginalColumn = null;
let lastOriginalName = null;
this.walk(function(chunk, original) {
generated.code += chunk;
if (original.source !== null
&& original.line !== null
&& original.column !== null) {
if (lastOriginalSource !== original.source
|| lastOriginalLine !== original.line
|| lastOriginalColumn !== original.column
|| lastOriginalName !== original.name) {
map.addMapping({
source: original.source,
original: {
line: original.line,
column: original.column
},
generated: {
line: generated.line,
column: generated.column
},
name: original.name
});
}
lastOriginalSource = original.source;
lastOriginalLine = original.line;
lastOriginalColumn = original.column;
lastOriginalName = original.name;
sourceMappingActive = true;
} else if (sourceMappingActive) {
map.addMapping({
generated: {
line: generated.line,
column: generated.column
}
});
lastOriginalSource = null;
sourceMappingActive = false;
}
for (let idx = 0, length = chunk.length; idx < length; idx++) {
if (chunk.charCodeAt(idx) === NEWLINE_CODE) {
generated.line++;
generated.column = 0;
// Mappings end at eol
if (idx + 1 === length) {
lastOriginalSource = null;
sourceMappingActive = false;
} else if (sourceMappingActive) {
map.addMapping({
source: original.source,
original: {
line: original.line,
column: original.column
},
generated: {
line: generated.line,
column: generated.column
},
name: original.name
});
}
} else {
generated.column++;
}
}
});
this.walkSourceContents(function(sourceFile, sourceContent) {
map.setSourceContent(sourceFile, sourceContent);
});
return { code: generated.code, map };
}
}
exports.SourceNode = SourceNode;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,107 @@
const readWasm = require("../lib/read-wasm");
/**
* Provide the JIT with a nice shape / hidden class.
*/
function Mapping() {
this.generatedLine = 0;
this.generatedColumn = 0;
this.lastGeneratedColumn = null;
this.source = null;
this.originalLine = null;
this.originalColumn = null;
this.name = null;
}
let cachedWasm = null;
module.exports = function wasm() {
if (cachedWasm) {
return cachedWasm;
}
const callbackStack = [];
cachedWasm = readWasm().then(buffer => {
return WebAssembly.instantiate(buffer, {
env: {
mapping_callback(
generatedLine,
generatedColumn,
hasLastGeneratedColumn,
lastGeneratedColumn,
hasOriginal,
source,
originalLine,
originalColumn,
hasName,
name
) {
const mapping = new Mapping();
// JS uses 1-based line numbers, wasm uses 0-based.
mapping.generatedLine = generatedLine + 1;
mapping.generatedColumn = generatedColumn;
if (hasLastGeneratedColumn) {
// JS uses inclusive last generated column, wasm uses exclusive.
mapping.lastGeneratedColumn = lastGeneratedColumn - 1;
}
if (hasOriginal) {
mapping.source = source;
// JS uses 1-based line numbers, wasm uses 0-based.
mapping.originalLine = originalLine + 1;
mapping.originalColumn = originalColumn;
if (hasName) {
mapping.name = name;
}
}
callbackStack[callbackStack.length - 1](mapping);
},
start_all_generated_locations_for() { console.time("all_generated_locations_for"); },
end_all_generated_locations_for() { console.timeEnd("all_generated_locations_for"); },
start_compute_column_spans() { console.time("compute_column_spans"); },
end_compute_column_spans() { console.timeEnd("compute_column_spans"); },
start_generated_location_for() { console.time("generated_location_for"); },
end_generated_location_for() { console.timeEnd("generated_location_for"); },
start_original_location_for() { console.time("original_location_for"); },
end_original_location_for() { console.timeEnd("original_location_for"); },
start_parse_mappings() { console.time("parse_mappings"); },
end_parse_mappings() { console.timeEnd("parse_mappings"); },
start_sort_by_generated_location() { console.time("sort_by_generated_location"); },
end_sort_by_generated_location() { console.timeEnd("sort_by_generated_location"); },
start_sort_by_original_location() { console.time("sort_by_original_location"); },
end_sort_by_original_location() { console.timeEnd("sort_by_original_location"); },
}
});
}).then(Wasm => {
return {
exports: Wasm.instance.exports,
withMappingCallback: (mappingCallback, f) => {
callbackStack.push(mappingCallback);
try {
f();
} finally {
callbackStack.pop();
}
}
};
}).then(null, e => {
cachedWasm = null;
throw e;
});
return cachedWasm;
};

View File

@@ -0,0 +1,91 @@
{
"name": "source-map",
"description": "Generates and consumes source maps",
"version": "0.7.4",
"homepage": "https://github.com/mozilla/source-map",
"author": "Nick Fitzgerald <nfitzgerald@mozilla.com>",
"contributors": [
"Tobias Koppers <tobias.koppers@googlemail.com>",
"Duncan Beevers <duncan@dweebd.com>",
"Stephen Crane <scrane@mozilla.com>",
"Ryan Seddon <seddon.ryan@gmail.com>",
"Miles Elam <miles.elam@deem.com>",
"Mihai Bazon <mihai.bazon@gmail.com>",
"Michael Ficarra <github.public.email@michael.ficarra.me>",
"Todd Wolfson <todd@twolfson.com>",
"Alexander Solovyov <alexander@solovyov.net>",
"Felix Gnass <fgnass@gmail.com>",
"Conrad Irwin <conrad.irwin@gmail.com>",
"usrbincc <usrbincc@yahoo.com>",
"David Glasser <glasser@davidglasser.net>",
"Chase Douglas <chase@newrelic.com>",
"Evan Wallace <evan.exe@gmail.com>",
"Heather Arthur <fayearthur@gmail.com>",
"Hugh Kennedy <hughskennedy@gmail.com>",
"David Glasser <glasser@davidglasser.net>",
"Simon Lydell <simon.lydell@gmail.com>",
"Jmeas Smith <jellyes2@gmail.com>",
"Michael Z Goddard <mzgoddard@gmail.com>",
"azu <azu@users.noreply.github.com>",
"John Gozde <john@gozde.ca>",
"Adam Kirkton <akirkton@truefitinnovation.com>",
"Chris Montgomery <christopher.montgomery@dowjones.com>",
"J. Ryan Stinnett <jryans@gmail.com>",
"Jack Herrington <jherrington@walmartlabs.com>",
"Chris Truter <jeffpalentine@gmail.com>",
"Daniel Espeset <daniel@danielespeset.com>",
"Jamie Wong <jamie.lf.wong@gmail.com>",
"Eddy Bruël <ejpbruel@mozilla.com>",
"Hawken Rives <hawkrives@gmail.com>",
"Gilad Peleg <giladp007@gmail.com>",
"djchie <djchie.dev@gmail.com>",
"Gary Ye <garysye@gmail.com>",
"Nicolas Lalevée <nicolas.lalevee@hibnet.org>"
],
"repository": {
"type": "git",
"url": "http://github.com/mozilla/source-map.git"
},
"main": "./source-map.js",
"types": "./source-map.d.ts",
"files": [
"source-map.js",
"source-map.d.ts",
"lib/",
"dist/source-map.js"
],
"engines": {
"node": ">= 8"
},
"license": "BSD-3-Clause",
"scripts": {
"lint": "eslint *.js lib/ test/",
"prebuild": "npm run lint",
"build": "webpack --color",
"pretest": "npm run build",
"test": "node test/run-tests.js",
"precoverage": "npm run build",
"coverage": "nyc node test/run-tests.js",
"setup": "mkdir -p coverage && cp -n .waiting.html coverage/index.html || true",
"dev:live": "live-server --port=4103 --ignorePattern='(js|css|png)$' coverage",
"dev:watch": "watch 'npm run coverage' lib/ test/",
"predev": "npm run setup",
"dev": "npm-run-all -p --silent dev:*",
"clean": "rm -rf coverage .nyc_output",
"toc": "doctoc --title '## Table of Contents' README.md && doctoc --title '## Table of Contents' CONTRIBUTING.md"
},
"devDependencies": {
"doctoc": "^1.3.1",
"eslint": "^4.19.1",
"live-server": "^1.2.0",
"npm-run-all": "^4.1.2",
"nyc": "^11.7.1",
"watch": "^1.0.2",
"webpack": "^4.9.1",
"webpack-cli": "^3.1"
},
"nyc": {
"reporter": "html"
},
"typings": "source-map"
}

View File

@@ -0,0 +1,369 @@
// Type definitions for source-map 0.7
// Project: https://github.com/mozilla/source-map
// Definitions by: Morten Houston Ludvigsen <https://github.com/MortenHoustonLudvigsen>,
// Ron Buckton <https://github.com/rbuckton>,
// John Vilk <https://github.com/jvilk>
// Definitions: https://github.com/mozilla/source-map
export type SourceMapUrl = string;
export interface StartOfSourceMap {
file?: string;
sourceRoot?: string;
skipValidation?: boolean;
}
export interface RawSourceMap {
version: number;
sources: string[];
names: string[];
sourceRoot?: string;
sourcesContent?: string[];
mappings: string;
file: string;
}
export interface RawIndexMap extends StartOfSourceMap {
version: number;
sections: RawSection[];
}
export interface RawSection {
offset: Position;
map: RawSourceMap;
}
export interface Position {
line: number;
column: number;
}
export interface NullablePosition {
line: number | null;
column: number | null;
lastColumn: number | null;
}
export interface MappedPosition {
source: string;
line: number;
column: number;
name?: string;
}
export interface NullableMappedPosition {
source: string | null;
line: number | null;
column: number | null;
name: string | null;
}
export interface MappingItem {
source: string;
generatedLine: number;
generatedColumn: number;
originalLine: number;
originalColumn: number;
name: string;
}
export interface Mapping {
generated: Position;
original: Position;
source: string;
name?: string;
}
export interface CodeWithSourceMap {
code: string;
map: SourceMapGenerator;
}
export interface SourceMapConsumer {
/**
* Compute the last column for each generated mapping. The last column is
* inclusive.
*/
computeColumnSpans(): void;
/**
* Returns the original source, line, and column information for the generated
* source's line and column positions provided. The only argument is an object
* with the following properties:
*
* - line: The line number in the generated source.
* - column: The column number in the generated source.
* - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or
* 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the
* closest element that is smaller than or greater than the one we are
* searching for, respectively, if the exact element cannot be found.
* Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.
*
* and an object is returned with the following properties:
*
* - source: The original source file, or null.
* - line: The line number in the original source, or null.
* - column: The column number in the original source, or null.
* - name: The original identifier, or null.
*/
originalPositionFor(generatedPosition: Position & { bias?: number }): NullableMappedPosition;
/**
* Returns the generated line and column information for the original source,
* line, and column positions provided. The only argument is an object with
* the following properties:
*
* - source: The filename of the original source.
* - line: The line number in the original source.
* - column: The column number in the original source.
* - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or
* 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the
* closest element that is smaller than or greater than the one we are
* searching for, respectively, if the exact element cannot be found.
* Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.
*
* and an object is returned with the following properties:
*
* - line: The line number in the generated source, or null.
* - column: The column number in the generated source, or null.
*/
generatedPositionFor(originalPosition: MappedPosition & { bias?: number }): NullablePosition;
/**
* Returns all generated line and column information for the original source,
* line, and column provided. If no column is provided, returns all mappings
* corresponding to a either the line we are searching for or the next
* closest line that has any mappings. Otherwise, returns all mappings
* corresponding to the given line and either the column we are searching for
* or the next closest column that has any offsets.
*
* The only argument is an object with the following properties:
*
* - source: The filename of the original source.
* - line: The line number in the original source.
* - column: Optional. the column number in the original source.
*
* and an array of objects is returned, each with the following properties:
*
* - line: The line number in the generated source, or null.
* - column: The column number in the generated source, or null.
*/
allGeneratedPositionsFor(originalPosition: MappedPosition): NullablePosition[];
/**
* Return true if we have the source content for every source in the source
* map, false otherwise.
*/
hasContentsOfAllSources(): boolean;
/**
* Returns the original source content. The only argument is the url of the
* original source file. Returns null if no original source content is
* available.
*/
sourceContentFor(source: string, returnNullOnMissing?: boolean): string | null;
/**
* Iterate over each mapping between an original source/line/column and a
* generated line/column in this source map.
*
* @param callback
* The function that is called with each mapping.
* @param context
* Optional. If specified, this object will be the value of `this` every
* time that `aCallback` is called.
* @param order
* Either `SourceMapConsumer.GENERATED_ORDER` or
* `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to
* iterate over the mappings sorted by the generated file's line/column
* order or the original's source/line/column order, respectively. Defaults to
* `SourceMapConsumer.GENERATED_ORDER`.
*/
eachMapping(callback: (mapping: MappingItem) => void, context?: any, order?: number): void;
/**
* Free this source map consumer's associated wasm data that is manually-managed.
* Alternatively, you can use SourceMapConsumer.with to avoid needing to remember to call destroy.
*/
destroy(): void;
}
export interface SourceMapConsumerConstructor {
prototype: SourceMapConsumer;
GENERATED_ORDER: number;
ORIGINAL_ORDER: number;
GREATEST_LOWER_BOUND: number;
LEAST_UPPER_BOUND: number;
new (rawSourceMap: RawSourceMap, sourceMapUrl?: SourceMapUrl): Promise<BasicSourceMapConsumer>;
new (rawSourceMap: RawIndexMap, sourceMapUrl?: SourceMapUrl): Promise<IndexedSourceMapConsumer>;
new (rawSourceMap: RawSourceMap | RawIndexMap | string, sourceMapUrl?: SourceMapUrl): Promise<BasicSourceMapConsumer | IndexedSourceMapConsumer>;
/**
* Create a BasicSourceMapConsumer from a SourceMapGenerator.
*
* @param sourceMap
* The source map that will be consumed.
*/
fromSourceMap(sourceMap: SourceMapGenerator, sourceMapUrl?: SourceMapUrl): Promise<BasicSourceMapConsumer>;
/**
* Construct a new `SourceMapConsumer` from `rawSourceMap` and `sourceMapUrl`
* (see the `SourceMapConsumer` constructor for details. Then, invoke the `async
* function f(SourceMapConsumer) -> T` with the newly constructed consumer, wait
* for `f` to complete, call `destroy` on the consumer, and return `f`'s return
* value.
*
* You must not use the consumer after `f` completes!
*
* By using `with`, you do not have to remember to manually call `destroy` on
* the consumer, since it will be called automatically once `f` completes.
*
* ```js
* const xSquared = await SourceMapConsumer.with(
* myRawSourceMap,
* null,
* async function (consumer) {
* // Use `consumer` inside here and don't worry about remembering
* // to call `destroy`.
*
* const x = await whatever(consumer);
* return x * x;
* }
* );
*
* // You may not use that `consumer` anymore out here; it has
* // been destroyed. But you can use `xSquared`.
* console.log(xSquared);
* ```
*/
with<T>(rawSourceMap: RawSourceMap | RawIndexMap | string, sourceMapUrl: SourceMapUrl | null | undefined, callback: (consumer: BasicSourceMapConsumer | IndexedSourceMapConsumer) => Promise<T> | T): Promise<T>;
}
export const SourceMapConsumer: SourceMapConsumerConstructor;
export interface BasicSourceMapConsumer extends SourceMapConsumer {
file: string;
sourceRoot: string;
sources: string[];
sourcesContent: string[];
}
export interface BasicSourceMapConsumerConstructor {
prototype: BasicSourceMapConsumer;
new (rawSourceMap: RawSourceMap | string): Promise<BasicSourceMapConsumer>;
/**
* Create a BasicSourceMapConsumer from a SourceMapGenerator.
*
* @param sourceMap
* The source map that will be consumed.
*/
fromSourceMap(sourceMap: SourceMapGenerator): Promise<BasicSourceMapConsumer>;
}
export const BasicSourceMapConsumer: BasicSourceMapConsumerConstructor;
export interface IndexedSourceMapConsumer extends SourceMapConsumer {
sources: string[];
}
export interface IndexedSourceMapConsumerConstructor {
prototype: IndexedSourceMapConsumer;
new (rawSourceMap: RawIndexMap | string): Promise<IndexedSourceMapConsumer>;
}
export const IndexedSourceMapConsumer: IndexedSourceMapConsumerConstructor;
export class SourceMapGenerator {
constructor(startOfSourceMap?: StartOfSourceMap);
/**
* Creates a new SourceMapGenerator based on a SourceMapConsumer
*
* @param sourceMapConsumer The SourceMap.
*/
static fromSourceMap(sourceMapConsumer: SourceMapConsumer): SourceMapGenerator;
/**
* Add a single mapping from original source line and column to the generated
* source's line and column for this source map being created. The mapping
* object should have the following properties:
*
* - generated: An object with the generated line and column positions.
* - original: An object with the original line and column positions.
* - source: The original source file (relative to the sourceRoot).
* - name: An optional original token name for this mapping.
*/
addMapping(mapping: Mapping): void;
/**
* Set the source content for a source file.
*/
setSourceContent(sourceFile: string, sourceContent: string): void;
/**
* Applies the mappings of a sub-source-map for a specific source file to the
* source map being generated. Each mapping to the supplied source file is
* rewritten using the supplied source map. Note: The resolution for the
* resulting mappings is the minimium of this map and the supplied map.
*
* @param sourceMapConsumer The source map to be applied.
* @param sourceFile Optional. The filename of the source file.
* If omitted, SourceMapConsumer's file property will be used.
* @param sourceMapPath Optional. The dirname of the path to the source map
* to be applied. If relative, it is relative to the SourceMapConsumer.
* This parameter is needed when the two source maps aren't in the same
* directory, and the source map to be applied contains relative source
* paths. If so, those relative source paths need to be rewritten
* relative to the SourceMapGenerator.
*/
applySourceMap(sourceMapConsumer: SourceMapConsumer, sourceFile?: string, sourceMapPath?: string): void;
toString(): string;
toJSON(): RawSourceMap;
}
export class SourceNode {
children: SourceNode[];
sourceContents: any;
line: number;
column: number;
source: string;
name: string;
constructor();
constructor(
line: number | null,
column: number | null,
source: string | null,
chunks?: Array<(string | SourceNode)> | SourceNode | string,
name?: string
);
static fromStringWithSourceMap(
code: string,
sourceMapConsumer: SourceMapConsumer,
relativePath?: string
): SourceNode;
add(chunk: Array<(string | SourceNode)> | SourceNode | string): SourceNode;
prepend(chunk: Array<(string | SourceNode)> | SourceNode | string): SourceNode;
setSourceContent(sourceFile: string, sourceContent: string): void;
walk(fn: (chunk: string, mapping: MappedPosition) => void): void;
walkSourceContents(fn: (file: string, content: string) => void): void;
join(sep: string): SourceNode;
replaceRight(pattern: string, replacement: string): SourceNode;
toString(): string;
toStringWithSourceMap(startOfSourceMap?: StartOfSourceMap): CodeWithSourceMap;
}

View File

@@ -0,0 +1,8 @@
/*
* Copyright 2009-2011 Mozilla Foundation and contributors
* Licensed under the New BSD license. See LICENSE.txt or:
* http://opensource.org/licenses/BSD-3-Clause
*/
exports.SourceMapGenerator = require("./lib/source-map-generator").SourceMapGenerator;
exports.SourceMapConsumer = require("./lib/source-map-consumer").SourceMapConsumer;
exports.SourceNode = require("./lib/source-node").SourceNode;

46
node_modules/scss-tokenizer/package.json generated vendored Normal file
View File

@@ -0,0 +1,46 @@
{
"name": "scss-tokenizer",
"version": "0.4.3",
"description": "A tokenzier for Sass' SCSS syntax",
"main": "index.js",
"scripts": {
"test": "jest",
"clean": "rm lib/*",
"build": "npm run clean; babel src/ --out-dir lib",
"prepublishOnly": "npm run build"
},
"files": [
"index.js",
"lib"
],
"repository": {
"type": "git",
"url": "https://github.com/sasstools/scss-tokenizer.git"
},
"keywords": [
"parser",
"tokenizer",
"sass",
"scss",
"libsass"
],
"author": "xzyfer",
"license": "MIT",
"bugs": {
"url": "https://github.com/sasstools/scss-tokenizer/issues"
},
"homepage": "https://github.com/sasstools/scss-tokenizer",
"dependencies": {
"js-base64": "^2.4.9",
"source-map": "^0.7.3"
},
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-core": "^6.26.3",
"babel-jest": "^23.6.0",
"babel-preset-env": "^1.7.0",
"glob": "^7.1.3",
"jest": "^23.6.0",
"sass-spec": "3.5.1"
}
}