Initial commit
This commit is contained in:
parent
208ba42c0e
commit
c34269400a
7275 changed files with 2 additions and 1858662 deletions
3
server/.gitignore
vendored
3
server/.gitignore
vendored
|
@ -1 +1,2 @@
|
||||||
.env
|
.env
|
||||||
|
node_modules/
|
1
server/node_modules/.bin/acorn
generated
vendored
1
server/node_modules/.bin/acorn
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../acorn/bin/acorn
|
|
1
server/node_modules/.bin/color-support
generated
vendored
1
server/node_modules/.bin/color-support
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../color-support/bin.js
|
|
1
server/node_modules/.bin/detect-libc
generated
vendored
1
server/node_modules/.bin/detect-libc
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../detect-libc/bin/detect-libc.js
|
|
1
server/node_modules/.bin/ejs
generated
vendored
1
server/node_modules/.bin/ejs
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../ejs/bin/cli.js
|
|
1
server/node_modules/.bin/esparse
generated
vendored
1
server/node_modules/.bin/esparse
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../esprima/bin/esparse.js
|
|
1
server/node_modules/.bin/esvalidate
generated
vendored
1
server/node_modules/.bin/esvalidate
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../esprima/bin/esvalidate.js
|
|
1
server/node_modules/.bin/is-ci
generated
vendored
1
server/node_modules/.bin/is-ci
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../is-ci/bin.js
|
|
1
server/node_modules/.bin/jake
generated
vendored
1
server/node_modules/.bin/jake
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../jake/bin/cli.js
|
|
1
server/node_modules/.bin/js-yaml
generated
vendored
1
server/node_modules/.bin/js-yaml
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../js-yaml/bin/js-yaml.js
|
|
1
server/node_modules/.bin/json2yaml
generated
vendored
1
server/node_modules/.bin/json2yaml
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../yamljs/bin/json2yaml
|
|
1
server/node_modules/.bin/mime
generated
vendored
1
server/node_modules/.bin/mime
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../mime/cli.js
|
|
1
server/node_modules/.bin/mkdirp
generated
vendored
1
server/node_modules/.bin/mkdirp
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../mkdirp/bin/cmd.js
|
|
1
server/node_modules/.bin/node-gyp
generated
vendored
1
server/node_modules/.bin/node-gyp
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../node-gyp/bin/node-gyp.js
|
|
1
server/node_modules/.bin/node-pre-gyp
generated
vendored
1
server/node_modules/.bin/node-pre-gyp
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../@mapbox/node-pre-gyp/bin/node-pre-gyp
|
|
1
server/node_modules/.bin/node-which
generated
vendored
1
server/node_modules/.bin/node-which
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../which/bin/node-which
|
|
1
server/node_modules/.bin/nodemon
generated
vendored
1
server/node_modules/.bin/nodemon
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../nodemon/bin/nodemon.js
|
|
1
server/node_modules/.bin/nodetouch
generated
vendored
1
server/node_modules/.bin/nodetouch
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../touch/bin/nodetouch.js
|
|
1
server/node_modules/.bin/nopt
generated
vendored
1
server/node_modules/.bin/nopt
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../nopt/bin/nopt.js
|
|
1
server/node_modules/.bin/rc
generated
vendored
1
server/node_modules/.bin/rc
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../rc/cli.js
|
|
1
server/node_modules/.bin/resolve
generated
vendored
1
server/node_modules/.bin/resolve
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../resolve/bin/resolve
|
|
1
server/node_modules/.bin/rimraf
generated
vendored
1
server/node_modules/.bin/rimraf
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../rimraf/bin.js
|
|
1
server/node_modules/.bin/semver
generated
vendored
1
server/node_modules/.bin/semver
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../jsonwebtoken/node_modules/semver/bin/semver
|
|
1
server/node_modules/.bin/sshpk-conv
generated
vendored
1
server/node_modules/.bin/sshpk-conv
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../sshpk/bin/sshpk-conv
|
|
1
server/node_modules/.bin/sshpk-sign
generated
vendored
1
server/node_modules/.bin/sshpk-sign
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../sshpk/bin/sshpk-sign
|
|
1
server/node_modules/.bin/sshpk-verify
generated
vendored
1
server/node_modules/.bin/sshpk-verify
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../sshpk/bin/sshpk-verify
|
|
1
server/node_modules/.bin/ts-node
generated
vendored
1
server/node_modules/.bin/ts-node
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../ts-node/dist/bin.js
|
|
1
server/node_modules/.bin/ts-node-cwd
generated
vendored
1
server/node_modules/.bin/ts-node-cwd
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../ts-node/dist/bin-cwd.js
|
|
1
server/node_modules/.bin/ts-node-script
generated
vendored
1
server/node_modules/.bin/ts-node-script
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../ts-node/dist/bin-script.js
|
|
1
server/node_modules/.bin/ts-node-transpile-only
generated
vendored
1
server/node_modules/.bin/ts-node-transpile-only
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../ts-node/dist/bin-transpile.js
|
|
1
server/node_modules/.bin/ts-script
generated
vendored
1
server/node_modules/.bin/ts-script
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../ts-node/dist/bin-script-deprecated.js
|
|
1
server/node_modules/.bin/tsc
generated
vendored
1
server/node_modules/.bin/tsc
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../typescript/bin/tsc
|
|
1
server/node_modules/.bin/tslint
generated
vendored
1
server/node_modules/.bin/tslint
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../tslint/bin/tslint
|
|
1
server/node_modules/.bin/tsserver
generated
vendored
1
server/node_modules/.bin/tsserver
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../typescript/bin/tsserver
|
|
1
server/node_modules/.bin/uuid
generated
vendored
1
server/node_modules/.bin/uuid
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../request/node_modules/uuid/bin/uuid
|
|
1
server/node_modules/.bin/yaml2json
generated
vendored
1
server/node_modules/.bin/yaml2json
generated
vendored
|
@ -1 +0,0 @@
|
||||||
../yamljs/bin/yaml2json
|
|
523
server/node_modules/.yarn-integrity
generated
vendored
523
server/node_modules/.yarn-integrity
generated
vendored
|
@ -1,523 +0,0 @@
|
||||||
{
|
|
||||||
"systemParams": "darwin-x64-93",
|
|
||||||
"modulesFolders": [
|
|
||||||
"node_modules"
|
|
||||||
],
|
|
||||||
"flags": [],
|
|
||||||
"linkedModules": [],
|
|
||||||
"topLevelPatterns": [
|
|
||||||
"@types/bcrypt@^5.0.0",
|
|
||||||
"@types/cors@^2.8.12",
|
|
||||||
"@types/express-jwt@^6.0.4",
|
|
||||||
"@types/express@^4.0.39",
|
|
||||||
"@types/jsonwebtoken@^8.5.8",
|
|
||||||
"@types/node@^17.0.21",
|
|
||||||
"bcrypt@^5.0.1",
|
|
||||||
"body-parser@^1.18.2",
|
|
||||||
"cors@^2.8.5",
|
|
||||||
"dotenv@^16.0.0",
|
|
||||||
"express-jwt@^6.1.1",
|
|
||||||
"express@^4.16.2",
|
|
||||||
"jsonwebtoken@^8.5.1",
|
|
||||||
"nodemon@^2.0.15",
|
|
||||||
"reflect-metadata@^0.1.10",
|
|
||||||
"sequelize-typescript@^2.1.3",
|
|
||||||
"sequelize@^6.17.0",
|
|
||||||
"sqlite3@https://github.com/mapbox/node-sqlite3#918052b538b0effe6c4a44c74a16b2749c08a0d2",
|
|
||||||
"strong-error-handler@^4.0.0",
|
|
||||||
"ts-node@^10.6.0",
|
|
||||||
"tslint@^6.1.3",
|
|
||||||
"typescript@^4.6.2"
|
|
||||||
],
|
|
||||||
"lockfileEntries": {
|
|
||||||
"@babel/code-frame@^7.0.0": "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.16.7.tgz#44416b6bd7624b998f5b1af5d470856c40138789",
|
|
||||||
"@babel/helper-validator-identifier@^7.16.7": "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz#e8c602438c4a8195751243da9031d1607d247cad",
|
|
||||||
"@babel/highlight@^7.16.7": "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.16.10.tgz#744f2eb81579d6eea753c227b0f570ad785aba88",
|
|
||||||
"@cspotcode/source-map-consumer@0.8.0": "https://registry.yarnpkg.com/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz#33bf4b7b39c178821606f669bbc447a6a629786b",
|
|
||||||
"@cspotcode/source-map-support@0.7.0": "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.7.0.tgz#4789840aa859e46d2f3173727ab707c66bf344f5",
|
|
||||||
"@mapbox/node-pre-gyp@^1.0.0": "https://registry.yarnpkg.com/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.8.tgz#32abc8a5c624bc4e46c43d84dfb8b26d33a96f58",
|
|
||||||
"@sindresorhus/is@^0.14.0": "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea",
|
|
||||||
"@szmarczak/http-timer@^1.1.2": "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421",
|
|
||||||
"@tsconfig/node10@^1.0.7": "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.8.tgz#c1e4e80d6f964fbecb3359c43bd48b40f7cadad9",
|
|
||||||
"@tsconfig/node12@^1.0.7": "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.9.tgz#62c1f6dee2ebd9aead80dc3afa56810e58e1a04c",
|
|
||||||
"@tsconfig/node14@^1.0.0": "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.1.tgz#95f2d167ffb9b8d2068b0b235302fafd4df711f2",
|
|
||||||
"@tsconfig/node16@^1.0.2": "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.2.tgz#423c77877d0569db20e1fc80885ac4118314010e",
|
|
||||||
"@types/bcrypt@^5.0.0": "https://registry.yarnpkg.com/@types/bcrypt/-/bcrypt-5.0.0.tgz#a835afa2882d165aff5690893db314eaa98b9f20",
|
|
||||||
"@types/body-parser@*": "https://registry.yarnpkg.com/@types/body-parser/-/body-parser-1.19.2.tgz#aea2059e28b7658639081347ac4fab3de166e6f0",
|
|
||||||
"@types/connect@*": "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.35.tgz#5fcf6ae445e4021d1fc2219a4873cc73a3bb2ad1",
|
|
||||||
"@types/cors@^2.8.12": "https://registry.yarnpkg.com/@types/cors/-/cors-2.8.12.tgz#6b2c510a7ad7039e98e7b8d3d6598f4359e5c080",
|
|
||||||
"@types/debug@^4.1.7": "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.7.tgz#7cc0ea761509124709b8b2d1090d8f6c17aadb82",
|
|
||||||
"@types/express-jwt@^6.0.4": "https://registry.yarnpkg.com/@types/express-jwt/-/express-jwt-6.0.4.tgz#ce617f65025959f24fba9ee34564a34eaf59e44b",
|
|
||||||
"@types/express-serve-static-core@^4.17.18": "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz#c47def9f34ec81dc6328d0b1b5303d1ec98d86b8",
|
|
||||||
"@types/express-unless@*": "https://registry.yarnpkg.com/@types/express-unless/-/express-unless-0.5.3.tgz#271f8603617445568ed0d6efe25a7d2f338544c1",
|
|
||||||
"@types/express@*": "https://registry.yarnpkg.com/@types/express/-/express-4.17.13.tgz#a76e2995728999bab51a33fabce1d705a3709034",
|
|
||||||
"@types/express@^4.0.39": "https://registry.yarnpkg.com/@types/express/-/express-4.17.13.tgz#a76e2995728999bab51a33fabce1d705a3709034",
|
|
||||||
"@types/express@^4.16.0": "https://registry.yarnpkg.com/@types/express/-/express-4.17.13.tgz#a76e2995728999bab51a33fabce1d705a3709034",
|
|
||||||
"@types/jsonwebtoken@^8.5.8": "https://registry.yarnpkg.com/@types/jsonwebtoken/-/jsonwebtoken-8.5.8.tgz#01b39711eb844777b7af1d1f2b4cf22fda1c0c44",
|
|
||||||
"@types/mime@^1": "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a",
|
|
||||||
"@types/ms@*": "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.31.tgz#31b7ca6407128a3d2bbc27fe2d21b345397f6197",
|
|
||||||
"@types/node@*": "https://registry.yarnpkg.com/@types/node/-/node-17.0.21.tgz#864b987c0c68d07b4345845c3e63b75edd143644",
|
|
||||||
"@types/node@^17.0.21": "https://registry.yarnpkg.com/@types/node/-/node-17.0.21.tgz#864b987c0c68d07b4345845c3e63b75edd143644",
|
|
||||||
"@types/qs@*": "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb",
|
|
||||||
"@types/range-parser@*": "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc",
|
|
||||||
"@types/serve-static@*": "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.13.10.tgz#f5e0ce8797d2d7cc5ebeda48a52c96c4fa47a8d9",
|
|
||||||
"@types/validator@^13.7.1": "https://registry.yarnpkg.com/@types/validator/-/validator-13.7.1.tgz#cdab1b4779f6b1718a08de89d92d2603b71950cb",
|
|
||||||
"abbrev@1": "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8",
|
|
||||||
"accept-language@^3.0.18": "https://registry.yarnpkg.com/accept-language/-/accept-language-3.0.18.tgz#f5025f17bf65a466a845838ccf98cdb877d83384",
|
|
||||||
"accepts@^1.3.3": "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e",
|
|
||||||
"accepts@~1.3.8": "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e",
|
|
||||||
"acorn-walk@^8.1.1": "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1",
|
|
||||||
"acorn@^8.4.1": "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf",
|
|
||||||
"agent-base@6": "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77",
|
|
||||||
"ajv@^6.12.3": "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4",
|
|
||||||
"ansi-align@^3.0.0": "https://registry.yarnpkg.com/ansi-align/-/ansi-align-3.0.1.tgz#0cdf12e111ace773a86e9a1fad1225c43cb19a59",
|
|
||||||
"ansi-regex@^2.0.0": "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df",
|
|
||||||
"ansi-regex@^5.0.1": "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304",
|
|
||||||
"ansi-styles@^3.2.1": "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d",
|
|
||||||
"ansi-styles@^4.0.0": "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937",
|
|
||||||
"ansi-styles@^4.1.0": "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937",
|
|
||||||
"anymatch@~3.1.2": "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716",
|
|
||||||
"aproba@^1.0.3": "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a",
|
|
||||||
"aproba@^1.0.3 || ^2.0.0": "https://registry.yarnpkg.com/aproba/-/aproba-2.0.0.tgz#52520b8ae5b569215b354efc0caa3fe1e45a8adc",
|
|
||||||
"are-we-there-yet@^2.0.0": "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz#372e0e7bd279d8e94c653aaa1f67200884bf3e1c",
|
|
||||||
"are-we-there-yet@~1.1.2": "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.7.tgz#b15474a932adab4ff8a50d9adfa7e4e926f21146",
|
|
||||||
"arg@^4.1.0": "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089",
|
|
||||||
"argparse@^1.0.7": "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911",
|
|
||||||
"array-flatten@1.1.1": "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2",
|
|
||||||
"asn1@~0.2.3": "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d",
|
|
||||||
"assert-plus@1.0.0": "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525",
|
|
||||||
"assert-plus@^1.0.0": "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525",
|
|
||||||
"async@0.9.x": "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d",
|
|
||||||
"async@^1.5.0": "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a",
|
|
||||||
"asynckit@^0.4.0": "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79",
|
|
||||||
"aws-sign2@~0.7.0": "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8",
|
|
||||||
"aws4@^1.8.0": "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59",
|
|
||||||
"balanced-match@^1.0.0": "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee",
|
|
||||||
"bcp47@^1.1.2": "https://registry.yarnpkg.com/bcp47/-/bcp47-1.1.2.tgz#354be3307ffd08433a78f5e1e2095845f89fc7fe",
|
|
||||||
"bcrypt-pbkdf@^1.0.0": "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e",
|
|
||||||
"bcrypt@^5.0.1": "https://registry.yarnpkg.com/bcrypt/-/bcrypt-5.0.1.tgz#f1a2c20f208e2ccdceea4433df0c8b2c54ecdf71",
|
|
||||||
"binary-extensions@^2.0.0": "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d",
|
|
||||||
"body-parser@1.19.2": "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.2.tgz#4714ccd9c157d44797b8b5607d72c0b89952f26e",
|
|
||||||
"body-parser@^1.18.2": "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.2.tgz#4714ccd9c157d44797b8b5607d72c0b89952f26e",
|
|
||||||
"boxen@^5.0.0": "https://registry.yarnpkg.com/boxen/-/boxen-5.1.2.tgz#788cb686fc83c1f486dfa8a40c68fc2b831d2b50",
|
|
||||||
"brace-expansion@^1.1.7": "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd",
|
|
||||||
"braces@~3.0.2": "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107",
|
|
||||||
"buffer-equal-constant-time@1.0.1": "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819",
|
|
||||||
"builtin-modules@^1.1.1": "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f",
|
|
||||||
"bytes@3.1.2": "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5",
|
|
||||||
"cacheable-request@^6.0.0": "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912",
|
|
||||||
"camelcase@^6.2.0": "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a",
|
|
||||||
"caseless@~0.12.0": "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc",
|
|
||||||
"chalk@^2.0.0": "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424",
|
|
||||||
"chalk@^2.3.0": "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424",
|
|
||||||
"chalk@^2.4.2": "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424",
|
|
||||||
"chalk@^4.1.0": "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01",
|
|
||||||
"charenc@0.0.2": "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667",
|
|
||||||
"chokidar@^3.5.2": "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd",
|
|
||||||
"chownr@^2.0.0": "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece",
|
|
||||||
"ci-info@^2.0.0": "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46",
|
|
||||||
"cldrjs@^0.5.4": "https://registry.yarnpkg.com/cldrjs/-/cldrjs-0.5.5.tgz#5c92ca2de89a8a16dea76cb2dfc4e00104428e52",
|
|
||||||
"cli-boxes@^2.2.1": "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f",
|
|
||||||
"clone-response@^1.0.2": "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b",
|
|
||||||
"code-point-at@^1.0.0": "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77",
|
|
||||||
"color-convert@^1.9.0": "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8",
|
|
||||||
"color-convert@^2.0.1": "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3",
|
|
||||||
"color-name@1.1.3": "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25",
|
|
||||||
"color-name@~1.1.4": "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2",
|
|
||||||
"color-support@^1.1.2": "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2",
|
|
||||||
"combined-stream@^1.0.6": "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f",
|
|
||||||
"combined-stream@~1.0.6": "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f",
|
|
||||||
"commander@^2.12.1": "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33",
|
|
||||||
"concat-map@0.0.1": "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b",
|
|
||||||
"configstore@^5.0.1": "https://registry.yarnpkg.com/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96",
|
|
||||||
"console-control-strings@^1.0.0": "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e",
|
|
||||||
"console-control-strings@^1.1.0": "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e",
|
|
||||||
"console-control-strings@~1.1.0": "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e",
|
|
||||||
"content-disposition@0.5.4": "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe",
|
|
||||||
"content-type@~1.0.4": "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b",
|
|
||||||
"cookie-signature@1.0.6": "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c",
|
|
||||||
"cookie@0.4.2": "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432",
|
|
||||||
"core-util-is@1.0.2": "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7",
|
|
||||||
"core-util-is@~1.0.0": "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85",
|
|
||||||
"cors@^2.8.5": "https://registry.yarnpkg.com/cors/-/cors-2.8.5.tgz#eac11da51592dd86b9f06f6e7ac293b3df875d29",
|
|
||||||
"create-require@^1.1.0": "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333",
|
|
||||||
"cross-spawn@^7.0.0": "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6",
|
|
||||||
"crypt@0.0.2": "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b",
|
|
||||||
"crypto-random-string@^2.0.0": "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5",
|
|
||||||
"dashdash@^1.12.0": "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0",
|
|
||||||
"debug@2.6.9": "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f",
|
|
||||||
"debug@4": "https://registry.yarnpkg.com/debug/-/debug-4.3.3.tgz#04266e0b70a98d4462e6e288e38259213332b664",
|
|
||||||
"debug@^3.2.7": "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a",
|
|
||||||
"debug@^4.1.1": "https://registry.yarnpkg.com/debug/-/debug-4.3.3.tgz#04266e0b70a98d4462e6e288e38259213332b664",
|
|
||||||
"debug@^4.2.0": "https://registry.yarnpkg.com/debug/-/debug-4.3.3.tgz#04266e0b70a98d4462e6e288e38259213332b664",
|
|
||||||
"debug@^4.3.3": "https://registry.yarnpkg.com/debug/-/debug-4.3.3.tgz#04266e0b70a98d4462e6e288e38259213332b664",
|
|
||||||
"decompress-response@^3.3.0": "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3",
|
|
||||||
"deep-extend@^0.6.0": "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac",
|
|
||||||
"defer-to-connect@^1.0.1": "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591",
|
|
||||||
"delayed-stream@~1.0.0": "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619",
|
|
||||||
"delegates@^1.0.0": "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a",
|
|
||||||
"depd@~1.1.2": "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9",
|
|
||||||
"destroy@~1.0.4": "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80",
|
|
||||||
"detect-libc@^1.0.3": "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b",
|
|
||||||
"diff@^4.0.1": "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d",
|
|
||||||
"dot-prop@^5.2.0": "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88",
|
|
||||||
"dotenv@^16.0.0": "https://registry.yarnpkg.com/dotenv/-/dotenv-16.0.0.tgz#c619001253be89ebb638d027b609c75c26e47411",
|
|
||||||
"dottie@^2.0.2": "https://registry.yarnpkg.com/dottie/-/dottie-2.0.2.tgz#cc91c0726ce3a054ebf11c55fbc92a7f266dd154",
|
|
||||||
"duplexer3@^0.1.4": "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2",
|
|
||||||
"ecc-jsbn@~0.1.1": "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9",
|
|
||||||
"ecdsa-sig-formatter@1.0.11": "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf",
|
|
||||||
"ee-first@1.1.1": "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d",
|
|
||||||
"ejs@^3.1.3": "https://registry.yarnpkg.com/ejs/-/ejs-3.1.6.tgz#5bfd0a0689743bb5268b3550cceeebbc1702822a",
|
|
||||||
"emoji-regex@^8.0.0": "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37",
|
|
||||||
"encodeurl@~1.0.2": "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59",
|
|
||||||
"end-of-stream@^1.1.0": "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0",
|
|
||||||
"env-paths@^2.2.0": "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.1.tgz#420399d416ce1fbe9bc0a07c62fa68d67fd0f8f2",
|
|
||||||
"escape-goat@^2.0.0": "https://registry.yarnpkg.com/escape-goat/-/escape-goat-2.1.1.tgz#1b2dc77003676c457ec760b2dc68edb648188675",
|
|
||||||
"escape-html@~1.0.3": "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988",
|
|
||||||
"escape-string-regexp@^1.0.5": "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4",
|
|
||||||
"esprima@^4.0.0": "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71",
|
|
||||||
"etag@~1.8.1": "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887",
|
|
||||||
"execa@^4.0.0": "https://registry.yarnpkg.com/execa/-/execa-4.1.0.tgz#4e5491ad1572f2f17a77d388c6c857135b22847a",
|
|
||||||
"express-jwt@^6.1.1": "https://registry.yarnpkg.com/express-jwt/-/express-jwt-6.1.1.tgz#2b157fb4fa33c2d367ee71c61b5aca762de39657",
|
|
||||||
"express-unless@^1.0.0": "https://registry.yarnpkg.com/express-unless/-/express-unless-1.0.0.tgz#ecd1c354c5ccf7709a8a17ece617934e037cccd8",
|
|
||||||
"express@^4.16.2": "https://registry.yarnpkg.com/express/-/express-4.17.3.tgz#f6c7302194a4fb54271b73a1fe7a06478c8f85a1",
|
|
||||||
"extend@~3.0.2": "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa",
|
|
||||||
"extsprintf@1.3.0": "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05",
|
|
||||||
"extsprintf@^1.2.0": "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.1.tgz#8d172c064867f235c0c84a596806d279bf4bcc07",
|
|
||||||
"fast-deep-equal@^3.1.1": "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525",
|
|
||||||
"fast-json-stable-stringify@^2.0.0": "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633",
|
|
||||||
"fast-safe-stringify@^2.0.6": "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884",
|
|
||||||
"filelist@^1.0.1": "https://registry.yarnpkg.com/filelist/-/filelist-1.0.2.tgz#80202f21462d4d1c2e214119b1807c1bc0380e5b",
|
|
||||||
"fill-range@^7.0.1": "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40",
|
|
||||||
"finalhandler@~1.1.2": "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d",
|
|
||||||
"forever-agent@~0.6.1": "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91",
|
|
||||||
"form-data@~2.3.2": "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6",
|
|
||||||
"forwarded@0.2.0": "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811",
|
|
||||||
"fresh@0.5.2": "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7",
|
|
||||||
"fs-minipass@^2.0.0": "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb",
|
|
||||||
"fs.realpath@^1.0.0": "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f",
|
|
||||||
"fsevents@~2.3.2": "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a",
|
|
||||||
"function-bind@^1.1.1": "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d",
|
|
||||||
"gauge@^3.0.0": "https://registry.yarnpkg.com/gauge/-/gauge-3.0.2.tgz#03bf4441c044383908bcfa0656ad91803259b395",
|
|
||||||
"gauge@~2.7.3": "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7",
|
|
||||||
"get-stream@^4.1.0": "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5",
|
|
||||||
"get-stream@^5.0.0": "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3",
|
|
||||||
"get-stream@^5.1.0": "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3",
|
|
||||||
"getpass@^0.1.1": "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa",
|
|
||||||
"glob-parent@~5.1.2": "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4",
|
|
||||||
"glob@7.2.0": "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023",
|
|
||||||
"glob@^7.0.5": "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023",
|
|
||||||
"glob@^7.1.1": "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023",
|
|
||||||
"glob@^7.1.3": "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023",
|
|
||||||
"glob@^7.1.4": "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023",
|
|
||||||
"global-dirs@^3.0.0": "https://registry.yarnpkg.com/global-dirs/-/global-dirs-3.0.0.tgz#70a76fe84ea315ab37b1f5576cbde7d48ef72686",
|
|
||||||
"globalize@^1.6.0": "https://registry.yarnpkg.com/globalize/-/globalize-1.7.0.tgz#321201eb18ded16d3f03c8d4ddbcb5d1edd6d4c2",
|
|
||||||
"got@^9.6.0": "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85",
|
|
||||||
"graceful-fs@^4.1.2": "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96",
|
|
||||||
"graceful-fs@^4.2.3": "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.9.tgz#041b05df45755e587a24942279b9d113146e1c96",
|
|
||||||
"har-schema@^2.0.0": "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92",
|
|
||||||
"har-validator@~5.1.3": "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd",
|
|
||||||
"has-flag@^3.0.0": "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd",
|
|
||||||
"has-flag@^4.0.0": "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b",
|
|
||||||
"has-unicode@^2.0.0": "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9",
|
|
||||||
"has-unicode@^2.0.1": "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9",
|
|
||||||
"has-yarn@^2.1.0": "https://registry.yarnpkg.com/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77",
|
|
||||||
"has@^1.0.3": "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796",
|
|
||||||
"http-cache-semantics@^4.0.0": "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390",
|
|
||||||
"http-errors@1.8.1": "https://registry.yarnpkg.com/http-errors/-/http-errors-1.8.1.tgz#7c3f28577cbc8a207388455dbd62295ed07bd68c",
|
|
||||||
"http-signature@~1.2.0": "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1",
|
|
||||||
"http-status@^1.1.2": "https://registry.yarnpkg.com/http-status/-/http-status-1.5.0.tgz#2edfb02068d236ba60fd1481ad89219aa96e1677",
|
|
||||||
"https-proxy-agent@^5.0.0": "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2",
|
|
||||||
"human-signals@^1.1.1": "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3",
|
|
||||||
"iconv-lite@0.4.24": "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b",
|
|
||||||
"ignore-by-default@^1.0.1": "https://registry.yarnpkg.com/ignore-by-default/-/ignore-by-default-1.0.1.tgz#48ca6d72f6c6a3af00a9ad4ae6876be3889e2b09",
|
|
||||||
"import-lazy@^2.1.0": "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43",
|
|
||||||
"imurmurhash@^0.1.4": "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea",
|
|
||||||
"inflection@^1.13.2": "https://registry.yarnpkg.com/inflection/-/inflection-1.13.2.tgz#15e8c797c6c3dadf31aa658f8df8a4ea024798b0",
|
|
||||||
"inflight@^1.0.4": "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9",
|
|
||||||
"inherits@2": "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c",
|
|
||||||
"inherits@2.0.4": "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c",
|
|
||||||
"inherits@^2.0.3": "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c",
|
|
||||||
"inherits@~2.0.3": "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c",
|
|
||||||
"ini@2.0.0": "https://registry.yarnpkg.com/ini/-/ini-2.0.0.tgz#e5fd556ecdd5726be978fa1001862eacb0a94bc5",
|
|
||||||
"ini@~1.3.0": "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c",
|
|
||||||
"invert-kv@^3.0.0": "https://registry.yarnpkg.com/invert-kv/-/invert-kv-3.0.1.tgz#a93c7a3d4386a1dc8325b97da9bb1620c0282523",
|
|
||||||
"ipaddr.js@1.9.1": "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3",
|
|
||||||
"is-binary-path@~2.1.0": "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09",
|
|
||||||
"is-buffer@~1.1.6": "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be",
|
|
||||||
"is-ci@^2.0.0": "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c",
|
|
||||||
"is-core-module@^2.8.1": "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.8.1.tgz#f59fdfca701d5879d0a6b100a40aa1560ce27211",
|
|
||||||
"is-extglob@^2.1.1": "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2",
|
|
||||||
"is-fullwidth-code-point@^1.0.0": "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb",
|
|
||||||
"is-fullwidth-code-point@^3.0.0": "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d",
|
|
||||||
"is-glob@^4.0.1": "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084",
|
|
||||||
"is-glob@~4.0.1": "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084",
|
|
||||||
"is-installed-globally@^0.4.0": "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.4.0.tgz#9a0fd407949c30f86eb6959ef1b7994ed0b7b520",
|
|
||||||
"is-npm@^5.0.0": "https://registry.yarnpkg.com/is-npm/-/is-npm-5.0.0.tgz#43e8d65cc56e1b67f8d47262cf667099193f45a8",
|
|
||||||
"is-number@^7.0.0": "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b",
|
|
||||||
"is-obj@^2.0.0": "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982",
|
|
||||||
"is-path-inside@^3.0.2": "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283",
|
|
||||||
"is-stream@^2.0.0": "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077",
|
|
||||||
"is-typedarray@^1.0.0": "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a",
|
|
||||||
"is-typedarray@~1.0.0": "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a",
|
|
||||||
"is-yarn-global@^0.3.0": "https://registry.yarnpkg.com/is-yarn-global/-/is-yarn-global-0.3.0.tgz#d502d3382590ea3004893746754c89139973e232",
|
|
||||||
"isarray@~1.0.0": "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11",
|
|
||||||
"isexe@^2.0.0": "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10",
|
|
||||||
"isstream@~0.1.2": "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a",
|
|
||||||
"jake@^10.6.1": "https://registry.yarnpkg.com/jake/-/jake-10.8.2.tgz#ebc9de8558160a66d82d0eadc6a2e58fbc500a7b",
|
|
||||||
"js-tokens@^4.0.0": "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499",
|
|
||||||
"js-yaml@^3.13.1": "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537",
|
|
||||||
"js2xmlparser@^4.0.0": "https://registry.yarnpkg.com/js2xmlparser/-/js2xmlparser-4.0.2.tgz#2a1fdf01e90585ef2ae872a01bc169c6a8d5e60a",
|
|
||||||
"jsbn@~0.1.0": "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513",
|
|
||||||
"json-buffer@3.0.0": "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898",
|
|
||||||
"json-schema-traverse@^0.4.1": "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660",
|
|
||||||
"json-schema@0.4.0": "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5",
|
|
||||||
"json-stringify-safe@~5.0.1": "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb",
|
|
||||||
"jsonwebtoken@^8.1.0": "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d",
|
|
||||||
"jsonwebtoken@^8.5.1": "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d",
|
|
||||||
"jsprim@^1.2.2": "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.2.tgz#712c65533a15c878ba59e9ed5f0e26d5b77c5feb",
|
|
||||||
"jwa@^1.4.1": "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a",
|
|
||||||
"jws@^3.2.2": "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304",
|
|
||||||
"keyv@^3.0.0": "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9",
|
|
||||||
"latest-version@^5.1.0": "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face",
|
|
||||||
"lcid@^3.0.0": "https://registry.yarnpkg.com/lcid/-/lcid-3.1.1.tgz#9030ec479a058fc36b5e8243ebaac8b6ac582fd0",
|
|
||||||
"lodash.includes@^4.3.0": "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f",
|
|
||||||
"lodash.isboolean@^3.0.3": "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6",
|
|
||||||
"lodash.isinteger@^4.0.4": "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343",
|
|
||||||
"lodash.isnumber@^3.0.3": "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc",
|
|
||||||
"lodash.isplainobject@^4.0.6": "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb",
|
|
||||||
"lodash.isstring@^4.0.1": "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451",
|
|
||||||
"lodash.once@^4.0.0": "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac",
|
|
||||||
"lodash@^4.17.20": "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c",
|
|
||||||
"lodash@^4.17.21": "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c",
|
|
||||||
"lowercase-keys@^1.0.0": "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f",
|
|
||||||
"lowercase-keys@^1.0.1": "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f",
|
|
||||||
"lowercase-keys@^2.0.0": "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479",
|
|
||||||
"lru-cache@^6.0.0": "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94",
|
|
||||||
"make-dir@^3.0.0": "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f",
|
|
||||||
"make-dir@^3.1.0": "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f",
|
|
||||||
"make-error@^1.1.1": "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2",
|
|
||||||
"map-age-cleaner@^0.1.3": "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a",
|
|
||||||
"md5@^2.3.0": "https://registry.yarnpkg.com/md5/-/md5-2.3.0.tgz#c3da9a6aae3a30b46b7b0c349b87b110dc3bda4f",
|
|
||||||
"media-typer@0.3.0": "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748",
|
|
||||||
"mem@^5.0.0": "https://registry.yarnpkg.com/mem/-/mem-5.1.1.tgz#7059b67bf9ac2c924c9f1cff7155a064394adfb3",
|
|
||||||
"merge-descriptors@1.0.1": "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61",
|
|
||||||
"merge-stream@^2.0.0": "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60",
|
|
||||||
"methods@~1.1.2": "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee",
|
|
||||||
"mime-db@1.51.0": "https://registry.yarnpkg.com/mime-db/-/mime-db-1.51.0.tgz#d9ff62451859b18342d960850dc3cfb77e63fb0c",
|
|
||||||
"mime-types@^2.1.12": "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.34.tgz#5a712f9ec1503511a945803640fafe09d3793c24",
|
|
||||||
"mime-types@~2.1.19": "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.34.tgz#5a712f9ec1503511a945803640fafe09d3793c24",
|
|
||||||
"mime-types@~2.1.24": "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.34.tgz#5a712f9ec1503511a945803640fafe09d3793c24",
|
|
||||||
"mime-types@~2.1.34": "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.34.tgz#5a712f9ec1503511a945803640fafe09d3793c24",
|
|
||||||
"mime@1.6.0": "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1",
|
|
||||||
"mimic-fn@^2.1.0": "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b",
|
|
||||||
"mimic-response@^1.0.0": "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b",
|
|
||||||
"mimic-response@^1.0.1": "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b",
|
|
||||||
"minimatch@^3.0.4": "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b",
|
|
||||||
"minimist@^1.2.0": "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602",
|
|
||||||
"minimist@^1.2.5": "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602",
|
|
||||||
"minipass@^3.0.0": "https://registry.yarnpkg.com/minipass/-/minipass-3.1.6.tgz#3b8150aa688a711a1521af5e8779c1d3bb4f45ee",
|
|
||||||
"minizlib@^2.1.1": "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931",
|
|
||||||
"mkdirp@^0.5.3": "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def",
|
|
||||||
"mkdirp@^1.0.3": "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e",
|
|
||||||
"mkdirp@^1.0.4": "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e",
|
|
||||||
"moment-timezone@^0.5.34": "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.34.tgz#a75938f7476b88f155d3504a9343f7519d9a405c",
|
|
||||||
"moment@>= 2.9.0": "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3",
|
|
||||||
"moment@^2.29.1": "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3",
|
|
||||||
"ms@2.0.0": "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8",
|
|
||||||
"ms@2.1.2": "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009",
|
|
||||||
"ms@2.1.3": "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2",
|
|
||||||
"ms@^2.1.1": "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2",
|
|
||||||
"negotiator@0.6.3": "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd",
|
|
||||||
"node-addon-api@^3.1.0": "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161",
|
|
||||||
"node-addon-api@^4.2.0": "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-4.3.0.tgz#52a1a0b475193e0928e98e0426a0d1254782b77f",
|
|
||||||
"node-fetch@^2.6.5": "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad",
|
|
||||||
"node-gyp@7.x": "https://registry.yarnpkg.com/node-gyp/-/node-gyp-7.1.2.tgz#21a810aebb187120251c3bcec979af1587b188ae",
|
|
||||||
"nodemon@^2.0.15": "https://registry.yarnpkg.com/nodemon/-/nodemon-2.0.15.tgz#504516ce3b43d9dc9a955ccd9ec57550a31a8d4e",
|
|
||||||
"nopt@^5.0.0": "https://registry.yarnpkg.com/nopt/-/nopt-5.0.0.tgz#530942bb58a512fccafe53fe210f13a25355dc88",
|
|
||||||
"nopt@~1.0.10": "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee",
|
|
||||||
"normalize-path@^3.0.0": "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65",
|
|
||||||
"normalize-path@~3.0.0": "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65",
|
|
||||||
"normalize-url@^4.1.0": "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a",
|
|
||||||
"npm-run-path@^4.0.0": "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea",
|
|
||||||
"npmlog@^4.1.2": "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b",
|
|
||||||
"npmlog@^5.0.1": "https://registry.yarnpkg.com/npmlog/-/npmlog-5.0.1.tgz#f06678e80e29419ad67ab964e0fa69959c1eb8b0",
|
|
||||||
"number-is-nan@^1.0.0": "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d",
|
|
||||||
"oauth-sign@~0.9.0": "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455",
|
|
||||||
"object-assign@^4": "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863",
|
|
||||||
"object-assign@^4.1.0": "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863",
|
|
||||||
"object-assign@^4.1.1": "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863",
|
|
||||||
"on-finished@~2.3.0": "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947",
|
|
||||||
"once@^1.3.0": "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1",
|
|
||||||
"once@^1.3.1": "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1",
|
|
||||||
"once@^1.4.0": "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1",
|
|
||||||
"onetime@^5.1.0": "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e",
|
|
||||||
"os-locale@^5.0.0": "https://registry.yarnpkg.com/os-locale/-/os-locale-5.0.0.tgz#6d26c1d95b6597c5d5317bf5fba37eccec3672e0",
|
|
||||||
"p-cancelable@^1.0.0": "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc",
|
|
||||||
"p-defer@^1.0.0": "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c",
|
|
||||||
"p-is-promise@^2.1.0": "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e",
|
|
||||||
"package-json@^6.3.0": "https://registry.yarnpkg.com/package-json/-/package-json-6.5.0.tgz#6feedaca35e75725876d0b0e64974697fed145b0",
|
|
||||||
"parseurl@~1.3.3": "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4",
|
|
||||||
"path-is-absolute@^1.0.0": "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f",
|
|
||||||
"path-key@^3.0.0": "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375",
|
|
||||||
"path-key@^3.1.0": "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375",
|
|
||||||
"path-parse@^1.0.7": "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735",
|
|
||||||
"path-to-regexp@0.1.7": "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c",
|
|
||||||
"performance-now@^2.1.0": "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b",
|
|
||||||
"pg-connection-string@^2.5.0": "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34",
|
|
||||||
"picomatch@^2.0.4": "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42",
|
|
||||||
"picomatch@^2.2.1": "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42",
|
|
||||||
"prepend-http@^2.0.0": "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897",
|
|
||||||
"process-nextick-args@~2.0.0": "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2",
|
|
||||||
"proxy-addr@~2.0.7": "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025",
|
|
||||||
"psl@^1.1.28": "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24",
|
|
||||||
"pstree.remy@^1.1.8": "https://registry.yarnpkg.com/pstree.remy/-/pstree.remy-1.1.8.tgz#c242224f4a67c21f686839bbdb4ac282b8373d3a",
|
|
||||||
"pump@^3.0.0": "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64",
|
|
||||||
"punycode@^2.1.0": "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec",
|
|
||||||
"punycode@^2.1.1": "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec",
|
|
||||||
"pupa@^2.1.1": "https://registry.yarnpkg.com/pupa/-/pupa-2.1.1.tgz#f5e8fd4afc2c5d97828faa523549ed8744a20d62",
|
|
||||||
"qs@6.9.7": "https://registry.yarnpkg.com/qs/-/qs-6.9.7.tgz#4610846871485e1e048f44ae3b94033f0e675afe",
|
|
||||||
"qs@~6.5.2": "https://registry.yarnpkg.com/qs/-/qs-6.5.3.tgz#3aeeffc91967ef6e35c0e488ef46fb296ab76aad",
|
|
||||||
"range-parser@~1.2.1": "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031",
|
|
||||||
"raw-body@2.4.3": "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.3.tgz#8f80305d11c2a0a545c2d9d89d7a0286fcead43c",
|
|
||||||
"rc@^1.2.8": "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed",
|
|
||||||
"readable-stream@^2.0.6": "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57",
|
|
||||||
"readable-stream@^3.6.0": "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198",
|
|
||||||
"readdirp@~3.6.0": "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7",
|
|
||||||
"reflect-metadata@^0.1.10": "https://registry.yarnpkg.com/reflect-metadata/-/reflect-metadata-0.1.13.tgz#67ae3ca57c972a2aa1642b10fe363fe32d49dc08",
|
|
||||||
"registry-auth-token@^4.0.0": "https://registry.yarnpkg.com/registry-auth-token/-/registry-auth-token-4.2.1.tgz#6d7b4006441918972ccd5fedcd41dc322c79b250",
|
|
||||||
"registry-url@^5.0.0": "https://registry.yarnpkg.com/registry-url/-/registry-url-5.1.0.tgz#e98334b50d5434b81136b44ec638d9c2009c5009",
|
|
||||||
"request@^2.88.2": "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3",
|
|
||||||
"resolve@^1.3.2": "https://registry.yarnpkg.com/resolve/-/resolve-1.22.0.tgz#5e0b8c67c15df57a89bdbabe603a002f21731198",
|
|
||||||
"responselike@^1.0.2": "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7",
|
|
||||||
"retry-as-promised@^5.0.0": "https://registry.yarnpkg.com/retry-as-promised/-/retry-as-promised-5.0.0.tgz#f4ecc25133603a2d2a7aff4a128691d7bc506d54",
|
|
||||||
"rimraf@^3.0.2": "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a",
|
|
||||||
"safe-buffer@5.2.1": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6",
|
|
||||||
"safe-buffer@^5.0.1": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6",
|
|
||||||
"safe-buffer@^5.1.2": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6",
|
|
||||||
"safe-buffer@~5.1.0": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d",
|
|
||||||
"safe-buffer@~5.1.1": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d",
|
|
||||||
"safe-buffer@~5.2.0": "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6",
|
|
||||||
"safer-buffer@>= 2.1.2 < 3": "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a",
|
|
||||||
"safer-buffer@^2.0.2": "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a",
|
|
||||||
"safer-buffer@^2.1.0": "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a",
|
|
||||||
"safer-buffer@~2.1.0": "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a",
|
|
||||||
"semver-diff@^3.1.1": "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b",
|
|
||||||
"semver@^5.3.0": "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7",
|
|
||||||
"semver@^5.6.0": "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7",
|
|
||||||
"semver@^5.7.1": "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7",
|
|
||||||
"semver@^6.0.0": "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d",
|
|
||||||
"semver@^6.2.0": "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d",
|
|
||||||
"semver@^6.3.0": "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d",
|
|
||||||
"semver@^7.3.2": "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7",
|
|
||||||
"semver@^7.3.4": "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7",
|
|
||||||
"semver@^7.3.5": "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7",
|
|
||||||
"send@0.17.2": "https://registry.yarnpkg.com/send/-/send-0.17.2.tgz#926622f76601c41808012c8bf1688fe3906f7820",
|
|
||||||
"sequelize-pool@^7.1.0": "https://registry.yarnpkg.com/sequelize-pool/-/sequelize-pool-7.1.0.tgz#210b391af4002762f823188fd6ecfc7413020768",
|
|
||||||
"sequelize-typescript@^2.1.3": "https://registry.yarnpkg.com/sequelize-typescript/-/sequelize-typescript-2.1.3.tgz#94a8d0a4b5739fc917c8d8fa66e1acb5aadc1274",
|
|
||||||
"sequelize@^6.17.0": "https://registry.yarnpkg.com/sequelize/-/sequelize-6.17.0.tgz#78a21f39b8a7548c65c0cc2055e8231137c679a3",
|
|
||||||
"serve-static@1.14.2": "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.2.tgz#722d6294b1d62626d41b43a013ece4598d292bfa",
|
|
||||||
"set-blocking@^2.0.0": "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7",
|
|
||||||
"set-blocking@~2.0.0": "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7",
|
|
||||||
"setprototypeof@1.2.0": "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424",
|
|
||||||
"shebang-command@^2.0.0": "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea",
|
|
||||||
"shebang-regex@^3.0.0": "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172",
|
|
||||||
"signal-exit@^3.0.0": "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9",
|
|
||||||
"signal-exit@^3.0.2": "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9",
|
|
||||||
"sprintf-js@~1.0.2": "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c",
|
|
||||||
"sqlite3@https://github.com/mapbox/node-sqlite3#918052b538b0effe6c4a44c74a16b2749c08a0d2": "https://github.com/mapbox/node-sqlite3#918052b538b0effe6c4a44c74a16b2749c08a0d2",
|
|
||||||
"sshpk@^1.7.0": "https://registry.yarnpkg.com/sshpk/-/sshpk-1.17.0.tgz#578082d92d4fe612b13007496e543fa0fbcbe4c5",
|
|
||||||
"stable@^0.1.6": "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf",
|
|
||||||
"statuses@>= 1.5.0 < 2": "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c",
|
|
||||||
"statuses@~1.5.0": "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c",
|
|
||||||
"string-width@^1.0.1": "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3",
|
|
||||||
"string-width@^1.0.2 || 2 || 3 || 4": "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010",
|
|
||||||
"string-width@^4.0.0": "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010",
|
|
||||||
"string-width@^4.1.0": "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010",
|
|
||||||
"string-width@^4.2.2": "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010",
|
|
||||||
"string-width@^4.2.3": "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010",
|
|
||||||
"string_decoder@^1.1.1": "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e",
|
|
||||||
"string_decoder@~1.1.1": "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8",
|
|
||||||
"strip-ansi@^3.0.0": "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf",
|
|
||||||
"strip-ansi@^3.0.1": "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf",
|
|
||||||
"strip-ansi@^6.0.0": "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9",
|
|
||||||
"strip-ansi@^6.0.1": "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9",
|
|
||||||
"strip-final-newline@^2.0.0": "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad",
|
|
||||||
"strip-json-comments@~2.0.1": "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a",
|
|
||||||
"strong-error-handler@^4.0.0": "https://registry.yarnpkg.com/strong-error-handler/-/strong-error-handler-4.0.0.tgz#162e13020c7b3fc5d4bc3f69a5cb45ea44232b31",
|
|
||||||
"strong-globalize@^6.0.1": "https://registry.yarnpkg.com/strong-globalize/-/strong-globalize-6.0.5.tgz#4b9e12a57ae0c95b4bced3b89c347f7430a939c2",
|
|
||||||
"supports-color@^5.3.0": "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f",
|
|
||||||
"supports-color@^5.5.0": "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f",
|
|
||||||
"supports-color@^7.1.0": "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da",
|
|
||||||
"supports-preserve-symlinks-flag@^1.0.0": "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09",
|
|
||||||
"tar@^6.0.2": "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621",
|
|
||||||
"tar@^6.1.11": "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621",
|
|
||||||
"to-readable-stream@^1.0.0": "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771",
|
|
||||||
"to-regex-range@^5.0.1": "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4",
|
|
||||||
"toidentifier@1.0.1": "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35",
|
|
||||||
"toposort-class@^1.0.1": "https://registry.yarnpkg.com/toposort-class/-/toposort-class-1.0.1.tgz#7ffd1f78c8be28c3ba45cd4e1a3f5ee193bd9988",
|
|
||||||
"touch@^3.1.0": "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b",
|
|
||||||
"tough-cookie@~2.5.0": "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2",
|
|
||||||
"tr46@~0.0.3": "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a",
|
|
||||||
"ts-node@^10.6.0": "https://registry.yarnpkg.com/ts-node/-/ts-node-10.6.0.tgz#c3f4195d5173ce3affdc8f2fd2e9a7ac8de5376a",
|
|
||||||
"tslib@^1.13.0": "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00",
|
|
||||||
"tslib@^1.8.1": "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00",
|
|
||||||
"tslint@^6.1.3": "https://registry.yarnpkg.com/tslint/-/tslint-6.1.3.tgz#5c23b2eccc32487d5523bd3a470e9aa31789d904",
|
|
||||||
"tsutils@^2.29.0": "https://registry.yarnpkg.com/tsutils/-/tsutils-2.29.0.tgz#32b488501467acbedd4b85498673a0812aca0b99",
|
|
||||||
"tunnel-agent@^0.6.0": "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd",
|
|
||||||
"tweetnacl@^0.14.3": "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64",
|
|
||||||
"tweetnacl@~0.14.0": "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64",
|
|
||||||
"type-fest@^0.20.2": "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4",
|
|
||||||
"type-is@~1.6.18": "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131",
|
|
||||||
"typedarray-to-buffer@^3.1.5": "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080",
|
|
||||||
"typescript@^4.6.2": "https://registry.yarnpkg.com/typescript/-/typescript-4.6.2.tgz#fe12d2727b708f4eef40f51598b3398baa9611d4",
|
|
||||||
"undefsafe@^2.0.5": "https://registry.yarnpkg.com/undefsafe/-/undefsafe-2.0.5.tgz#38733b9327bdcd226db889fb723a6efd162e6e2c",
|
|
||||||
"unique-string@^2.0.0": "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d",
|
|
||||||
"unpipe@1.0.0": "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec",
|
|
||||||
"unpipe@~1.0.0": "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec",
|
|
||||||
"update-notifier@^5.1.0": "https://registry.yarnpkg.com/update-notifier/-/update-notifier-5.1.0.tgz#4ab0d7c7f36a231dd7316cf7729313f0214d9ad9",
|
|
||||||
"uri-js@^4.2.2": "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e",
|
|
||||||
"url-parse-lax@^3.0.0": "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c",
|
|
||||||
"util-deprecate@^1.0.1": "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf",
|
|
||||||
"util-deprecate@~1.0.1": "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf",
|
|
||||||
"utils-merge@1.0.1": "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713",
|
|
||||||
"uuid@^3.3.2": "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee",
|
|
||||||
"uuid@^8.3.2": "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2",
|
|
||||||
"v8-compile-cache-lib@^3.0.0": "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.0.tgz#0582bcb1c74f3a2ee46487ceecf372e46bce53e8",
|
|
||||||
"validator@^13.7.0": "https://registry.yarnpkg.com/validator/-/validator-13.7.0.tgz#4f9658ba13ba8f3d82ee881d3516489ea85c0857",
|
|
||||||
"vary@^1": "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc",
|
|
||||||
"vary@~1.1.2": "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc",
|
|
||||||
"verror@1.10.0": "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400",
|
|
||||||
"webidl-conversions@^3.0.0": "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871",
|
|
||||||
"whatwg-url@^5.0.0": "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d",
|
|
||||||
"which@^2.0.1": "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1",
|
|
||||||
"which@^2.0.2": "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1",
|
|
||||||
"wide-align@^1.1.0": "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.5.tgz#df1d4c206854369ecf3c9a4898f1b23fbd9d15d3",
|
|
||||||
"wide-align@^1.1.2": "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.5.tgz#df1d4c206854369ecf3c9a4898f1b23fbd9d15d3",
|
|
||||||
"widest-line@^3.1.0": "https://registry.yarnpkg.com/widest-line/-/widest-line-3.1.0.tgz#8292333bbf66cb45ff0de1603b136b7ae1496eca",
|
|
||||||
"wkx@^0.5.0": "https://registry.yarnpkg.com/wkx/-/wkx-0.5.0.tgz#c6c37019acf40e517cc6b94657a25a3d4aa33e8c",
|
|
||||||
"wrap-ansi@^7.0.0": "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43",
|
|
||||||
"wrappy@1": "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f",
|
|
||||||
"write-file-atomic@^3.0.0": "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8",
|
|
||||||
"xdg-basedir@^4.0.0": "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13",
|
|
||||||
"xmlcreate@^2.0.4": "https://registry.yarnpkg.com/xmlcreate/-/xmlcreate-2.0.4.tgz#0c5ab0f99cdd02a81065fa9cd8f8ae87624889be",
|
|
||||||
"yallist@^4.0.0": "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72",
|
|
||||||
"yamljs@^0.3.0": "https://registry.yarnpkg.com/yamljs/-/yamljs-0.3.0.tgz#dc060bf267447b39f7304e9b2bfbe8b5a7ddb03b",
|
|
||||||
"yn@3.1.1": "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
|
|
||||||
},
|
|
||||||
"files": [],
|
|
||||||
"artifacts": {
|
|
||||||
"sqlite3@5.0.2": [
|
|
||||||
"lib",
|
|
||||||
"lib/binding",
|
|
||||||
"lib/binding/napi-v3-darwin-x64",
|
|
||||||
"lib/binding/napi-v3-darwin-x64/node_sqlite3.node"
|
|
||||||
],
|
|
||||||
"bcrypt@5.0.1": [
|
|
||||||
"lib",
|
|
||||||
"lib/binding",
|
|
||||||
"lib/binding/napi-v3",
|
|
||||||
"lib/binding/napi-v3/bcrypt_lib.node"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
22
server/node_modules/@babel/code-frame/LICENSE
generated
vendored
22
server/node_modules/@babel/code-frame/LICENSE
generated
vendored
|
@ -1,22 +0,0 @@
|
||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of this software and associated documentation files (the
|
|
||||||
"Software"), to deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify, merge, publish,
|
|
||||||
distribute, sublicense, and/or sell copies of the Software, and to
|
|
||||||
permit persons to whom the Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
|
||||||
included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
||||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
||||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
19
server/node_modules/@babel/code-frame/README.md
generated
vendored
19
server/node_modules/@babel/code-frame/README.md
generated
vendored
|
@ -1,19 +0,0 @@
|
||||||
# @babel/code-frame
|
|
||||||
|
|
||||||
> Generate errors that contain a code frame that point to source locations.
|
|
||||||
|
|
||||||
See our website [@babel/code-frame](https://babeljs.io/docs/en/babel-code-frame) for more information.
|
|
||||||
|
|
||||||
## Install
|
|
||||||
|
|
||||||
Using npm:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm install --save-dev @babel/code-frame
|
|
||||||
```
|
|
||||||
|
|
||||||
or using yarn:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
yarn add @babel/code-frame --dev
|
|
||||||
```
|
|
163
server/node_modules/@babel/code-frame/lib/index.js
generated
vendored
163
server/node_modules/@babel/code-frame/lib/index.js
generated
vendored
|
@ -1,163 +0,0 @@
|
||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.codeFrameColumns = codeFrameColumns;
|
|
||||||
exports.default = _default;
|
|
||||||
|
|
||||||
var _highlight = require("@babel/highlight");
|
|
||||||
|
|
||||||
let deprecationWarningShown = false;
|
|
||||||
|
|
||||||
function getDefs(chalk) {
|
|
||||||
return {
|
|
||||||
gutter: chalk.grey,
|
|
||||||
marker: chalk.red.bold,
|
|
||||||
message: chalk.red.bold
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const NEWLINE = /\r\n|[\n\r\u2028\u2029]/;
|
|
||||||
|
|
||||||
function getMarkerLines(loc, source, opts) {
|
|
||||||
const startLoc = Object.assign({
|
|
||||||
column: 0,
|
|
||||||
line: -1
|
|
||||||
}, loc.start);
|
|
||||||
const endLoc = Object.assign({}, startLoc, loc.end);
|
|
||||||
const {
|
|
||||||
linesAbove = 2,
|
|
||||||
linesBelow = 3
|
|
||||||
} = opts || {};
|
|
||||||
const startLine = startLoc.line;
|
|
||||||
const startColumn = startLoc.column;
|
|
||||||
const endLine = endLoc.line;
|
|
||||||
const endColumn = endLoc.column;
|
|
||||||
let start = Math.max(startLine - (linesAbove + 1), 0);
|
|
||||||
let end = Math.min(source.length, endLine + linesBelow);
|
|
||||||
|
|
||||||
if (startLine === -1) {
|
|
||||||
start = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (endLine === -1) {
|
|
||||||
end = source.length;
|
|
||||||
}
|
|
||||||
|
|
||||||
const lineDiff = endLine - startLine;
|
|
||||||
const markerLines = {};
|
|
||||||
|
|
||||||
if (lineDiff) {
|
|
||||||
for (let i = 0; i <= lineDiff; i++) {
|
|
||||||
const lineNumber = i + startLine;
|
|
||||||
|
|
||||||
if (!startColumn) {
|
|
||||||
markerLines[lineNumber] = true;
|
|
||||||
} else if (i === 0) {
|
|
||||||
const sourceLength = source[lineNumber - 1].length;
|
|
||||||
markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1];
|
|
||||||
} else if (i === lineDiff) {
|
|
||||||
markerLines[lineNumber] = [0, endColumn];
|
|
||||||
} else {
|
|
||||||
const sourceLength = source[lineNumber - i].length;
|
|
||||||
markerLines[lineNumber] = [0, sourceLength];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (startColumn === endColumn) {
|
|
||||||
if (startColumn) {
|
|
||||||
markerLines[startLine] = [startColumn, 0];
|
|
||||||
} else {
|
|
||||||
markerLines[startLine] = true;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
markerLines[startLine] = [startColumn, endColumn - startColumn];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
start,
|
|
||||||
end,
|
|
||||||
markerLines
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function codeFrameColumns(rawLines, loc, opts = {}) {
|
|
||||||
const highlighted = (opts.highlightCode || opts.forceColor) && (0, _highlight.shouldHighlight)(opts);
|
|
||||||
const chalk = (0, _highlight.getChalk)(opts);
|
|
||||||
const defs = getDefs(chalk);
|
|
||||||
|
|
||||||
const maybeHighlight = (chalkFn, string) => {
|
|
||||||
return highlighted ? chalkFn(string) : string;
|
|
||||||
};
|
|
||||||
|
|
||||||
const lines = rawLines.split(NEWLINE);
|
|
||||||
const {
|
|
||||||
start,
|
|
||||||
end,
|
|
||||||
markerLines
|
|
||||||
} = getMarkerLines(loc, lines, opts);
|
|
||||||
const hasColumns = loc.start && typeof loc.start.column === "number";
|
|
||||||
const numberMaxWidth = String(end).length;
|
|
||||||
const highlightedLines = highlighted ? (0, _highlight.default)(rawLines, opts) : rawLines;
|
|
||||||
let frame = highlightedLines.split(NEWLINE, end).slice(start, end).map((line, index) => {
|
|
||||||
const number = start + 1 + index;
|
|
||||||
const paddedNumber = ` ${number}`.slice(-numberMaxWidth);
|
|
||||||
const gutter = ` ${paddedNumber} |`;
|
|
||||||
const hasMarker = markerLines[number];
|
|
||||||
const lastMarkerLine = !markerLines[number + 1];
|
|
||||||
|
|
||||||
if (hasMarker) {
|
|
||||||
let markerLine = "";
|
|
||||||
|
|
||||||
if (Array.isArray(hasMarker)) {
|
|
||||||
const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " ");
|
|
||||||
const numberOfMarkers = hasMarker[1] || 1;
|
|
||||||
markerLine = ["\n ", maybeHighlight(defs.gutter, gutter.replace(/\d/g, " ")), " ", markerSpacing, maybeHighlight(defs.marker, "^").repeat(numberOfMarkers)].join("");
|
|
||||||
|
|
||||||
if (lastMarkerLine && opts.message) {
|
|
||||||
markerLine += " " + maybeHighlight(defs.message, opts.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return [maybeHighlight(defs.marker, ">"), maybeHighlight(defs.gutter, gutter), line.length > 0 ? ` ${line}` : "", markerLine].join("");
|
|
||||||
} else {
|
|
||||||
return ` ${maybeHighlight(defs.gutter, gutter)}${line.length > 0 ? ` ${line}` : ""}`;
|
|
||||||
}
|
|
||||||
}).join("\n");
|
|
||||||
|
|
||||||
if (opts.message && !hasColumns) {
|
|
||||||
frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (highlighted) {
|
|
||||||
return chalk.reset(frame);
|
|
||||||
} else {
|
|
||||||
return frame;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function _default(rawLines, lineNumber, colNumber, opts = {}) {
|
|
||||||
if (!deprecationWarningShown) {
|
|
||||||
deprecationWarningShown = true;
|
|
||||||
const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`.";
|
|
||||||
|
|
||||||
if (process.emitWarning) {
|
|
||||||
process.emitWarning(message, "DeprecationWarning");
|
|
||||||
} else {
|
|
||||||
const deprecationError = new Error(message);
|
|
||||||
deprecationError.name = "DeprecationWarning";
|
|
||||||
console.warn(new Error(message));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
colNumber = Math.max(colNumber, 0);
|
|
||||||
const location = {
|
|
||||||
start: {
|
|
||||||
column: colNumber,
|
|
||||||
line: lineNumber
|
|
||||||
}
|
|
||||||
};
|
|
||||||
return codeFrameColumns(rawLines, location, opts);
|
|
||||||
}
|
|
29
server/node_modules/@babel/code-frame/package.json
generated
vendored
29
server/node_modules/@babel/code-frame/package.json
generated
vendored
|
@ -1,29 +0,0 @@
|
||||||
{
|
|
||||||
"name": "@babel/code-frame",
|
|
||||||
"version": "7.16.7",
|
|
||||||
"description": "Generate errors that contain a code frame that point to source locations.",
|
|
||||||
"author": "The Babel Team (https://babel.dev/team)",
|
|
||||||
"homepage": "https://babel.dev/docs/en/next/babel-code-frame",
|
|
||||||
"bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen",
|
|
||||||
"license": "MIT",
|
|
||||||
"publishConfig": {
|
|
||||||
"access": "public"
|
|
||||||
},
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://github.com/babel/babel.git",
|
|
||||||
"directory": "packages/babel-code-frame"
|
|
||||||
},
|
|
||||||
"main": "./lib/index.js",
|
|
||||||
"dependencies": {
|
|
||||||
"@babel/highlight": "^7.16.7"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/chalk": "^2.0.0",
|
|
||||||
"chalk": "^2.0.0",
|
|
||||||
"strip-ansi": "^4.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6.9.0"
|
|
||||||
}
|
|
||||||
}
|
|
22
server/node_modules/@babel/helper-validator-identifier/LICENSE
generated
vendored
22
server/node_modules/@babel/helper-validator-identifier/LICENSE
generated
vendored
|
@ -1,22 +0,0 @@
|
||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of this software and associated documentation files (the
|
|
||||||
"Software"), to deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify, merge, publish,
|
|
||||||
distribute, sublicense, and/or sell copies of the Software, and to
|
|
||||||
permit persons to whom the Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
|
||||||
included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
||||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
||||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
19
server/node_modules/@babel/helper-validator-identifier/README.md
generated
vendored
19
server/node_modules/@babel/helper-validator-identifier/README.md
generated
vendored
|
@ -1,19 +0,0 @@
|
||||||
# @babel/helper-validator-identifier
|
|
||||||
|
|
||||||
> Validate identifier/keywords name
|
|
||||||
|
|
||||||
See our website [@babel/helper-validator-identifier](https://babeljs.io/docs/en/babel-helper-validator-identifier) for more information.
|
|
||||||
|
|
||||||
## Install
|
|
||||||
|
|
||||||
Using npm:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm install --save @babel/helper-validator-identifier
|
|
||||||
```
|
|
||||||
|
|
||||||
or using yarn:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
yarn add @babel/helper-validator-identifier
|
|
||||||
```
|
|
84
server/node_modules/@babel/helper-validator-identifier/lib/identifier.js
generated
vendored
84
server/node_modules/@babel/helper-validator-identifier/lib/identifier.js
generated
vendored
|
@ -1,84 +0,0 @@
|
||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.isIdentifierChar = isIdentifierChar;
|
|
||||||
exports.isIdentifierName = isIdentifierName;
|
|
||||||
exports.isIdentifierStart = isIdentifierStart;
|
|
||||||
let nonASCIIidentifierStartChars = "\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u0870-\u0887\u0889-\u088e\u08a0-\u08c9\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u09fc\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0af9\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c5d\u0c60\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cdd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d04-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e86-\u0e8a\u0e8c-\u0ea3\u0ea5\u0ea7-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u1711\u171f-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4c\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf3\u1cf5\u1cf6\u1cfa\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309b-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31bf\u31f0-\u31ff\u3400-\u4dbf\u4e00-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7ca\ua7d0\ua7d1\ua7d3\ua7d5-\ua7d9\ua7f2-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab69\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc";
|
|
||||||
let nonASCIIidentifierChars = "\u200c\u200d\xb7\u0300-\u036f\u0387\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u0898-\u089f\u08ca-\u08e1\u08e3-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u09fe\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0afa-\u0aff\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b55-\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c00-\u0c04\u0c3c\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c81-\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0d00-\u0d03\u0d3b\u0d3c\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d81-\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0ebc\u0ec8-\u0ecd\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1369-\u1371\u1712-\u1715\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u180f-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19d0-\u19da\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1ab0-\u1abd\u1abf-\u1ace\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf4\u1cf7-\u1cf9\u1dc0-\u1dff\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua620-\ua629\ua66f\ua674-\ua67d\ua69e\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua82c\ua880\ua881\ua8b4-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f1\ua8ff-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\ua9e5\ua9f0-\ua9f9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b-\uaa7d\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f";
|
|
||||||
const nonASCIIidentifierStart = new RegExp("[" + nonASCIIidentifierStartChars + "]");
|
|
||||||
const nonASCIIidentifier = new RegExp("[" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + "]");
|
|
||||||
nonASCIIidentifierStartChars = nonASCIIidentifierChars = null;
|
|
||||||
const astralIdentifierStartCodes = [0, 11, 2, 25, 2, 18, 2, 1, 2, 14, 3, 13, 35, 122, 70, 52, 268, 28, 4, 48, 48, 31, 14, 29, 6, 37, 11, 29, 3, 35, 5, 7, 2, 4, 43, 157, 19, 35, 5, 35, 5, 39, 9, 51, 13, 10, 2, 14, 2, 6, 2, 1, 2, 10, 2, 14, 2, 6, 2, 1, 68, 310, 10, 21, 11, 7, 25, 5, 2, 41, 2, 8, 70, 5, 3, 0, 2, 43, 2, 1, 4, 0, 3, 22, 11, 22, 10, 30, 66, 18, 2, 1, 11, 21, 11, 25, 71, 55, 7, 1, 65, 0, 16, 3, 2, 2, 2, 28, 43, 28, 4, 28, 36, 7, 2, 27, 28, 53, 11, 21, 11, 18, 14, 17, 111, 72, 56, 50, 14, 50, 14, 35, 349, 41, 7, 1, 79, 28, 11, 0, 9, 21, 43, 17, 47, 20, 28, 22, 13, 52, 58, 1, 3, 0, 14, 44, 33, 24, 27, 35, 30, 0, 3, 0, 9, 34, 4, 0, 13, 47, 15, 3, 22, 0, 2, 0, 36, 17, 2, 24, 85, 6, 2, 0, 2, 3, 2, 14, 2, 9, 8, 46, 39, 7, 3, 1, 3, 21, 2, 6, 2, 1, 2, 4, 4, 0, 19, 0, 13, 4, 159, 52, 19, 3, 21, 2, 31, 47, 21, 1, 2, 0, 185, 46, 42, 3, 37, 47, 21, 0, 60, 42, 14, 0, 72, 26, 38, 6, 186, 43, 117, 63, 32, 7, 3, 0, 3, 7, 2, 1, 2, 23, 16, 0, 2, 0, 95, 7, 3, 38, 17, 0, 2, 0, 29, 0, 11, 39, 8, 0, 22, 0, 12, 45, 20, 0, 19, 72, 264, 8, 2, 36, 18, 0, 50, 29, 113, 6, 2, 1, 2, 37, 22, 0, 26, 5, 2, 1, 2, 31, 15, 0, 328, 18, 190, 0, 80, 921, 103, 110, 18, 195, 2637, 96, 16, 1070, 4050, 582, 8634, 568, 8, 30, 18, 78, 18, 29, 19, 47, 17, 3, 32, 20, 6, 18, 689, 63, 129, 74, 6, 0, 67, 12, 65, 1, 2, 0, 29, 6135, 9, 1237, 43, 8, 8936, 3, 2, 6, 2, 1, 2, 290, 46, 2, 18, 3, 9, 395, 2309, 106, 6, 12, 4, 8, 8, 9, 5991, 84, 2, 70, 2, 1, 3, 0, 3, 1, 3, 3, 2, 11, 2, 0, 2, 6, 2, 64, 2, 3, 3, 7, 2, 6, 2, 27, 2, 3, 2, 4, 2, 0, 4, 6, 2, 339, 3, 24, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 7, 1845, 30, 482, 44, 11, 6, 17, 0, 322, 29, 19, 43, 1269, 6, 2, 3, 2, 1, 2, 14, 2, 196, 60, 67, 8, 0, 1205, 3, 2, 26, 2, 1, 2, 0, 3, 0, 2, 9, 2, 3, 2, 0, 2, 0, 7, 0, 5, 0, 2, 0, 2, 0, 2, 2, 2, 1, 2, 0, 3, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 1, 2, 0, 3, 3, 2, 6, 2, 3, 2, 3, 2, 0, 2, 9, 2, 16, 6, 2, 2, 4, 2, 16, 4421, 42719, 33, 4152, 8, 221, 3, 5761, 15, 7472, 3104, 541, 1507, 4938];
|
|
||||||
const astralIdentifierCodes = [509, 0, 227, 0, 150, 4, 294, 9, 1368, 2, 2, 1, 6, 3, 41, 2, 5, 0, 166, 1, 574, 3, 9, 9, 370, 1, 154, 10, 50, 3, 123, 2, 54, 14, 32, 10, 3, 1, 11, 3, 46, 10, 8, 0, 46, 9, 7, 2, 37, 13, 2, 9, 6, 1, 45, 0, 13, 2, 49, 13, 9, 3, 2, 11, 83, 11, 7, 0, 161, 11, 6, 9, 7, 3, 56, 1, 2, 6, 3, 1, 3, 2, 10, 0, 11, 1, 3, 6, 4, 4, 193, 17, 10, 9, 5, 0, 82, 19, 13, 9, 214, 6, 3, 8, 28, 1, 83, 16, 16, 9, 82, 12, 9, 9, 84, 14, 5, 9, 243, 14, 166, 9, 71, 5, 2, 1, 3, 3, 2, 0, 2, 1, 13, 9, 120, 6, 3, 6, 4, 0, 29, 9, 41, 6, 2, 3, 9, 0, 10, 10, 47, 15, 406, 7, 2, 7, 17, 9, 57, 21, 2, 13, 123, 5, 4, 0, 2, 1, 2, 6, 2, 0, 9, 9, 49, 4, 2, 1, 2, 4, 9, 9, 330, 3, 19306, 9, 87, 9, 39, 4, 60, 6, 26, 9, 1014, 0, 2, 54, 8, 3, 82, 0, 12, 1, 19628, 1, 4706, 45, 3, 22, 543, 4, 4, 5, 9, 7, 3, 6, 31, 3, 149, 2, 1418, 49, 513, 54, 5, 49, 9, 0, 15, 0, 23, 4, 2, 14, 1361, 6, 2, 16, 3, 6, 2, 1, 2, 4, 262, 6, 10, 9, 357, 0, 62, 13, 1495, 6, 110, 6, 6, 9, 4759, 9, 787719, 239];
|
|
||||||
|
|
||||||
function isInAstralSet(code, set) {
|
|
||||||
let pos = 0x10000;
|
|
||||||
|
|
||||||
for (let i = 0, length = set.length; i < length; i += 2) {
|
|
||||||
pos += set[i];
|
|
||||||
if (pos > code) return false;
|
|
||||||
pos += set[i + 1];
|
|
||||||
if (pos >= code) return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
function isIdentifierStart(code) {
|
|
||||||
if (code < 65) return code === 36;
|
|
||||||
if (code <= 90) return true;
|
|
||||||
if (code < 97) return code === 95;
|
|
||||||
if (code <= 122) return true;
|
|
||||||
|
|
||||||
if (code <= 0xffff) {
|
|
||||||
return code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code));
|
|
||||||
}
|
|
||||||
|
|
||||||
return isInAstralSet(code, astralIdentifierStartCodes);
|
|
||||||
}
|
|
||||||
|
|
||||||
function isIdentifierChar(code) {
|
|
||||||
if (code < 48) return code === 36;
|
|
||||||
if (code < 58) return true;
|
|
||||||
if (code < 65) return false;
|
|
||||||
if (code <= 90) return true;
|
|
||||||
if (code < 97) return code === 95;
|
|
||||||
if (code <= 122) return true;
|
|
||||||
|
|
||||||
if (code <= 0xffff) {
|
|
||||||
return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code));
|
|
||||||
}
|
|
||||||
|
|
||||||
return isInAstralSet(code, astralIdentifierStartCodes) || isInAstralSet(code, astralIdentifierCodes);
|
|
||||||
}
|
|
||||||
|
|
||||||
function isIdentifierName(name) {
|
|
||||||
let isFirst = true;
|
|
||||||
|
|
||||||
for (let i = 0; i < name.length; i++) {
|
|
||||||
let cp = name.charCodeAt(i);
|
|
||||||
|
|
||||||
if ((cp & 0xfc00) === 0xd800 && i + 1 < name.length) {
|
|
||||||
const trail = name.charCodeAt(++i);
|
|
||||||
|
|
||||||
if ((trail & 0xfc00) === 0xdc00) {
|
|
||||||
cp = 0x10000 + ((cp & 0x3ff) << 10) + (trail & 0x3ff);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isFirst) {
|
|
||||||
isFirst = false;
|
|
||||||
|
|
||||||
if (!isIdentifierStart(cp)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
} else if (!isIdentifierChar(cp)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return !isFirst;
|
|
||||||
}
|
|
57
server/node_modules/@babel/helper-validator-identifier/lib/index.js
generated
vendored
57
server/node_modules/@babel/helper-validator-identifier/lib/index.js
generated
vendored
|
@ -1,57 +0,0 @@
|
||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "isIdentifierChar", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _identifier.isIdentifierChar;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "isIdentifierName", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _identifier.isIdentifierName;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "isIdentifierStart", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _identifier.isIdentifierStart;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "isKeyword", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _keyword.isKeyword;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "isReservedWord", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _keyword.isReservedWord;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "isStrictBindOnlyReservedWord", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _keyword.isStrictBindOnlyReservedWord;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "isStrictBindReservedWord", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _keyword.isStrictBindReservedWord;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Object.defineProperty(exports, "isStrictReservedWord", {
|
|
||||||
enumerable: true,
|
|
||||||
get: function () {
|
|
||||||
return _keyword.isStrictReservedWord;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
var _identifier = require("./identifier");
|
|
||||||
|
|
||||||
var _keyword = require("./keyword");
|
|
38
server/node_modules/@babel/helper-validator-identifier/lib/keyword.js
generated
vendored
38
server/node_modules/@babel/helper-validator-identifier/lib/keyword.js
generated
vendored
|
@ -1,38 +0,0 @@
|
||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.isKeyword = isKeyword;
|
|
||||||
exports.isReservedWord = isReservedWord;
|
|
||||||
exports.isStrictBindOnlyReservedWord = isStrictBindOnlyReservedWord;
|
|
||||||
exports.isStrictBindReservedWord = isStrictBindReservedWord;
|
|
||||||
exports.isStrictReservedWord = isStrictReservedWord;
|
|
||||||
const reservedWords = {
|
|
||||||
keyword: ["break", "case", "catch", "continue", "debugger", "default", "do", "else", "finally", "for", "function", "if", "return", "switch", "throw", "try", "var", "const", "while", "with", "new", "this", "super", "class", "extends", "export", "import", "null", "true", "false", "in", "instanceof", "typeof", "void", "delete"],
|
|
||||||
strict: ["implements", "interface", "let", "package", "private", "protected", "public", "static", "yield"],
|
|
||||||
strictBind: ["eval", "arguments"]
|
|
||||||
};
|
|
||||||
const keywords = new Set(reservedWords.keyword);
|
|
||||||
const reservedWordsStrictSet = new Set(reservedWords.strict);
|
|
||||||
const reservedWordsStrictBindSet = new Set(reservedWords.strictBind);
|
|
||||||
|
|
||||||
function isReservedWord(word, inModule) {
|
|
||||||
return inModule && word === "await" || word === "enum";
|
|
||||||
}
|
|
||||||
|
|
||||||
function isStrictReservedWord(word, inModule) {
|
|
||||||
return isReservedWord(word, inModule) || reservedWordsStrictSet.has(word);
|
|
||||||
}
|
|
||||||
|
|
||||||
function isStrictBindOnlyReservedWord(word) {
|
|
||||||
return reservedWordsStrictBindSet.has(word);
|
|
||||||
}
|
|
||||||
|
|
||||||
function isStrictBindReservedWord(word, inModule) {
|
|
||||||
return isStrictReservedWord(word, inModule) || isStrictBindOnlyReservedWord(word);
|
|
||||||
}
|
|
||||||
|
|
||||||
function isKeyword(word) {
|
|
||||||
return keywords.has(word);
|
|
||||||
}
|
|
27
server/node_modules/@babel/helper-validator-identifier/package.json
generated
vendored
27
server/node_modules/@babel/helper-validator-identifier/package.json
generated
vendored
|
@ -1,27 +0,0 @@
|
||||||
{
|
|
||||||
"name": "@babel/helper-validator-identifier",
|
|
||||||
"version": "7.16.7",
|
|
||||||
"description": "Validate identifier/keywords name",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://github.com/babel/babel.git",
|
|
||||||
"directory": "packages/babel-helper-validator-identifier"
|
|
||||||
},
|
|
||||||
"license": "MIT",
|
|
||||||
"publishConfig": {
|
|
||||||
"access": "public"
|
|
||||||
},
|
|
||||||
"main": "./lib/index.js",
|
|
||||||
"exports": {
|
|
||||||
".": "./lib/index.js",
|
|
||||||
"./package.json": "./package.json"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@unicode/unicode-14.0.0": "^1.2.1",
|
|
||||||
"charcodes": "^0.2.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6.9.0"
|
|
||||||
},
|
|
||||||
"author": "The Babel Team (https://babel.dev/team)"
|
|
||||||
}
|
|
|
@ -1,75 +0,0 @@
|
||||||
"use strict";
|
|
||||||
|
|
||||||
// Always use the latest available version of Unicode!
|
|
||||||
// https://tc39.github.io/ecma262/#sec-conformance
|
|
||||||
const version = "14.0.0";
|
|
||||||
|
|
||||||
const start = require("@unicode/unicode-" +
|
|
||||||
version +
|
|
||||||
"/Binary_Property/ID_Start/code-points.js").filter(function (ch) {
|
|
||||||
return ch > 0x7f;
|
|
||||||
});
|
|
||||||
let last = -1;
|
|
||||||
const cont = [0x200c, 0x200d].concat(
|
|
||||||
require("@unicode/unicode-" +
|
|
||||||
version +
|
|
||||||
"/Binary_Property/ID_Continue/code-points.js").filter(function (ch) {
|
|
||||||
return ch > 0x7f && search(start, ch, last + 1) == -1;
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
function search(arr, ch, starting) {
|
|
||||||
for (let i = starting; arr[i] <= ch && i < arr.length; last = i++) {
|
|
||||||
if (arr[i] === ch) return i;
|
|
||||||
}
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
function pad(str, width) {
|
|
||||||
while (str.length < width) str = "0" + str;
|
|
||||||
return str;
|
|
||||||
}
|
|
||||||
|
|
||||||
function esc(code) {
|
|
||||||
const hex = code.toString(16);
|
|
||||||
if (hex.length <= 2) return "\\x" + pad(hex, 2);
|
|
||||||
else return "\\u" + pad(hex, 4);
|
|
||||||
}
|
|
||||||
|
|
||||||
function generate(chars) {
|
|
||||||
const astral = [];
|
|
||||||
let re = "";
|
|
||||||
for (let i = 0, at = 0x10000; i < chars.length; i++) {
|
|
||||||
const from = chars[i];
|
|
||||||
let to = from;
|
|
||||||
while (i < chars.length - 1 && chars[i + 1] == to + 1) {
|
|
||||||
i++;
|
|
||||||
to++;
|
|
||||||
}
|
|
||||||
if (to <= 0xffff) {
|
|
||||||
if (from == to) re += esc(from);
|
|
||||||
else if (from + 1 == to) re += esc(from) + esc(to);
|
|
||||||
else re += esc(from) + "-" + esc(to);
|
|
||||||
} else {
|
|
||||||
astral.push(from - at, to - from);
|
|
||||||
at = to;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return { nonASCII: re, astral: astral };
|
|
||||||
}
|
|
||||||
|
|
||||||
const startData = generate(start);
|
|
||||||
const contData = generate(cont);
|
|
||||||
|
|
||||||
console.log("/* prettier-ignore */");
|
|
||||||
console.log('let nonASCIIidentifierStartChars = "' + startData.nonASCII + '";');
|
|
||||||
console.log("/* prettier-ignore */");
|
|
||||||
console.log('let nonASCIIidentifierChars = "' + contData.nonASCII + '";');
|
|
||||||
console.log("/* prettier-ignore */");
|
|
||||||
console.log(
|
|
||||||
"const astralIdentifierStartCodes = " + JSON.stringify(startData.astral) + ";"
|
|
||||||
);
|
|
||||||
console.log("/* prettier-ignore */");
|
|
||||||
console.log(
|
|
||||||
"const astralIdentifierCodes = " + JSON.stringify(contData.astral) + ";"
|
|
||||||
);
|
|
22
server/node_modules/@babel/highlight/LICENSE
generated
vendored
22
server/node_modules/@babel/highlight/LICENSE
generated
vendored
|
@ -1,22 +0,0 @@
|
||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of this software and associated documentation files (the
|
|
||||||
"Software"), to deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify, merge, publish,
|
|
||||||
distribute, sublicense, and/or sell copies of the Software, and to
|
|
||||||
permit persons to whom the Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
|
||||||
included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
||||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
||||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
19
server/node_modules/@babel/highlight/README.md
generated
vendored
19
server/node_modules/@babel/highlight/README.md
generated
vendored
|
@ -1,19 +0,0 @@
|
||||||
# @babel/highlight
|
|
||||||
|
|
||||||
> Syntax highlight JavaScript strings for output in terminals.
|
|
||||||
|
|
||||||
See our website [@babel/highlight](https://babeljs.io/docs/en/babel-highlight) for more information.
|
|
||||||
|
|
||||||
## Install
|
|
||||||
|
|
||||||
Using npm:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm install --save-dev @babel/highlight
|
|
||||||
```
|
|
||||||
|
|
||||||
or using yarn:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
yarn add @babel/highlight --dev
|
|
||||||
```
|
|
116
server/node_modules/@babel/highlight/lib/index.js
generated
vendored
116
server/node_modules/@babel/highlight/lib/index.js
generated
vendored
|
@ -1,116 +0,0 @@
|
||||||
"use strict";
|
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", {
|
|
||||||
value: true
|
|
||||||
});
|
|
||||||
exports.default = highlight;
|
|
||||||
exports.getChalk = getChalk;
|
|
||||||
exports.shouldHighlight = shouldHighlight;
|
|
||||||
|
|
||||||
var _jsTokens = require("js-tokens");
|
|
||||||
|
|
||||||
var _helperValidatorIdentifier = require("@babel/helper-validator-identifier");
|
|
||||||
|
|
||||||
var _chalk = require("chalk");
|
|
||||||
|
|
||||||
const sometimesKeywords = new Set(["as", "async", "from", "get", "of", "set"]);
|
|
||||||
|
|
||||||
function getDefs(chalk) {
|
|
||||||
return {
|
|
||||||
keyword: chalk.cyan,
|
|
||||||
capitalized: chalk.yellow,
|
|
||||||
jsxIdentifier: chalk.yellow,
|
|
||||||
punctuator: chalk.yellow,
|
|
||||||
number: chalk.magenta,
|
|
||||||
string: chalk.green,
|
|
||||||
regex: chalk.magenta,
|
|
||||||
comment: chalk.grey,
|
|
||||||
invalid: chalk.white.bgRed.bold
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const NEWLINE = /\r\n|[\n\r\u2028\u2029]/;
|
|
||||||
const BRACKET = /^[()[\]{}]$/;
|
|
||||||
let tokenize;
|
|
||||||
{
|
|
||||||
const JSX_TAG = /^[a-z][\w-]*$/i;
|
|
||||||
|
|
||||||
const getTokenType = function (token, offset, text) {
|
|
||||||
if (token.type === "name") {
|
|
||||||
if ((0, _helperValidatorIdentifier.isKeyword)(token.value) || (0, _helperValidatorIdentifier.isStrictReservedWord)(token.value, true) || sometimesKeywords.has(token.value)) {
|
|
||||||
return "keyword";
|
|
||||||
}
|
|
||||||
|
|
||||||
if (JSX_TAG.test(token.value) && (text[offset - 1] === "<" || text.substr(offset - 2, 2) == "</")) {
|
|
||||||
return "jsxIdentifier";
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.value[0] !== token.value[0].toLowerCase()) {
|
|
||||||
return "capitalized";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.type === "punctuator" && BRACKET.test(token.value)) {
|
|
||||||
return "bracket";
|
|
||||||
}
|
|
||||||
|
|
||||||
if (token.type === "invalid" && (token.value === "@" || token.value === "#")) {
|
|
||||||
return "punctuator";
|
|
||||||
}
|
|
||||||
|
|
||||||
return token.type;
|
|
||||||
};
|
|
||||||
|
|
||||||
tokenize = function* (text) {
|
|
||||||
let match;
|
|
||||||
|
|
||||||
while (match = _jsTokens.default.exec(text)) {
|
|
||||||
const token = _jsTokens.matchToToken(match);
|
|
||||||
|
|
||||||
yield {
|
|
||||||
type: getTokenType(token, match.index, text),
|
|
||||||
value: token.value
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function highlightTokens(defs, text) {
|
|
||||||
let highlighted = "";
|
|
||||||
|
|
||||||
for (const {
|
|
||||||
type,
|
|
||||||
value
|
|
||||||
} of tokenize(text)) {
|
|
||||||
const colorize = defs[type];
|
|
||||||
|
|
||||||
if (colorize) {
|
|
||||||
highlighted += value.split(NEWLINE).map(str => colorize(str)).join("\n");
|
|
||||||
} else {
|
|
||||||
highlighted += value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return highlighted;
|
|
||||||
}
|
|
||||||
|
|
||||||
function shouldHighlight(options) {
|
|
||||||
return !!_chalk.supportsColor || options.forceColor;
|
|
||||||
}
|
|
||||||
|
|
||||||
function getChalk(options) {
|
|
||||||
return options.forceColor ? new _chalk.constructor({
|
|
||||||
enabled: true,
|
|
||||||
level: 1
|
|
||||||
}) : _chalk;
|
|
||||||
}
|
|
||||||
|
|
||||||
function highlight(code, options = {}) {
|
|
||||||
if (code !== "" && shouldHighlight(options)) {
|
|
||||||
const chalk = getChalk(options);
|
|
||||||
const defs = getDefs(chalk);
|
|
||||||
return highlightTokens(defs, code);
|
|
||||||
} else {
|
|
||||||
return code;
|
|
||||||
}
|
|
||||||
}
|
|
29
server/node_modules/@babel/highlight/package.json
generated
vendored
29
server/node_modules/@babel/highlight/package.json
generated
vendored
|
@ -1,29 +0,0 @@
|
||||||
{
|
|
||||||
"name": "@babel/highlight",
|
|
||||||
"version": "7.16.10",
|
|
||||||
"description": "Syntax highlight JavaScript strings for output in terminals.",
|
|
||||||
"author": "The Babel Team (https://babel.dev/team)",
|
|
||||||
"homepage": "https://babel.dev/docs/en/next/babel-highlight",
|
|
||||||
"license": "MIT",
|
|
||||||
"publishConfig": {
|
|
||||||
"access": "public"
|
|
||||||
},
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://github.com/babel/babel.git",
|
|
||||||
"directory": "packages/babel-highlight"
|
|
||||||
},
|
|
||||||
"main": "./lib/index.js",
|
|
||||||
"dependencies": {
|
|
||||||
"@babel/helper-validator-identifier": "^7.16.7",
|
|
||||||
"chalk": "^2.0.0",
|
|
||||||
"js-tokens": "^4.0.0"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/chalk": "^2.0.0",
|
|
||||||
"strip-ansi": "^4.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6.9.0"
|
|
||||||
}
|
|
||||||
}
|
|
7
server/node_modules/@cspotcode/source-map-consumer/README.md
generated
vendored
7
server/node_modules/@cspotcode/source-map-consumer/README.md
generated
vendored
|
@ -1,7 +0,0 @@
|
||||||
A smaller version of @cspotcode/source-map which includes only the consumer, not the generator.
|
|
||||||
|
|
||||||
**NOTE**: the type declarations are incorrect. They include declarations for things from @cspotcode/source-map
|
|
||||||
which are omitted from this package.
|
|
||||||
|
|
||||||
This is a delibrate, pragmatic choice. The declarations for things which *are* included -- the consumer -- should
|
|
||||||
be correct.
|
|
100
server/node_modules/@cspotcode/source-map-consumer/lib/array-set.js
generated
vendored
100
server/node_modules/@cspotcode/source-map-consumer/lib/array-set.js
generated
vendored
|
@ -1,100 +0,0 @@
|
||||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
||||||
/*
|
|
||||||
* Copyright 2011 Mozilla Foundation and contributors
|
|
||||||
* Licensed under the New BSD license. See LICENSE or:
|
|
||||||
* http://opensource.org/licenses/BSD-3-Clause
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A data structure which is a combination of an array and a set. Adding a new
|
|
||||||
* member is O(1), testing for membership is O(1), and finding the index of an
|
|
||||||
* element is O(1). Removing elements from the set is not supported. Only
|
|
||||||
* strings are supported for membership.
|
|
||||||
*/
|
|
||||||
class ArraySet {
|
|
||||||
constructor() {
|
|
||||||
this._array = [];
|
|
||||||
this._set = new Map();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Static method for creating ArraySet instances from an existing array.
|
|
||||||
*/
|
|
||||||
static fromArray(aArray, aAllowDuplicates) {
|
|
||||||
const set = new ArraySet();
|
|
||||||
for (let i = 0, len = aArray.length; i < len; i++) {
|
|
||||||
set.add(aArray[i], aAllowDuplicates);
|
|
||||||
}
|
|
||||||
return set;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return how many unique items are in this ArraySet. If duplicates have been
|
|
||||||
* added, than those do not count towards the size.
|
|
||||||
*
|
|
||||||
* @returns Number
|
|
||||||
*/
|
|
||||||
size() {
|
|
||||||
return this._set.size;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add the given string to this set.
|
|
||||||
*
|
|
||||||
* @param String aStr
|
|
||||||
*/
|
|
||||||
add(aStr, aAllowDuplicates) {
|
|
||||||
const isDuplicate = this.has(aStr);
|
|
||||||
const idx = this._array.length;
|
|
||||||
if (!isDuplicate || aAllowDuplicates) {
|
|
||||||
this._array.push(aStr);
|
|
||||||
}
|
|
||||||
if (!isDuplicate) {
|
|
||||||
this._set.set(aStr, idx);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Is the given string a member of this set?
|
|
||||||
*
|
|
||||||
* @param String aStr
|
|
||||||
*/
|
|
||||||
has(aStr) {
|
|
||||||
return this._set.has(aStr);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* What is the index of the given string in the array?
|
|
||||||
*
|
|
||||||
* @param String aStr
|
|
||||||
*/
|
|
||||||
indexOf(aStr) {
|
|
||||||
const idx = this._set.get(aStr);
|
|
||||||
if (idx >= 0) {
|
|
||||||
return idx;
|
|
||||||
}
|
|
||||||
throw new Error('"' + aStr + '" is not in the set.');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* What is the element at the given index?
|
|
||||||
*
|
|
||||||
* @param Number aIdx
|
|
||||||
*/
|
|
||||||
at(aIdx) {
|
|
||||||
if (aIdx >= 0 && aIdx < this._array.length) {
|
|
||||||
return this._array[aIdx];
|
|
||||||
}
|
|
||||||
throw new Error("No element indexed by " + aIdx);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the array representation of this set (which has the proper indices
|
|
||||||
* indicated by indexOf). Note that this is a copy of the internal array used
|
|
||||||
* for storing the members so that no one can mess with internal state.
|
|
||||||
*/
|
|
||||||
toArray() {
|
|
||||||
return this._array.slice();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.ArraySet = ArraySet;
|
|
107
server/node_modules/@cspotcode/source-map-consumer/lib/binary-search.js
generated
vendored
107
server/node_modules/@cspotcode/source-map-consumer/lib/binary-search.js
generated
vendored
|
@ -1,107 +0,0 @@
|
||||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
||||||
/*
|
|
||||||
* Copyright 2011 Mozilla Foundation and contributors
|
|
||||||
* Licensed under the New BSD license. See LICENSE or:
|
|
||||||
* http://opensource.org/licenses/BSD-3-Clause
|
|
||||||
*/
|
|
||||||
|
|
||||||
exports.GREATEST_LOWER_BOUND = 1;
|
|
||||||
exports.LEAST_UPPER_BOUND = 2;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Recursive implementation of binary search.
|
|
||||||
*
|
|
||||||
* @param aLow Indices here and lower do not contain the needle.
|
|
||||||
* @param aHigh Indices here and higher do not contain the needle.
|
|
||||||
* @param aNeedle The element being searched for.
|
|
||||||
* @param aHaystack The non-empty array being searched.
|
|
||||||
* @param aCompare Function which takes two elements and returns -1, 0, or 1.
|
|
||||||
* @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or
|
|
||||||
* 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the
|
|
||||||
* closest element that is smaller than or greater than the one we are
|
|
||||||
* searching for, respectively, if the exact element cannot be found.
|
|
||||||
*/
|
|
||||||
function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) {
|
|
||||||
// This function terminates when one of the following is true:
|
|
||||||
//
|
|
||||||
// 1. We find the exact element we are looking for.
|
|
||||||
//
|
|
||||||
// 2. We did not find the exact element, but we can return the index of
|
|
||||||
// the next-closest element.
|
|
||||||
//
|
|
||||||
// 3. We did not find the exact element, and there is no next-closest
|
|
||||||
// element than the one we are searching for, so we return -1.
|
|
||||||
const mid = Math.floor((aHigh - aLow) / 2) + aLow;
|
|
||||||
const cmp = aCompare(aNeedle, aHaystack[mid], true);
|
|
||||||
if (cmp === 0) {
|
|
||||||
// Found the element we are looking for.
|
|
||||||
return mid;
|
|
||||||
} else if (cmp > 0) {
|
|
||||||
// Our needle is greater than aHaystack[mid].
|
|
||||||
if (aHigh - mid > 1) {
|
|
||||||
// The element is in the upper half.
|
|
||||||
return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias);
|
|
||||||
}
|
|
||||||
|
|
||||||
// The exact needle element was not found in this haystack. Determine if
|
|
||||||
// we are in termination case (3) or (2) and return the appropriate thing.
|
|
||||||
if (aBias == exports.LEAST_UPPER_BOUND) {
|
|
||||||
return aHigh < aHaystack.length ? aHigh : -1;
|
|
||||||
}
|
|
||||||
return mid;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Our needle is less than aHaystack[mid].
|
|
||||||
if (mid - aLow > 1) {
|
|
||||||
// The element is in the lower half.
|
|
||||||
return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias);
|
|
||||||
}
|
|
||||||
|
|
||||||
// we are in termination case (3) or (2) and return the appropriate thing.
|
|
||||||
if (aBias == exports.LEAST_UPPER_BOUND) {
|
|
||||||
return mid;
|
|
||||||
}
|
|
||||||
return aLow < 0 ? -1 : aLow;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This is an implementation of binary search which will always try and return
|
|
||||||
* the index of the closest element if there is no exact hit. This is because
|
|
||||||
* mappings between original and generated line/col pairs are single points,
|
|
||||||
* and there is an implicit region between each of them, so a miss just means
|
|
||||||
* that you aren't on the very start of a region.
|
|
||||||
*
|
|
||||||
* @param aNeedle The element you are looking for.
|
|
||||||
* @param aHaystack The array that is being searched.
|
|
||||||
* @param aCompare A function which takes the needle and an element in the
|
|
||||||
* array and returns -1, 0, or 1 depending on whether the needle is less
|
|
||||||
* than, equal to, or greater than the element, respectively.
|
|
||||||
* @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or
|
|
||||||
* 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the
|
|
||||||
* closest element that is smaller than or greater than the one we are
|
|
||||||
* searching for, respectively, if the exact element cannot be found.
|
|
||||||
* Defaults to 'binarySearch.GREATEST_LOWER_BOUND'.
|
|
||||||
*/
|
|
||||||
exports.search = function search(aNeedle, aHaystack, aCompare, aBias) {
|
|
||||||
if (aHaystack.length === 0) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
let index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack,
|
|
||||||
aCompare, aBias || exports.GREATEST_LOWER_BOUND);
|
|
||||||
if (index < 0) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// We have found either the exact element, or the next-closest element than
|
|
||||||
// the one we are searching for. However, there may be more than one such
|
|
||||||
// element. Make sure we always return the smallest of these.
|
|
||||||
while (index - 1 >= 0) {
|
|
||||||
if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
--index;
|
|
||||||
}
|
|
||||||
|
|
||||||
return index;
|
|
||||||
};
|
|
BIN
server/node_modules/@cspotcode/source-map-consumer/lib/mappings.wasm
generated
vendored
BIN
server/node_modules/@cspotcode/source-map-consumer/lib/mappings.wasm
generated
vendored
Binary file not shown.
25
server/node_modules/@cspotcode/source-map-consumer/lib/read-wasm.js
generated
vendored
25
server/node_modules/@cspotcode/source-map-consumer/lib/read-wasm.js
generated
vendored
|
@ -1,25 +0,0 @@
|
||||||
// Node version of reading a wasm file into an array buffer.
|
|
||||||
const fs = require("fs");
|
|
||||||
const path = require("path");
|
|
||||||
|
|
||||||
module.exports = function readWasm() {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const wasmPath = path.join(__dirname, "mappings.wasm");
|
|
||||||
fs.readFile(wasmPath, null, (error, data) => {
|
|
||||||
if (error) {
|
|
||||||
reject(error);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
resolve(data.buffer);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
|
||||||
module.exports.sync = function readWasmSync() {
|
|
||||||
const wasmPath = path.join(__dirname, "mappings.wasm");
|
|
||||||
return fs.readFileSync(wasmPath).buffer;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.initialize = _ => {
|
|
||||||
console.debug("SourceMapConsumer.initialize is a no-op when running in node.js");
|
|
||||||
};
|
|
1229
server/node_modules/@cspotcode/source-map-consumer/lib/source-map-consumer.js
generated
vendored
1229
server/node_modules/@cspotcode/source-map-consumer/lib/source-map-consumer.js
generated
vendored
File diff suppressed because it is too large
Load diff
546
server/node_modules/@cspotcode/source-map-consumer/lib/util.js
generated
vendored
546
server/node_modules/@cspotcode/source-map-consumer/lib/util.js
generated
vendored
|
@ -1,546 +0,0 @@
|
||||||
/* -*- Mode: js; js-indent-level: 2; -*- */
|
|
||||||
/*
|
|
||||||
* Copyright 2011 Mozilla Foundation and contributors
|
|
||||||
* Licensed under the New BSD license. See LICENSE or:
|
|
||||||
* http://opensource.org/licenses/BSD-3-Clause
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This is a helper function for getting values from parameter/options
|
|
||||||
* objects.
|
|
||||||
*
|
|
||||||
* @param args The object we are extracting values from
|
|
||||||
* @param name The name of the property we are getting.
|
|
||||||
* @param defaultValue An optional value to return if the property is missing
|
|
||||||
* from the object. If this is not specified and the property is missing, an
|
|
||||||
* error will be thrown.
|
|
||||||
*/
|
|
||||||
function getArg(aArgs, aName, aDefaultValue) {
|
|
||||||
if (aName in aArgs) {
|
|
||||||
return aArgs[aName];
|
|
||||||
} else if (arguments.length === 3) {
|
|
||||||
return aDefaultValue;
|
|
||||||
}
|
|
||||||
throw new Error('"' + aName + '" is a required argument.');
|
|
||||||
|
|
||||||
}
|
|
||||||
exports.getArg = getArg;
|
|
||||||
|
|
||||||
const urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;
|
|
||||||
const dataUrlRegexp = /^data:.+\,.+$/;
|
|
||||||
|
|
||||||
function urlParse(aUrl) {
|
|
||||||
const match = aUrl.match(urlRegexp);
|
|
||||||
if (!match) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
scheme: match[1],
|
|
||||||
auth: match[2],
|
|
||||||
host: match[3],
|
|
||||||
port: match[4],
|
|
||||||
path: match[5]
|
|
||||||
};
|
|
||||||
}
|
|
||||||
exports.urlParse = urlParse;
|
|
||||||
|
|
||||||
function urlGenerate(aParsedUrl) {
|
|
||||||
let url = "";
|
|
||||||
if (aParsedUrl.scheme) {
|
|
||||||
url += aParsedUrl.scheme + ":";
|
|
||||||
}
|
|
||||||
url += "//";
|
|
||||||
if (aParsedUrl.auth) {
|
|
||||||
url += aParsedUrl.auth + "@";
|
|
||||||
}
|
|
||||||
if (aParsedUrl.host) {
|
|
||||||
url += aParsedUrl.host;
|
|
||||||
}
|
|
||||||
if (aParsedUrl.port) {
|
|
||||||
url += ":" + aParsedUrl.port;
|
|
||||||
}
|
|
||||||
if (aParsedUrl.path) {
|
|
||||||
url += aParsedUrl.path;
|
|
||||||
}
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
exports.urlGenerate = urlGenerate;
|
|
||||||
|
|
||||||
const MAX_CACHED_INPUTS = 32;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Takes some function `f(input) -> result` and returns a memoized version of
|
|
||||||
* `f`.
|
|
||||||
*
|
|
||||||
* We keep at most `MAX_CACHED_INPUTS` memoized results of `f` alive. The
|
|
||||||
* memoization is a dumb-simple, linear least-recently-used cache.
|
|
||||||
*/
|
|
||||||
function lruMemoize(f) {
|
|
||||||
const cache = [];
|
|
||||||
|
|
||||||
return function(input) {
|
|
||||||
for (let i = 0; i < cache.length; i++) {
|
|
||||||
if (cache[i].input === input) {
|
|
||||||
const temp = cache[0];
|
|
||||||
cache[0] = cache[i];
|
|
||||||
cache[i] = temp;
|
|
||||||
return cache[0].result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = f(input);
|
|
||||||
|
|
||||||
cache.unshift({
|
|
||||||
input,
|
|
||||||
result,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (cache.length > MAX_CACHED_INPUTS) {
|
|
||||||
cache.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Normalizes a path, or the path portion of a URL:
|
|
||||||
*
|
|
||||||
* - Replaces consecutive slashes with one slash.
|
|
||||||
* - Removes unnecessary '.' parts.
|
|
||||||
* - Removes unnecessary '<dir>/..' parts.
|
|
||||||
*
|
|
||||||
* Based on code in the Node.js 'path' core module.
|
|
||||||
*
|
|
||||||
* @param aPath The path or url to normalize.
|
|
||||||
*/
|
|
||||||
const normalize = lruMemoize(function normalize(aPath) {
|
|
||||||
let path = aPath;
|
|
||||||
const url = urlParse(aPath);
|
|
||||||
if (url) {
|
|
||||||
if (!url.path) {
|
|
||||||
return aPath;
|
|
||||||
}
|
|
||||||
path = url.path;
|
|
||||||
}
|
|
||||||
const isAbsolute = exports.isAbsolute(path);
|
|
||||||
|
|
||||||
// Split the path into parts between `/` characters. This is much faster than
|
|
||||||
// using `.split(/\/+/g)`.
|
|
||||||
const parts = [];
|
|
||||||
let start = 0;
|
|
||||||
let i = 0;
|
|
||||||
while (true) {
|
|
||||||
start = i;
|
|
||||||
i = path.indexOf("/", start);
|
|
||||||
if (i === -1) {
|
|
||||||
parts.push(path.slice(start));
|
|
||||||
break;
|
|
||||||
} else {
|
|
||||||
parts.push(path.slice(start, i));
|
|
||||||
while (i < path.length && path[i] === "/") {
|
|
||||||
i++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let up = 0;
|
|
||||||
for (i = parts.length - 1; i >= 0; i--) {
|
|
||||||
const part = parts[i];
|
|
||||||
if (part === ".") {
|
|
||||||
parts.splice(i, 1);
|
|
||||||
} else if (part === "..") {
|
|
||||||
up++;
|
|
||||||
} else if (up > 0) {
|
|
||||||
if (part === "") {
|
|
||||||
// The first part is blank if the path is absolute. Trying to go
|
|
||||||
// above the root is a no-op. Therefore we can remove all '..' parts
|
|
||||||
// directly after the root.
|
|
||||||
parts.splice(i + 1, up);
|
|
||||||
up = 0;
|
|
||||||
} else {
|
|
||||||
parts.splice(i, 2);
|
|
||||||
up--;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
path = parts.join("/");
|
|
||||||
|
|
||||||
if (path === "") {
|
|
||||||
path = isAbsolute ? "/" : ".";
|
|
||||||
}
|
|
||||||
|
|
||||||
if (url) {
|
|
||||||
url.path = path;
|
|
||||||
return urlGenerate(url);
|
|
||||||
}
|
|
||||||
return path;
|
|
||||||
});
|
|
||||||
exports.normalize = normalize;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Joins two paths/URLs.
|
|
||||||
*
|
|
||||||
* @param aRoot The root path or URL.
|
|
||||||
* @param aPath The path or URL to be joined with the root.
|
|
||||||
*
|
|
||||||
* - If aPath is a URL or a data URI, aPath is returned, unless aPath is a
|
|
||||||
* scheme-relative URL: Then the scheme of aRoot, if any, is prepended
|
|
||||||
* first.
|
|
||||||
* - Otherwise aPath is a path. If aRoot is a URL, then its path portion
|
|
||||||
* is updated with the result and aRoot is returned. Otherwise the result
|
|
||||||
* is returned.
|
|
||||||
* - If aPath is absolute, the result is aPath.
|
|
||||||
* - Otherwise the two paths are joined with a slash.
|
|
||||||
* - Joining for example 'http://' and 'www.example.com' is also supported.
|
|
||||||
*/
|
|
||||||
function join(aRoot, aPath) {
|
|
||||||
if (aRoot === "") {
|
|
||||||
aRoot = ".";
|
|
||||||
}
|
|
||||||
if (aPath === "") {
|
|
||||||
aPath = ".";
|
|
||||||
}
|
|
||||||
const aPathUrl = urlParse(aPath);
|
|
||||||
const aRootUrl = urlParse(aRoot);
|
|
||||||
if (aRootUrl) {
|
|
||||||
aRoot = aRootUrl.path || "/";
|
|
||||||
}
|
|
||||||
|
|
||||||
// `join(foo, '//www.example.org')`
|
|
||||||
if (aPathUrl && !aPathUrl.scheme) {
|
|
||||||
if (aRootUrl) {
|
|
||||||
aPathUrl.scheme = aRootUrl.scheme;
|
|
||||||
}
|
|
||||||
return urlGenerate(aPathUrl);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (aPathUrl || aPath.match(dataUrlRegexp)) {
|
|
||||||
return aPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
// `join('http://', 'www.example.com')`
|
|
||||||
if (aRootUrl && !aRootUrl.host && !aRootUrl.path) {
|
|
||||||
aRootUrl.host = aPath;
|
|
||||||
return urlGenerate(aRootUrl);
|
|
||||||
}
|
|
||||||
|
|
||||||
const joined = aPath.charAt(0) === "/"
|
|
||||||
? aPath
|
|
||||||
: normalize(aRoot.replace(/\/+$/, "") + "/" + aPath);
|
|
||||||
|
|
||||||
if (aRootUrl) {
|
|
||||||
aRootUrl.path = joined;
|
|
||||||
return urlGenerate(aRootUrl);
|
|
||||||
}
|
|
||||||
return joined;
|
|
||||||
}
|
|
||||||
exports.join = join;
|
|
||||||
|
|
||||||
exports.isAbsolute = function(aPath) {
|
|
||||||
return aPath.charAt(0) === "/" || urlRegexp.test(aPath);
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Make a path relative to a URL or another path.
|
|
||||||
*
|
|
||||||
* @param aRoot The root path or URL.
|
|
||||||
* @param aPath The path or URL to be made relative to aRoot.
|
|
||||||
*/
|
|
||||||
function relative(aRoot, aPath) {
|
|
||||||
if (aRoot === "") {
|
|
||||||
aRoot = ".";
|
|
||||||
}
|
|
||||||
|
|
||||||
aRoot = aRoot.replace(/\/$/, "");
|
|
||||||
|
|
||||||
// It is possible for the path to be above the root. In this case, simply
|
|
||||||
// checking whether the root is a prefix of the path won't work. Instead, we
|
|
||||||
// need to remove components from the root one by one, until either we find
|
|
||||||
// a prefix that fits, or we run out of components to remove.
|
|
||||||
let level = 0;
|
|
||||||
while (aPath.indexOf(aRoot + "/") !== 0) {
|
|
||||||
const index = aRoot.lastIndexOf("/");
|
|
||||||
if (index < 0) {
|
|
||||||
return aPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the only part of the root that is left is the scheme (i.e. http://,
|
|
||||||
// file:///, etc.), one or more slashes (/), or simply nothing at all, we
|
|
||||||
// have exhausted all components, so the path is not relative to the root.
|
|
||||||
aRoot = aRoot.slice(0, index);
|
|
||||||
if (aRoot.match(/^([^\/]+:\/)?\/*$/)) {
|
|
||||||
return aPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
++level;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure we add a "../" for each component we removed from the root.
|
|
||||||
return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1);
|
|
||||||
}
|
|
||||||
exports.relative = relative;
|
|
||||||
|
|
||||||
const supportsNullProto = (function() {
|
|
||||||
const obj = Object.create(null);
|
|
||||||
return !("__proto__" in obj);
|
|
||||||
}());
|
|
||||||
|
|
||||||
function identity(s) {
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Because behavior goes wacky when you set `__proto__` on objects, we
|
|
||||||
* have to prefix all the strings in our set with an arbitrary character.
|
|
||||||
*
|
|
||||||
* See https://github.com/mozilla/source-map/pull/31 and
|
|
||||||
* https://github.com/mozilla/source-map/issues/30
|
|
||||||
*
|
|
||||||
* @param String aStr
|
|
||||||
*/
|
|
||||||
function toSetString(aStr) {
|
|
||||||
if (isProtoString(aStr)) {
|
|
||||||
return "$" + aStr;
|
|
||||||
}
|
|
||||||
|
|
||||||
return aStr;
|
|
||||||
}
|
|
||||||
exports.toSetString = supportsNullProto ? identity : toSetString;
|
|
||||||
|
|
||||||
function fromSetString(aStr) {
|
|
||||||
if (isProtoString(aStr)) {
|
|
||||||
return aStr.slice(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
return aStr;
|
|
||||||
}
|
|
||||||
exports.fromSetString = supportsNullProto ? identity : fromSetString;
|
|
||||||
|
|
||||||
function isProtoString(s) {
|
|
||||||
if (!s) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const length = s.length;
|
|
||||||
|
|
||||||
if (length < 9 /* "__proto__".length */) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* eslint-disable no-multi-spaces */
|
|
||||||
if (s.charCodeAt(length - 1) !== 95 /* '_' */ ||
|
|
||||||
s.charCodeAt(length - 2) !== 95 /* '_' */ ||
|
|
||||||
s.charCodeAt(length - 3) !== 111 /* 'o' */ ||
|
|
||||||
s.charCodeAt(length - 4) !== 116 /* 't' */ ||
|
|
||||||
s.charCodeAt(length - 5) !== 111 /* 'o' */ ||
|
|
||||||
s.charCodeAt(length - 6) !== 114 /* 'r' */ ||
|
|
||||||
s.charCodeAt(length - 7) !== 112 /* 'p' */ ||
|
|
||||||
s.charCodeAt(length - 8) !== 95 /* '_' */ ||
|
|
||||||
s.charCodeAt(length - 9) !== 95 /* '_' */) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
/* eslint-enable no-multi-spaces */
|
|
||||||
|
|
||||||
for (let i = length - 10; i >= 0; i--) {
|
|
||||||
if (s.charCodeAt(i) !== 36 /* '$' */) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Comparator between two mappings where the original positions are compared.
|
|
||||||
*
|
|
||||||
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
|
||||||
* mappings with the same original source/line/column, but different generated
|
|
||||||
* line and column the same. Useful when searching for a mapping with a
|
|
||||||
* stubbed out mapping.
|
|
||||||
*/
|
|
||||||
function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) {
|
|
||||||
let cmp = strcmp(mappingA.source, mappingB.source);
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
cmp = mappingA.originalLine - mappingB.originalLine;
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
|
||||||
if (cmp !== 0 || onlyCompareOriginal) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
cmp = mappingA.generatedLine - mappingB.generatedLine;
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
return strcmp(mappingA.name, mappingB.name);
|
|
||||||
}
|
|
||||||
exports.compareByOriginalPositions = compareByOriginalPositions;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Comparator between two mappings with deflated source and name indices where
|
|
||||||
* the generated positions are compared.
|
|
||||||
*
|
|
||||||
* Optionally pass in `true` as `onlyCompareGenerated` to consider two
|
|
||||||
* mappings with the same generated line and column, but different
|
|
||||||
* source/name/original line and column the same. Useful when searching for a
|
|
||||||
* mapping with a stubbed out mapping.
|
|
||||||
*/
|
|
||||||
function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) {
|
|
||||||
let cmp = mappingA.generatedLine - mappingB.generatedLine;
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
|
||||||
if (cmp !== 0 || onlyCompareGenerated) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
cmp = strcmp(mappingA.source, mappingB.source);
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
cmp = mappingA.originalLine - mappingB.originalLine;
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
return strcmp(mappingA.name, mappingB.name);
|
|
||||||
}
|
|
||||||
exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated;
|
|
||||||
|
|
||||||
function strcmp(aStr1, aStr2) {
|
|
||||||
if (aStr1 === aStr2) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (aStr1 === null) {
|
|
||||||
return 1; // aStr2 !== null
|
|
||||||
}
|
|
||||||
|
|
||||||
if (aStr2 === null) {
|
|
||||||
return -1; // aStr1 !== null
|
|
||||||
}
|
|
||||||
|
|
||||||
if (aStr1 > aStr2) {
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Comparator between two mappings with inflated source and name strings where
|
|
||||||
* the generated positions are compared.
|
|
||||||
*/
|
|
||||||
function compareByGeneratedPositionsInflated(mappingA, mappingB) {
|
|
||||||
let cmp = mappingA.generatedLine - mappingB.generatedLine;
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
cmp = mappingA.generatedColumn - mappingB.generatedColumn;
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
cmp = strcmp(mappingA.source, mappingB.source);
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
cmp = mappingA.originalLine - mappingB.originalLine;
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
cmp = mappingA.originalColumn - mappingB.originalColumn;
|
|
||||||
if (cmp !== 0) {
|
|
||||||
return cmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
return strcmp(mappingA.name, mappingB.name);
|
|
||||||
}
|
|
||||||
exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Strip any JSON XSSI avoidance prefix from the string (as documented
|
|
||||||
* in the source maps specification), and then parse the string as
|
|
||||||
* JSON.
|
|
||||||
*/
|
|
||||||
function parseSourceMapInput(str) {
|
|
||||||
return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, ""));
|
|
||||||
}
|
|
||||||
exports.parseSourceMapInput = parseSourceMapInput;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Compute the URL of a source given the the source root, the source's
|
|
||||||
* URL, and the source map's URL.
|
|
||||||
*/
|
|
||||||
function computeSourceURL(sourceRoot, sourceURL, sourceMapURL) {
|
|
||||||
sourceURL = sourceURL || "";
|
|
||||||
|
|
||||||
if (sourceRoot) {
|
|
||||||
// This follows what Chrome does.
|
|
||||||
if (sourceRoot[sourceRoot.length - 1] !== "/" && sourceURL[0] !== "/") {
|
|
||||||
sourceRoot += "/";
|
|
||||||
}
|
|
||||||
// The spec says:
|
|
||||||
// Line 4: An optional source root, useful for relocating source
|
|
||||||
// files on a server or removing repeated values in the
|
|
||||||
// “sources” entry. This value is prepended to the individual
|
|
||||||
// entries in the “source” field.
|
|
||||||
sourceURL = sourceRoot + sourceURL;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Historically, SourceMapConsumer did not take the sourceMapURL as
|
|
||||||
// a parameter. This mode is still somewhat supported, which is why
|
|
||||||
// this code block is conditional. However, it's preferable to pass
|
|
||||||
// the source map URL to SourceMapConsumer, so that this function
|
|
||||||
// can implement the source URL resolution algorithm as outlined in
|
|
||||||
// the spec. This block is basically the equivalent of:
|
|
||||||
// new URL(sourceURL, sourceMapURL).toString()
|
|
||||||
// ... except it avoids using URL, which wasn't available in the
|
|
||||||
// older releases of node still supported by this library.
|
|
||||||
//
|
|
||||||
// The spec says:
|
|
||||||
// If the sources are not absolute URLs after prepending of the
|
|
||||||
// “sourceRoot”, the sources are resolved relative to the
|
|
||||||
// SourceMap (like resolving script src in a html document).
|
|
||||||
if (sourceMapURL) {
|
|
||||||
const parsed = urlParse(sourceMapURL);
|
|
||||||
if (!parsed) {
|
|
||||||
throw new Error("sourceMapURL could not be parsed");
|
|
||||||
}
|
|
||||||
if (parsed.path) {
|
|
||||||
// Strip the last path component, but keep the "/".
|
|
||||||
const index = parsed.path.lastIndexOf("/");
|
|
||||||
if (index >= 0) {
|
|
||||||
parsed.path = parsed.path.substring(0, index + 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sourceURL = join(urlGenerate(parsed), sourceURL);
|
|
||||||
}
|
|
||||||
|
|
||||||
return normalize(sourceURL);
|
|
||||||
}
|
|
||||||
exports.computeSourceURL = computeSourceURL;
|
|
153
server/node_modules/@cspotcode/source-map-consumer/lib/wasm.js
generated
vendored
153
server/node_modules/@cspotcode/source-map-consumer/lib/wasm.js
generated
vendored
|
@ -1,153 +0,0 @@
|
||||||
const readWasm = require("../lib/read-wasm");
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Provide the JIT with a nice shape / hidden class.
|
|
||||||
*/
|
|
||||||
function Mapping() {
|
|
||||||
this.generatedLine = 0;
|
|
||||||
this.generatedColumn = 0;
|
|
||||||
this.lastGeneratedColumn = null;
|
|
||||||
this.source = null;
|
|
||||||
this.originalLine = null;
|
|
||||||
this.originalColumn = null;
|
|
||||||
this.name = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
let cachedWasm = null;
|
|
||||||
let cachedWasmSync = null;
|
|
||||||
|
|
||||||
module.exports = async function wasm() {
|
|
||||||
if (cachedWasm) {
|
|
||||||
return cachedWasm;
|
|
||||||
}
|
|
||||||
|
|
||||||
// At every step of the way, if a sync load already succeeded, abort and return
|
|
||||||
// the sync-loaded module.
|
|
||||||
cachedWasm = async () => {
|
|
||||||
try {
|
|
||||||
const callbackStack = [];
|
|
||||||
|
|
||||||
const buffer = await readWasm();
|
|
||||||
if (cachedWasmSync) return cachedWasmSync;
|
|
||||||
const Wasm = await WebAssembly.instantiate(buffer, getImportObject({callbackStack}));
|
|
||||||
if (cachedWasmSync) return cachedWasmSync;
|
|
||||||
|
|
||||||
cachedWasmSync = {
|
|
||||||
exports: Wasm.instance.exports,
|
|
||||||
withMappingCallback: (mappingCallback, f) => {
|
|
||||||
callbackStack.push(mappingCallback);
|
|
||||||
try {
|
|
||||||
f();
|
|
||||||
} finally {
|
|
||||||
callbackStack.pop();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} catch (e) {
|
|
||||||
if (cachedWasmSync) return cachedWasmSync;
|
|
||||||
cachedWasm = null;
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
return cachedWasmSync;
|
|
||||||
};
|
|
||||||
|
|
||||||
return cachedWasm;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.sync = function wasmSync() {
|
|
||||||
if (cachedWasmSync) {
|
|
||||||
return cachedWasmSync;
|
|
||||||
}
|
|
||||||
|
|
||||||
const callbackStack = [];
|
|
||||||
|
|
||||||
try {
|
|
||||||
|
|
||||||
const cachedWasmBuffer = readWasm.sync();
|
|
||||||
const wasmModule = new WebAssembly.Module(cachedWasmBuffer);
|
|
||||||
const Wasm = new WebAssembly.Instance(wasmModule, getImportObject({callbackStack}));
|
|
||||||
|
|
||||||
cachedWasmSync = {
|
|
||||||
exports: Wasm.exports,
|
|
||||||
withMappingCallback: (mappingCallback, f) => {
|
|
||||||
callbackStack.push(mappingCallback);
|
|
||||||
try {
|
|
||||||
f();
|
|
||||||
} finally {
|
|
||||||
callbackStack.pop();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
} catch (e) {
|
|
||||||
cachedWasmSync = null;
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
|
|
||||||
return cachedWasmSync;
|
|
||||||
};
|
|
||||||
|
|
||||||
function getImportObject({callbackStack}) {
|
|
||||||
return {
|
|
||||||
env: {
|
|
||||||
mapping_callback(
|
|
||||||
generatedLine,
|
|
||||||
generatedColumn,
|
|
||||||
|
|
||||||
hasLastGeneratedColumn,
|
|
||||||
lastGeneratedColumn,
|
|
||||||
|
|
||||||
hasOriginal,
|
|
||||||
source,
|
|
||||||
originalLine,
|
|
||||||
originalColumn,
|
|
||||||
|
|
||||||
hasName,
|
|
||||||
name
|
|
||||||
) {
|
|
||||||
const mapping = new Mapping();
|
|
||||||
// JS uses 1-based line numbers, wasm uses 0-based.
|
|
||||||
mapping.generatedLine = generatedLine + 1;
|
|
||||||
mapping.generatedColumn = generatedColumn;
|
|
||||||
|
|
||||||
if (hasLastGeneratedColumn) {
|
|
||||||
// JS uses inclusive last generated column, wasm uses exclusive.
|
|
||||||
mapping.lastGeneratedColumn = lastGeneratedColumn - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasOriginal) {
|
|
||||||
mapping.source = source;
|
|
||||||
// JS uses 1-based line numbers, wasm uses 0-based.
|
|
||||||
mapping.originalLine = originalLine + 1;
|
|
||||||
mapping.originalColumn = originalColumn;
|
|
||||||
|
|
||||||
if (hasName) {
|
|
||||||
mapping.name = name;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
callbackStack[callbackStack.length - 1](mapping);
|
|
||||||
},
|
|
||||||
|
|
||||||
start_all_generated_locations_for() { console.time("all_generated_locations_for"); },
|
|
||||||
end_all_generated_locations_for() { console.timeEnd("all_generated_locations_for"); },
|
|
||||||
|
|
||||||
start_compute_column_spans() { console.time("compute_column_spans"); },
|
|
||||||
end_compute_column_spans() { console.timeEnd("compute_column_spans"); },
|
|
||||||
|
|
||||||
start_generated_location_for() { console.time("generated_location_for"); },
|
|
||||||
end_generated_location_for() { console.timeEnd("generated_location_for"); },
|
|
||||||
|
|
||||||
start_original_location_for() { console.time("original_location_for"); },
|
|
||||||
end_original_location_for() { console.timeEnd("original_location_for"); },
|
|
||||||
|
|
||||||
start_parse_mappings() { console.time("parse_mappings"); },
|
|
||||||
end_parse_mappings() { console.timeEnd("parse_mappings"); },
|
|
||||||
|
|
||||||
start_sort_by_generated_location() { console.time("sort_by_generated_location"); },
|
|
||||||
end_sort_by_generated_location() { console.timeEnd("sort_by_generated_location"); },
|
|
||||||
|
|
||||||
start_sort_by_original_location() { console.time("sort_by_original_location"); },
|
|
||||||
end_sort_by_original_location() { console.timeEnd("sort_by_original_location"); },
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
91
server/node_modules/@cspotcode/source-map-consumer/package.json
generated
vendored
91
server/node_modules/@cspotcode/source-map-consumer/package.json
generated
vendored
|
@ -1,91 +0,0 @@
|
||||||
{
|
|
||||||
"name": "@cspotcode/source-map-consumer",
|
|
||||||
"description": "Generates and consumes source maps",
|
|
||||||
"version": "0.8.0",
|
|
||||||
"homepage": "https://github.com/cspotcode/source-map",
|
|
||||||
"author": "Andrew Bradley <cspotcode@gmail.com>",
|
|
||||||
"contributors": [
|
|
||||||
"Nick Fitzgerald <nfitzgerald@mozilla.com>",
|
|
||||||
"Tobias Koppers <tobias.koppers@googlemail.com>",
|
|
||||||
"Duncan Beevers <duncan@dweebd.com>",
|
|
||||||
"Stephen Crane <scrane@mozilla.com>",
|
|
||||||
"Ryan Seddon <seddon.ryan@gmail.com>",
|
|
||||||
"Miles Elam <miles.elam@deem.com>",
|
|
||||||
"Mihai Bazon <mihai.bazon@gmail.com>",
|
|
||||||
"Michael Ficarra <github.public.email@michael.ficarra.me>",
|
|
||||||
"Todd Wolfson <todd@twolfson.com>",
|
|
||||||
"Alexander Solovyov <alexander@solovyov.net>",
|
|
||||||
"Felix Gnass <fgnass@gmail.com>",
|
|
||||||
"Conrad Irwin <conrad.irwin@gmail.com>",
|
|
||||||
"usrbincc <usrbincc@yahoo.com>",
|
|
||||||
"David Glasser <glasser@davidglasser.net>",
|
|
||||||
"Chase Douglas <chase@newrelic.com>",
|
|
||||||
"Evan Wallace <evan.exe@gmail.com>",
|
|
||||||
"Heather Arthur <fayearthur@gmail.com>",
|
|
||||||
"Hugh Kennedy <hughskennedy@gmail.com>",
|
|
||||||
"David Glasser <glasser@davidglasser.net>",
|
|
||||||
"Simon Lydell <simon.lydell@gmail.com>",
|
|
||||||
"Jmeas Smith <jellyes2@gmail.com>",
|
|
||||||
"Michael Z Goddard <mzgoddard@gmail.com>",
|
|
||||||
"azu <azu@users.noreply.github.com>",
|
|
||||||
"John Gozde <john@gozde.ca>",
|
|
||||||
"Adam Kirkton <akirkton@truefitinnovation.com>",
|
|
||||||
"Chris Montgomery <christopher.montgomery@dowjones.com>",
|
|
||||||
"J. Ryan Stinnett <jryans@gmail.com>",
|
|
||||||
"Jack Herrington <jherrington@walmartlabs.com>",
|
|
||||||
"Chris Truter <jeffpalentine@gmail.com>",
|
|
||||||
"Daniel Espeset <daniel@danielespeset.com>",
|
|
||||||
"Jamie Wong <jamie.lf.wong@gmail.com>",
|
|
||||||
"Eddy Bruël <ejpbruel@mozilla.com>",
|
|
||||||
"Hawken Rives <hawkrives@gmail.com>",
|
|
||||||
"Gilad Peleg <giladp007@gmail.com>",
|
|
||||||
"djchie <djchie.dev@gmail.com>",
|
|
||||||
"Gary Ye <garysye@gmail.com>",
|
|
||||||
"Nicolas Lalevée <nicolas.lalevee@hibnet.org>"
|
|
||||||
],
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "http://github.com/cspotcode/source-map.git"
|
|
||||||
},
|
|
||||||
"main": "./source-map.js",
|
|
||||||
"types": "./source-map.d.ts",
|
|
||||||
"files": [
|
|
||||||
"/source-map.js",
|
|
||||||
"/source-map.d.ts",
|
|
||||||
"/lib/"
|
|
||||||
],
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 12"
|
|
||||||
},
|
|
||||||
"license": "BSD-3-Clause",
|
|
||||||
"scripts": {
|
|
||||||
"lint": "eslint *.js lib/ test/",
|
|
||||||
"prebuild": "npm run lint",
|
|
||||||
"build": "webpack --color",
|
|
||||||
"pretest": "npm run build",
|
|
||||||
"test": "node test/run-tests.js",
|
|
||||||
"precoverage": "npm run build",
|
|
||||||
"coverage": "nyc node test/run-tests.js",
|
|
||||||
"coverage-report": "nyc report --reporter=lcov",
|
|
||||||
"setup": "mkdir -p coverage && cp -n .waiting.html coverage/index.html || true",
|
|
||||||
"dev:live": "live-server --port=4103 --ignorePattern='(js|css|png)$' coverage",
|
|
||||||
"dev:watch": "watch 'npm run coverage' lib/ test/",
|
|
||||||
"predev": "npm run setup",
|
|
||||||
"dev": "npm-run-all -p --silent dev:*",
|
|
||||||
"clean": "rm -rf coverage .nyc_output",
|
|
||||||
"toc": "doctoc --title '## Table of Contents' README.md && doctoc --title '## Table of Contents' CONTRIBUTING.md"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"doctoc": "^0.15.0",
|
|
||||||
"eslint": "^4.19.1",
|
|
||||||
"live-server": "^1.2.0",
|
|
||||||
"npm-run-all": "^4.1.2",
|
|
||||||
"nyc": "^11.7.1",
|
|
||||||
"watch": "^1.0.2",
|
|
||||||
"webpack": "^3.10"
|
|
||||||
},
|
|
||||||
"nyc": {
|
|
||||||
"reporter": "html"
|
|
||||||
},
|
|
||||||
"typings": "source-map"
|
|
||||||
}
|
|
369
server/node_modules/@cspotcode/source-map-consumer/source-map.d.ts
generated
vendored
369
server/node_modules/@cspotcode/source-map-consumer/source-map.d.ts
generated
vendored
|
@ -1,369 +0,0 @@
|
||||||
// Type definitions for source-map 0.7
|
|
||||||
// Project: https://github.com/mozilla/source-map
|
|
||||||
// Definitions by: Morten Houston Ludvigsen <https://github.com/MortenHoustonLudvigsen>,
|
|
||||||
// Ron Buckton <https://github.com/rbuckton>,
|
|
||||||
// John Vilk <https://github.com/jvilk>
|
|
||||||
// Definitions: https://github.com/mozilla/source-map
|
|
||||||
export type SourceMapUrl = string;
|
|
||||||
|
|
||||||
export interface StartOfSourceMap {
|
|
||||||
file?: string;
|
|
||||||
sourceRoot?: string;
|
|
||||||
skipValidation?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface RawSourceMap {
|
|
||||||
version: number;
|
|
||||||
sources: string[];
|
|
||||||
names: string[];
|
|
||||||
sourceRoot?: string;
|
|
||||||
sourcesContent?: string[];
|
|
||||||
mappings: string;
|
|
||||||
file: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface RawIndexMap extends StartOfSourceMap {
|
|
||||||
version: number;
|
|
||||||
sections: RawSection[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface RawSection {
|
|
||||||
offset: Position;
|
|
||||||
map: RawSourceMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Position {
|
|
||||||
line: number;
|
|
||||||
column: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface NullablePosition {
|
|
||||||
line: number | null;
|
|
||||||
column: number | null;
|
|
||||||
lastColumn: number | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface MappedPosition {
|
|
||||||
source: string;
|
|
||||||
line: number;
|
|
||||||
column: number;
|
|
||||||
name?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface NullableMappedPosition {
|
|
||||||
source: string | null;
|
|
||||||
line: number | null;
|
|
||||||
column: number | null;
|
|
||||||
name: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface MappingItem {
|
|
||||||
source: string;
|
|
||||||
generatedLine: number;
|
|
||||||
generatedColumn: number;
|
|
||||||
originalLine: number;
|
|
||||||
originalColumn: number;
|
|
||||||
name: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Mapping {
|
|
||||||
generated: Position;
|
|
||||||
original: Position;
|
|
||||||
source: string;
|
|
||||||
name?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CodeWithSourceMap {
|
|
||||||
code: string;
|
|
||||||
map: SourceMapGenerator;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SourceMapConsumer {
|
|
||||||
/**
|
|
||||||
* Compute the last column for each generated mapping. The last column is
|
|
||||||
* inclusive.
|
|
||||||
*/
|
|
||||||
computeColumnSpans(): void;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the original source, line, and column information for the generated
|
|
||||||
* source's line and column positions provided. The only argument is an object
|
|
||||||
* with the following properties:
|
|
||||||
*
|
|
||||||
* - line: The line number in the generated source.
|
|
||||||
* - column: The column number in the generated source.
|
|
||||||
* - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or
|
|
||||||
* 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the
|
|
||||||
* closest element that is smaller than or greater than the one we are
|
|
||||||
* searching for, respectively, if the exact element cannot be found.
|
|
||||||
* Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.
|
|
||||||
*
|
|
||||||
* and an object is returned with the following properties:
|
|
||||||
*
|
|
||||||
* - source: The original source file, or null.
|
|
||||||
* - line: The line number in the original source, or null.
|
|
||||||
* - column: The column number in the original source, or null.
|
|
||||||
* - name: The original identifier, or null.
|
|
||||||
*/
|
|
||||||
originalPositionFor(generatedPosition: Position & { bias?: number }): NullableMappedPosition;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the generated line and column information for the original source,
|
|
||||||
* line, and column positions provided. The only argument is an object with
|
|
||||||
* the following properties:
|
|
||||||
*
|
|
||||||
* - source: The filename of the original source.
|
|
||||||
* - line: The line number in the original source.
|
|
||||||
* - column: The column number in the original source.
|
|
||||||
* - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or
|
|
||||||
* 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the
|
|
||||||
* closest element that is smaller than or greater than the one we are
|
|
||||||
* searching for, respectively, if the exact element cannot be found.
|
|
||||||
* Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'.
|
|
||||||
*
|
|
||||||
* and an object is returned with the following properties:
|
|
||||||
*
|
|
||||||
* - line: The line number in the generated source, or null.
|
|
||||||
* - column: The column number in the generated source, or null.
|
|
||||||
*/
|
|
||||||
generatedPositionFor(originalPosition: MappedPosition & { bias?: number }): NullablePosition;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns all generated line and column information for the original source,
|
|
||||||
* line, and column provided. If no column is provided, returns all mappings
|
|
||||||
* corresponding to a either the line we are searching for or the next
|
|
||||||
* closest line that has any mappings. Otherwise, returns all mappings
|
|
||||||
* corresponding to the given line and either the column we are searching for
|
|
||||||
* or the next closest column that has any offsets.
|
|
||||||
*
|
|
||||||
* The only argument is an object with the following properties:
|
|
||||||
*
|
|
||||||
* - source: The filename of the original source.
|
|
||||||
* - line: The line number in the original source.
|
|
||||||
* - column: Optional. the column number in the original source.
|
|
||||||
*
|
|
||||||
* and an array of objects is returned, each with the following properties:
|
|
||||||
*
|
|
||||||
* - line: The line number in the generated source, or null.
|
|
||||||
* - column: The column number in the generated source, or null.
|
|
||||||
*/
|
|
||||||
allGeneratedPositionsFor(originalPosition: MappedPosition): NullablePosition[];
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return true if we have the source content for every source in the source
|
|
||||||
* map, false otherwise.
|
|
||||||
*/
|
|
||||||
hasContentsOfAllSources(): boolean;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the original source content. The only argument is the url of the
|
|
||||||
* original source file. Returns null if no original source content is
|
|
||||||
* available.
|
|
||||||
*/
|
|
||||||
sourceContentFor(source: string, returnNullOnMissing?: boolean): string | null;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Iterate over each mapping between an original source/line/column and a
|
|
||||||
* generated line/column in this source map.
|
|
||||||
*
|
|
||||||
* @param callback
|
|
||||||
* The function that is called with each mapping.
|
|
||||||
* @param context
|
|
||||||
* Optional. If specified, this object will be the value of `this` every
|
|
||||||
* time that `aCallback` is called.
|
|
||||||
* @param order
|
|
||||||
* Either `SourceMapConsumer.GENERATED_ORDER` or
|
|
||||||
* `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to
|
|
||||||
* iterate over the mappings sorted by the generated file's line/column
|
|
||||||
* order or the original's source/line/column order, respectively. Defaults to
|
|
||||||
* `SourceMapConsumer.GENERATED_ORDER`.
|
|
||||||
*/
|
|
||||||
eachMapping(callback: (mapping: MappingItem) => void, context?: any, order?: number): void;
|
|
||||||
/**
|
|
||||||
* Free this source map consumer's associated wasm data that is manually-managed.
|
|
||||||
* Alternatively, you can use SourceMapConsumer.with to avoid needing to remember to call destroy.
|
|
||||||
*/
|
|
||||||
destroy(): void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SourceMapConsumerConstructor {
|
|
||||||
prototype: SourceMapConsumer;
|
|
||||||
|
|
||||||
GENERATED_ORDER: number;
|
|
||||||
ORIGINAL_ORDER: number;
|
|
||||||
GREATEST_LOWER_BOUND: number;
|
|
||||||
LEAST_UPPER_BOUND: number;
|
|
||||||
|
|
||||||
new (rawSourceMap: RawSourceMap, sourceMapUrl?: SourceMapUrl): BasicSourceMapConsumer;
|
|
||||||
new (rawSourceMap: RawIndexMap, sourceMapUrl?: SourceMapUrl): IndexedSourceMapConsumer;
|
|
||||||
new (rawSourceMap: RawSourceMap | RawIndexMap | string, sourceMapUrl?: SourceMapUrl): BasicSourceMapConsumer | IndexedSourceMapConsumer;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a BasicSourceMapConsumer from a SourceMapGenerator.
|
|
||||||
*
|
|
||||||
* @param sourceMap
|
|
||||||
* The source map that will be consumed.
|
|
||||||
*/
|
|
||||||
fromSourceMap(sourceMap: SourceMapGenerator, sourceMapUrl?: SourceMapUrl): Promise<BasicSourceMapConsumer>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Construct a new `SourceMapConsumer` from `rawSourceMap` and `sourceMapUrl`
|
|
||||||
* (see the `SourceMapConsumer` constructor for details. Then, invoke the `async
|
|
||||||
* function f(SourceMapConsumer) -> T` with the newly constructed consumer, wait
|
|
||||||
* for `f` to complete, call `destroy` on the consumer, and return `f`'s return
|
|
||||||
* value.
|
|
||||||
*
|
|
||||||
* You must not use the consumer after `f` completes!
|
|
||||||
*
|
|
||||||
* By using `with`, you do not have to remember to manually call `destroy` on
|
|
||||||
* the consumer, since it will be called automatically once `f` completes.
|
|
||||||
*
|
|
||||||
* ```js
|
|
||||||
* const xSquared = await SourceMapConsumer.with(
|
|
||||||
* myRawSourceMap,
|
|
||||||
* null,
|
|
||||||
* async function (consumer) {
|
|
||||||
* // Use `consumer` inside here and don't worry about remembering
|
|
||||||
* // to call `destroy`.
|
|
||||||
*
|
|
||||||
* const x = await whatever(consumer);
|
|
||||||
* return x * x;
|
|
||||||
* }
|
|
||||||
* );
|
|
||||||
*
|
|
||||||
* // You may not use that `consumer` anymore out here; it has
|
|
||||||
* // been destroyed. But you can use `xSquared`.
|
|
||||||
* console.log(xSquared);
|
|
||||||
* ```
|
|
||||||
*/
|
|
||||||
with<T>(rawSourceMap: RawSourceMap | RawIndexMap | string, sourceMapUrl: SourceMapUrl | null | undefined, callback: (consumer: BasicSourceMapConsumer | IndexedSourceMapConsumer) => Promise<T> | T): Promise<T>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const SourceMapConsumer: SourceMapConsumerConstructor;
|
|
||||||
|
|
||||||
export interface BasicSourceMapConsumer extends SourceMapConsumer {
|
|
||||||
file: string;
|
|
||||||
sourceRoot: string;
|
|
||||||
sources: string[];
|
|
||||||
sourcesContent: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface BasicSourceMapConsumerConstructor {
|
|
||||||
prototype: BasicSourceMapConsumer;
|
|
||||||
|
|
||||||
new (rawSourceMap: RawSourceMap | string): BasicSourceMapConsumer;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a BasicSourceMapConsumer from a SourceMapGenerator.
|
|
||||||
*
|
|
||||||
* @param sourceMap
|
|
||||||
* The source map that will be consumed.
|
|
||||||
*/
|
|
||||||
fromSourceMap(sourceMap: SourceMapGenerator): Promise<BasicSourceMapConsumer>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const BasicSourceMapConsumer: BasicSourceMapConsumerConstructor;
|
|
||||||
|
|
||||||
export interface IndexedSourceMapConsumer extends SourceMapConsumer {
|
|
||||||
sources: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface IndexedSourceMapConsumerConstructor {
|
|
||||||
prototype: IndexedSourceMapConsumer;
|
|
||||||
|
|
||||||
new (rawSourceMap: RawIndexMap | string): IndexedSourceMapConsumer;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const IndexedSourceMapConsumer: IndexedSourceMapConsumerConstructor;
|
|
||||||
|
|
||||||
export class SourceMapGenerator {
|
|
||||||
constructor(startOfSourceMap?: StartOfSourceMap);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new SourceMapGenerator based on a SourceMapConsumer
|
|
||||||
*
|
|
||||||
* @param sourceMapConsumer The SourceMap.
|
|
||||||
*/
|
|
||||||
static fromSourceMap(sourceMapConsumer: SourceMapConsumer): SourceMapGenerator;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a single mapping from original source line and column to the generated
|
|
||||||
* source's line and column for this source map being created. The mapping
|
|
||||||
* object should have the following properties:
|
|
||||||
*
|
|
||||||
* - generated: An object with the generated line and column positions.
|
|
||||||
* - original: An object with the original line and column positions.
|
|
||||||
* - source: The original source file (relative to the sourceRoot).
|
|
||||||
* - name: An optional original token name for this mapping.
|
|
||||||
*/
|
|
||||||
addMapping(mapping: Mapping): void;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the source content for a source file.
|
|
||||||
*/
|
|
||||||
setSourceContent(sourceFile: string, sourceContent: string): void;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Applies the mappings of a sub-source-map for a specific source file to the
|
|
||||||
* source map being generated. Each mapping to the supplied source file is
|
|
||||||
* rewritten using the supplied source map. Note: The resolution for the
|
|
||||||
* resulting mappings is the minimium of this map and the supplied map.
|
|
||||||
*
|
|
||||||
* @param sourceMapConsumer The source map to be applied.
|
|
||||||
* @param sourceFile Optional. The filename of the source file.
|
|
||||||
* If omitted, SourceMapConsumer's file property will be used.
|
|
||||||
* @param sourceMapPath Optional. The dirname of the path to the source map
|
|
||||||
* to be applied. If relative, it is relative to the SourceMapConsumer.
|
|
||||||
* This parameter is needed when the two source maps aren't in the same
|
|
||||||
* directory, and the source map to be applied contains relative source
|
|
||||||
* paths. If so, those relative source paths need to be rewritten
|
|
||||||
* relative to the SourceMapGenerator.
|
|
||||||
*/
|
|
||||||
applySourceMap(sourceMapConsumer: SourceMapConsumer, sourceFile?: string, sourceMapPath?: string): void;
|
|
||||||
|
|
||||||
toString(): string;
|
|
||||||
|
|
||||||
toJSON(): RawSourceMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class SourceNode {
|
|
||||||
children: SourceNode[];
|
|
||||||
sourceContents: any;
|
|
||||||
line: number;
|
|
||||||
column: number;
|
|
||||||
source: string;
|
|
||||||
name: string;
|
|
||||||
|
|
||||||
constructor();
|
|
||||||
constructor(
|
|
||||||
line: number | null,
|
|
||||||
column: number | null,
|
|
||||||
source: string | null,
|
|
||||||
chunks?: Array<(string | SourceNode)> | SourceNode | string,
|
|
||||||
name?: string
|
|
||||||
);
|
|
||||||
|
|
||||||
static fromStringWithSourceMap(
|
|
||||||
code: string,
|
|
||||||
sourceMapConsumer: SourceMapConsumer,
|
|
||||||
relativePath?: string
|
|
||||||
): SourceNode;
|
|
||||||
|
|
||||||
add(chunk: Array<(string | SourceNode)> | SourceNode | string): SourceNode;
|
|
||||||
|
|
||||||
prepend(chunk: Array<(string | SourceNode)> | SourceNode | string): SourceNode;
|
|
||||||
|
|
||||||
setSourceContent(sourceFile: string, sourceContent: string): void;
|
|
||||||
|
|
||||||
walk(fn: (chunk: string, mapping: MappedPosition) => void): void;
|
|
||||||
|
|
||||||
walkSourceContents(fn: (file: string, content: string) => void): void;
|
|
||||||
|
|
||||||
join(sep: string): SourceNode;
|
|
||||||
|
|
||||||
replaceRight(pattern: string, replacement: string): SourceNode;
|
|
||||||
|
|
||||||
toString(): string;
|
|
||||||
|
|
||||||
toStringWithSourceMap(startOfSourceMap?: StartOfSourceMap): CodeWithSourceMap;
|
|
||||||
}
|
|
6
server/node_modules/@cspotcode/source-map-consumer/source-map.js
generated
vendored
6
server/node_modules/@cspotcode/source-map-consumer/source-map.js
generated
vendored
|
@ -1,6 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright 2009-2011 Mozilla Foundation and contributors
|
|
||||||
* Licensed under the New BSD license. See LICENSE.txt or:
|
|
||||||
* http://opensource.org/licenses/BSD-3-Clause
|
|
||||||
*/
|
|
||||||
exports.SourceMapConsumer = require("./lib/source-map-consumer").SourceMapConsumer;
|
|
21
server/node_modules/@cspotcode/source-map-support/LICENSE.md
generated
vendored
21
server/node_modules/@cspotcode/source-map-support/LICENSE.md
generated
vendored
|
@ -1,21 +0,0 @@
|
||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2014 Evan Wallace
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
289
server/node_modules/@cspotcode/source-map-support/README.md
generated
vendored
289
server/node_modules/@cspotcode/source-map-support/README.md
generated
vendored
|
@ -1,289 +0,0 @@
|
||||||
# Source Map Support
|
|
||||||
|
|
||||||
[![NPM version](https://img.shields.io/npm/v/@cspotcode/source-map-support.svg?style=flat)](https://npmjs.org/package/@cspotcode/source-map-support)
|
|
||||||
[![NPM downloads](https://img.shields.io/npm/dm/@cspotcode/source-map-support.svg?style=flat)](https://npmjs.org/package/@cspotcode/source-map-support)
|
|
||||||
[![Build status](https://img.shields.io/github/workflow/status/cspotcode/node-source-map-support/Continuous%20Integration)](https://github.com/cspotcode/node-source-map-support/actions?query=workflow%3A%22Continuous+Integration%22)
|
|
||||||
|
|
||||||
This module provides source map support for stack traces in node via the [V8 stack trace API](https://github.com/v8/v8/wiki/Stack-Trace-API). It uses the [source-map](https://github.com/mozilla/source-map) module to replace the paths and line numbers of source-mapped files with their original paths and line numbers. The output mimics node's stack trace format with the goal of making every compile-to-JS language more of a first-class citizen. Source maps are completely general (not specific to any one language) so you can use source maps with multiple compile-to-JS languages in the same node process.
|
|
||||||
|
|
||||||
## Installation and Usage
|
|
||||||
|
|
||||||
#### Node support
|
|
||||||
|
|
||||||
```
|
|
||||||
$ npm install @cspotcode/source-map-support
|
|
||||||
```
|
|
||||||
|
|
||||||
Source maps can be generated using libraries such as [source-map-index-generator](https://github.com/twolfson/source-map-index-generator). Once you have a valid source map, place a source mapping comment somewhere in the file (usually done automatically or with an option by your transpiler):
|
|
||||||
|
|
||||||
```
|
|
||||||
//# sourceMappingURL=path/to/source.map
|
|
||||||
```
|
|
||||||
|
|
||||||
If multiple sourceMappingURL comments exist in one file, the last sourceMappingURL comment will be
|
|
||||||
respected (e.g. if a file mentions the comment in code, or went through multiple transpilers).
|
|
||||||
The path should either be absolute or relative to the compiled file.
|
|
||||||
|
|
||||||
From here you have two options.
|
|
||||||
|
|
||||||
##### CLI Usage
|
|
||||||
|
|
||||||
```bash
|
|
||||||
node -r @cspotcode/source-map-support/register compiled.js
|
|
||||||
# Or to enable hookRequire
|
|
||||||
node -r @cspotcode/source-map-support/register-hook-require compiled.js
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Programmatic Usage
|
|
||||||
|
|
||||||
Put the following line at the top of the compiled file.
|
|
||||||
|
|
||||||
```js
|
|
||||||
require('@cspotcode/source-map-support').install();
|
|
||||||
```
|
|
||||||
|
|
||||||
It is also possible to install the source map support directly by
|
|
||||||
requiring the `register` module which can be handy with ES6:
|
|
||||||
|
|
||||||
```js
|
|
||||||
import '@cspotcode/source-map-support/register'
|
|
||||||
|
|
||||||
// Instead of:
|
|
||||||
import sourceMapSupport from '@cspotcode/source-map-support'
|
|
||||||
sourceMapSupport.install()
|
|
||||||
```
|
|
||||||
Note: if you're using babel-register, it includes source-map-support already.
|
|
||||||
|
|
||||||
It is also very useful with Mocha:
|
|
||||||
|
|
||||||
```
|
|
||||||
$ mocha --require @cspotcode/source-map-support/register tests/
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Browser support
|
|
||||||
|
|
||||||
This library also works in Chrome. While the DevTools console already supports source maps, the V8 engine doesn't and `Error.prototype.stack` will be incorrect without this library. Everything will just work if you deploy your source files using [browserify](http://browserify.org/). Just make sure to pass the `--debug` flag to the browserify command so your source maps are included in the bundled code.
|
|
||||||
|
|
||||||
This library also works if you use another build process or just include the source files directly. In this case, include the file `browser-source-map-support.js` in your page and call `sourceMapSupport.install()`. It contains the whole library already bundled for the browser using browserify.
|
|
||||||
|
|
||||||
```html
|
|
||||||
<script src="browser-source-map-support.js"></script>
|
|
||||||
<script>sourceMapSupport.install();</script>
|
|
||||||
```
|
|
||||||
|
|
||||||
This library also works if you use AMD (Asynchronous Module Definition), which is used in tools like [RequireJS](http://requirejs.org/). Just list `browser-source-map-support` as a dependency:
|
|
||||||
|
|
||||||
```html
|
|
||||||
<script>
|
|
||||||
define(['browser-source-map-support'], function(sourceMapSupport) {
|
|
||||||
sourceMapSupport.install();
|
|
||||||
});
|
|
||||||
</script>
|
|
||||||
```
|
|
||||||
|
|
||||||
## Options
|
|
||||||
|
|
||||||
This module installs two things: a change to the `stack` property on `Error` objects and a handler for uncaught exceptions that mimics node's default exception handler (the handler can be seen in the demos below). You may want to disable the handler if you have your own uncaught exception handler. This can be done by passing an argument to the installer:
|
|
||||||
|
|
||||||
```js
|
|
||||||
require('@cspotcode/source-map-support').install({
|
|
||||||
handleUncaughtExceptions: false
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
This module loads source maps from the filesystem by default. You can provide alternate loading behavior through a callback as shown below. For example, [Meteor](https://github.com/meteor) keeps all source maps cached in memory to avoid disk access.
|
|
||||||
|
|
||||||
```js
|
|
||||||
require('@cspotcode/source-map-support').install({
|
|
||||||
retrieveSourceMap: function(source) {
|
|
||||||
if (source === 'compiled.js') {
|
|
||||||
return {
|
|
||||||
url: 'original.js',
|
|
||||||
map: fs.readFileSync('compiled.js.map', 'utf8')
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
The module will by default assume a browser environment if XMLHttpRequest and window are defined. If either of these do not exist it will instead assume a node environment.
|
|
||||||
In some rare cases, e.g. when running a browser emulation and where both variables are also set, you can explictly specify the environment to be either 'browser' or 'node'.
|
|
||||||
|
|
||||||
```js
|
|
||||||
require('@cspotcode/source-map-support').install({
|
|
||||||
environment: 'node'
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
To support files with inline source maps, the `hookRequire` options can be specified, which will monitor all source files for inline source maps.
|
|
||||||
|
|
||||||
|
|
||||||
```js
|
|
||||||
require('@cspotcode/source-map-support').install({
|
|
||||||
hookRequire: true
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
This monkey patches the `require` module loading chain, so is not enabled by default and is not recommended for any sort of production usage.
|
|
||||||
|
|
||||||
## Demos
|
|
||||||
|
|
||||||
#### Basic Demo
|
|
||||||
|
|
||||||
original.js:
|
|
||||||
|
|
||||||
```js
|
|
||||||
throw new Error('test'); // This is the original code
|
|
||||||
```
|
|
||||||
|
|
||||||
compiled.js:
|
|
||||||
|
|
||||||
```js
|
|
||||||
require('@cspotcode/source-map-support').install();
|
|
||||||
|
|
||||||
throw new Error('test'); // This is the compiled code
|
|
||||||
// The next line defines the sourceMapping.
|
|
||||||
//# sourceMappingURL=compiled.js.map
|
|
||||||
```
|
|
||||||
|
|
||||||
compiled.js.map:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": 3,
|
|
||||||
"file": "compiled.js",
|
|
||||||
"sources": ["original.js"],
|
|
||||||
"names": [],
|
|
||||||
"mappings": ";;AAAA,MAAM,IAAI"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Run compiled.js using node (notice how the stack trace uses original.js instead of compiled.js):
|
|
||||||
|
|
||||||
```
|
|
||||||
$ node compiled.js
|
|
||||||
|
|
||||||
original.js:1
|
|
||||||
throw new Error('test'); // This is the original code
|
|
||||||
^
|
|
||||||
Error: test
|
|
||||||
at Object.<anonymous> (original.js:1:7)
|
|
||||||
at Module._compile (module.js:456:26)
|
|
||||||
at Object.Module._extensions..js (module.js:474:10)
|
|
||||||
at Module.load (module.js:356:32)
|
|
||||||
at Function.Module._load (module.js:312:12)
|
|
||||||
at Function.Module.runMain (module.js:497:10)
|
|
||||||
at startup (node.js:119:16)
|
|
||||||
at node.js:901:3
|
|
||||||
```
|
|
||||||
|
|
||||||
#### TypeScript Demo
|
|
||||||
|
|
||||||
demo.ts:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
declare function require(name: string);
|
|
||||||
require('@cspotcode/source-map-support').install();
|
|
||||||
class Foo {
|
|
||||||
constructor() { this.bar(); }
|
|
||||||
bar() { throw new Error('this is a demo'); }
|
|
||||||
}
|
|
||||||
new Foo();
|
|
||||||
```
|
|
||||||
|
|
||||||
Compile and run the file using the TypeScript compiler from the terminal:
|
|
||||||
|
|
||||||
```
|
|
||||||
$ npm install source-map-support typescript
|
|
||||||
$ node_modules/typescript/bin/tsc -sourcemap demo.ts
|
|
||||||
$ node demo.js
|
|
||||||
|
|
||||||
demo.ts:5
|
|
||||||
bar() { throw new Error('this is a demo'); }
|
|
||||||
^
|
|
||||||
Error: this is a demo
|
|
||||||
at Foo.bar (demo.ts:5:17)
|
|
||||||
at new Foo (demo.ts:4:24)
|
|
||||||
at Object.<anonymous> (demo.ts:7:1)
|
|
||||||
at Module._compile (module.js:456:26)
|
|
||||||
at Object.Module._extensions..js (module.js:474:10)
|
|
||||||
at Module.load (module.js:356:32)
|
|
||||||
at Function.Module._load (module.js:312:12)
|
|
||||||
at Function.Module.runMain (module.js:497:10)
|
|
||||||
at startup (node.js:119:16)
|
|
||||||
at node.js:901:3
|
|
||||||
```
|
|
||||||
|
|
||||||
There is also the option to use `-r source-map-support/register` with typescript, without the need add the `require('@cspotcode/source-map-support').install()` in the code base:
|
|
||||||
|
|
||||||
```
|
|
||||||
$ npm install source-map-support typescript
|
|
||||||
$ node_modules/typescript/bin/tsc -sourcemap demo.ts
|
|
||||||
$ node -r source-map-support/register demo.js
|
|
||||||
|
|
||||||
demo.ts:5
|
|
||||||
bar() { throw new Error('this is a demo'); }
|
|
||||||
^
|
|
||||||
Error: this is a demo
|
|
||||||
at Foo.bar (demo.ts:5:17)
|
|
||||||
at new Foo (demo.ts:4:24)
|
|
||||||
at Object.<anonymous> (demo.ts:7:1)
|
|
||||||
at Module._compile (module.js:456:26)
|
|
||||||
at Object.Module._extensions..js (module.js:474:10)
|
|
||||||
at Module.load (module.js:356:32)
|
|
||||||
at Function.Module._load (module.js:312:12)
|
|
||||||
at Function.Module.runMain (module.js:497:10)
|
|
||||||
at startup (node.js:119:16)
|
|
||||||
at node.js:901:3
|
|
||||||
```
|
|
||||||
|
|
||||||
#### CoffeeScript Demo
|
|
||||||
|
|
||||||
demo.coffee:
|
|
||||||
|
|
||||||
```coffee
|
|
||||||
require('@cspotcode/source-map-support').install()
|
|
||||||
foo = ->
|
|
||||||
bar = -> throw new Error 'this is a demo'
|
|
||||||
bar()
|
|
||||||
foo()
|
|
||||||
```
|
|
||||||
|
|
||||||
Compile and run the file using the CoffeeScript compiler from the terminal:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ npm install @cspotcode/source-map-support coffeescript
|
|
||||||
$ node_modules/.bin/coffee --map --compile demo.coffee
|
|
||||||
$ node demo.js
|
|
||||||
|
|
||||||
demo.coffee:3
|
|
||||||
bar = -> throw new Error 'this is a demo'
|
|
||||||
^
|
|
||||||
Error: this is a demo
|
|
||||||
at bar (demo.coffee:3:22)
|
|
||||||
at foo (demo.coffee:4:3)
|
|
||||||
at Object.<anonymous> (demo.coffee:5:1)
|
|
||||||
at Object.<anonymous> (demo.coffee:1:1)
|
|
||||||
at Module._compile (module.js:456:26)
|
|
||||||
at Object.Module._extensions..js (module.js:474:10)
|
|
||||||
at Module.load (module.js:356:32)
|
|
||||||
at Function.Module._load (module.js:312:12)
|
|
||||||
at Function.Module.runMain (module.js:497:10)
|
|
||||||
at startup (node.js:119:16)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Tests
|
|
||||||
|
|
||||||
This repo contains both automated tests for node and manual tests for the browser. The automated tests can be run using mocha (type `mocha` in the root directory). To run the manual tests:
|
|
||||||
|
|
||||||
* Build the tests using `build.js`
|
|
||||||
* Launch the HTTP server (`npm run serve-tests`) and visit
|
|
||||||
* http://127.0.0.1:1336/amd-test
|
|
||||||
* http://127.0.0.1:1336/browser-test
|
|
||||||
* http://127.0.0.1:1336/browserify-test - **Currently not working** due to a bug with browserify (see [pull request #66](https://github.com/evanw/node-source-map-support/pull/66) for details).
|
|
||||||
* For `header-test`, run `server.js` inside that directory and visit http://127.0.0.1:1337/
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
This code is available under the [MIT license](http://opensource.org/licenses/MIT).
|
|
46
server/node_modules/@cspotcode/source-map-support/package.json
generated
vendored
46
server/node_modules/@cspotcode/source-map-support/package.json
generated
vendored
|
@ -1,46 +0,0 @@
|
||||||
{
|
|
||||||
"name": "@cspotcode/source-map-support",
|
|
||||||
"description": "Fixes stack traces for files with source maps",
|
|
||||||
"version": "0.7.0",
|
|
||||||
"main": "./source-map-support.js",
|
|
||||||
"types": "./source-map-support.d.ts",
|
|
||||||
"scripts": {
|
|
||||||
"build": "node build.js",
|
|
||||||
"serve-tests": "http-server -p 1336",
|
|
||||||
"test": "mocha"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"/register.d.ts",
|
|
||||||
"/register.js",
|
|
||||||
"/register-hook-require.d.ts",
|
|
||||||
"/register-hook-require.js",
|
|
||||||
"/source-map-support.d.ts",
|
|
||||||
"/source-map-support.js"
|
|
||||||
],
|
|
||||||
"dependencies": {
|
|
||||||
"@cspotcode/source-map-consumer": "0.8.0"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"browserify": "^4.2.3",
|
|
||||||
"coffeescript": "^1.12.7",
|
|
||||||
"http-server": "^0.11.1",
|
|
||||||
"mocha": "^3.5.3",
|
|
||||||
"source-map": "0.6.1",
|
|
||||||
"webpack": "^1.15.0"
|
|
||||||
},
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://github.com/cspotcode/node-source-map-support"
|
|
||||||
},
|
|
||||||
"bugs": {
|
|
||||||
"url": "https://github.com/cspotcode/node-source-map-support/issues"
|
|
||||||
},
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=12"
|
|
||||||
},
|
|
||||||
"volta": {
|
|
||||||
"node": "16.11.0",
|
|
||||||
"npm": "7.24.2"
|
|
||||||
}
|
|
||||||
}
|
|
7
server/node_modules/@cspotcode/source-map-support/register-hook-require.d.ts
generated
vendored
7
server/node_modules/@cspotcode/source-map-support/register-hook-require.d.ts
generated
vendored
|
@ -1,7 +0,0 @@
|
||||||
// tslint:disable:no-useless-files
|
|
||||||
|
|
||||||
// For following usage:
|
|
||||||
// import '@cspotcode/source-map-support/register-hook-require'
|
|
||||||
// Instead of:
|
|
||||||
// import sourceMapSupport from '@cspotcode/source-map-support'
|
|
||||||
// sourceMapSupport.install({hookRequire: true})
|
|
3
server/node_modules/@cspotcode/source-map-support/register-hook-require.js
generated
vendored
3
server/node_modules/@cspotcode/source-map-support/register-hook-require.js
generated
vendored
|
@ -1,3 +0,0 @@
|
||||||
require('./').install({
|
|
||||||
hookRequire: true
|
|
||||||
});
|
|
7
server/node_modules/@cspotcode/source-map-support/register.d.ts
generated
vendored
7
server/node_modules/@cspotcode/source-map-support/register.d.ts
generated
vendored
|
@ -1,7 +0,0 @@
|
||||||
// tslint:disable:no-useless-files
|
|
||||||
|
|
||||||
// For following usage:
|
|
||||||
// import '@cspotcode/source-map-support/register'
|
|
||||||
// Instead of:
|
|
||||||
// import sourceMapSupport from '@cspotcode/source-map-support'
|
|
||||||
// sourceMapSupport.install()
|
|
1
server/node_modules/@cspotcode/source-map-support/register.js
generated
vendored
1
server/node_modules/@cspotcode/source-map-support/register.js
generated
vendored
|
@ -1 +0,0 @@
|
||||||
require('./').install();
|
|
68
server/node_modules/@cspotcode/source-map-support/source-map-support.d.ts
generated
vendored
68
server/node_modules/@cspotcode/source-map-support/source-map-support.d.ts
generated
vendored
|
@ -1,68 +0,0 @@
|
||||||
// Type definitions for source-map-support 0.5
|
|
||||||
// Project: https://github.com/evanw/node-source-map-support
|
|
||||||
// Definitions by: Bart van der Schoor <https://github.com/Bartvds>
|
|
||||||
// Jason Cheatham <https://github.com/jason0x43>
|
|
||||||
// Alcedo Nathaniel De Guzman Jr <https://github.com/natealcedo>
|
|
||||||
// Griffin Yourick <https://github.com/tough-griff>
|
|
||||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
|
||||||
|
|
||||||
import { RawSourceMap } from '@cspotcode/source-map-consumer';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Output of retrieveSourceMap().
|
|
||||||
* From source-map-support:
|
|
||||||
* The map field may be either a string or the parsed JSON object (i.e.,
|
|
||||||
* it must be a valid argument to the SourceMapConsumer constructor).
|
|
||||||
*/
|
|
||||||
export interface UrlAndMap {
|
|
||||||
url: string;
|
|
||||||
map: string | RawSourceMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Options to install().
|
|
||||||
*/
|
|
||||||
export interface Options {
|
|
||||||
handleUncaughtExceptions?: boolean | undefined;
|
|
||||||
hookRequire?: boolean | undefined;
|
|
||||||
emptyCacheBetweenOperations?: boolean | undefined;
|
|
||||||
environment?: 'auto' | 'browser' | 'node' | undefined;
|
|
||||||
overrideRetrieveFile?: boolean | undefined;
|
|
||||||
overrideRetrieveSourceMap?: boolean | undefined;
|
|
||||||
retrieveFile?(path: string): string;
|
|
||||||
retrieveSourceMap?(source: string): UrlAndMap | null;
|
|
||||||
/**
|
|
||||||
* Set false to disable redirection of require / import `source-map-support` to `@cspotcode/source-map-support`
|
|
||||||
*/
|
|
||||||
redirectConflictingLibrary?: boolean;
|
|
||||||
/**
|
|
||||||
* Callback will be called every time we redirect due to `redirectConflictingLibrary`
|
|
||||||
* This allows consumers to log helpful warnings if they choose.
|
|
||||||
* @param parent NodeJS.Module which made the require() or require.resolve() call
|
|
||||||
* @param options options object internally passed to node's `_resolveFilename` hook
|
|
||||||
*/
|
|
||||||
onConflictingLibraryRedirect?: (request: string, parent: any, isMain: boolean, options: any, redirectedRequest: string) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Position {
|
|
||||||
source: string;
|
|
||||||
line: number;
|
|
||||||
column: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function wrapCallSite(frame: any /* StackFrame */): any /* StackFrame */;
|
|
||||||
export function getErrorSource(error: Error): string | null;
|
|
||||||
export function mapSourcePosition(position: Position): Position;
|
|
||||||
export function retrieveSourceMap(source: string): UrlAndMap | null;
|
|
||||||
export function resetRetrieveHandlers(): void;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Install SourceMap support.
|
|
||||||
* @param options Can be used to e.g. disable uncaughtException handler.
|
|
||||||
*/
|
|
||||||
export function install(options?: Options): void;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Uninstall SourceMap support.
|
|
||||||
*/
|
|
||||||
export function uninstall(): void;
|
|
806
server/node_modules/@cspotcode/source-map-support/source-map-support.js
generated
vendored
806
server/node_modules/@cspotcode/source-map-support/source-map-support.js
generated
vendored
|
@ -1,806 +0,0 @@
|
||||||
var SourceMapConsumer = require('@cspotcode/source-map-consumer').SourceMapConsumer;
|
|
||||||
var path = require('path');
|
|
||||||
var util = require('util');
|
|
||||||
|
|
||||||
var fs;
|
|
||||||
try {
|
|
||||||
fs = require('fs');
|
|
||||||
if (!fs.existsSync || !fs.readFileSync) {
|
|
||||||
// fs doesn't have all methods we need
|
|
||||||
fs = null;
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
/* nop */
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Requires a module which is protected against bundler minification.
|
|
||||||
*
|
|
||||||
* @param {NodeModule} mod
|
|
||||||
* @param {string} request
|
|
||||||
*/
|
|
||||||
function dynamicRequire(mod, request) {
|
|
||||||
return mod.require(request);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {{
|
|
||||||
* enabled: boolean;
|
|
||||||
* originalValue: any;
|
|
||||||
* installedValue: any;
|
|
||||||
* }} HookState
|
|
||||||
* Used for installing and uninstalling hooks
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Increment this if the format of sharedData changes in a breaking way.
|
|
||||||
var sharedDataVersion = 1;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @template T
|
|
||||||
* @param {T} defaults
|
|
||||||
* @returns {T}
|
|
||||||
*/
|
|
||||||
function initializeSharedData(defaults) {
|
|
||||||
var sharedDataKey = 'source-map-support/sharedData';
|
|
||||||
if (typeof Symbol !== 'undefined') {
|
|
||||||
sharedDataKey = Symbol.for(sharedDataKey);
|
|
||||||
}
|
|
||||||
var sharedData = this[sharedDataKey];
|
|
||||||
if (!sharedData) {
|
|
||||||
sharedData = { version: sharedDataVersion };
|
|
||||||
if (Object.defineProperty) {
|
|
||||||
Object.defineProperty(this, sharedDataKey, { value: sharedData });
|
|
||||||
} else {
|
|
||||||
this[sharedDataKey] = sharedData;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (sharedDataVersion !== sharedData.version) {
|
|
||||||
throw new Error("Multiple incompatible instances of source-map-support were loaded");
|
|
||||||
}
|
|
||||||
for (var key in defaults) {
|
|
||||||
if (!(key in sharedData)) {
|
|
||||||
sharedData[key] = defaults[key];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return sharedData;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If multiple instances of source-map-support are loaded into the same
|
|
||||||
// context, they shouldn't overwrite each other. By storing handlers, caches,
|
|
||||||
// and other state on a shared object, different instances of
|
|
||||||
// source-map-support can work together in a limited way. This does require
|
|
||||||
// that future versions of source-map-support continue to support the fields on
|
|
||||||
// this object. If this internal contract ever needs to be broken, increment
|
|
||||||
// sharedDataVersion. (This version number is not the same as any of the
|
|
||||||
// package's version numbers, which should reflect the *external* API of
|
|
||||||
// source-map-support.)
|
|
||||||
var sharedData = initializeSharedData({
|
|
||||||
|
|
||||||
// Only install once if called multiple times
|
|
||||||
// Remember how the environment looked before installation so we can restore if able
|
|
||||||
/** @type {HookState} */
|
|
||||||
errorPrepareStackTraceHook: undefined,
|
|
||||||
/** @type {HookState} */
|
|
||||||
processEmitHook: undefined,
|
|
||||||
/** @type {HookState} */
|
|
||||||
moduleResolveFilenameHook: undefined,
|
|
||||||
|
|
||||||
/** @type {Array<(request: string, parent: any, isMain: boolean, options: any, redirectedRequest: string) => void>} */
|
|
||||||
onConflictingLibraryRedirectArr: [],
|
|
||||||
|
|
||||||
// If true, the caches are reset before a stack trace formatting operation
|
|
||||||
emptyCacheBetweenOperations: false,
|
|
||||||
|
|
||||||
// Maps a file path to a string containing the file contents
|
|
||||||
fileContentsCache: {},
|
|
||||||
|
|
||||||
// Maps a file path to a source map for that file
|
|
||||||
sourceMapCache: {},
|
|
||||||
|
|
||||||
// Priority list of retrieve handlers
|
|
||||||
retrieveFileHandlers: [],
|
|
||||||
retrieveMapHandlers: [],
|
|
||||||
|
|
||||||
// Priority list of internally-implemented handlers.
|
|
||||||
// When resetting state, we must keep these.
|
|
||||||
internalRetrieveFileHandlers: [],
|
|
||||||
internalRetrieveMapHandlers: [],
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
// Supports {browser, node, auto}
|
|
||||||
var environment = "auto";
|
|
||||||
|
|
||||||
// Regex for detecting source maps
|
|
||||||
var reSourceMap = /^data:application\/json[^,]+base64,/;
|
|
||||||
|
|
||||||
function isInBrowser() {
|
|
||||||
if (environment === "browser")
|
|
||||||
return true;
|
|
||||||
if (environment === "node")
|
|
||||||
return false;
|
|
||||||
return ((typeof window !== 'undefined') && (typeof XMLHttpRequest === 'function') && !(window.require && window.module && window.process && window.process.type === "renderer"));
|
|
||||||
}
|
|
||||||
|
|
||||||
function hasGlobalProcessEventEmitter() {
|
|
||||||
return ((typeof process === 'object') && (process !== null) && (typeof process.on === 'function'));
|
|
||||||
}
|
|
||||||
|
|
||||||
function handlerExec(list, internalList) {
|
|
||||||
return function(arg) {
|
|
||||||
for (var i = 0; i < list.length; i++) {
|
|
||||||
var ret = list[i](arg);
|
|
||||||
if (ret) {
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (var i = 0; i < internalList.length; i++) {
|
|
||||||
var ret = internalList[i](arg);
|
|
||||||
if (ret) {
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
var retrieveFile = handlerExec(sharedData.retrieveFileHandlers, sharedData.internalRetrieveFileHandlers);
|
|
||||||
|
|
||||||
sharedData.internalRetrieveFileHandlers.push(function(path) {
|
|
||||||
// Trim the path to make sure there is no extra whitespace.
|
|
||||||
path = path.trim();
|
|
||||||
if (/^file:/.test(path)) {
|
|
||||||
// existsSync/readFileSync can't handle file protocol, but once stripped, it works
|
|
||||||
path = path.replace(/file:\/\/\/(\w:)?/, function(protocol, drive) {
|
|
||||||
return drive ?
|
|
||||||
'' : // file:///C:/dir/file -> C:/dir/file
|
|
||||||
'/'; // file:///root-dir/file -> /root-dir/file
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (path in sharedData.fileContentsCache) {
|
|
||||||
return sharedData.fileContentsCache[path];
|
|
||||||
}
|
|
||||||
|
|
||||||
var contents = '';
|
|
||||||
try {
|
|
||||||
if (!fs) {
|
|
||||||
// Use SJAX if we are in the browser
|
|
||||||
var xhr = new XMLHttpRequest();
|
|
||||||
xhr.open('GET', path, /** async */ false);
|
|
||||||
xhr.send(null);
|
|
||||||
if (xhr.readyState === 4 && xhr.status === 200) {
|
|
||||||
contents = xhr.responseText;
|
|
||||||
}
|
|
||||||
} else if (fs.existsSync(path)) {
|
|
||||||
// Otherwise, use the filesystem
|
|
||||||
contents = fs.readFileSync(path, 'utf8');
|
|
||||||
}
|
|
||||||
} catch (er) {
|
|
||||||
/* ignore any errors */
|
|
||||||
}
|
|
||||||
|
|
||||||
return sharedData.fileContentsCache[path] = contents;
|
|
||||||
});
|
|
||||||
|
|
||||||
// Support URLs relative to a directory, but be careful about a protocol prefix
|
|
||||||
// in case we are in the browser (i.e. directories may start with "http://" or "file:///")
|
|
||||||
function supportRelativeURL(file, url) {
|
|
||||||
if (!file) return url;
|
|
||||||
var dir = path.dirname(file);
|
|
||||||
var match = /^\w+:\/\/[^\/]*/.exec(dir);
|
|
||||||
var protocol = match ? match[0] : '';
|
|
||||||
var startPath = dir.slice(protocol.length);
|
|
||||||
if (protocol && /^\/\w\:/.test(startPath)) {
|
|
||||||
// handle file:///C:/ paths
|
|
||||||
protocol += '/';
|
|
||||||
return protocol + path.resolve(dir.slice(protocol.length), url).replace(/\\/g, '/');
|
|
||||||
}
|
|
||||||
return protocol + path.resolve(dir.slice(protocol.length), url);
|
|
||||||
}
|
|
||||||
|
|
||||||
function retrieveSourceMapURL(source) {
|
|
||||||
var fileData;
|
|
||||||
|
|
||||||
if (isInBrowser()) {
|
|
||||||
try {
|
|
||||||
var xhr = new XMLHttpRequest();
|
|
||||||
xhr.open('GET', source, false);
|
|
||||||
xhr.send(null);
|
|
||||||
fileData = xhr.readyState === 4 ? xhr.responseText : null;
|
|
||||||
|
|
||||||
// Support providing a sourceMappingURL via the SourceMap header
|
|
||||||
var sourceMapHeader = xhr.getResponseHeader("SourceMap") ||
|
|
||||||
xhr.getResponseHeader("X-SourceMap");
|
|
||||||
if (sourceMapHeader) {
|
|
||||||
return sourceMapHeader;
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the URL of the source map
|
|
||||||
fileData = retrieveFile(source);
|
|
||||||
var re = /(?:\/\/[@#][\s]*sourceMappingURL=([^\s'"]+)[\s]*$)|(?:\/\*[@#][\s]*sourceMappingURL=([^\s*'"]+)[\s]*(?:\*\/)[\s]*$)/mg;
|
|
||||||
// Keep executing the search to find the *last* sourceMappingURL to avoid
|
|
||||||
// picking up sourceMappingURLs from comments, strings, etc.
|
|
||||||
var lastMatch, match;
|
|
||||||
while (match = re.exec(fileData)) lastMatch = match;
|
|
||||||
if (!lastMatch) return null;
|
|
||||||
return lastMatch[1];
|
|
||||||
};
|
|
||||||
|
|
||||||
// Can be overridden by the retrieveSourceMap option to install. Takes a
|
|
||||||
// generated source filename; returns a {map, optional url} object, or null if
|
|
||||||
// there is no source map. The map field may be either a string or the parsed
|
|
||||||
// JSON object (ie, it must be a valid argument to the SourceMapConsumer
|
|
||||||
// constructor).
|
|
||||||
var retrieveSourceMap = handlerExec(sharedData.retrieveMapHandlers, sharedData.internalRetrieveMapHandlers);
|
|
||||||
sharedData.internalRetrieveMapHandlers.push(function(source) {
|
|
||||||
var sourceMappingURL = retrieveSourceMapURL(source);
|
|
||||||
if (!sourceMappingURL) return null;
|
|
||||||
|
|
||||||
// Read the contents of the source map
|
|
||||||
var sourceMapData;
|
|
||||||
if (reSourceMap.test(sourceMappingURL)) {
|
|
||||||
// Support source map URL as a data url
|
|
||||||
var rawData = sourceMappingURL.slice(sourceMappingURL.indexOf(',') + 1);
|
|
||||||
sourceMapData = Buffer.from(rawData, "base64").toString();
|
|
||||||
sourceMappingURL = source;
|
|
||||||
} else {
|
|
||||||
// Support source map URLs relative to the source URL
|
|
||||||
sourceMappingURL = supportRelativeURL(source, sourceMappingURL);
|
|
||||||
sourceMapData = retrieveFile(sourceMappingURL);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!sourceMapData) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
url: sourceMappingURL,
|
|
||||||
map: sourceMapData
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
function mapSourcePosition(position) {
|
|
||||||
var sourceMap = sharedData.sourceMapCache[position.source];
|
|
||||||
if (!sourceMap) {
|
|
||||||
// Call the (overrideable) retrieveSourceMap function to get the source map.
|
|
||||||
var urlAndMap = retrieveSourceMap(position.source);
|
|
||||||
if (urlAndMap) {
|
|
||||||
sourceMap = sharedData.sourceMapCache[position.source] = {
|
|
||||||
url: urlAndMap.url,
|
|
||||||
map: new SourceMapConsumer(urlAndMap.map)
|
|
||||||
};
|
|
||||||
|
|
||||||
// Load all sources stored inline with the source map into the file cache
|
|
||||||
// to pretend like they are already loaded. They may not exist on disk.
|
|
||||||
if (sourceMap.map.sourcesContent) {
|
|
||||||
sourceMap.map.sources.forEach(function(source, i) {
|
|
||||||
var contents = sourceMap.map.sourcesContent[i];
|
|
||||||
if (contents) {
|
|
||||||
var url = supportRelativeURL(sourceMap.url, source);
|
|
||||||
sharedData.fileContentsCache[url] = contents;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
sourceMap = sharedData.sourceMapCache[position.source] = {
|
|
||||||
url: null,
|
|
||||||
map: null
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resolve the source URL relative to the URL of the source map
|
|
||||||
if (sourceMap && sourceMap.map && typeof sourceMap.map.originalPositionFor === 'function') {
|
|
||||||
var originalPosition = sourceMap.map.originalPositionFor(position);
|
|
||||||
|
|
||||||
// Only return the original position if a matching line was found. If no
|
|
||||||
// matching line is found then we return position instead, which will cause
|
|
||||||
// the stack trace to print the path and line for the compiled file. It is
|
|
||||||
// better to give a precise location in the compiled file than a vague
|
|
||||||
// location in the original file.
|
|
||||||
if (originalPosition.source !== null) {
|
|
||||||
originalPosition.source = supportRelativeURL(
|
|
||||||
sourceMap.url, originalPosition.source);
|
|
||||||
return originalPosition;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return position;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parses code generated by FormatEvalOrigin(), a function inside V8:
|
|
||||||
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js
|
|
||||||
function mapEvalOrigin(origin) {
|
|
||||||
// Most eval() calls are in this format
|
|
||||||
var match = /^eval at ([^(]+) \((.+):(\d+):(\d+)\)$/.exec(origin);
|
|
||||||
if (match) {
|
|
||||||
var position = mapSourcePosition({
|
|
||||||
source: match[2],
|
|
||||||
line: +match[3],
|
|
||||||
column: match[4] - 1
|
|
||||||
});
|
|
||||||
return 'eval at ' + match[1] + ' (' + position.source + ':' +
|
|
||||||
position.line + ':' + (position.column + 1) + ')';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse nested eval() calls using recursion
|
|
||||||
match = /^eval at ([^(]+) \((.+)\)$/.exec(origin);
|
|
||||||
if (match) {
|
|
||||||
return 'eval at ' + match[1] + ' (' + mapEvalOrigin(match[2]) + ')';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure we still return useful information if we didn't find anything
|
|
||||||
return origin;
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is copied almost verbatim from the V8 source code at
|
|
||||||
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js. The
|
|
||||||
// implementation of wrapCallSite() used to just forward to the actual source
|
|
||||||
// code of CallSite.prototype.toString but unfortunately a new release of V8
|
|
||||||
// did something to the prototype chain and broke the shim. The only fix I
|
|
||||||
// could find was copy/paste.
|
|
||||||
function CallSiteToString() {
|
|
||||||
var fileName;
|
|
||||||
var fileLocation = "";
|
|
||||||
if (this.isNative()) {
|
|
||||||
fileLocation = "native";
|
|
||||||
} else {
|
|
||||||
fileName = this.getScriptNameOrSourceURL();
|
|
||||||
if (!fileName && this.isEval()) {
|
|
||||||
fileLocation = this.getEvalOrigin();
|
|
||||||
fileLocation += ", "; // Expecting source position to follow.
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fileName) {
|
|
||||||
fileLocation += fileName;
|
|
||||||
} else {
|
|
||||||
// Source code does not originate from a file and is not native, but we
|
|
||||||
// can still get the source position inside the source string, e.g. in
|
|
||||||
// an eval string.
|
|
||||||
fileLocation += "<anonymous>";
|
|
||||||
}
|
|
||||||
var lineNumber = this.getLineNumber();
|
|
||||||
if (lineNumber != null) {
|
|
||||||
fileLocation += ":" + lineNumber;
|
|
||||||
var columnNumber = this.getColumnNumber();
|
|
||||||
if (columnNumber) {
|
|
||||||
fileLocation += ":" + columnNumber;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var line = "";
|
|
||||||
var isAsync = this.isAsync ? this.isAsync() : false;
|
|
||||||
if(isAsync) {
|
|
||||||
line += 'async ';
|
|
||||||
var isPromiseAll = this.isPromiseAll ? this.isPromiseAll() : false;
|
|
||||||
var isPromiseAny = this.isPromiseAny ? this.isPromiseAny() : false;
|
|
||||||
if(isPromiseAny || isPromiseAll) {
|
|
||||||
line += isPromiseAll ? 'Promise.all (index ' : 'Promise.any (index ';
|
|
||||||
var promiseIndex = this.getPromiseIndex();
|
|
||||||
line += promiseIndex + ')';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var functionName = this.getFunctionName();
|
|
||||||
var addSuffix = true;
|
|
||||||
var isConstructor = this.isConstructor();
|
|
||||||
var isMethodCall = !(this.isToplevel() || isConstructor);
|
|
||||||
if (isMethodCall) {
|
|
||||||
var typeName = this.getTypeName();
|
|
||||||
// Fixes shim to be backward compatable with Node v0 to v4
|
|
||||||
if (typeName === "[object Object]") {
|
|
||||||
typeName = "null";
|
|
||||||
}
|
|
||||||
var methodName = this.getMethodName();
|
|
||||||
if (functionName) {
|
|
||||||
if (typeName && functionName.indexOf(typeName) != 0) {
|
|
||||||
line += typeName + ".";
|
|
||||||
}
|
|
||||||
line += functionName;
|
|
||||||
if (methodName && functionName.indexOf("." + methodName) != functionName.length - methodName.length - 1) {
|
|
||||||
line += " [as " + methodName + "]";
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
line += typeName + "." + (methodName || "<anonymous>");
|
|
||||||
}
|
|
||||||
} else if (isConstructor) {
|
|
||||||
line += "new " + (functionName || "<anonymous>");
|
|
||||||
} else if (functionName) {
|
|
||||||
line += functionName;
|
|
||||||
} else {
|
|
||||||
line += fileLocation;
|
|
||||||
addSuffix = false;
|
|
||||||
}
|
|
||||||
if (addSuffix) {
|
|
||||||
line += " (" + fileLocation + ")";
|
|
||||||
}
|
|
||||||
return line;
|
|
||||||
}
|
|
||||||
|
|
||||||
function cloneCallSite(frame) {
|
|
||||||
var object = {};
|
|
||||||
Object.getOwnPropertyNames(Object.getPrototypeOf(frame)).forEach(function(name) {
|
|
||||||
object[name] = /^(?:is|get)/.test(name) ? function() { return frame[name].call(frame); } : frame[name];
|
|
||||||
});
|
|
||||||
object.toString = CallSiteToString;
|
|
||||||
return object;
|
|
||||||
}
|
|
||||||
|
|
||||||
function wrapCallSite(frame, state) {
|
|
||||||
// provides interface backward compatibility
|
|
||||||
if (state === undefined) {
|
|
||||||
state = { nextPosition: null, curPosition: null }
|
|
||||||
}
|
|
||||||
if(frame.isNative()) {
|
|
||||||
state.curPosition = null;
|
|
||||||
return frame;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Most call sites will return the source file from getFileName(), but code
|
|
||||||
// passed to eval() ending in "//# sourceURL=..." will return the source file
|
|
||||||
// from getScriptNameOrSourceURL() instead
|
|
||||||
var source = frame.getFileName() || frame.getScriptNameOrSourceURL();
|
|
||||||
if (source) {
|
|
||||||
var line = frame.getLineNumber();
|
|
||||||
var column = frame.getColumnNumber() - 1;
|
|
||||||
|
|
||||||
// Fix position in Node where some (internal) code is prepended.
|
|
||||||
// See https://github.com/evanw/node-source-map-support/issues/36
|
|
||||||
// Header removed in node at ^10.16 || >=11.11.0
|
|
||||||
// v11 is not an LTS candidate, we can just test the one version with it.
|
|
||||||
// Test node versions for: 10.16-19, 10.20+, 12-19, 20-99, 100+, or 11.11
|
|
||||||
var noHeader = /^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;
|
|
||||||
var headerLength = noHeader.test(process.version) ? 0 : 62;
|
|
||||||
if (line === 1 && column > headerLength && !isInBrowser() && !frame.isEval()) {
|
|
||||||
column -= headerLength;
|
|
||||||
}
|
|
||||||
|
|
||||||
var position = mapSourcePosition({
|
|
||||||
source: source,
|
|
||||||
line: line,
|
|
||||||
column: column
|
|
||||||
});
|
|
||||||
state.curPosition = position;
|
|
||||||
frame = cloneCallSite(frame);
|
|
||||||
var originalFunctionName = frame.getFunctionName;
|
|
||||||
frame.getFunctionName = function() {
|
|
||||||
if (state.nextPosition == null) {
|
|
||||||
return originalFunctionName();
|
|
||||||
}
|
|
||||||
return state.nextPosition.name || originalFunctionName();
|
|
||||||
};
|
|
||||||
frame.getFileName = function() { return position.source; };
|
|
||||||
frame.getLineNumber = function() { return position.line; };
|
|
||||||
frame.getColumnNumber = function() { return position.column + 1; };
|
|
||||||
frame.getScriptNameOrSourceURL = function() { return position.source; };
|
|
||||||
return frame;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Code called using eval() needs special handling
|
|
||||||
var origin = frame.isEval() && frame.getEvalOrigin();
|
|
||||||
if (origin) {
|
|
||||||
origin = mapEvalOrigin(origin);
|
|
||||||
frame = cloneCallSite(frame);
|
|
||||||
frame.getEvalOrigin = function() { return origin; };
|
|
||||||
return frame;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we get here then we were unable to change the source position
|
|
||||||
return frame;
|
|
||||||
}
|
|
||||||
|
|
||||||
var kIsNodeError = undefined;
|
|
||||||
try {
|
|
||||||
// Get a deliberate ERR_INVALID_ARG_TYPE
|
|
||||||
// TODO is there a better way to reliably get an instance of NodeError?
|
|
||||||
path.resolve(123);
|
|
||||||
} catch(e) {
|
|
||||||
const symbols = Object.getOwnPropertySymbols(e);
|
|
||||||
const symbol = symbols.find(function (s) {return s.toString().indexOf('kIsNodeError') >= 0});
|
|
||||||
if(symbol) kIsNodeError = symbol;
|
|
||||||
}
|
|
||||||
|
|
||||||
const ErrorPrototypeToString = (err) =>Error.prototype.toString.call(err);
|
|
||||||
|
|
||||||
/** @param {HookState} hookState */
|
|
||||||
function createPrepareStackTrace(hookState) {
|
|
||||||
return prepareStackTrace;
|
|
||||||
|
|
||||||
// This function is part of the V8 stack trace API, for more info see:
|
|
||||||
// https://v8.dev/docs/stack-trace-api
|
|
||||||
function prepareStackTrace(error, stack) {
|
|
||||||
if(!hookState.enabled) return hookState.originalValue.apply(this, arguments);
|
|
||||||
|
|
||||||
if (sharedData.emptyCacheBetweenOperations) {
|
|
||||||
sharedData.fileContentsCache = {};
|
|
||||||
sharedData.sourceMapCache = {};
|
|
||||||
}
|
|
||||||
|
|
||||||
// node gives its own errors special treatment. Mimic that behavior
|
|
||||||
// https://github.com/nodejs/node/blob/3cbaabc4622df1b4009b9d026a1a970bdbae6e89/lib/internal/errors.js#L118-L128
|
|
||||||
// https://github.com/nodejs/node/pull/39182
|
|
||||||
var errorString;
|
|
||||||
if (kIsNodeError) {
|
|
||||||
if(kIsNodeError in error) {
|
|
||||||
errorString = `${error.name} [${error.code}]: ${error.message}`;
|
|
||||||
} else {
|
|
||||||
errorString = ErrorPrototypeToString(error);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
var name = error.name || 'Error';
|
|
||||||
var message = error.message || '';
|
|
||||||
errorString = name + ": " + message;
|
|
||||||
}
|
|
||||||
|
|
||||||
var state = { nextPosition: null, curPosition: null };
|
|
||||||
var processedStack = [];
|
|
||||||
for (var i = stack.length - 1; i >= 0; i--) {
|
|
||||||
processedStack.push('\n at ' + wrapCallSite(stack[i], state));
|
|
||||||
state.nextPosition = state.curPosition;
|
|
||||||
}
|
|
||||||
state.curPosition = state.nextPosition = null;
|
|
||||||
return errorString + processedStack.reverse().join('');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate position and snippet of original source with pointer
|
|
||||||
function getErrorSource(error) {
|
|
||||||
var match = /\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(error.stack);
|
|
||||||
if (match) {
|
|
||||||
var source = match[1];
|
|
||||||
var line = +match[2];
|
|
||||||
var column = +match[3];
|
|
||||||
|
|
||||||
// Support the inline sourceContents inside the source map
|
|
||||||
var contents = sharedData.fileContentsCache[source];
|
|
||||||
|
|
||||||
// Support files on disk
|
|
||||||
if (!contents && fs && fs.existsSync(source)) {
|
|
||||||
try {
|
|
||||||
contents = fs.readFileSync(source, 'utf8');
|
|
||||||
} catch (er) {
|
|
||||||
contents = '';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Format the line from the original source code like node does
|
|
||||||
if (contents) {
|
|
||||||
var code = contents.split(/(?:\r\n|\r|\n)/)[line - 1];
|
|
||||||
if (code) {
|
|
||||||
return source + ':' + line + '\n' + code + '\n' +
|
|
||||||
new Array(column).join(' ') + '^';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
function printFatalErrorUponExit (error) {
|
|
||||||
var source = getErrorSource(error);
|
|
||||||
|
|
||||||
// Ensure error is printed synchronously and not truncated
|
|
||||||
if (process.stderr._handle && process.stderr._handle.setBlocking) {
|
|
||||||
process.stderr._handle.setBlocking(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (source) {
|
|
||||||
console.error(source);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Matches node's behavior for colorized output
|
|
||||||
console.error(
|
|
||||||
util.inspect(error, {
|
|
||||||
customInspect: false,
|
|
||||||
colors: process.stderr.isTTY
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function shimEmitUncaughtException () {
|
|
||||||
const originalValue = process.emit;
|
|
||||||
var hook = sharedData.processEmitHook = {
|
|
||||||
enabled: true,
|
|
||||||
originalValue,
|
|
||||||
installedValue: undefined
|
|
||||||
};
|
|
||||||
var isTerminatingDueToFatalException = false;
|
|
||||||
var fatalException;
|
|
||||||
|
|
||||||
process.emit = sharedData.processEmitHook.installedValue = function (type) {
|
|
||||||
const hadListeners = originalValue.apply(this, arguments);
|
|
||||||
if(hook.enabled) {
|
|
||||||
if (type === 'uncaughtException' && !hadListeners) {
|
|
||||||
isTerminatingDueToFatalException = true;
|
|
||||||
fatalException = arguments[1];
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
if (type === 'exit' && isTerminatingDueToFatalException) {
|
|
||||||
printFatalErrorUponExit(fatalException);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return hadListeners;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
var originalRetrieveFileHandlers = sharedData.retrieveFileHandlers.slice(0);
|
|
||||||
var originalRetrieveMapHandlers = sharedData.retrieveMapHandlers.slice(0);
|
|
||||||
|
|
||||||
exports.wrapCallSite = wrapCallSite;
|
|
||||||
exports.getErrorSource = getErrorSource;
|
|
||||||
exports.mapSourcePosition = mapSourcePosition;
|
|
||||||
exports.retrieveSourceMap = retrieveSourceMap;
|
|
||||||
|
|
||||||
exports.install = function(options) {
|
|
||||||
options = options || {};
|
|
||||||
|
|
||||||
if (options.environment) {
|
|
||||||
environment = options.environment;
|
|
||||||
if (["node", "browser", "auto"].indexOf(environment) === -1) {
|
|
||||||
throw new Error("environment " + environment + " was unknown. Available options are {auto, browser, node}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use dynamicRequire to avoid including in browser bundles
|
|
||||||
var Module = dynamicRequire(module, 'module');
|
|
||||||
|
|
||||||
// Redirect subsequent imports of "source-map-support"
|
|
||||||
// to this package
|
|
||||||
const {redirectConflictingLibrary = true, onConflictingLibraryRedirect} = options;
|
|
||||||
if(redirectConflictingLibrary) {
|
|
||||||
if (!sharedData.moduleResolveFilenameHook) {
|
|
||||||
const originalValue = Module._resolveFilename;
|
|
||||||
const moduleResolveFilenameHook = sharedData.moduleResolveFilenameHook = {
|
|
||||||
enabled: true,
|
|
||||||
originalValue,
|
|
||||||
installedValue: undefined,
|
|
||||||
}
|
|
||||||
Module._resolveFilename = sharedData.moduleResolveFilenameHook.installedValue = function (request, parent, isMain, options) {
|
|
||||||
if (moduleResolveFilenameHook.enabled) {
|
|
||||||
// Match all source-map-support entrypoints: source-map-support, source-map-support/register
|
|
||||||
let requestRedirect;
|
|
||||||
if (request === 'source-map-support') {
|
|
||||||
requestRedirect = './';
|
|
||||||
} else if (request === 'source-map-support/register') {
|
|
||||||
requestRedirect = './register';
|
|
||||||
}
|
|
||||||
|
|
||||||
if (requestRedirect !== undefined) {
|
|
||||||
const newRequest = require.resolve(requestRedirect);
|
|
||||||
for (const cb of sharedData.onConflictingLibraryRedirectArr) {
|
|
||||||
cb(request, parent, isMain, options, newRequest);
|
|
||||||
}
|
|
||||||
request = newRequest;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return originalValue.call(this, request, parent, isMain, options);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (onConflictingLibraryRedirect) {
|
|
||||||
sharedData.onConflictingLibraryRedirectArr.push(onConflictingLibraryRedirect);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Allow sources to be found by methods other than reading the files
|
|
||||||
// directly from disk.
|
|
||||||
if (options.retrieveFile) {
|
|
||||||
if (options.overrideRetrieveFile) {
|
|
||||||
sharedData.retrieveFileHandlers.length = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
sharedData.retrieveFileHandlers.unshift(options.retrieveFile);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Allow source maps to be found by methods other than reading the files
|
|
||||||
// directly from disk.
|
|
||||||
if (options.retrieveSourceMap) {
|
|
||||||
if (options.overrideRetrieveSourceMap) {
|
|
||||||
sharedData.retrieveMapHandlers.length = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
sharedData.retrieveMapHandlers.unshift(options.retrieveSourceMap);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Support runtime transpilers that include inline source maps
|
|
||||||
if (options.hookRequire && !isInBrowser()) {
|
|
||||||
var $compile = Module.prototype._compile;
|
|
||||||
|
|
||||||
if (!$compile.__sourceMapSupport) {
|
|
||||||
Module.prototype._compile = function(content, filename) {
|
|
||||||
sharedData.fileContentsCache[filename] = content;
|
|
||||||
sharedData.sourceMapCache[filename] = undefined;
|
|
||||||
return $compile.call(this, content, filename);
|
|
||||||
};
|
|
||||||
|
|
||||||
Module.prototype._compile.__sourceMapSupport = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Configure options
|
|
||||||
if (!sharedData.emptyCacheBetweenOperations) {
|
|
||||||
sharedData.emptyCacheBetweenOperations = 'emptyCacheBetweenOperations' in options ?
|
|
||||||
options.emptyCacheBetweenOperations : false;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Install the error reformatter
|
|
||||||
if (!sharedData.errorPrepareStackTraceHook) {
|
|
||||||
const originalValue = Error.prepareStackTrace;
|
|
||||||
sharedData.errorPrepareStackTraceHook = {
|
|
||||||
enabled: true,
|
|
||||||
originalValue,
|
|
||||||
installedValue: undefined
|
|
||||||
};
|
|
||||||
Error.prepareStackTrace = sharedData.errorPrepareStackTraceHook.installedValue = createPrepareStackTrace(sharedData.errorPrepareStackTraceHook);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!sharedData.processEmitHook) {
|
|
||||||
var installHandler = 'handleUncaughtExceptions' in options ?
|
|
||||||
options.handleUncaughtExceptions : true;
|
|
||||||
|
|
||||||
// Do not override 'uncaughtException' with our own handler in Node.js
|
|
||||||
// Worker threads. Workers pass the error to the main thread as an event,
|
|
||||||
// rather than printing something to stderr and exiting.
|
|
||||||
try {
|
|
||||||
// We need to use `dynamicRequire` because `require` on it's own will be optimized by WebPack/Browserify.
|
|
||||||
var worker_threads = dynamicRequire(module, 'worker_threads');
|
|
||||||
if (worker_threads.isMainThread === false) {
|
|
||||||
installHandler = false;
|
|
||||||
}
|
|
||||||
} catch(e) {}
|
|
||||||
|
|
||||||
// Provide the option to not install the uncaught exception handler. This is
|
|
||||||
// to support other uncaught exception handlers (in test frameworks, for
|
|
||||||
// example). If this handler is not installed and there are no other uncaught
|
|
||||||
// exception handlers, uncaught exceptions will be caught by node's built-in
|
|
||||||
// exception handler and the process will still be terminated. However, the
|
|
||||||
// generated JavaScript code will be shown above the stack trace instead of
|
|
||||||
// the original source code.
|
|
||||||
if (installHandler && hasGlobalProcessEventEmitter()) {
|
|
||||||
shimEmitUncaughtException();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.uninstall = function() {
|
|
||||||
if(sharedData.processEmitHook) {
|
|
||||||
// Disable behavior
|
|
||||||
sharedData.processEmitHook.enabled = false;
|
|
||||||
// If possible, remove our hook function. May not be possible if subsequent third-party hooks have wrapped around us.
|
|
||||||
if(process.emit === sharedData.processEmitHook.installedValue) {
|
|
||||||
process.emit = sharedData.processEmitHook.originalValue;
|
|
||||||
}
|
|
||||||
sharedData.processEmitHook = undefined;
|
|
||||||
}
|
|
||||||
if(sharedData.errorPrepareStackTraceHook) {
|
|
||||||
// Disable behavior
|
|
||||||
sharedData.errorPrepareStackTraceHook.enabled = false;
|
|
||||||
// If possible or necessary, remove our hook function.
|
|
||||||
// In vanilla environments, prepareStackTrace is `undefined`.
|
|
||||||
// We cannot delegate to `undefined` the way we can to a function w/`.apply()`; our only option is to remove the function.
|
|
||||||
// If we are the *first* hook installed, and another was installed on top of us, we have no choice but to remove both.
|
|
||||||
if(Error.prepareStackTrace === sharedData.errorPrepareStackTraceHook.installedValue || typeof sharedData.errorPrepareStackTraceHook.originalValue !== 'function') {
|
|
||||||
Error.prepareStackTrace = sharedData.errorPrepareStackTraceHook.originalValue;
|
|
||||||
}
|
|
||||||
sharedData.errorPrepareStackTraceHook = undefined;
|
|
||||||
}
|
|
||||||
if (sharedData.moduleResolveFilenameHook) {
|
|
||||||
// Disable behavior
|
|
||||||
sharedData.moduleResolveFilenameHook.enabled = false;
|
|
||||||
// If possible, remove our hook function. May not be possible if subsequent third-party hooks have wrapped around us.
|
|
||||||
var Module = dynamicRequire(module, 'module');
|
|
||||||
if(Module._resolveFilename === sharedData.moduleResolveFilenameHook.installedValue) {
|
|
||||||
Module._resolveFilename = sharedData.moduleResolveFilenameHook.originalValue;
|
|
||||||
}
|
|
||||||
sharedData.moduleResolveFilenameHook = undefined;
|
|
||||||
}
|
|
||||||
sharedData.onConflictingLibraryRedirectArr.length = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.resetRetrieveHandlers = function() {
|
|
||||||
sharedData.retrieveFileHandlers.length = 0;
|
|
||||||
sharedData.retrieveMapHandlers.length = 0;
|
|
||||||
}
|
|
500
server/node_modules/@mapbox/node-pre-gyp/CHANGELOG.md
generated
vendored
500
server/node_modules/@mapbox/node-pre-gyp/CHANGELOG.md
generated
vendored
|
@ -1,500 +0,0 @@
|
||||||
# node-pre-gyp changelog
|
|
||||||
|
|
||||||
## 1.0.8
|
|
||||||
- Downgraded npmlog to maintain node v10 and v8 support (https://github.com/mapbox/node-pre-gyp/pull/624)
|
|
||||||
|
|
||||||
## 1.0.7
|
|
||||||
- Upgraded nyc and npmlog to address https://github.com/advisories/GHSA-93q8-gq69-wqmw
|
|
||||||
|
|
||||||
## 1.0.6
|
|
||||||
- Added node v17 to the internal node releases listing
|
|
||||||
- Upgraded various dependencies declared in package.json to latest major versions (node-fetch from 2.6.1 to 2.6.5, npmlog from 4.1.2 to 5.01, semver from 7.3.4 to 7.3.5, and tar from 6.1.0 to 6.1.11)
|
|
||||||
- Fixed bug in `staging_host` parameter (https://github.com/mapbox/node-pre-gyp/pull/590)
|
|
||||||
|
|
||||||
|
|
||||||
## 1.0.5
|
|
||||||
- Fix circular reference warning with node >= v14
|
|
||||||
|
|
||||||
## 1.0.4
|
|
||||||
- Added node v16 to the internal node releases listing
|
|
||||||
|
|
||||||
## 1.0.3
|
|
||||||
- Improved support configuring s3 uploads (solves https://github.com/mapbox/node-pre-gyp/issues/571)
|
|
||||||
- New options added in https://github.com/mapbox/node-pre-gyp/pull/576: 'bucket', 'region', and `s3ForcePathStyle`
|
|
||||||
|
|
||||||
## 1.0.2
|
|
||||||
- Fixed regression in proxy support (https://github.com/mapbox/node-pre-gyp/issues/572)
|
|
||||||
|
|
||||||
## 1.0.1
|
|
||||||
- Switched from mkdirp@1.0.4 to make-dir@3.1.0 to avoid this bug: https://github.com/isaacs/node-mkdirp/issues/31
|
|
||||||
|
|
||||||
## 1.0.0
|
|
||||||
- Module is now name-spaced at `@mapbox/node-pre-gyp` and the original `node-pre-gyp` is deprecated.
|
|
||||||
- New: support for staging and production s3 targets (see README.md)
|
|
||||||
- BREAKING: no longer supporting `node_pre_gyp_accessKeyId` & `node_pre_gyp_secretAccessKey`, use `AWS_ACCESS_KEY_ID` & `AWS_SECRET_ACCESS_KEY` instead to authenticate against s3 for `info`, `publish`, and `unpublish` commands.
|
|
||||||
- Dropped node v6 support, added node v14 support
|
|
||||||
- Switched tests to use mapbox-owned bucket for testing
|
|
||||||
- Added coverage tracking and linting with eslint
|
|
||||||
- Added back support for symlinks inside the tarball
|
|
||||||
- Upgraded all test apps to N-API/node-addon-api
|
|
||||||
- New: support for staging and production s3 targets (see README.md)
|
|
||||||
- Added `node_pre_gyp_s3_host` env var which has priority over the `--s3_host` option or default.
|
|
||||||
- Replaced needle with node-fetch
|
|
||||||
- Added proxy support for node-fetch
|
|
||||||
- Upgraded to mkdirp@1.x
|
|
||||||
|
|
||||||
## 0.17.0
|
|
||||||
- Got travis + appveyor green again
|
|
||||||
- Added support for more node versions
|
|
||||||
|
|
||||||
## 0.16.0
|
|
||||||
|
|
||||||
- Added Node 15 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/520)
|
|
||||||
|
|
||||||
## 0.15.0
|
|
||||||
|
|
||||||
- Bump dependency on `mkdirp` from `^0.5.1` to `^0.5.3` (https://github.com/mapbox/node-pre-gyp/pull/492)
|
|
||||||
- Bump dependency on `needle` from `^2.2.1` to `^2.5.0` (https://github.com/mapbox/node-pre-gyp/pull/502)
|
|
||||||
- Added Node 14 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/501)
|
|
||||||
|
|
||||||
## 0.14.0
|
|
||||||
|
|
||||||
- Defer modules requires in napi.js (https://github.com/mapbox/node-pre-gyp/pull/434)
|
|
||||||
- Bump dependency on `tar` from `^4` to `^4.4.2` (https://github.com/mapbox/node-pre-gyp/pull/454)
|
|
||||||
- Support extracting compiled binary from local offline mirror (https://github.com/mapbox/node-pre-gyp/pull/459)
|
|
||||||
- Added Node 13 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/483)
|
|
||||||
|
|
||||||
## 0.13.0
|
|
||||||
|
|
||||||
- Added Node 12 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/449)
|
|
||||||
|
|
||||||
## 0.12.0
|
|
||||||
|
|
||||||
- Fixed double-build problem with node v10 (https://github.com/mapbox/node-pre-gyp/pull/428)
|
|
||||||
- Added node 11 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/422)
|
|
||||||
|
|
||||||
## 0.11.0
|
|
||||||
|
|
||||||
- Fixed double-install problem with node v10
|
|
||||||
- Significant N-API improvements (https://github.com/mapbox/node-pre-gyp/pull/405)
|
|
||||||
|
|
||||||
## 0.10.3
|
|
||||||
|
|
||||||
- Now will use `request` over `needle` if request is installed. By default `needle` is used for `https`. This should unbreak proxy support that regressed in v0.9.0
|
|
||||||
|
|
||||||
## 0.10.2
|
|
||||||
|
|
||||||
- Fixed rc/deep-extent security vulnerability
|
|
||||||
- Fixed broken reinstall script do to incorrectly named get_best_napi_version
|
|
||||||
|
|
||||||
## 0.10.1
|
|
||||||
|
|
||||||
- Fix needle error event (@medns)
|
|
||||||
|
|
||||||
## 0.10.0
|
|
||||||
|
|
||||||
- Allow for a single-level module path when packing @allenluce (https://github.com/mapbox/node-pre-gyp/pull/371)
|
|
||||||
- Log warnings instead of errors when falling back @xzyfer (https://github.com/mapbox/node-pre-gyp/pull/366)
|
|
||||||
- Add Node.js v10 support to tests (https://github.com/mapbox/node-pre-gyp/pull/372)
|
|
||||||
- Remove retire.js from CI (https://github.com/mapbox/node-pre-gyp/pull/372)
|
|
||||||
- Remove support for Node.js v4 due to [EOL on April 30th, 2018](https://github.com/nodejs/Release/blob/7dd52354049cae99eed0e9fe01345b0722a86fde/schedule.json#L14)
|
|
||||||
- Update appveyor tests to install default NPM version instead of NPM v2.x for all Windows builds (https://github.com/mapbox/node-pre-gyp/pull/375)
|
|
||||||
|
|
||||||
## 0.9.1
|
|
||||||
|
|
||||||
- Fixed regression (in v0.9.0) with support for http redirects @allenluce (https://github.com/mapbox/node-pre-gyp/pull/361)
|
|
||||||
|
|
||||||
## 0.9.0
|
|
||||||
|
|
||||||
- Switched from using `request` to `needle` to reduce size of module deps (https://github.com/mapbox/node-pre-gyp/pull/350)
|
|
||||||
|
|
||||||
## 0.8.0
|
|
||||||
|
|
||||||
- N-API support (@inspiredware)
|
|
||||||
|
|
||||||
## 0.7.1
|
|
||||||
|
|
||||||
- Upgraded to tar v4.x
|
|
||||||
|
|
||||||
## 0.7.0
|
|
||||||
|
|
||||||
- Updated request and hawk (#347)
|
|
||||||
- Dropped node v0.10.x support
|
|
||||||
|
|
||||||
## 0.6.40
|
|
||||||
|
|
||||||
- Improved error reporting if an install fails
|
|
||||||
|
|
||||||
## 0.6.39
|
|
||||||
|
|
||||||
- Support for node v9
|
|
||||||
- Support for versioning on `{libc}` to allow binaries to work on non-glic linux systems like alpine linux
|
|
||||||
|
|
||||||
|
|
||||||
## 0.6.38
|
|
||||||
|
|
||||||
- Maintaining compatibility (for v0.6.x series) with node v0.10.x
|
|
||||||
|
|
||||||
## 0.6.37
|
|
||||||
|
|
||||||
- Solved one part of #276: now now deduce the node ABI from the major version for node >= 2 even when not stored in the abi_crosswalk.json
|
|
||||||
- Fixed docs to avoid mentioning the deprecated and dangerous `prepublish` in package.json (#291)
|
|
||||||
- Add new node versions to crosswalk
|
|
||||||
- Ported tests to use tape instead of mocha
|
|
||||||
- Got appveyor tests passing by downgrading npm and node-gyp
|
|
||||||
|
|
||||||
## 0.6.36
|
|
||||||
|
|
||||||
- Removed the running of `testbinary` during install. Because this was regressed for so long, it is too dangerous to re-enable by default. Developers needing validation can call `node-pre-gyp testbinary` directory.
|
|
||||||
- Fixed regression in v0.6.35 for electron installs (now skipping binary validation which is not yet supported for electron)
|
|
||||||
|
|
||||||
## 0.6.35
|
|
||||||
|
|
||||||
- No longer recommending `npm ls` in `prepublish` (#291)
|
|
||||||
- Fixed testbinary command (#283) @szdavid92
|
|
||||||
|
|
||||||
## 0.6.34
|
|
||||||
|
|
||||||
- Added new node versions to crosswalk, including v8
|
|
||||||
- Upgraded deps to latest versions, started using `^` instead of `~` for all deps.
|
|
||||||
|
|
||||||
## 0.6.33
|
|
||||||
|
|
||||||
- Improved support for yarn
|
|
||||||
|
|
||||||
## 0.6.32
|
|
||||||
|
|
||||||
- Honor npm configuration for CA bundles (@heikkipora)
|
|
||||||
- Add node-pre-gyp and npm versions to user agent (@addaleax)
|
|
||||||
- Updated various deps
|
|
||||||
- Add known node version for v7.x
|
|
||||||
|
|
||||||
## 0.6.31
|
|
||||||
|
|
||||||
- Updated various deps
|
|
||||||
|
|
||||||
## 0.6.30
|
|
||||||
|
|
||||||
- Update to npmlog@4.x and semver@5.3.x
|
|
||||||
- Add known node version for v6.5.0
|
|
||||||
|
|
||||||
## 0.6.29
|
|
||||||
|
|
||||||
- Add known node versions for v0.10.45, v0.12.14, v4.4.4, v5.11.1, and v6.1.0
|
|
||||||
|
|
||||||
## 0.6.28
|
|
||||||
|
|
||||||
- Now more verbose when remote binaries are not available. This is needed since npm is increasingly more quiet by default
|
|
||||||
and users need to know why builds are falling back to source compiles that might then error out.
|
|
||||||
|
|
||||||
## 0.6.27
|
|
||||||
|
|
||||||
- Add known node version for node v6
|
|
||||||
- Stopped bundling dependencies
|
|
||||||
- Documented method for module authors to avoid bundling node-pre-gyp
|
|
||||||
- See https://github.com/mapbox/node-pre-gyp/tree/master#configuring for details
|
|
||||||
|
|
||||||
## 0.6.26
|
|
||||||
|
|
||||||
- Skip validation for nw runtime (https://github.com/mapbox/node-pre-gyp/pull/181) via @fleg
|
|
||||||
|
|
||||||
## 0.6.25
|
|
||||||
|
|
||||||
- Improved support for auto-detection of electron runtime in `node-pre-gyp.find()`
|
|
||||||
- Pull request from @enlight - https://github.com/mapbox/node-pre-gyp/pull/187
|
|
||||||
- Add known node version for 4.4.1 and 5.9.1
|
|
||||||
|
|
||||||
## 0.6.24
|
|
||||||
|
|
||||||
- Add known node version for 5.8.0, 5.9.0, and 4.4.0.
|
|
||||||
|
|
||||||
## 0.6.23
|
|
||||||
|
|
||||||
- Add known node version for 0.10.43, 0.12.11, 4.3.2, and 5.7.1.
|
|
||||||
|
|
||||||
## 0.6.22
|
|
||||||
|
|
||||||
- Add known node version for 4.3.1, and 5.7.0.
|
|
||||||
|
|
||||||
## 0.6.21
|
|
||||||
|
|
||||||
- Add known node version for 0.10.42, 0.12.10, 4.3.0, and 5.6.0.
|
|
||||||
|
|
||||||
## 0.6.20
|
|
||||||
|
|
||||||
- Add known node version for 4.2.5, 4.2.6, 5.4.0, 5.4.1,and 5.5.0.
|
|
||||||
|
|
||||||
## 0.6.19
|
|
||||||
|
|
||||||
- Add known node version for 4.2.4
|
|
||||||
|
|
||||||
## 0.6.18
|
|
||||||
|
|
||||||
- Add new known node versions for 0.10.x, 0.12.x, 4.x, and 5.x
|
|
||||||
|
|
||||||
## 0.6.17
|
|
||||||
|
|
||||||
- Re-tagged to fix packaging problem of `Error: Cannot find module 'isarray'`
|
|
||||||
|
|
||||||
## 0.6.16
|
|
||||||
|
|
||||||
- Added known version in crosswalk for 5.1.0.
|
|
||||||
|
|
||||||
## 0.6.15
|
|
||||||
|
|
||||||
- Upgraded tar-pack (https://github.com/mapbox/node-pre-gyp/issues/182)
|
|
||||||
- Support custom binary hosting mirror (https://github.com/mapbox/node-pre-gyp/pull/170)
|
|
||||||
- Added known version in crosswalk for 4.2.2.
|
|
||||||
|
|
||||||
## 0.6.14
|
|
||||||
|
|
||||||
- Added node 5.x version
|
|
||||||
|
|
||||||
## 0.6.13
|
|
||||||
|
|
||||||
- Added more known node 4.x versions
|
|
||||||
|
|
||||||
## 0.6.12
|
|
||||||
|
|
||||||
- Added support for [Electron](http://electron.atom.io/). Just pass the `--runtime=electron` flag when building/installing. Thanks @zcbenz
|
|
||||||
|
|
||||||
## 0.6.11
|
|
||||||
|
|
||||||
- Added known node and io.js versions including more 3.x and 4.x versions
|
|
||||||
|
|
||||||
## 0.6.10
|
|
||||||
|
|
||||||
- Added known node and io.js versions including 3.x and 4.x versions
|
|
||||||
- Upgraded `tar` dep
|
|
||||||
|
|
||||||
## 0.6.9
|
|
||||||
|
|
||||||
- Upgraded `rc` dep
|
|
||||||
- Updated known io.js version: v2.4.0
|
|
||||||
|
|
||||||
## 0.6.8
|
|
||||||
|
|
||||||
- Upgraded `semver` and `rimraf` deps
|
|
||||||
- Updated known node and io.js versions
|
|
||||||
|
|
||||||
## 0.6.7
|
|
||||||
|
|
||||||
- Fixed `node_abi` versions for io.js 1.1.x -> 1.8.x (should be 43, but was stored as 42) (refs https://github.com/iojs/build/issues/94)
|
|
||||||
|
|
||||||
## 0.6.6
|
|
||||||
|
|
||||||
- Updated with known io.js 2.0.0 version
|
|
||||||
|
|
||||||
## 0.6.5
|
|
||||||
|
|
||||||
- Now respecting `npm_config_node_gyp` (https://github.com/npm/npm/pull/4887)
|
|
||||||
- Updated to semver@4.3.2
|
|
||||||
- Updated known node v0.12.x versions and io.js 1.x versions.
|
|
||||||
|
|
||||||
## 0.6.4
|
|
||||||
|
|
||||||
- Improved support for `io.js` (@fengmk2)
|
|
||||||
- Test coverage improvements (@mikemorris)
|
|
||||||
- Fixed support for `--dist-url` that regressed in 0.6.3
|
|
||||||
|
|
||||||
## 0.6.3
|
|
||||||
|
|
||||||
- Added support for passing raw options to node-gyp using `--` separator. Flags passed after
|
|
||||||
the `--` to `node-pre-gyp configure` will be passed directly to gyp while flags passed
|
|
||||||
after the `--` will be passed directly to make/visual studio.
|
|
||||||
- Added `node-pre-gyp configure` command to be able to call `node-gyp configure` directly
|
|
||||||
- Fix issue with require validation not working on windows 7 (@edgarsilva)
|
|
||||||
|
|
||||||
## 0.6.2
|
|
||||||
|
|
||||||
- Support for io.js >= v1.0.2
|
|
||||||
- Deferred require of `request` and `tar` to help speed up command line usage of `node-pre-gyp`.
|
|
||||||
|
|
||||||
## 0.6.1
|
|
||||||
|
|
||||||
- Fixed bundled `tar` version
|
|
||||||
|
|
||||||
## 0.6.0
|
|
||||||
|
|
||||||
- BREAKING: node odd releases like v0.11.x now use `major.minor.patch` for `{node_abi}` instead of `NODE_MODULE_VERSION` (#124)
|
|
||||||
- Added support for `toolset` option in versioning. By default is an empty string but `--toolset` can be passed to publish or install to select alternative binaries that target a custom toolset like C++11. For example to target Visual Studio 2014 modules like node-sqlite3 use `--toolset=v140`.
|
|
||||||
- Added support for `--no-rollback` option to request that a failed binary test does not remove the binary module leaves it in place.
|
|
||||||
- Added support for `--update-binary` option to request an existing binary be re-installed and the check for a valid local module be skipped.
|
|
||||||
- Added support for passing build options from `npm` through `node-pre-gyp` to `node-gyp`: `--nodedir`, `--disturl`, `--python`, and `--msvs_version`
|
|
||||||
|
|
||||||
## 0.5.31
|
|
||||||
|
|
||||||
- Added support for deducing node_abi for node.js runtime from previous release if the series is even
|
|
||||||
- Added support for --target=0.10.33
|
|
||||||
|
|
||||||
## 0.5.30
|
|
||||||
|
|
||||||
- Repackaged with latest bundled deps
|
|
||||||
|
|
||||||
## 0.5.29
|
|
||||||
|
|
||||||
- Added support for semver `build`.
|
|
||||||
- Fixed support for downloading from urls that include `+`.
|
|
||||||
|
|
||||||
## 0.5.28
|
|
||||||
|
|
||||||
- Now reporting unix style paths only in reveal command
|
|
||||||
|
|
||||||
## 0.5.27
|
|
||||||
|
|
||||||
- Fixed support for auto-detecting s3 bucket name when it contains `.` - @taavo
|
|
||||||
- Fixed support for installing when path contains a `'` - @halfdan
|
|
||||||
- Ported tests to mocha
|
|
||||||
|
|
||||||
## 0.5.26
|
|
||||||
|
|
||||||
- Fix node-webkit support when `--target` option is not provided
|
|
||||||
|
|
||||||
## 0.5.25
|
|
||||||
|
|
||||||
- Fix bundling of deps
|
|
||||||
|
|
||||||
## 0.5.24
|
|
||||||
|
|
||||||
- Updated ABI crosswalk to incldue node v0.10.30 and v0.10.31
|
|
||||||
|
|
||||||
## 0.5.23
|
|
||||||
|
|
||||||
- Added `reveal` command. Pass no options to get all versioning data as json. Pass a second arg to grab a single versioned property value
|
|
||||||
- Added support for `--silent` (shortcut for `--loglevel=silent`)
|
|
||||||
|
|
||||||
## 0.5.22
|
|
||||||
|
|
||||||
- Fixed node-webkit versioning name (NOTE: node-webkit support still experimental)
|
|
||||||
|
|
||||||
## 0.5.21
|
|
||||||
|
|
||||||
- New package to fix `shasum check failed` error with v0.5.20
|
|
||||||
|
|
||||||
## 0.5.20
|
|
||||||
|
|
||||||
- Now versioning node-webkit binaries based on major.minor.patch - assuming no compatible ABI across versions (#90)
|
|
||||||
|
|
||||||
## 0.5.19
|
|
||||||
|
|
||||||
- Updated to know about more node-webkit releases
|
|
||||||
|
|
||||||
## 0.5.18
|
|
||||||
|
|
||||||
- Updated to know about more node-webkit releases
|
|
||||||
|
|
||||||
## 0.5.17
|
|
||||||
|
|
||||||
- Updated to know about node v0.10.29 release
|
|
||||||
|
|
||||||
## 0.5.16
|
|
||||||
|
|
||||||
- Now supporting all aws-sdk configuration parameters (http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-configuring.html) (#86)
|
|
||||||
|
|
||||||
## 0.5.15
|
|
||||||
|
|
||||||
- Fixed installation of windows packages sub directories on unix systems (#84)
|
|
||||||
|
|
||||||
## 0.5.14
|
|
||||||
|
|
||||||
- Finished support for cross building using `--target_platform` option (#82)
|
|
||||||
- Now skipping binary validation on install if target arch/platform do not match the host.
|
|
||||||
- Removed multi-arch validing for OS X since it required a FAT node.js binary
|
|
||||||
|
|
||||||
## 0.5.13
|
|
||||||
|
|
||||||
- Fix problem in 0.5.12 whereby the wrong versions of mkdirp and semver where bundled.
|
|
||||||
|
|
||||||
## 0.5.12
|
|
||||||
|
|
||||||
- Improved support for node-webkit (@Mithgol)
|
|
||||||
|
|
||||||
## 0.5.11
|
|
||||||
|
|
||||||
- Updated target versions listing
|
|
||||||
|
|
||||||
## 0.5.10
|
|
||||||
|
|
||||||
- Fixed handling of `-debug` flag passed directory to node-pre-gyp (#72)
|
|
||||||
- Added optional second arg to `node_pre_gyp.find` to customize the default versioning options used to locate the runtime binary
|
|
||||||
- Failed install due to `testbinary` check failure no longer leaves behind binary (#70)
|
|
||||||
|
|
||||||
## 0.5.9
|
|
||||||
|
|
||||||
- Fixed regression in `testbinary` command causing installs to fail on windows with 0.5.7 (#60)
|
|
||||||
|
|
||||||
## 0.5.8
|
|
||||||
|
|
||||||
- Started bundling deps
|
|
||||||
|
|
||||||
## 0.5.7
|
|
||||||
|
|
||||||
- Fixed the `testbinary` check, which is used to determine whether to re-download or source compile, to work even in complex dependency situations (#63)
|
|
||||||
- Exposed the internal `testbinary` command in node-pre-gyp command line tool
|
|
||||||
- Fixed minor bug so that `fallback_to_build` option is always respected
|
|
||||||
|
|
||||||
## 0.5.6
|
|
||||||
|
|
||||||
- Added support for versioning on the `name` value in `package.json` (#57).
|
|
||||||
- Moved to using streams for reading tarball when publishing (#52)
|
|
||||||
|
|
||||||
## 0.5.5
|
|
||||||
|
|
||||||
- Improved binary validation that also now works with node-webkit (@Mithgol)
|
|
||||||
- Upgraded test apps to work with node v0.11.x
|
|
||||||
- Improved test coverage
|
|
||||||
|
|
||||||
## 0.5.4
|
|
||||||
|
|
||||||
- No longer depends on external install of node-gyp for compiling builds.
|
|
||||||
|
|
||||||
## 0.5.3
|
|
||||||
|
|
||||||
- Reverted fix for debian/nodejs since it broke windows (#45)
|
|
||||||
|
|
||||||
## 0.5.2
|
|
||||||
|
|
||||||
- Support for debian systems where the node binary is named `nodejs` (#45)
|
|
||||||
- Added `bin/node-pre-gyp.cmd` to be able to run command on windows locally (npm creates an .npm automatically when globally installed)
|
|
||||||
- Updated abi-crosswalk with node v0.10.26 entry.
|
|
||||||
|
|
||||||
## 0.5.1
|
|
||||||
|
|
||||||
- Various minor bug fixes, several improving windows support for publishing.
|
|
||||||
|
|
||||||
## 0.5.0
|
|
||||||
|
|
||||||
- Changed property names in `binary` object: now required are `module_name`, `module_path`, and `host`.
|
|
||||||
- Now `module_path` supports versioning, which allows developers to opt-in to using a versioned install path (#18).
|
|
||||||
- Added `remote_path` which also supports versioning.
|
|
||||||
- Changed `remote_uri` to `host`.
|
|
||||||
|
|
||||||
## 0.4.2
|
|
||||||
|
|
||||||
- Added support for `--target` flag to request cross-compile against a specific node/node-webkit version.
|
|
||||||
- Added preliminary support for node-webkit
|
|
||||||
- Fixed support for `--target_arch` option being respected in all cases.
|
|
||||||
|
|
||||||
## 0.4.1
|
|
||||||
|
|
||||||
- Fixed exception when only stderr is available in binary test (@bendi / #31)
|
|
||||||
|
|
||||||
## 0.4.0
|
|
||||||
|
|
||||||
- Enforce only `https:` based remote publishing access.
|
|
||||||
- Added `node-pre-gyp info` command to display listing of published binaries
|
|
||||||
- Added support for changing the directory node-pre-gyp should build in with the `-C/--directory` option.
|
|
||||||
- Added support for S3 prefixes.
|
|
||||||
|
|
||||||
## 0.3.1
|
|
||||||
|
|
||||||
- Added `unpublish` command.
|
|
||||||
- Fixed module path construction in tests.
|
|
||||||
- Added ability to disable falling back to build behavior via `npm install --fallback-to-build=false` which overrides setting in a depedencies package.json `install` target.
|
|
||||||
|
|
||||||
## 0.3.0
|
|
||||||
|
|
||||||
- Support for packaging all files in `module_path` directory - see `app4` for example
|
|
||||||
- Added `testpackage` command.
|
|
||||||
- Changed `clean` command to only delete `.node` not entire `build` directory since node-gyp will handle that.
|
|
||||||
- `.node` modules must be in a folder of there own since tar-pack will remove everything when it unpacks.
|
|
27
server/node_modules/@mapbox/node-pre-gyp/LICENSE
generated
vendored
27
server/node_modules/@mapbox/node-pre-gyp/LICENSE
generated
vendored
|
@ -1,27 +0,0 @@
|
||||||
Copyright (c), Mapbox
|
|
||||||
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer.
|
|
||||||
* Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer in the documentation
|
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
* Neither the name of node-pre-gyp nor the names of its contributors
|
|
||||||
may be used to endorse or promote products derived from this software
|
|
||||||
without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
|
|
||||||
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
|
||||||
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
||||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
|
||||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
|
||||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
742
server/node_modules/@mapbox/node-pre-gyp/README.md
generated
vendored
742
server/node_modules/@mapbox/node-pre-gyp/README.md
generated
vendored
|
@ -1,742 +0,0 @@
|
||||||
# @mapbox/node-pre-gyp
|
|
||||||
|
|
||||||
#### @mapbox/node-pre-gyp makes it easy to publish and install Node.js C++ addons from binaries
|
|
||||||
|
|
||||||
[![Build Status](https://travis-ci.com/mapbox/node-pre-gyp.svg?branch=master)](https://travis-ci.com/mapbox/node-pre-gyp)
|
|
||||||
[![Build status](https://ci.appveyor.com/api/projects/status/3nxewb425y83c0gv)](https://ci.appveyor.com/project/Mapbox/node-pre-gyp)
|
|
||||||
|
|
||||||
`@mapbox/node-pre-gyp` stands between [npm](https://github.com/npm/npm) and [node-gyp](https://github.com/Tootallnate/node-gyp) and offers a cross-platform method of binary deployment.
|
|
||||||
|
|
||||||
### Special note on previous package
|
|
||||||
|
|
||||||
On Feb 9th, 2021 `@mapbox/node-pre-gyp@1.0.0` was [released](./CHANGELOG.md). Older, unscoped versions that are not part of the `@mapbox` org are deprecated and only `@mapbox/node-pre-gyp` will see updates going forward. To upgrade to the new package do:
|
|
||||||
|
|
||||||
```
|
|
||||||
npm uninstall node-pre-gyp --save
|
|
||||||
npm install @mapbox/node-pre-gyp --save
|
|
||||||
```
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- A command line tool called `node-pre-gyp` that can install your package's C++ module from a binary.
|
|
||||||
- A variety of developer targeted commands for packaging, testing, and publishing binaries.
|
|
||||||
- A JavaScript module that can dynamically require your installed binary: `require('@mapbox/node-pre-gyp').find`
|
|
||||||
|
|
||||||
For a hello world example of a module packaged with `node-pre-gyp` see <https://github.com/springmeyer/node-addon-example> and [the wiki ](https://github.com/mapbox/node-pre-gyp/wiki/Modules-using-node-pre-gyp) for real world examples.
|
|
||||||
|
|
||||||
## Credits
|
|
||||||
|
|
||||||
- The module is modeled after [node-gyp](https://github.com/Tootallnate/node-gyp) by [@Tootallnate](https://github.com/Tootallnate)
|
|
||||||
- Motivation for initial development came from [@ErisDS](https://github.com/ErisDS) and the [Ghost Project](https://github.com/TryGhost/Ghost).
|
|
||||||
- Development is sponsored by [Mapbox](https://www.mapbox.com/)
|
|
||||||
|
|
||||||
## FAQ
|
|
||||||
|
|
||||||
See the [Frequently Ask Questions](https://github.com/mapbox/node-pre-gyp/wiki/FAQ).
|
|
||||||
|
|
||||||
## Depends
|
|
||||||
|
|
||||||
- Node.js >= node v8.x
|
|
||||||
|
|
||||||
## Install
|
|
||||||
|
|
||||||
`node-pre-gyp` is designed to be installed as a local dependency of your Node.js C++ addon and accessed like:
|
|
||||||
|
|
||||||
./node_modules/.bin/node-pre-gyp --help
|
|
||||||
|
|
||||||
But you can also install it globally:
|
|
||||||
|
|
||||||
npm install @mapbox/node-pre-gyp -g
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
### Commands
|
|
||||||
|
|
||||||
View all possible commands:
|
|
||||||
|
|
||||||
node-pre-gyp --help
|
|
||||||
|
|
||||||
- clean - Remove the entire folder containing the compiled .node module
|
|
||||||
- install - Install pre-built binary for module
|
|
||||||
- reinstall - Run "clean" and "install" at once
|
|
||||||
- build - Compile the module by dispatching to node-gyp or nw-gyp
|
|
||||||
- rebuild - Run "clean" and "build" at once
|
|
||||||
- package - Pack binary into tarball
|
|
||||||
- testpackage - Test that the staged package is valid
|
|
||||||
- publish - Publish pre-built binary
|
|
||||||
- unpublish - Unpublish pre-built binary
|
|
||||||
- info - Fetch info on published binaries
|
|
||||||
|
|
||||||
You can also chain commands:
|
|
||||||
|
|
||||||
node-pre-gyp clean build unpublish publish info
|
|
||||||
|
|
||||||
### Options
|
|
||||||
|
|
||||||
Options include:
|
|
||||||
|
|
||||||
- `-C/--directory`: run the command in this directory
|
|
||||||
- `--build-from-source`: build from source instead of using pre-built binary
|
|
||||||
- `--update-binary`: reinstall by replacing previously installed local binary with remote binary
|
|
||||||
- `--runtime=node-webkit`: customize the runtime: `node`, `electron` and `node-webkit` are the valid options
|
|
||||||
- `--fallback-to-build`: fallback to building from source if pre-built binary is not available
|
|
||||||
- `--target=0.4.0`: Pass the target node or node-webkit version to compile against
|
|
||||||
- `--target_arch=ia32`: Pass the target arch and override the host `arch`. Valid values are 'ia32','x64', or `arm`.
|
|
||||||
- `--target_platform=win32`: Pass the target platform and override the host `platform`. Valid values are `linux`, `darwin`, `win32`, `sunos`, `freebsd`, `openbsd`, and `aix`.
|
|
||||||
|
|
||||||
Both `--build-from-source` and `--fallback-to-build` can be passed alone or they can provide values. You can pass `--fallback-to-build=false` to override the option as declared in package.json. In addition to being able to pass `--build-from-source` you can also pass `--build-from-source=myapp` where `myapp` is the name of your module.
|
|
||||||
|
|
||||||
For example: `npm install --build-from-source=myapp`. This is useful if:
|
|
||||||
|
|
||||||
- `myapp` is referenced in the package.json of a larger app and therefore `myapp` is being installed as a dependency with `npm install`.
|
|
||||||
- The larger app also depends on other modules installed with `node-pre-gyp`
|
|
||||||
- You only want to trigger a source compile for `myapp` and the other modules.
|
|
||||||
|
|
||||||
### Configuring
|
|
||||||
|
|
||||||
This is a guide to configuring your module to use node-pre-gyp.
|
|
||||||
|
|
||||||
#### 1) Add new entries to your `package.json`
|
|
||||||
|
|
||||||
- Add `@mapbox/node-pre-gyp` to `dependencies`
|
|
||||||
- Add `aws-sdk` as a `devDependency`
|
|
||||||
- Add a custom `install` script
|
|
||||||
- Declare a `binary` object
|
|
||||||
|
|
||||||
This looks like:
|
|
||||||
|
|
||||||
```js
|
|
||||||
"dependencies" : {
|
|
||||||
"@mapbox/node-pre-gyp": "1.x"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"aws-sdk": "2.x"
|
|
||||||
}
|
|
||||||
"scripts": {
|
|
||||||
"install": "node-pre-gyp install --fallback-to-build"
|
|
||||||
},
|
|
||||||
"binary": {
|
|
||||||
"module_name": "your_module",
|
|
||||||
"module_path": "./lib/binding/",
|
|
||||||
"host": "https://your_module.s3-us-west-1.amazonaws.com"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
For a full example see [node-addon-examples's package.json](https://github.com/springmeyer/node-addon-example/blob/master/package.json).
|
|
||||||
|
|
||||||
Let's break this down:
|
|
||||||
|
|
||||||
- Dependencies need to list `node-pre-gyp`
|
|
||||||
- Your devDependencies should list `aws-sdk` so that you can run `node-pre-gyp publish` locally or a CI system. We recommend using `devDependencies` only since `aws-sdk` is large and not needed for `node-pre-gyp install` since it only uses http to fetch binaries
|
|
||||||
- Your `scripts` section should override the `install` target with `"install": "node-pre-gyp install --fallback-to-build"`. This allows node-pre-gyp to be used instead of the default npm behavior of always source compiling with `node-gyp` directly.
|
|
||||||
- Your package.json should contain a `binary` section describing key properties you provide to allow node-pre-gyp to package optimally. They are detailed below.
|
|
||||||
|
|
||||||
Note: in the past we recommended putting `@mapbox/node-pre-gyp` in the `bundledDependencies`, but we no longer recommend this. In the past there were npm bugs (with node versions 0.10.x) that could lead to node-pre-gyp not being available at the right time during install (unless we bundled). This should no longer be the case. Also, for a time we recommended using `"preinstall": "npm install @mapbox/node-pre-gyp"` as an alternative method to avoid needing to bundle. But this did not behave predictably across all npm versions - see https://github.com/mapbox/node-pre-gyp/issues/260 for the details. So we do not recommend using `preinstall` to install `@mapbox/node-pre-gyp`. More history on this at https://github.com/strongloop/fsevents/issues/157#issuecomment-265545908.
|
|
||||||
|
|
||||||
##### The `binary` object has three required properties
|
|
||||||
|
|
||||||
###### module_name
|
|
||||||
|
|
||||||
The name of your native node module. This value must:
|
|
||||||
|
|
||||||
- Match the name passed to [the NODE_MODULE macro](http://nodejs.org/api/addons.html#addons_hello_world)
|
|
||||||
- Must be a valid C variable name (e.g. it cannot contain `-`)
|
|
||||||
- Should not include the `.node` extension.
|
|
||||||
|
|
||||||
###### module_path
|
|
||||||
|
|
||||||
The location your native module is placed after a build. This should be an empty directory without other Javascript files. This entire directory will be packaged in the binary tarball. When installing from a remote package this directory will be overwritten with the contents of the tarball.
|
|
||||||
|
|
||||||
Note: This property supports variables based on [Versioning](#versioning).
|
|
||||||
|
|
||||||
###### host
|
|
||||||
|
|
||||||
A url to the remote location where you've published tarball binaries (must be `https` not `http`).
|
|
||||||
|
|
||||||
It is highly recommended that you use Amazon S3. The reasons are:
|
|
||||||
|
|
||||||
- Various node-pre-gyp commands like `publish` and `info` only work with an S3 host.
|
|
||||||
- S3 is a very solid hosting platform for distributing large files.
|
|
||||||
- We provide detail documentation for using [S3 hosting](#s3-hosting) with node-pre-gyp.
|
|
||||||
|
|
||||||
Why then not require S3? Because while some applications using node-pre-gyp need to distribute binaries as large as 20-30 MB, others might have very small binaries and might wish to store them in a GitHub repo. This is not recommended, but if an author really wants to host in a non-S3 location then it should be possible.
|
|
||||||
|
|
||||||
It should also be mentioned that there is an optional and entirely separate npm module called [node-pre-gyp-github](https://github.com/bchr02/node-pre-gyp-github) which is intended to complement node-pre-gyp and be installed along with it. It provides the ability to store and publish your binaries within your repositories GitHub Releases if you would rather not use S3 directly. Installation and usage instructions can be found [here](https://github.com/bchr02/node-pre-gyp-github), but the basic premise is that instead of using the ```node-pre-gyp publish``` command you would use ```node-pre-gyp-github publish```.
|
|
||||||
|
|
||||||
##### The `binary` object other optional S3 properties
|
|
||||||
|
|
||||||
If you are not using a standard s3 path like `bucket_name.s3(.-)region.amazonaws.com`, you might get an error on `publish` because node-pre-gyp extracts the region and bucket from the `host` url. For example, you may have an on-premises s3-compatible storage server, or may have configured a specific dns redirecting to an s3 endpoint. In these cases, you can explicitly set the `region` and `bucket` properties to tell node-pre-gyp to use these values instead of guessing from the `host` property. The following values can be used in the `binary` section:
|
|
||||||
|
|
||||||
###### host
|
|
||||||
|
|
||||||
The url to the remote server root location (must be `https` not `http`).
|
|
||||||
|
|
||||||
###### bucket
|
|
||||||
|
|
||||||
The bucket name where your tarball binaries should be located.
|
|
||||||
|
|
||||||
###### region
|
|
||||||
|
|
||||||
Your S3 server region.
|
|
||||||
|
|
||||||
###### s3ForcePathStyle
|
|
||||||
|
|
||||||
Set `s3ForcePathStyle` to true if the endpoint url should not be prefixed with the bucket name. If false (default), the server endpoint would be constructed as `bucket_name.your_server.com`.
|
|
||||||
|
|
||||||
##### The `binary` object has optional properties
|
|
||||||
|
|
||||||
###### remote_path
|
|
||||||
|
|
||||||
It **is recommended** that you customize this property. This is an extra path to use for publishing and finding remote tarballs. The default value for `remote_path` is `""` meaning that if you do not provide it then all packages will be published at the base of the `host`. It is recommended to provide a value like `./{name}/v{version}` to help organize remote packages in the case that you choose to publish multiple node addons to the same `host`.
|
|
||||||
|
|
||||||
Note: This property supports variables based on [Versioning](#versioning).
|
|
||||||
|
|
||||||
###### package_name
|
|
||||||
|
|
||||||
It is **not recommended** to override this property unless you are also overriding the `remote_path`. This is the versioned name of the remote tarball containing the binary `.node` module and any supporting files you've placed inside the `module_path` directory. Unless you specify `package_name` in your `package.json` then it defaults to `{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz` which allows your binary to work across node versions, platforms, and architectures. If you are using `remote_path` that is also versioned by `./{module_name}/v{version}` then you could remove these variables from the `package_name` and just use: `{node_abi}-{platform}-{arch}.tar.gz`. Then your remote tarball will be looked up at, for example, `https://example.com/your-module/v0.1.0/node-v11-linux-x64.tar.gz`.
|
|
||||||
|
|
||||||
Avoiding the version of your module in the `package_name` and instead only embedding in a directory name can be useful when you want to make a quick tag of your module that does not change any C++ code. In this case you can just copy binaries to the new version behind the scenes like:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
aws s3 sync --acl public-read s3://mapbox-node-binary/sqlite3/v3.0.3/ s3://mapbox-node-binary/sqlite3/v3.0.4/
|
|
||||||
```
|
|
||||||
|
|
||||||
Note: This property supports variables based on [Versioning](#versioning).
|
|
||||||
|
|
||||||
#### 2) Add a new target to binding.gyp
|
|
||||||
|
|
||||||
`node-pre-gyp` calls out to `node-gyp` to compile the module and passes variables along like [module_name](#module_name) and [module_path](#module_path).
|
|
||||||
|
|
||||||
A new target must be added to `binding.gyp` that moves the compiled `.node` module from `./build/Release/module_name.node` into the directory specified by `module_path`.
|
|
||||||
|
|
||||||
Add a target like this at the end of your `targets` list:
|
|
||||||
|
|
||||||
```js
|
|
||||||
{
|
|
||||||
"target_name": "action_after_build",
|
|
||||||
"type": "none",
|
|
||||||
"dependencies": [ "<(module_name)" ],
|
|
||||||
"copies": [
|
|
||||||
{
|
|
||||||
"files": [ "<(PRODUCT_DIR)/<(module_name).node" ],
|
|
||||||
"destination": "<(module_path)"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
For a full example see [node-addon-example's binding.gyp](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/binding.gyp).
|
|
||||||
|
|
||||||
#### 3) Dynamically require your `.node`
|
|
||||||
|
|
||||||
Inside the main js file that requires your addon module you are likely currently doing:
|
|
||||||
|
|
||||||
```js
|
|
||||||
var binding = require('../build/Release/binding.node');
|
|
||||||
```
|
|
||||||
|
|
||||||
or:
|
|
||||||
|
|
||||||
```js
|
|
||||||
var bindings = require('./bindings')
|
|
||||||
```
|
|
||||||
|
|
||||||
Change those lines to:
|
|
||||||
|
|
||||||
```js
|
|
||||||
var binary = require('@mapbox/node-pre-gyp');
|
|
||||||
var path = require('path');
|
|
||||||
var binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json')));
|
|
||||||
var binding = require(binding_path);
|
|
||||||
```
|
|
||||||
|
|
||||||
For a full example see [node-addon-example's index.js](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/index.js#L1-L4)
|
|
||||||
|
|
||||||
#### 4) Build and package your app
|
|
||||||
|
|
||||||
Now build your module from source:
|
|
||||||
|
|
||||||
npm install --build-from-source
|
|
||||||
|
|
||||||
The `--build-from-source` tells `node-pre-gyp` to not look for a remote package and instead dispatch to node-gyp to build.
|
|
||||||
|
|
||||||
Now `node-pre-gyp` should now also be installed as a local dependency so the command line tool it offers can be found at `./node_modules/.bin/node-pre-gyp`.
|
|
||||||
|
|
||||||
#### 5) Test
|
|
||||||
|
|
||||||
Now `npm test` should work just as it did before.
|
|
||||||
|
|
||||||
#### 6) Publish the tarball
|
|
||||||
|
|
||||||
Then package your app:
|
|
||||||
|
|
||||||
./node_modules/.bin/node-pre-gyp package
|
|
||||||
|
|
||||||
Once packaged, now you can publish:
|
|
||||||
|
|
||||||
./node_modules/.bin/node-pre-gyp publish
|
|
||||||
|
|
||||||
Currently the `publish` command pushes your binary to S3. This requires:
|
|
||||||
|
|
||||||
- You have installed `aws-sdk` with `npm install aws-sdk`
|
|
||||||
- You have created a bucket already.
|
|
||||||
- The `host` points to an S3 http or https endpoint.
|
|
||||||
- You have configured node-pre-gyp to read your S3 credentials (see [S3 hosting](#s3-hosting) for details).
|
|
||||||
|
|
||||||
You can also host your binaries elsewhere. To do this requires:
|
|
||||||
|
|
||||||
- You manually publish the binary created by the `package` command to an `https` endpoint
|
|
||||||
- Ensure that the `host` value points to your custom `https` endpoint.
|
|
||||||
|
|
||||||
#### 7) Automate builds
|
|
||||||
|
|
||||||
Now you need to publish builds for all the platforms and node versions you wish to support. This is best automated.
|
|
||||||
|
|
||||||
- See [Appveyor Automation](#appveyor-automation) for how to auto-publish builds on Windows.
|
|
||||||
- See [Travis Automation](#travis-automation) for how to auto-publish builds on OS X and Linux.
|
|
||||||
|
|
||||||
#### 8) You're done!
|
|
||||||
|
|
||||||
Now publish your module to the npm registry. Users will now be able to install your module from a binary.
|
|
||||||
|
|
||||||
What will happen is this:
|
|
||||||
|
|
||||||
1. `npm install <your package>` will pull from the npm registry
|
|
||||||
2. npm will run the `install` script which will call out to `node-pre-gyp`
|
|
||||||
3. `node-pre-gyp` will fetch the binary `.node` module and unpack in the right place
|
|
||||||
4. Assuming that all worked, you are done
|
|
||||||
|
|
||||||
If a a binary was not available for a given platform and `--fallback-to-build` was used then `node-gyp rebuild` will be called to try to source compile the module.
|
|
||||||
|
|
||||||
#### 9) One more option
|
|
||||||
|
|
||||||
It may be that you want to work with two s3 buckets, one for staging and one for production; this
|
|
||||||
arrangement makes it less likely to accidentally overwrite a production binary. It also allows the production
|
|
||||||
environment to have more restrictive permissions than staging while still enabling publishing when
|
|
||||||
developing and testing.
|
|
||||||
|
|
||||||
The binary.host property can be set at execution time. In order to do so all of the following conditions
|
|
||||||
must be true.
|
|
||||||
|
|
||||||
- binary.host is falsey or not present
|
|
||||||
- binary.staging_host is not empty
|
|
||||||
- binary.production_host is not empty
|
|
||||||
|
|
||||||
If any of these checks fail then the operation will not perform execution time determination of the s3 target.
|
|
||||||
|
|
||||||
If the command being executed is either "publish" or "unpublish" then the default is set to `binary.staging_host`. In all other cases
|
|
||||||
the default is `binary.production_host`.
|
|
||||||
|
|
||||||
The command-line options `--s3_host=staging` or `--s3_host=production` override the default. If `s3_host`
|
|
||||||
is present and not `staging` or `production` an exception is thrown.
|
|
||||||
|
|
||||||
This allows installing from staging by specifying `--s3_host=staging`. And it requires specifying
|
|
||||||
`--s3_option=production` in order to publish to, or unpublish from, production, making accidental errors less likely.
|
|
||||||
|
|
||||||
## Node-API Considerations
|
|
||||||
|
|
||||||
[Node-API](https://nodejs.org/api/n-api.html#n_api_node_api), which was previously known as N-API, is an ABI-stable alternative to previous technologies such as [nan](https://github.com/nodejs/nan) which are tied to a specific Node runtime engine. Node-API is Node runtime engine agnostic and guarantees modules created today will continue to run, without changes, into the future.
|
|
||||||
|
|
||||||
Using `node-pre-gyp` with Node-API projects requires a handful of additional configuration values and imposes some additional requirements.
|
|
||||||
|
|
||||||
The most significant difference is that an Node-API module can be coded to target multiple Node-API versions. Therefore, an Node-API module must declare in its `package.json` file which Node-API versions the module is designed to run against. In addition, since multiple builds may be required for a single module, path and file names must be specified in way that avoids naming conflicts.
|
|
||||||
|
|
||||||
### The `napi_versions` array property
|
|
||||||
|
|
||||||
A Node-API module must declare in its `package.json` file, the Node-API versions the module is intended to support. This is accomplished by including an `napi-versions` array property in the `binary` object. For example:
|
|
||||||
|
|
||||||
```js
|
|
||||||
"binary": {
|
|
||||||
"module_name": "your_module",
|
|
||||||
"module_path": "your_module_path",
|
|
||||||
"host": "https://your_bucket.s3-us-west-1.amazonaws.com",
|
|
||||||
"napi_versions": [1,3]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
If the `napi_versions` array property is *not* present, `node-pre-gyp` operates as it always has. Including the `napi_versions` array property instructs `node-pre-gyp` that this is a Node-API module build.
|
|
||||||
|
|
||||||
When the `napi_versions` array property is present, `node-pre-gyp` fires off multiple operations, one for each of the Node-API versions in the array. In the example above, two operations are initiated, one for Node-API version 1 and second for Node-API version 3. How this version number is communicated is described next.
|
|
||||||
|
|
||||||
### The `napi_build_version` value
|
|
||||||
|
|
||||||
For each of the Node-API module operations `node-pre-gyp` initiates, it ensures that the `napi_build_version` is set appropriately.
|
|
||||||
|
|
||||||
This value is of importance in two areas:
|
|
||||||
|
|
||||||
1. The C/C++ code which needs to know against which Node-API version it should compile.
|
|
||||||
2. `node-pre-gyp` itself which must assign appropriate path and file names to avoid collisions.
|
|
||||||
|
|
||||||
### Defining `NAPI_VERSION` for the C/C++ code
|
|
||||||
|
|
||||||
The `napi_build_version` value is communicated to the C/C++ code by adding this code to the `binding.gyp` file:
|
|
||||||
|
|
||||||
```
|
|
||||||
"defines": [
|
|
||||||
"NAPI_VERSION=<(napi_build_version)",
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
This ensures that `NAPI_VERSION`, an integer value, is declared appropriately to the C/C++ code for each build.
|
|
||||||
|
|
||||||
> Note that earlier versions of this document recommended defining the symbol `NAPI_BUILD_VERSION`. `NAPI_VERSION` is preferred because it used by the Node-API C/C++ headers to configure the specific Node-API versions being requested.
|
|
||||||
|
|
||||||
### Path and file naming requirements in `package.json`
|
|
||||||
|
|
||||||
Since `node-pre-gyp` fires off multiple operations for each request, it is essential that path and file names be created in such a way as to avoid collisions. This is accomplished by imposing additional path and file naming requirements.
|
|
||||||
|
|
||||||
Specifically, when performing Node-API builds, the `{napi_build_version}` text configuration value *must* be present in the `module_path` property. In addition, the `{napi_build_version}` text configuration value *must* be present in either the `remote_path` or `package_name` property. (No problem if it's in both.)
|
|
||||||
|
|
||||||
Here's an example:
|
|
||||||
|
|
||||||
```js
|
|
||||||
"binary": {
|
|
||||||
"module_name": "your_module",
|
|
||||||
"module_path": "./lib/binding/napi-v{napi_build_version}",
|
|
||||||
"remote_path": "./{module_name}/v{version}/{configuration}/",
|
|
||||||
"package_name": "{platform}-{arch}-napi-v{napi_build_version}.tar.gz",
|
|
||||||
"host": "https://your_bucket.s3-us-west-1.amazonaws.com",
|
|
||||||
"napi_versions": [1,3]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Supporting both Node-API and NAN builds
|
|
||||||
|
|
||||||
You may have a legacy native add-on that you wish to continue supporting for those versions of Node that do not support Node-API, as you add Node-API support for later Node versions. This can be accomplished by specifying the `node_napi_label` configuration value in the package.json `binary.package_name` property.
|
|
||||||
|
|
||||||
Placing the configuration value `node_napi_label` in the package.json `binary.package_name` property instructs `node-pre-gyp` to build all viable Node-API binaries supported by the current Node instance. If the current Node instance does not support Node-API, `node-pre-gyp` will request a traditional, non-Node-API build.
|
|
||||||
|
|
||||||
The configuration value `node_napi_label` is set by `node-pre-gyp` to the type of build created, `napi` or `node`, and the version number. For Node-API builds, the string contains the Node-API version nad has values like `napi-v3`. For traditional, non-Node-API builds, the string contains the ABI version with values like `node-v46`.
|
|
||||||
|
|
||||||
Here's how the `binary` configuration above might be changed to support both Node-API and NAN builds:
|
|
||||||
|
|
||||||
```js
|
|
||||||
"binary": {
|
|
||||||
"module_name": "your_module",
|
|
||||||
"module_path": "./lib/binding/{node_napi_label}",
|
|
||||||
"remote_path": "./{module_name}/v{version}/{configuration}/",
|
|
||||||
"package_name": "{platform}-{arch}-{node_napi_label}.tar.gz",
|
|
||||||
"host": "https://your_bucket.s3-us-west-1.amazonaws.com",
|
|
||||||
"napi_versions": [1,3]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The C/C++ symbol `NAPI_VERSION` can be used to distinguish Node-API and non-Node-API builds. The value of `NAPI_VERSION` is set to the integer Node-API version for Node-API builds and is set to `0` for non-Node-API builds.
|
|
||||||
|
|
||||||
For example:
|
|
||||||
|
|
||||||
```C
|
|
||||||
#if NAPI_VERSION
|
|
||||||
// Node-API code goes here
|
|
||||||
#else
|
|
||||||
// NAN code goes here
|
|
||||||
#endif
|
|
||||||
```
|
|
||||||
|
|
||||||
### Two additional configuration values
|
|
||||||
|
|
||||||
The following two configuration values, which were implemented in previous versions of `node-pre-gyp`, continue to exist, but have been replaced by the `node_napi_label` configuration value described above.
|
|
||||||
|
|
||||||
1. `napi_version` If Node-API is supported by the currently executing Node instance, this value is the Node-API version number supported by Node. If Node-API is not supported, this value is an empty string.
|
|
||||||
|
|
||||||
2. `node_abi_napi` If the value returned for `napi_version` is non empty, this value is `'napi'`. If the value returned for `napi_version` is empty, this value is the value returned for `node_abi`.
|
|
||||||
|
|
||||||
These values are present for use in the `binding.gyp` file and may be used as `{napi_version}` and `{node_abi_napi}` for text substituion in the `binary` properties of the `package.json` file.
|
|
||||||
|
|
||||||
## S3 Hosting
|
|
||||||
|
|
||||||
You can host wherever you choose but S3 is cheap, `node-pre-gyp publish` expects it, and S3 can be integrated well with [Travis.ci](http://travis-ci.org) to automate builds for OS X and Ubuntu, and with [Appveyor](http://appveyor.com) to automate builds for Windows. Here is an approach to do this:
|
|
||||||
|
|
||||||
First, get setup locally and test the workflow:
|
|
||||||
|
|
||||||
#### 1) Create an S3 bucket
|
|
||||||
|
|
||||||
And have your **key** and **secret key** ready for writing to the bucket.
|
|
||||||
|
|
||||||
It is recommended to create a IAM user with a policy that only gives permissions to the specific bucket you plan to publish to. This can be done in the [IAM console](https://console.aws.amazon.com/iam/) by: 1) adding a new user, 2) choosing `Attach User Policy`, 3) Using the `Policy Generator`, 4) selecting `Amazon S3` for the service, 5) adding the actions: `DeleteObject`, `GetObject`, `GetObjectAcl`, `ListBucket`, `HeadBucket`, `PutObject`, `PutObjectAcl`, 6) adding an ARN of `arn:aws:s3:::bucket/*` (replacing `bucket` with your bucket name), and finally 7) clicking `Add Statement` and saving the policy. It should generate a policy like:
|
|
||||||
|
|
||||||
```js
|
|
||||||
{
|
|
||||||
"Version": "2012-10-17",
|
|
||||||
"Statement": [
|
|
||||||
{
|
|
||||||
"Sid": "objects",
|
|
||||||
"Effect": "Allow",
|
|
||||||
"Action": [
|
|
||||||
"s3:PutObject",
|
|
||||||
"s3:GetObjectAcl",
|
|
||||||
"s3:GetObject",
|
|
||||||
"s3:DeleteObject",
|
|
||||||
"s3:PutObjectAcl"
|
|
||||||
],
|
|
||||||
"Resource": "arn:aws:s3:::your-bucket-name/*"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Sid": "bucket",
|
|
||||||
"Effect": "Allow",
|
|
||||||
"Action": "s3:ListBucket",
|
|
||||||
"Resource": "arn:aws:s3:::your-bucket-name"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"Sid": "buckets",
|
|
||||||
"Effect": "Allow",
|
|
||||||
"Action": "s3:HeadBucket",
|
|
||||||
"Resource": "*"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 2) Install node-pre-gyp
|
|
||||||
|
|
||||||
Either install it globally:
|
|
||||||
|
|
||||||
npm install node-pre-gyp -g
|
|
||||||
|
|
||||||
Or put the local version on your PATH
|
|
||||||
|
|
||||||
export PATH=`pwd`/node_modules/.bin/:$PATH
|
|
||||||
|
|
||||||
#### 3) Configure AWS credentials
|
|
||||||
|
|
||||||
It is recommended to configure the AWS JS SDK v2 used internally by `node-pre-gyp` by setting these environment variables:
|
|
||||||
|
|
||||||
- AWS_ACCESS_KEY_ID
|
|
||||||
- AWS_SECRET_ACCESS_KEY
|
|
||||||
|
|
||||||
But also you can also use the `Shared Config File` mentioned [in the AWS JS SDK v2 docs](https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/configuring-the-jssdk.html)
|
|
||||||
|
|
||||||
#### 4) Package and publish your build
|
|
||||||
|
|
||||||
Install the `aws-sdk`:
|
|
||||||
|
|
||||||
npm install aws-sdk
|
|
||||||
|
|
||||||
Then publish:
|
|
||||||
|
|
||||||
node-pre-gyp package publish
|
|
||||||
|
|
||||||
Note: if you hit an error like `Hostname/IP doesn't match certificate's altnames` it may mean that you need to provide the `region` option in your config.
|
|
||||||
|
|
||||||
## Appveyor Automation
|
|
||||||
|
|
||||||
[Appveyor](http://www.appveyor.com/) can build binaries and publish the results per commit and supports:
|
|
||||||
|
|
||||||
- Windows Visual Studio 2013 and related compilers
|
|
||||||
- Both 64 bit (x64) and 32 bit (x86) build configurations
|
|
||||||
- Multiple Node.js versions
|
|
||||||
|
|
||||||
For an example of doing this see [node-sqlite3's appveyor.yml](https://github.com/mapbox/node-sqlite3/blob/master/appveyor.yml).
|
|
||||||
|
|
||||||
Below is a guide to getting set up:
|
|
||||||
|
|
||||||
#### 1) Create a free Appveyor account
|
|
||||||
|
|
||||||
Go to https://ci.appveyor.com/signup/free and sign in with your GitHub account.
|
|
||||||
|
|
||||||
#### 2) Create a new project
|
|
||||||
|
|
||||||
Go to https://ci.appveyor.com/projects/new and select the GitHub repo for your module
|
|
||||||
|
|
||||||
#### 3) Add appveyor.yml and push it
|
|
||||||
|
|
||||||
Once you have committed an `appveyor.yml` ([appveyor.yml reference](http://www.appveyor.com/docs/appveyor-yml)) to your GitHub repo and pushed it AppVeyor should automatically start building your project.
|
|
||||||
|
|
||||||
#### 4) Create secure variables
|
|
||||||
|
|
||||||
Encrypt your S3 AWS keys by going to <https://ci.appveyor.com/tools/encrypt> and hitting the `encrypt` button.
|
|
||||||
|
|
||||||
Then paste the result into your `appveyor.yml`
|
|
||||||
|
|
||||||
```yml
|
|
||||||
environment:
|
|
||||||
AWS_ACCESS_KEY_ID:
|
|
||||||
secure: Dn9HKdLNYvDgPdQOzRq/DqZ/MPhjknRHB1o+/lVU8MA=
|
|
||||||
AWS_SECRET_ACCESS_KEY:
|
|
||||||
secure: W1rwNoSnOku1r+28gnoufO8UA8iWADmL1LiiwH9IOkIVhDTNGdGPJqAlLjNqwLnL
|
|
||||||
```
|
|
||||||
|
|
||||||
NOTE: keys are per account but not per repo (this is difference than Travis where keys are per repo but not related to the account used to encrypt them).
|
|
||||||
|
|
||||||
#### 5) Hook up publishing
|
|
||||||
|
|
||||||
Just put `node-pre-gyp package publish` in your `appveyor.yml` after `npm install`.
|
|
||||||
|
|
||||||
#### 6) Publish when you want
|
|
||||||
|
|
||||||
You might wish to publish binaries only on a specific commit. To do this you could borrow from the [Travis CI idea of commit keywords](http://about.travis-ci.org/docs/user/how-to-skip-a-build/) and add special handling for commit messages with `[publish binary]`:
|
|
||||||
|
|
||||||
SET CM=%APPVEYOR_REPO_COMMIT_MESSAGE%
|
|
||||||
if not "%CM%" == "%CM:[publish binary]=%" node-pre-gyp --msvs_version=2013 publish
|
|
||||||
|
|
||||||
If your commit message contains special characters (e.g. `&`) this method might fail. An alternative is to use PowerShell, which gives you additional possibilities, like ignoring case by using `ToLower()`:
|
|
||||||
|
|
||||||
ps: if($env:APPVEYOR_REPO_COMMIT_MESSAGE.ToLower().Contains('[publish binary]')) { node-pre-gyp --msvs_version=2013 publish }
|
|
||||||
|
|
||||||
Remember this publishing is not the same as `npm publish`. We're just talking about the binary module here and not your entire npm package.
|
|
||||||
|
|
||||||
## Travis Automation
|
|
||||||
|
|
||||||
[Travis](https://travis-ci.org/) can push to S3 after a successful build and supports both:
|
|
||||||
|
|
||||||
- Ubuntu Precise and OS X (64 bit)
|
|
||||||
- Multiple Node.js versions
|
|
||||||
|
|
||||||
For an example of doing this see [node-add-example's .travis.yml](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/.travis.yml).
|
|
||||||
|
|
||||||
Note: if you need 32 bit binaries, this can be done from a 64 bit Travis machine. See [the node-sqlite3 scripts for an example of doing this](https://github.com/mapbox/node-sqlite3/blob/bae122aa6a2b8a45f6b717fab24e207740e32b5d/scripts/build_against_node.sh#L54-L74).
|
|
||||||
|
|
||||||
Below is a guide to getting set up:
|
|
||||||
|
|
||||||
#### 1) Install the Travis gem
|
|
||||||
|
|
||||||
gem install travis
|
|
||||||
|
|
||||||
#### 2) Create secure variables
|
|
||||||
|
|
||||||
Make sure you run this command from within the directory of your module.
|
|
||||||
|
|
||||||
Use `travis-encrypt` like:
|
|
||||||
|
|
||||||
travis encrypt AWS_ACCESS_KEY_ID=${node_pre_gyp_accessKeyId}
|
|
||||||
travis encrypt AWS_SECRET_ACCESS_KEY=${node_pre_gyp_secretAccessKey}
|
|
||||||
|
|
||||||
Then put those values in your `.travis.yml` like:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
env:
|
|
||||||
global:
|
|
||||||
- secure: F+sEL/v56CzHqmCSSES4pEyC9NeQlkoR0Gs/ZuZxX1ytrj8SKtp3MKqBj7zhIclSdXBz4Ev966Da5ctmcTd410p0b240MV6BVOkLUtkjZJyErMBOkeb8n8yVfSoeMx8RiIhBmIvEn+rlQq+bSFis61/JkE9rxsjkGRZi14hHr4M=
|
|
||||||
- secure: o2nkUQIiABD139XS6L8pxq3XO5gch27hvm/gOdV+dzNKc/s2KomVPWcOyXNxtJGhtecAkABzaW8KHDDi5QL1kNEFx6BxFVMLO8rjFPsMVaBG9Ks6JiDQkkmrGNcnVdxI/6EKTLHTH5WLsz8+J7caDBzvKbEfTux5EamEhxIWgrI=
|
|
||||||
```
|
|
||||||
|
|
||||||
More details on Travis encryption at http://about.travis-ci.org/docs/user/encryption-keys/.
|
|
||||||
|
|
||||||
#### 3) Hook up publishing
|
|
||||||
|
|
||||||
Just put `node-pre-gyp package publish` in your `.travis.yml` after `npm install`.
|
|
||||||
|
|
||||||
##### OS X publishing
|
|
||||||
|
|
||||||
If you want binaries for OS X in addition to linux you can enable [multi-os for Travis](http://docs.travis-ci.com/user/multi-os/#Setting-.travis.yml)
|
|
||||||
|
|
||||||
Use a configuration like:
|
|
||||||
|
|
||||||
```yml
|
|
||||||
|
|
||||||
language: cpp
|
|
||||||
|
|
||||||
os:
|
|
||||||
- linux
|
|
||||||
- osx
|
|
||||||
|
|
||||||
env:
|
|
||||||
matrix:
|
|
||||||
- NODE_VERSION="4"
|
|
||||||
- NODE_VERSION="6"
|
|
||||||
|
|
||||||
before_install:
|
|
||||||
- rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm
|
|
||||||
- source ~/.nvm/nvm.sh
|
|
||||||
- nvm install $NODE_VERSION
|
|
||||||
- nvm use $NODE_VERSION
|
|
||||||
```
|
|
||||||
|
|
||||||
See [Travis OS X Gotchas](#travis-os-x-gotchas) for why we replace `language: node_js` and `node_js:` sections with `language: cpp` and a custom matrix.
|
|
||||||
|
|
||||||
Also create platform specific sections for any deps that need install. For example if you need libpng:
|
|
||||||
|
|
||||||
```yml
|
|
||||||
- if [ $(uname -s) == 'Linux' ]; then apt-get install libpng-dev; fi;
|
|
||||||
- if [ $(uname -s) == 'Darwin' ]; then brew install libpng; fi;
|
|
||||||
```
|
|
||||||
|
|
||||||
For detailed multi-OS examples see [node-mapnik](https://github.com/mapnik/node-mapnik/blob/master/.travis.yml) and [node-sqlite3](https://github.com/mapbox/node-sqlite3/blob/master/.travis.yml).
|
|
||||||
|
|
||||||
##### Travis OS X Gotchas
|
|
||||||
|
|
||||||
First, unlike the Travis Linux machines, the OS X machines do not put `node-pre-gyp` on PATH by default. To do so you will need to:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
export PATH=$(pwd)/node_modules/.bin:${PATH}
|
|
||||||
```
|
|
||||||
|
|
||||||
Second, the OS X machines do not support using a matrix for installing different Node.js versions. So you need to bootstrap the installation of Node.js in a cross platform way.
|
|
||||||
|
|
||||||
By doing:
|
|
||||||
|
|
||||||
```yml
|
|
||||||
env:
|
|
||||||
matrix:
|
|
||||||
- NODE_VERSION="4"
|
|
||||||
- NODE_VERSION="6"
|
|
||||||
|
|
||||||
before_install:
|
|
||||||
- rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm
|
|
||||||
- source ~/.nvm/nvm.sh
|
|
||||||
- nvm install $NODE_VERSION
|
|
||||||
- nvm use $NODE_VERSION
|
|
||||||
```
|
|
||||||
|
|
||||||
You can easily recreate the previous behavior of this matrix:
|
|
||||||
|
|
||||||
```yml
|
|
||||||
node_js:
|
|
||||||
- "4"
|
|
||||||
- "6"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 4) Publish when you want
|
|
||||||
|
|
||||||
You might wish to publish binaries only on a specific commit. To do this you could borrow from the [Travis CI idea of commit keywords](http://about.travis-ci.org/docs/user/how-to-skip-a-build/) and add special handling for commit messages with `[publish binary]`:
|
|
||||||
|
|
||||||
COMMIT_MESSAGE=$(git log --format=%B --no-merges -n 1 | tr -d '\n')
|
|
||||||
if [[ ${COMMIT_MESSAGE} =~ "[publish binary]" ]]; then node-pre-gyp publish; fi;
|
|
||||||
|
|
||||||
Then you can trigger new binaries to be built like:
|
|
||||||
|
|
||||||
git commit -a -m "[publish binary]"
|
|
||||||
|
|
||||||
Or, if you don't have any changes to make simply run:
|
|
||||||
|
|
||||||
git commit --allow-empty -m "[publish binary]"
|
|
||||||
|
|
||||||
WARNING: if you are working in a pull request and publishing binaries from there then you will want to avoid double publishing when Travis CI builds both the `push` and `pr`. You only want to run the publish on the `push` commit. See https://github.com/Project-OSRM/node-osrm/blob/8eb837abe2e2e30e595093d16e5354bc5c573575/scripts/is_pr_merge.sh which is called from https://github.com/Project-OSRM/node-osrm/blob/8eb837abe2e2e30e595093d16e5354bc5c573575/scripts/publish.sh for an example of how to do this.
|
|
||||||
|
|
||||||
Remember this publishing is not the same as `npm publish`. We're just talking about the binary module here and not your entire npm package. To automate the publishing of your entire package to npm on Travis see http://about.travis-ci.org/docs/user/deployment/npm/
|
|
||||||
|
|
||||||
# Versioning
|
|
||||||
|
|
||||||
The `binary` properties of `module_path`, `remote_path`, and `package_name` support variable substitution. The strings are evaluated by `node-pre-gyp` depending on your system and any custom build flags you passed.
|
|
||||||
|
|
||||||
- `node_abi`: The node C++ `ABI` number. This value is available in Javascript as `process.versions.modules` as of [`>= v0.10.4 >= v0.11.7`](https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e) and in C++ as the `NODE_MODULE_VERSION` define much earlier. For versions of Node before this was available we fallback to the V8 major and minor version.
|
|
||||||
- `platform` matches node's `process.platform` like `linux`, `darwin`, and `win32` unless the user passed the `--target_platform` option to override.
|
|
||||||
- `arch` matches node's `process.arch` like `x64` or `ia32` unless the user passes the `--target_arch` option to override.
|
|
||||||
- `libc` matches `require('detect-libc').family` like `glibc` or `musl` unless the user passes the `--target_libc` option to override.
|
|
||||||
- `configuration` - Either 'Release' or 'Debug' depending on if `--debug` is passed during the build.
|
|
||||||
- `module_name` - the `binary.module_name` attribute from `package.json`.
|
|
||||||
- `version` - the semver `version` value for your module from `package.json` (NOTE: ignores the `semver.build` property).
|
|
||||||
- `major`, `minor`, `patch`, and `prelease` match the individual semver values for your module's `version`
|
|
||||||
- `build` - the sevmer `build` value. For example it would be `this.that` if your package.json `version` was `v1.0.0+this.that`
|
|
||||||
- `prerelease` - the semver `prerelease` value. For example it would be `alpha.beta` if your package.json `version` was `v1.0.0-alpha.beta`
|
|
||||||
|
|
||||||
|
|
||||||
The options are visible in the code at <https://github.com/mapbox/node-pre-gyp/blob/612b7bca2604508d881e1187614870ba19a7f0c5/lib/util/versioning.js#L114-L127>
|
|
||||||
|
|
||||||
# Download binary files from a mirror
|
|
||||||
|
|
||||||
S3 is broken in China for the well known reason.
|
|
||||||
|
|
||||||
Using the `npm` config argument: `--{module_name}_binary_host_mirror` can download binary files through a mirror, `-` in `module_name` will be replaced with `_`.
|
|
||||||
|
|
||||||
e.g.: Install [v8-profiler](https://www.npmjs.com/package/v8-profiler) from `npm`.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/
|
|
||||||
```
|
|
||||||
|
|
||||||
e.g.: Install [canvas-prebuilt](https://www.npmjs.com/package/canvas-prebuilt) from `npm`.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ npm install canvas-prebuilt --canvas_prebuilt_binary_host_mirror=https://npm.taobao.org/mirrors/canvas-prebuilt/
|
|
||||||
```
|
|
4
server/node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp
generated
vendored
4
server/node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp
generated
vendored
|
@ -1,4 +0,0 @@
|
||||||
#!/usr/bin/env node
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
require('../lib/main');
|
|
2
server/node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp.cmd
generated
vendored
2
server/node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp.cmd
generated
vendored
|
@ -1,2 +0,0 @@
|
||||||
@echo off
|
|
||||||
node "%~dp0\node-pre-gyp" %*
|
|
10
server/node_modules/@mapbox/node-pre-gyp/contributing.md
generated
vendored
10
server/node_modules/@mapbox/node-pre-gyp/contributing.md
generated
vendored
|
@ -1,10 +0,0 @@
|
||||||
# Contributing
|
|
||||||
|
|
||||||
|
|
||||||
### Releasing a new version:
|
|
||||||
|
|
||||||
- Ensure tests are passing on travis and appveyor
|
|
||||||
- Run `node scripts/abi_crosswalk.js` and commit any changes
|
|
||||||
- Update the changelog
|
|
||||||
- Tag a new release like: `git tag -a v0.6.34 -m "tagging v0.6.34" && git push --tags`
|
|
||||||
- Run `npm publish`
|
|
51
server/node_modules/@mapbox/node-pre-gyp/lib/build.js
generated
vendored
51
server/node_modules/@mapbox/node-pre-gyp/lib/build.js
generated
vendored
|
@ -1,51 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = build;
|
|
||||||
|
|
||||||
exports.usage = 'Attempts to compile the module by dispatching to node-gyp or nw-gyp';
|
|
||||||
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
const compile = require('./util/compile.js');
|
|
||||||
const handle_gyp_opts = require('./util/handle_gyp_opts.js');
|
|
||||||
const configure = require('./configure.js');
|
|
||||||
|
|
||||||
function do_build(gyp, argv, callback) {
|
|
||||||
handle_gyp_opts(gyp, argv, (err, result) => {
|
|
||||||
let final_args = ['build'].concat(result.gyp).concat(result.pre);
|
|
||||||
if (result.unparsed.length > 0) {
|
|
||||||
final_args = final_args.
|
|
||||||
concat(['--']).
|
|
||||||
concat(result.unparsed);
|
|
||||||
}
|
|
||||||
if (!err && result.opts.napi_build_version) {
|
|
||||||
napi.swap_build_dir_in(result.opts.napi_build_version);
|
|
||||||
}
|
|
||||||
compile.run_gyp(final_args, result.opts, (err2) => {
|
|
||||||
if (result.opts.napi_build_version) {
|
|
||||||
napi.swap_build_dir_out(result.opts.napi_build_version);
|
|
||||||
}
|
|
||||||
return callback(err2);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function build(gyp, argv, callback) {
|
|
||||||
|
|
||||||
// Form up commands to pass to node-gyp:
|
|
||||||
// We map `node-pre-gyp build` to `node-gyp configure build` so that we do not
|
|
||||||
// trigger a clean and therefore do not pay the penalty of a full recompile
|
|
||||||
if (argv.length && (argv.indexOf('rebuild') > -1)) {
|
|
||||||
argv.shift(); // remove `rebuild`
|
|
||||||
// here we map `node-pre-gyp rebuild` to `node-gyp rebuild` which internally means
|
|
||||||
// "clean + configure + build" and triggers a full recompile
|
|
||||||
compile.run_gyp(['clean'], {}, (err3) => {
|
|
||||||
if (err3) return callback(err3);
|
|
||||||
configure(gyp, argv, (err4) => {
|
|
||||||
if (err4) return callback(err4);
|
|
||||||
return do_build(gyp, argv, callback);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
return do_build(gyp, argv, callback);
|
|
||||||
}
|
|
||||||
}
|
|
31
server/node_modules/@mapbox/node-pre-gyp/lib/clean.js
generated
vendored
31
server/node_modules/@mapbox/node-pre-gyp/lib/clean.js
generated
vendored
|
@ -1,31 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = clean;
|
|
||||||
|
|
||||||
exports.usage = 'Removes the entire folder containing the compiled .node module';
|
|
||||||
|
|
||||||
const rm = require('rimraf');
|
|
||||||
const exists = require('fs').exists || require('path').exists;
|
|
||||||
const versioning = require('./util/versioning.js');
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
function clean(gyp, argv, callback) {
|
|
||||||
const package_json = gyp.package_json;
|
|
||||||
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
|
||||||
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
|
||||||
const to_delete = opts.module_path;
|
|
||||||
if (!to_delete) {
|
|
||||||
return callback(new Error('module_path is empty, refusing to delete'));
|
|
||||||
} else if (path.normalize(to_delete) === path.normalize(process.cwd())) {
|
|
||||||
return callback(new Error('module_path is not set, refusing to delete'));
|
|
||||||
} else {
|
|
||||||
exists(to_delete, (found) => {
|
|
||||||
if (found) {
|
|
||||||
if (!gyp.opts.silent_clean) console.log('[' + package_json.name + '] Removing "%s"', to_delete);
|
|
||||||
return rm(to_delete, callback);
|
|
||||||
}
|
|
||||||
return callback();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
52
server/node_modules/@mapbox/node-pre-gyp/lib/configure.js
generated
vendored
52
server/node_modules/@mapbox/node-pre-gyp/lib/configure.js
generated
vendored
|
@ -1,52 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = configure;
|
|
||||||
|
|
||||||
exports.usage = 'Attempts to configure node-gyp or nw-gyp build';
|
|
||||||
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
const compile = require('./util/compile.js');
|
|
||||||
const handle_gyp_opts = require('./util/handle_gyp_opts.js');
|
|
||||||
|
|
||||||
function configure(gyp, argv, callback) {
|
|
||||||
handle_gyp_opts(gyp, argv, (err, result) => {
|
|
||||||
let final_args = result.gyp.concat(result.pre);
|
|
||||||
// pull select node-gyp configure options out of the npm environ
|
|
||||||
const known_gyp_args = ['dist-url', 'python', 'nodedir', 'msvs_version'];
|
|
||||||
known_gyp_args.forEach((key) => {
|
|
||||||
const val = gyp.opts[key] || gyp.opts[key.replace('-', '_')];
|
|
||||||
if (val) {
|
|
||||||
final_args.push('--' + key + '=' + val);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
// --ensure=false tell node-gyp to re-install node development headers
|
|
||||||
// but it is only respected by node-gyp install, so we have to call install
|
|
||||||
// as a separate step if the user passes it
|
|
||||||
if (gyp.opts.ensure === false) {
|
|
||||||
const install_args = final_args.concat(['install', '--ensure=false']);
|
|
||||||
compile.run_gyp(install_args, result.opts, (err2) => {
|
|
||||||
if (err2) return callback(err2);
|
|
||||||
if (result.unparsed.length > 0) {
|
|
||||||
final_args = final_args.
|
|
||||||
concat(['--']).
|
|
||||||
concat(result.unparsed);
|
|
||||||
}
|
|
||||||
compile.run_gyp(['configure'].concat(final_args), result.opts, (err3) => {
|
|
||||||
return callback(err3);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
if (result.unparsed.length > 0) {
|
|
||||||
final_args = final_args.
|
|
||||||
concat(['--']).
|
|
||||||
concat(result.unparsed);
|
|
||||||
}
|
|
||||||
compile.run_gyp(['configure'].concat(final_args), result.opts, (err4) => {
|
|
||||||
if (!err4 && result.opts.napi_build_version) {
|
|
||||||
napi.swap_build_dir_out(result.opts.napi_build_version);
|
|
||||||
}
|
|
||||||
return callback(err4);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
38
server/node_modules/@mapbox/node-pre-gyp/lib/info.js
generated
vendored
38
server/node_modules/@mapbox/node-pre-gyp/lib/info.js
generated
vendored
|
@ -1,38 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = info;
|
|
||||||
|
|
||||||
exports.usage = 'Lists all published binaries (requires aws-sdk)';
|
|
||||||
|
|
||||||
const log = require('npmlog');
|
|
||||||
const versioning = require('./util/versioning.js');
|
|
||||||
const s3_setup = require('./util/s3_setup.js');
|
|
||||||
|
|
||||||
function info(gyp, argv, callback) {
|
|
||||||
const package_json = gyp.package_json;
|
|
||||||
const opts = versioning.evaluate(package_json, gyp.opts);
|
|
||||||
const config = {};
|
|
||||||
s3_setup.detect(opts, config);
|
|
||||||
const s3 = s3_setup.get_s3(config);
|
|
||||||
const s3_opts = {
|
|
||||||
Bucket: config.bucket,
|
|
||||||
Prefix: config.prefix
|
|
||||||
};
|
|
||||||
s3.listObjects(s3_opts, (err, meta) => {
|
|
||||||
if (err && err.code === 'NotFound') {
|
|
||||||
return callback(new Error('[' + package_json.name + '] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + config.prefix));
|
|
||||||
} else if (err) {
|
|
||||||
return callback(err);
|
|
||||||
} else {
|
|
||||||
log.verbose(JSON.stringify(meta, null, 1));
|
|
||||||
if (meta && meta.Contents) {
|
|
||||||
meta.Contents.forEach((obj) => {
|
|
||||||
console.log(obj.Key);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
console.error('[' + package_json.name + '] No objects found at https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + config.prefix);
|
|
||||||
}
|
|
||||||
return callback();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
235
server/node_modules/@mapbox/node-pre-gyp/lib/install.js
generated
vendored
235
server/node_modules/@mapbox/node-pre-gyp/lib/install.js
generated
vendored
|
@ -1,235 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = install;
|
|
||||||
|
|
||||||
exports.usage = 'Attempts to install pre-built binary for module';
|
|
||||||
|
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const log = require('npmlog');
|
|
||||||
const existsAsync = fs.exists || path.exists;
|
|
||||||
const versioning = require('./util/versioning.js');
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
const makeDir = require('make-dir');
|
|
||||||
// for fetching binaries
|
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const tar = require('tar');
|
|
||||||
|
|
||||||
let npgVersion = 'unknown';
|
|
||||||
try {
|
|
||||||
// Read own package.json to get the current node-pre-pyp version.
|
|
||||||
const ownPackageJSON = fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8');
|
|
||||||
npgVersion = JSON.parse(ownPackageJSON).version;
|
|
||||||
} catch (e) {
|
|
||||||
// do nothing
|
|
||||||
}
|
|
||||||
|
|
||||||
function place_binary(uri, targetDir, opts, callback) {
|
|
||||||
log.http('GET', uri);
|
|
||||||
|
|
||||||
// Try getting version info from the currently running npm.
|
|
||||||
const envVersionInfo = process.env.npm_config_user_agent ||
|
|
||||||
'node ' + process.version;
|
|
||||||
|
|
||||||
const sanitized = uri.replace('+', '%2B');
|
|
||||||
const requestOpts = {
|
|
||||||
uri: sanitized,
|
|
||||||
headers: {
|
|
||||||
'User-Agent': 'node-pre-gyp (v' + npgVersion + ', ' + envVersionInfo + ')'
|
|
||||||
},
|
|
||||||
follow_max: 10
|
|
||||||
};
|
|
||||||
|
|
||||||
if (opts.cafile) {
|
|
||||||
try {
|
|
||||||
requestOpts.ca = fs.readFileSync(opts.cafile);
|
|
||||||
} catch (e) {
|
|
||||||
return callback(e);
|
|
||||||
}
|
|
||||||
} else if (opts.ca) {
|
|
||||||
requestOpts.ca = opts.ca;
|
|
||||||
}
|
|
||||||
|
|
||||||
const proxyUrl = opts.proxy ||
|
|
||||||
process.env.http_proxy ||
|
|
||||||
process.env.HTTP_PROXY ||
|
|
||||||
process.env.npm_config_proxy;
|
|
||||||
let agent;
|
|
||||||
if (proxyUrl) {
|
|
||||||
const ProxyAgent = require('https-proxy-agent');
|
|
||||||
agent = new ProxyAgent(proxyUrl);
|
|
||||||
log.http('download', 'proxy agent configured using: "%s"', proxyUrl);
|
|
||||||
}
|
|
||||||
|
|
||||||
fetch(sanitized, { agent })
|
|
||||||
.then((res) => {
|
|
||||||
if (!res.ok) {
|
|
||||||
throw new Error(`response status ${res.status} ${res.statusText} on ${sanitized}`);
|
|
||||||
}
|
|
||||||
const dataStream = res.body;
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
let extractions = 0;
|
|
||||||
const countExtractions = (entry) => {
|
|
||||||
extractions += 1;
|
|
||||||
log.info('install', 'unpacking %s', entry.path);
|
|
||||||
};
|
|
||||||
|
|
||||||
dataStream.pipe(extract(targetDir, countExtractions))
|
|
||||||
.on('error', (e) => {
|
|
||||||
reject(e);
|
|
||||||
});
|
|
||||||
dataStream.on('end', () => {
|
|
||||||
resolve(`extracted file count: ${extractions}`);
|
|
||||||
});
|
|
||||||
dataStream.on('error', (e) => {
|
|
||||||
reject(e);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.then((text) => {
|
|
||||||
log.info(text);
|
|
||||||
callback();
|
|
||||||
})
|
|
||||||
.catch((e) => {
|
|
||||||
log.error(`install ${e.message}`);
|
|
||||||
callback(e);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function extract(to, onentry) {
|
|
||||||
return tar.extract({
|
|
||||||
cwd: to,
|
|
||||||
strip: 1,
|
|
||||||
onentry
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function extract_from_local(from, targetDir, callback) {
|
|
||||||
if (!fs.existsSync(from)) {
|
|
||||||
return callback(new Error('Cannot find file ' + from));
|
|
||||||
}
|
|
||||||
log.info('Found local file to extract from ' + from);
|
|
||||||
|
|
||||||
// extract helpers
|
|
||||||
let extractCount = 0;
|
|
||||||
function countExtractions(entry) {
|
|
||||||
extractCount += 1;
|
|
||||||
log.info('install', 'unpacking ' + entry.path);
|
|
||||||
}
|
|
||||||
function afterExtract(err) {
|
|
||||||
if (err) return callback(err);
|
|
||||||
if (extractCount === 0) {
|
|
||||||
return callback(new Error('There was a fatal problem while extracting the tarball'));
|
|
||||||
}
|
|
||||||
log.info('tarball', 'done parsing tarball');
|
|
||||||
callback();
|
|
||||||
}
|
|
||||||
|
|
||||||
fs.createReadStream(from).pipe(extract(targetDir, countExtractions))
|
|
||||||
.on('close', afterExtract)
|
|
||||||
.on('error', afterExtract);
|
|
||||||
}
|
|
||||||
|
|
||||||
function do_build(gyp, argv, callback) {
|
|
||||||
const args = ['rebuild'].concat(argv);
|
|
||||||
gyp.todo.push({ name: 'build', args: args });
|
|
||||||
process.nextTick(callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
function print_fallback_error(err, opts, package_json) {
|
|
||||||
const fallback_message = ' (falling back to source compile with node-gyp)';
|
|
||||||
let full_message = '';
|
|
||||||
if (err.statusCode !== undefined) {
|
|
||||||
// If we got a network response it but failed to download
|
|
||||||
// it means remote binaries are not available, so let's try to help
|
|
||||||
// the user/developer with the info to debug why
|
|
||||||
full_message = 'Pre-built binaries not found for ' + package_json.name + '@' + package_json.version;
|
|
||||||
full_message += ' and ' + opts.runtime + '@' + (opts.target || process.versions.node) + ' (' + opts.node_abi + ' ABI, ' + opts.libc + ')';
|
|
||||||
full_message += fallback_message;
|
|
||||||
log.warn('Tried to download(' + err.statusCode + '): ' + opts.hosted_tarball);
|
|
||||||
log.warn(full_message);
|
|
||||||
log.http(err.message);
|
|
||||||
} else {
|
|
||||||
// If we do not have a statusCode that means an unexpected error
|
|
||||||
// happened and prevented an http response, so we output the exact error
|
|
||||||
full_message = 'Pre-built binaries not installable for ' + package_json.name + '@' + package_json.version;
|
|
||||||
full_message += ' and ' + opts.runtime + '@' + (opts.target || process.versions.node) + ' (' + opts.node_abi + ' ABI, ' + opts.libc + ')';
|
|
||||||
full_message += fallback_message;
|
|
||||||
log.warn(full_message);
|
|
||||||
log.warn('Hit error ' + err.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//
|
|
||||||
// install
|
|
||||||
//
|
|
||||||
function install(gyp, argv, callback) {
|
|
||||||
const package_json = gyp.package_json;
|
|
||||||
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
|
||||||
const source_build = gyp.opts['build-from-source'] || gyp.opts.build_from_source;
|
|
||||||
const update_binary = gyp.opts['update-binary'] || gyp.opts.update_binary;
|
|
||||||
const should_do_source_build = source_build === package_json.name || (source_build === true || source_build === 'true');
|
|
||||||
if (should_do_source_build) {
|
|
||||||
log.info('build', 'requesting source compile');
|
|
||||||
return do_build(gyp, argv, callback);
|
|
||||||
} else {
|
|
||||||
const fallback_to_build = gyp.opts['fallback-to-build'] || gyp.opts.fallback_to_build;
|
|
||||||
let should_do_fallback_build = fallback_to_build === package_json.name || (fallback_to_build === true || fallback_to_build === 'true');
|
|
||||||
// but allow override from npm
|
|
||||||
if (process.env.npm_config_argv) {
|
|
||||||
const cooked = JSON.parse(process.env.npm_config_argv).cooked;
|
|
||||||
const match = cooked.indexOf('--fallback-to-build');
|
|
||||||
if (match > -1 && cooked.length > match && cooked[match + 1] === 'false') {
|
|
||||||
should_do_fallback_build = false;
|
|
||||||
log.info('install', 'Build fallback disabled via npm flag: --fallback-to-build=false');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let opts;
|
|
||||||
try {
|
|
||||||
opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
|
||||||
} catch (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
opts.ca = gyp.opts.ca;
|
|
||||||
opts.cafile = gyp.opts.cafile;
|
|
||||||
|
|
||||||
const from = opts.hosted_tarball;
|
|
||||||
const to = opts.module_path;
|
|
||||||
const binary_module = path.join(to, opts.module_name + '.node');
|
|
||||||
existsAsync(binary_module, (found) => {
|
|
||||||
if (!update_binary) {
|
|
||||||
if (found) {
|
|
||||||
console.log('[' + package_json.name + '] Success: "' + binary_module + '" already installed');
|
|
||||||
console.log('Pass --update-binary to reinstall or --build-from-source to recompile');
|
|
||||||
return callback();
|
|
||||||
}
|
|
||||||
log.info('check', 'checked for "' + binary_module + '" (not found)');
|
|
||||||
}
|
|
||||||
|
|
||||||
makeDir(to).then(() => {
|
|
||||||
const fileName = from.startsWith('file://') && from.slice('file://'.length);
|
|
||||||
if (fileName) {
|
|
||||||
extract_from_local(fileName, to, after_place);
|
|
||||||
} else {
|
|
||||||
place_binary(from, to, opts, after_place);
|
|
||||||
}
|
|
||||||
}).catch((err) => {
|
|
||||||
after_place(err);
|
|
||||||
});
|
|
||||||
|
|
||||||
function after_place(err) {
|
|
||||||
if (err && should_do_fallback_build) {
|
|
||||||
print_fallback_error(err, opts, package_json);
|
|
||||||
return do_build(gyp, argv, callback);
|
|
||||||
} else if (err) {
|
|
||||||
return callback(err);
|
|
||||||
} else {
|
|
||||||
console.log('[' + package_json.name + '] Success: "' + binary_module + '" is installed via remote');
|
|
||||||
return callback();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
125
server/node_modules/@mapbox/node-pre-gyp/lib/main.js
generated
vendored
125
server/node_modules/@mapbox/node-pre-gyp/lib/main.js
generated
vendored
|
@ -1,125 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the title.
|
|
||||||
*/
|
|
||||||
|
|
||||||
process.title = 'node-pre-gyp';
|
|
||||||
|
|
||||||
const node_pre_gyp = require('../');
|
|
||||||
const log = require('npmlog');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Process and execute the selected commands.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const prog = new node_pre_gyp.Run({ argv: process.argv });
|
|
||||||
let completed = false;
|
|
||||||
|
|
||||||
if (prog.todo.length === 0) {
|
|
||||||
if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) {
|
|
||||||
console.log('v%s', prog.version);
|
|
||||||
process.exit(0);
|
|
||||||
} else if (~process.argv.indexOf('-h') || ~process.argv.indexOf('--help')) {
|
|
||||||
console.log('%s', prog.usage());
|
|
||||||
process.exit(0);
|
|
||||||
}
|
|
||||||
console.log('%s', prog.usage());
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// if --no-color is passed
|
|
||||||
if (prog.opts && Object.hasOwnProperty.call(prog, 'color') && !prog.opts.color) {
|
|
||||||
log.disableColor();
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info('it worked if it ends with', 'ok');
|
|
||||||
log.verbose('cli', process.argv);
|
|
||||||
log.info('using', process.title + '@%s', prog.version);
|
|
||||||
log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch);
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Change dir if -C/--directory was passed.
|
|
||||||
*/
|
|
||||||
|
|
||||||
const dir = prog.opts.directory;
|
|
||||||
if (dir) {
|
|
||||||
const fs = require('fs');
|
|
||||||
try {
|
|
||||||
const stat = fs.statSync(dir);
|
|
||||||
if (stat.isDirectory()) {
|
|
||||||
log.info('chdir', dir);
|
|
||||||
process.chdir(dir);
|
|
||||||
} else {
|
|
||||||
log.warn('chdir', dir + ' is not a directory');
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
if (e.code === 'ENOENT') {
|
|
||||||
log.warn('chdir', dir + ' is not a directory');
|
|
||||||
} else {
|
|
||||||
log.warn('chdir', 'error during chdir() "%s"', e.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function run() {
|
|
||||||
const command = prog.todo.shift();
|
|
||||||
if (!command) {
|
|
||||||
// done!
|
|
||||||
completed = true;
|
|
||||||
log.info('ok');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// set binary.host when appropriate. host determines the s3 target bucket.
|
|
||||||
const target = prog.setBinaryHostProperty(command.name);
|
|
||||||
if (target && ['install', 'publish', 'unpublish', 'info'].indexOf(command.name) >= 0) {
|
|
||||||
log.info('using binary.host: ' + prog.package_json.binary.host);
|
|
||||||
}
|
|
||||||
|
|
||||||
prog.commands[command.name](command.args, function(err) {
|
|
||||||
if (err) {
|
|
||||||
log.error(command.name + ' error');
|
|
||||||
log.error('stack', err.stack);
|
|
||||||
errorMessage();
|
|
||||||
log.error('not ok');
|
|
||||||
console.log(err.message);
|
|
||||||
return process.exit(1);
|
|
||||||
}
|
|
||||||
const args_array = [].slice.call(arguments, 1);
|
|
||||||
if (args_array.length) {
|
|
||||||
console.log.apply(console, args_array);
|
|
||||||
}
|
|
||||||
// now run the next command in the queue
|
|
||||||
process.nextTick(run);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
process.on('exit', (code) => {
|
|
||||||
if (!completed && !code) {
|
|
||||||
log.error('Completion callback never invoked!');
|
|
||||||
errorMessage();
|
|
||||||
process.exit(6);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
process.on('uncaughtException', (err) => {
|
|
||||||
log.error('UNCAUGHT EXCEPTION');
|
|
||||||
log.error('stack', err.stack);
|
|
||||||
errorMessage();
|
|
||||||
process.exit(7);
|
|
||||||
});
|
|
||||||
|
|
||||||
function errorMessage() {
|
|
||||||
// copied from npm's lib/util/error-handler.js
|
|
||||||
const os = require('os');
|
|
||||||
log.error('System', os.type() + ' ' + os.release());
|
|
||||||
log.error('command', process.argv.map(JSON.stringify).join(' '));
|
|
||||||
log.error('cwd', process.cwd());
|
|
||||||
log.error('node -v', process.version);
|
|
||||||
log.error(process.title + ' -v', 'v' + prog.package.version);
|
|
||||||
}
|
|
||||||
|
|
||||||
// start running the given commands!
|
|
||||||
run();
|
|
309
server/node_modules/@mapbox/node-pre-gyp/lib/node-pre-gyp.js
generated
vendored
309
server/node_modules/@mapbox/node-pre-gyp/lib/node-pre-gyp.js
generated
vendored
|
@ -1,309 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Module exports.
|
|
||||||
*/
|
|
||||||
|
|
||||||
module.exports = exports;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Module dependencies.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// load mocking control function for accessing s3 via https. the function is a noop always returning
|
|
||||||
// false if not mocking.
|
|
||||||
exports.mockS3Http = require('./util/s3_setup').get_mockS3Http();
|
|
||||||
exports.mockS3Http('on');
|
|
||||||
const mocking = exports.mockS3Http('get');
|
|
||||||
|
|
||||||
|
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const nopt = require('nopt');
|
|
||||||
const log = require('npmlog');
|
|
||||||
log.disableProgress();
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
|
|
||||||
const EE = require('events').EventEmitter;
|
|
||||||
const inherits = require('util').inherits;
|
|
||||||
const cli_commands = [
|
|
||||||
'clean',
|
|
||||||
'install',
|
|
||||||
'reinstall',
|
|
||||||
'build',
|
|
||||||
'rebuild',
|
|
||||||
'package',
|
|
||||||
'testpackage',
|
|
||||||
'publish',
|
|
||||||
'unpublish',
|
|
||||||
'info',
|
|
||||||
'testbinary',
|
|
||||||
'reveal',
|
|
||||||
'configure'
|
|
||||||
];
|
|
||||||
const aliases = {};
|
|
||||||
|
|
||||||
// differentiate node-pre-gyp's logs from npm's
|
|
||||||
log.heading = 'node-pre-gyp';
|
|
||||||
|
|
||||||
if (mocking) {
|
|
||||||
log.warn(`mocking s3 to ${process.env.node_pre_gyp_mock_s3}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// this is a getter to avoid circular reference warnings with node v14.
|
|
||||||
Object.defineProperty(exports, 'find', {
|
|
||||||
get: function() {
|
|
||||||
return require('./pre-binding').find;
|
|
||||||
},
|
|
||||||
enumerable: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// in the following, "my_module" is using node-pre-gyp to
|
|
||||||
// prebuild and install pre-built binaries. "main_module"
|
|
||||||
// is using "my_module".
|
|
||||||
//
|
|
||||||
// "bin/node-pre-gyp" invokes Run() without a path. the
|
|
||||||
// expectation is that the working directory is the package
|
|
||||||
// root "my_module". this is true because in all cases npm is
|
|
||||||
// executing a script in the context of "my_module".
|
|
||||||
//
|
|
||||||
// "pre-binding.find()" is executed by "my_module" but in the
|
|
||||||
// context of "main_module". this is because "main_module" is
|
|
||||||
// executing and requires "my_module" which is then executing
|
|
||||||
// "pre-binding.find()" via "node-pre-gyp.find()", so the working
|
|
||||||
// directory is that of "main_module".
|
|
||||||
//
|
|
||||||
// that's why "find()" must pass the path to package.json.
|
|
||||||
//
|
|
||||||
function Run({ package_json_path = './package.json', argv }) {
|
|
||||||
this.package_json_path = package_json_path;
|
|
||||||
this.commands = {};
|
|
||||||
|
|
||||||
const self = this;
|
|
||||||
cli_commands.forEach((command) => {
|
|
||||||
self.commands[command] = function(argvx, callback) {
|
|
||||||
log.verbose('command', command, argvx);
|
|
||||||
return require('./' + command)(self, argvx, callback);
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
this.parseArgv(argv);
|
|
||||||
|
|
||||||
// this is set to true after the binary.host property was set to
|
|
||||||
// either staging_host or production_host.
|
|
||||||
this.binaryHostSet = false;
|
|
||||||
}
|
|
||||||
inherits(Run, EE);
|
|
||||||
exports.Run = Run;
|
|
||||||
const proto = Run.prototype;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Export the contents of the package.json.
|
|
||||||
*/
|
|
||||||
|
|
||||||
proto.package = require('../package.json');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* nopt configuration definitions
|
|
||||||
*/
|
|
||||||
|
|
||||||
proto.configDefs = {
|
|
||||||
help: Boolean, // everywhere
|
|
||||||
arch: String, // 'configure'
|
|
||||||
debug: Boolean, // 'build'
|
|
||||||
directory: String, // bin
|
|
||||||
proxy: String, // 'install'
|
|
||||||
loglevel: String // everywhere
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* nopt shorthands
|
|
||||||
*/
|
|
||||||
|
|
||||||
proto.shorthands = {
|
|
||||||
release: '--no-debug',
|
|
||||||
C: '--directory',
|
|
||||||
debug: '--debug',
|
|
||||||
j: '--jobs',
|
|
||||||
silent: '--loglevel=silent',
|
|
||||||
silly: '--loglevel=silly',
|
|
||||||
verbose: '--loglevel=verbose'
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* expose the command aliases for the bin file to use.
|
|
||||||
*/
|
|
||||||
|
|
||||||
proto.aliases = aliases;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parses the given argv array and sets the 'opts', 'argv',
|
|
||||||
* 'command', and 'package_json' properties.
|
|
||||||
*/
|
|
||||||
|
|
||||||
proto.parseArgv = function parseOpts(argv) {
|
|
||||||
this.opts = nopt(this.configDefs, this.shorthands, argv);
|
|
||||||
this.argv = this.opts.argv.remain.slice();
|
|
||||||
const commands = this.todo = [];
|
|
||||||
|
|
||||||
// create a copy of the argv array with aliases mapped
|
|
||||||
argv = this.argv.map((arg) => {
|
|
||||||
// is this an alias?
|
|
||||||
if (arg in this.aliases) {
|
|
||||||
arg = this.aliases[arg];
|
|
||||||
}
|
|
||||||
return arg;
|
|
||||||
});
|
|
||||||
|
|
||||||
// process the mapped args into "command" objects ("name" and "args" props)
|
|
||||||
argv.slice().forEach((arg) => {
|
|
||||||
if (arg in this.commands) {
|
|
||||||
const args = argv.splice(0, argv.indexOf(arg));
|
|
||||||
argv.shift();
|
|
||||||
if (commands.length > 0) {
|
|
||||||
commands[commands.length - 1].args = args;
|
|
||||||
}
|
|
||||||
commands.push({ name: arg, args: [] });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (commands.length > 0) {
|
|
||||||
commands[commands.length - 1].args = argv.splice(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// if a directory was specified package.json is assumed to be relative
|
|
||||||
// to it.
|
|
||||||
let package_json_path = this.package_json_path;
|
|
||||||
if (this.opts.directory) {
|
|
||||||
package_json_path = path.join(this.opts.directory, package_json_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.package_json = JSON.parse(fs.readFileSync(package_json_path));
|
|
||||||
|
|
||||||
// expand commands entries for multiple napi builds
|
|
||||||
this.todo = napi.expand_commands(this.package_json, this.opts, commands);
|
|
||||||
|
|
||||||
// support for inheriting config env variables from npm
|
|
||||||
const npm_config_prefix = 'npm_config_';
|
|
||||||
Object.keys(process.env).forEach((name) => {
|
|
||||||
if (name.indexOf(npm_config_prefix) !== 0) return;
|
|
||||||
const val = process.env[name];
|
|
||||||
if (name === npm_config_prefix + 'loglevel') {
|
|
||||||
log.level = val;
|
|
||||||
} else {
|
|
||||||
// add the user-defined options to the config
|
|
||||||
name = name.substring(npm_config_prefix.length);
|
|
||||||
// avoid npm argv clobber already present args
|
|
||||||
// which avoids problem of 'npm test' calling
|
|
||||||
// script that runs unique npm install commands
|
|
||||||
if (name === 'argv') {
|
|
||||||
if (this.opts.argv &&
|
|
||||||
this.opts.argv.remain &&
|
|
||||||
this.opts.argv.remain.length) {
|
|
||||||
// do nothing
|
|
||||||
} else {
|
|
||||||
this.opts[name] = val;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
this.opts[name] = val;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (this.opts.loglevel) {
|
|
||||||
log.level = this.opts.loglevel;
|
|
||||||
}
|
|
||||||
log.resume();
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* allow the binary.host property to be set at execution time.
|
|
||||||
*
|
|
||||||
* for this to take effect requires all the following to be true.
|
|
||||||
* - binary is a property in package.json
|
|
||||||
* - binary.host is falsey
|
|
||||||
* - binary.staging_host is not empty
|
|
||||||
* - binary.production_host is not empty
|
|
||||||
*
|
|
||||||
* if any of the previous checks fail then the function returns an empty string
|
|
||||||
* and makes no changes to package.json's binary property.
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* if command is "publish" then the default is set to "binary.staging_host"
|
|
||||||
* if command is not "publish" the the default is set to "binary.production_host"
|
|
||||||
*
|
|
||||||
* if the command-line option '--s3_host' is set to "staging" or "production" then
|
|
||||||
* "binary.host" is set to the specified "staging_host" or "production_host". if
|
|
||||||
* '--s3_host' is any other value an exception is thrown.
|
|
||||||
*
|
|
||||||
* if '--s3_host' is not present then "binary.host" is set to the default as above.
|
|
||||||
*
|
|
||||||
* this strategy was chosen so that any command other than "publish" or "unpublish" uses "production"
|
|
||||||
* as the default without requiring any command-line options but that "publish" and "unpublish" require
|
|
||||||
* '--s3_host production_host' to be specified in order to *really* publish (or unpublish). publishing
|
|
||||||
* to staging can be done freely without worrying about disturbing any production releases.
|
|
||||||
*/
|
|
||||||
proto.setBinaryHostProperty = function(command) {
|
|
||||||
if (this.binaryHostSet) {
|
|
||||||
return this.package_json.binary.host;
|
|
||||||
}
|
|
||||||
const p = this.package_json;
|
|
||||||
// don't set anything if host is present. it must be left blank to trigger this.
|
|
||||||
if (!p || !p.binary || p.binary.host) {
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
// and both staging and production must be present. errors will be reported later.
|
|
||||||
if (!p.binary.staging_host || !p.binary.production_host) {
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
let target = 'production_host';
|
|
||||||
if (command === 'publish' || command === 'unpublish') {
|
|
||||||
target = 'staging_host';
|
|
||||||
}
|
|
||||||
// the environment variable has priority over the default or the command line. if
|
|
||||||
// either the env var or the command line option are invalid throw an error.
|
|
||||||
const npg_s3_host = process.env.node_pre_gyp_s3_host;
|
|
||||||
if (npg_s3_host === 'staging' || npg_s3_host === 'production') {
|
|
||||||
target = `${npg_s3_host}_host`;
|
|
||||||
} else if (this.opts['s3_host'] === 'staging' || this.opts['s3_host'] === 'production') {
|
|
||||||
target = `${this.opts['s3_host']}_host`;
|
|
||||||
} else if (this.opts['s3_host'] || npg_s3_host) {
|
|
||||||
throw new Error(`invalid s3_host ${this.opts['s3_host'] || npg_s3_host}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
p.binary.host = p.binary[target];
|
|
||||||
this.binaryHostSet = true;
|
|
||||||
|
|
||||||
return p.binary.host;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the usage instructions for node-pre-gyp.
|
|
||||||
*/
|
|
||||||
|
|
||||||
proto.usage = function usage() {
|
|
||||||
const str = [
|
|
||||||
'',
|
|
||||||
' Usage: node-pre-gyp <command> [options]',
|
|
||||||
'',
|
|
||||||
' where <command> is one of:',
|
|
||||||
cli_commands.map((c) => {
|
|
||||||
return ' - ' + c + ' - ' + require('./' + c).usage;
|
|
||||||
}).join('\n'),
|
|
||||||
'',
|
|
||||||
'node-pre-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'),
|
|
||||||
'node@' + process.versions.node
|
|
||||||
].join('\n');
|
|
||||||
return str;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Version number getter.
|
|
||||||
*/
|
|
||||||
|
|
||||||
Object.defineProperty(proto, 'version', {
|
|
||||||
get: function() {
|
|
||||||
return this.package.version;
|
|
||||||
},
|
|
||||||
enumerable: true
|
|
||||||
});
|
|
73
server/node_modules/@mapbox/node-pre-gyp/lib/package.js
generated
vendored
73
server/node_modules/@mapbox/node-pre-gyp/lib/package.js
generated
vendored
|
@ -1,73 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = _package;
|
|
||||||
|
|
||||||
exports.usage = 'Packs binary (and enclosing directory) into locally staged tarball';
|
|
||||||
|
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const log = require('npmlog');
|
|
||||||
const versioning = require('./util/versioning.js');
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
const existsAsync = fs.exists || path.exists;
|
|
||||||
const makeDir = require('make-dir');
|
|
||||||
const tar = require('tar');
|
|
||||||
|
|
||||||
function readdirSync(dir) {
|
|
||||||
let list = [];
|
|
||||||
const files = fs.readdirSync(dir);
|
|
||||||
|
|
||||||
files.forEach((file) => {
|
|
||||||
const stats = fs.lstatSync(path.join(dir, file));
|
|
||||||
if (stats.isDirectory()) {
|
|
||||||
list = list.concat(readdirSync(path.join(dir, file)));
|
|
||||||
} else {
|
|
||||||
list.push(path.join(dir, file));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return list;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _package(gyp, argv, callback) {
|
|
||||||
const package_json = gyp.package_json;
|
|
||||||
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
|
||||||
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
|
||||||
const from = opts.module_path;
|
|
||||||
const binary_module = path.join(from, opts.module_name + '.node');
|
|
||||||
existsAsync(binary_module, (found) => {
|
|
||||||
if (!found) {
|
|
||||||
return callback(new Error('Cannot package because ' + binary_module + ' missing: run `node-pre-gyp rebuild` first'));
|
|
||||||
}
|
|
||||||
const tarball = opts.staged_tarball;
|
|
||||||
const filter_func = function(entry) {
|
|
||||||
const basename = path.basename(entry);
|
|
||||||
if (basename.length && basename[0] !== '.') {
|
|
||||||
console.log('packing ' + entry);
|
|
||||||
return true;
|
|
||||||
} else {
|
|
||||||
console.log('skipping ' + entry);
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
makeDir(path.dirname(tarball)).then(() => {
|
|
||||||
let files = readdirSync(from);
|
|
||||||
const base = path.basename(from);
|
|
||||||
files = files.map((file) => {
|
|
||||||
return path.join(base, path.relative(from, file));
|
|
||||||
});
|
|
||||||
tar.create({
|
|
||||||
portable: false,
|
|
||||||
gzip: true,
|
|
||||||
filter: filter_func,
|
|
||||||
file: tarball,
|
|
||||||
cwd: path.dirname(from)
|
|
||||||
}, files, (err2) => {
|
|
||||||
if (err2) console.error('[' + package_json.name + '] ' + err2.message);
|
|
||||||
else log.info('package', 'Binary staged at "' + tarball + '"');
|
|
||||||
return callback(err2);
|
|
||||||
});
|
|
||||||
}).catch((err) => {
|
|
||||||
return callback(err);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
34
server/node_modules/@mapbox/node-pre-gyp/lib/pre-binding.js
generated
vendored
34
server/node_modules/@mapbox/node-pre-gyp/lib/pre-binding.js
generated
vendored
|
@ -1,34 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
const npg = require('..');
|
|
||||||
const versioning = require('../lib/util/versioning.js');
|
|
||||||
const napi = require('../lib/util/napi.js');
|
|
||||||
const existsSync = require('fs').existsSync || require('path').existsSync;
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
module.exports = exports;
|
|
||||||
|
|
||||||
exports.usage = 'Finds the require path for the node-pre-gyp installed module';
|
|
||||||
|
|
||||||
exports.validate = function(package_json, opts) {
|
|
||||||
versioning.validate_config(package_json, opts);
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.find = function(package_json_path, opts) {
|
|
||||||
if (!existsSync(package_json_path)) {
|
|
||||||
throw new Error(package_json_path + 'does not exist');
|
|
||||||
}
|
|
||||||
const prog = new npg.Run({ package_json_path, argv: process.argv });
|
|
||||||
prog.setBinaryHostProperty();
|
|
||||||
const package_json = prog.package_json;
|
|
||||||
|
|
||||||
versioning.validate_config(package_json, opts);
|
|
||||||
let napi_build_version;
|
|
||||||
if (napi.get_napi_build_versions(package_json, opts)) {
|
|
||||||
napi_build_version = napi.get_best_napi_build_version(package_json, opts);
|
|
||||||
}
|
|
||||||
opts = opts || {};
|
|
||||||
if (!opts.module_root) opts.module_root = path.dirname(package_json_path);
|
|
||||||
const meta = versioning.evaluate(package_json, opts, napi_build_version);
|
|
||||||
return meta.module;
|
|
||||||
};
|
|
81
server/node_modules/@mapbox/node-pre-gyp/lib/publish.js
generated
vendored
81
server/node_modules/@mapbox/node-pre-gyp/lib/publish.js
generated
vendored
|
@ -1,81 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = publish;
|
|
||||||
|
|
||||||
exports.usage = 'Publishes pre-built binary (requires aws-sdk)';
|
|
||||||
|
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const log = require('npmlog');
|
|
||||||
const versioning = require('./util/versioning.js');
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
const s3_setup = require('./util/s3_setup.js');
|
|
||||||
const existsAsync = fs.exists || path.exists;
|
|
||||||
const url = require('url');
|
|
||||||
|
|
||||||
function publish(gyp, argv, callback) {
|
|
||||||
const package_json = gyp.package_json;
|
|
||||||
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
|
||||||
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
|
||||||
const tarball = opts.staged_tarball;
|
|
||||||
existsAsync(tarball, (found) => {
|
|
||||||
if (!found) {
|
|
||||||
return callback(new Error('Cannot publish because ' + tarball + ' missing: run `node-pre-gyp package` first'));
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info('publish', 'Detecting s3 credentials');
|
|
||||||
const config = {};
|
|
||||||
s3_setup.detect(opts, config);
|
|
||||||
const s3 = s3_setup.get_s3(config);
|
|
||||||
|
|
||||||
const key_name = url.resolve(config.prefix, opts.package_name);
|
|
||||||
const s3_opts = {
|
|
||||||
Bucket: config.bucket,
|
|
||||||
Key: key_name
|
|
||||||
};
|
|
||||||
log.info('publish', 'Authenticating with s3');
|
|
||||||
log.info('publish', config);
|
|
||||||
|
|
||||||
log.info('publish', 'Checking for existing binary at ' + opts.hosted_path);
|
|
||||||
s3.headObject(s3_opts, (err, meta) => {
|
|
||||||
if (meta) log.info('publish', JSON.stringify(meta));
|
|
||||||
if (err && err.code === 'NotFound') {
|
|
||||||
// we are safe to publish because
|
|
||||||
// the object does not already exist
|
|
||||||
log.info('publish', 'Preparing to put object');
|
|
||||||
const s3_put_opts = {
|
|
||||||
ACL: 'public-read',
|
|
||||||
Body: fs.createReadStream(tarball),
|
|
||||||
Key: key_name,
|
|
||||||
Bucket: config.bucket
|
|
||||||
};
|
|
||||||
log.info('publish', 'Putting object', s3_put_opts.ACL, s3_put_opts.Bucket, s3_put_opts.Key);
|
|
||||||
try {
|
|
||||||
s3.putObject(s3_put_opts, (err2, resp) => {
|
|
||||||
log.info('publish', 'returned from putting object');
|
|
||||||
if (err2) {
|
|
||||||
log.info('publish', 's3 putObject error: "' + err2 + '"');
|
|
||||||
return callback(err2);
|
|
||||||
}
|
|
||||||
if (resp) log.info('publish', 's3 putObject response: "' + JSON.stringify(resp) + '"');
|
|
||||||
log.info('publish', 'successfully put object');
|
|
||||||
console.log('[' + package_json.name + '] published to ' + opts.hosted_path);
|
|
||||||
return callback();
|
|
||||||
});
|
|
||||||
} catch (err3) {
|
|
||||||
log.info('publish', 's3 putObject error: "' + err3 + '"');
|
|
||||||
return callback(err3);
|
|
||||||
}
|
|
||||||
} else if (err) {
|
|
||||||
log.info('publish', 's3 headObject error: "' + err + '"');
|
|
||||||
return callback(err);
|
|
||||||
} else {
|
|
||||||
log.error('publish', 'Cannot publish over existing version');
|
|
||||||
log.error('publish', "Update the 'version' field in package.json and try again");
|
|
||||||
log.error('publish', 'If the previous version was published in error see:');
|
|
||||||
log.error('publish', '\t node-pre-gyp unpublish');
|
|
||||||
return callback(new Error('Failed publishing to ' + opts.hosted_path));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
20
server/node_modules/@mapbox/node-pre-gyp/lib/rebuild.js
generated
vendored
20
server/node_modules/@mapbox/node-pre-gyp/lib/rebuild.js
generated
vendored
|
@ -1,20 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = rebuild;
|
|
||||||
|
|
||||||
exports.usage = 'Runs "clean" and "build" at once';
|
|
||||||
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
|
|
||||||
function rebuild(gyp, argv, callback) {
|
|
||||||
const package_json = gyp.package_json;
|
|
||||||
let commands = [
|
|
||||||
{ name: 'clean', args: [] },
|
|
||||||
{ name: 'build', args: ['rebuild'] }
|
|
||||||
];
|
|
||||||
commands = napi.expand_commands(package_json, gyp.opts, commands);
|
|
||||||
for (let i = commands.length; i !== 0; i--) {
|
|
||||||
gyp.todo.unshift(commands[i - 1]);
|
|
||||||
}
|
|
||||||
process.nextTick(callback);
|
|
||||||
}
|
|
19
server/node_modules/@mapbox/node-pre-gyp/lib/reinstall.js
generated
vendored
19
server/node_modules/@mapbox/node-pre-gyp/lib/reinstall.js
generated
vendored
|
@ -1,19 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = rebuild;
|
|
||||||
|
|
||||||
exports.usage = 'Runs "clean" and "install" at once';
|
|
||||||
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
|
|
||||||
function rebuild(gyp, argv, callback) {
|
|
||||||
const package_json = gyp.package_json;
|
|
||||||
let installArgs = [];
|
|
||||||
const napi_build_version = napi.get_best_napi_build_version(package_json, gyp.opts);
|
|
||||||
if (napi_build_version != null) installArgs = [napi.get_command_arg(napi_build_version)];
|
|
||||||
gyp.todo.unshift(
|
|
||||||
{ name: 'clean', args: [] },
|
|
||||||
{ name: 'install', args: installArgs }
|
|
||||||
);
|
|
||||||
process.nextTick(callback);
|
|
||||||
}
|
|
32
server/node_modules/@mapbox/node-pre-gyp/lib/reveal.js
generated
vendored
32
server/node_modules/@mapbox/node-pre-gyp/lib/reveal.js
generated
vendored
|
@ -1,32 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = reveal;
|
|
||||||
|
|
||||||
exports.usage = 'Reveals data on the versioned binary';
|
|
||||||
|
|
||||||
const versioning = require('./util/versioning.js');
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
|
|
||||||
function unix_paths(key, val) {
|
|
||||||
return val && val.replace ? val.replace(/\\/g, '/') : val;
|
|
||||||
}
|
|
||||||
|
|
||||||
function reveal(gyp, argv, callback) {
|
|
||||||
const package_json = gyp.package_json;
|
|
||||||
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
|
||||||
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
|
||||||
let hit = false;
|
|
||||||
// if a second arg is passed look to see
|
|
||||||
// if it is a known option
|
|
||||||
// console.log(JSON.stringify(gyp.opts,null,1))
|
|
||||||
const remain = gyp.opts.argv.remain[gyp.opts.argv.remain.length - 1];
|
|
||||||
if (remain && Object.hasOwnProperty.call(opts, remain)) {
|
|
||||||
console.log(opts[remain].replace(/\\/g, '/'));
|
|
||||||
hit = true;
|
|
||||||
}
|
|
||||||
// otherwise return all options as json
|
|
||||||
if (!hit) {
|
|
||||||
console.log(JSON.stringify(opts, unix_paths, 2));
|
|
||||||
}
|
|
||||||
return callback();
|
|
||||||
}
|
|
79
server/node_modules/@mapbox/node-pre-gyp/lib/testbinary.js
generated
vendored
79
server/node_modules/@mapbox/node-pre-gyp/lib/testbinary.js
generated
vendored
|
@ -1,79 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = testbinary;
|
|
||||||
|
|
||||||
exports.usage = 'Tests that the binary.node can be required';
|
|
||||||
|
|
||||||
const path = require('path');
|
|
||||||
const log = require('npmlog');
|
|
||||||
const cp = require('child_process');
|
|
||||||
const versioning = require('./util/versioning.js');
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
|
|
||||||
function testbinary(gyp, argv, callback) {
|
|
||||||
const args = [];
|
|
||||||
const options = {};
|
|
||||||
let shell_cmd = process.execPath;
|
|
||||||
const package_json = gyp.package_json;
|
|
||||||
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
|
||||||
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
|
||||||
// skip validation for runtimes we don't explicitly support (like electron)
|
|
||||||
if (opts.runtime &&
|
|
||||||
opts.runtime !== 'node-webkit' &&
|
|
||||||
opts.runtime !== 'node') {
|
|
||||||
return callback();
|
|
||||||
}
|
|
||||||
const nw = (opts.runtime && opts.runtime === 'node-webkit');
|
|
||||||
// ensure on windows that / are used for require path
|
|
||||||
const binary_module = opts.module.replace(/\\/g, '/');
|
|
||||||
if ((process.arch !== opts.target_arch) ||
|
|
||||||
(process.platform !== opts.target_platform)) {
|
|
||||||
let msg = 'skipping validation since host platform/arch (';
|
|
||||||
msg += process.platform + '/' + process.arch + ')';
|
|
||||||
msg += ' does not match target (';
|
|
||||||
msg += opts.target_platform + '/' + opts.target_arch + ')';
|
|
||||||
log.info('validate', msg);
|
|
||||||
return callback();
|
|
||||||
}
|
|
||||||
if (nw) {
|
|
||||||
options.timeout = 5000;
|
|
||||||
if (process.platform === 'darwin') {
|
|
||||||
shell_cmd = 'node-webkit';
|
|
||||||
} else if (process.platform === 'win32') {
|
|
||||||
shell_cmd = 'nw.exe';
|
|
||||||
} else {
|
|
||||||
shell_cmd = 'nw';
|
|
||||||
}
|
|
||||||
const modulePath = path.resolve(binary_module);
|
|
||||||
const appDir = path.join(__dirname, 'util', 'nw-pre-gyp');
|
|
||||||
args.push(appDir);
|
|
||||||
args.push(modulePath);
|
|
||||||
log.info('validate', "Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'");
|
|
||||||
cp.execFile(shell_cmd, args, options, (err, stdout, stderr) => {
|
|
||||||
// check for normal timeout for node-webkit
|
|
||||||
if (err) {
|
|
||||||
if (err.killed === true && err.signal && err.signal.indexOf('SIG') > -1) {
|
|
||||||
return callback();
|
|
||||||
}
|
|
||||||
const stderrLog = stderr.toString();
|
|
||||||
log.info('stderr', stderrLog);
|
|
||||||
if (/^\s*Xlib:\s*extension\s*"RANDR"\s*missing\s*on\s*display\s*":\d+\.\d+"\.\s*$/.test(stderrLog)) {
|
|
||||||
log.info('RANDR', 'stderr contains only RANDR error, ignored');
|
|
||||||
return callback();
|
|
||||||
}
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
return callback();
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
args.push('--eval');
|
|
||||||
args.push("require('" + binary_module.replace(/'/g, '\'') + "')");
|
|
||||||
log.info('validate', "Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'");
|
|
||||||
cp.execFile(shell_cmd, args, options, (err, stdout, stderr) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err, { stdout: stdout, stderr: stderr });
|
|
||||||
}
|
|
||||||
return callback();
|
|
||||||
});
|
|
||||||
}
|
|
53
server/node_modules/@mapbox/node-pre-gyp/lib/testpackage.js
generated
vendored
53
server/node_modules/@mapbox/node-pre-gyp/lib/testpackage.js
generated
vendored
|
@ -1,53 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = testpackage;
|
|
||||||
|
|
||||||
exports.usage = 'Tests that the staged package is valid';
|
|
||||||
|
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const log = require('npmlog');
|
|
||||||
const existsAsync = fs.exists || path.exists;
|
|
||||||
const versioning = require('./util/versioning.js');
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
const testbinary = require('./testbinary.js');
|
|
||||||
const tar = require('tar');
|
|
||||||
const makeDir = require('make-dir');
|
|
||||||
|
|
||||||
function testpackage(gyp, argv, callback) {
|
|
||||||
const package_json = gyp.package_json;
|
|
||||||
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
|
||||||
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
|
||||||
const tarball = opts.staged_tarball;
|
|
||||||
existsAsync(tarball, (found) => {
|
|
||||||
if (!found) {
|
|
||||||
return callback(new Error('Cannot test package because ' + tarball + ' missing: run `node-pre-gyp package` first'));
|
|
||||||
}
|
|
||||||
const to = opts.module_path;
|
|
||||||
function filter_func(entry) {
|
|
||||||
log.info('install', 'unpacking [' + entry.path + ']');
|
|
||||||
}
|
|
||||||
|
|
||||||
makeDir(to).then(() => {
|
|
||||||
tar.extract({
|
|
||||||
file: tarball,
|
|
||||||
cwd: to,
|
|
||||||
strip: 1,
|
|
||||||
onentry: filter_func
|
|
||||||
}).then(after_extract, callback);
|
|
||||||
}).catch((err) => {
|
|
||||||
return callback(err);
|
|
||||||
});
|
|
||||||
|
|
||||||
function after_extract() {
|
|
||||||
testbinary(gyp, argv, (err) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
} else {
|
|
||||||
console.log('[' + package_json.name + '] Package appears valid');
|
|
||||||
return callback();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
41
server/node_modules/@mapbox/node-pre-gyp/lib/unpublish.js
generated
vendored
41
server/node_modules/@mapbox/node-pre-gyp/lib/unpublish.js
generated
vendored
|
@ -1,41 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = unpublish;
|
|
||||||
|
|
||||||
exports.usage = 'Unpublishes pre-built binary (requires aws-sdk)';
|
|
||||||
|
|
||||||
const log = require('npmlog');
|
|
||||||
const versioning = require('./util/versioning.js');
|
|
||||||
const napi = require('./util/napi.js');
|
|
||||||
const s3_setup = require('./util/s3_setup.js');
|
|
||||||
const url = require('url');
|
|
||||||
|
|
||||||
function unpublish(gyp, argv, callback) {
|
|
||||||
const package_json = gyp.package_json;
|
|
||||||
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
|
||||||
const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version);
|
|
||||||
const config = {};
|
|
||||||
s3_setup.detect(opts, config);
|
|
||||||
const s3 = s3_setup.get_s3(config);
|
|
||||||
const key_name = url.resolve(config.prefix, opts.package_name);
|
|
||||||
const s3_opts = {
|
|
||||||
Bucket: config.bucket,
|
|
||||||
Key: key_name
|
|
||||||
};
|
|
||||||
s3.headObject(s3_opts, (err, meta) => {
|
|
||||||
if (err && err.code === 'NotFound') {
|
|
||||||
console.log('[' + package_json.name + '] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
|
|
||||||
return callback();
|
|
||||||
} else if (err) {
|
|
||||||
return callback(err);
|
|
||||||
} else {
|
|
||||||
log.info('unpublish', JSON.stringify(meta));
|
|
||||||
s3.deleteObject(s3_opts, (err2, resp) => {
|
|
||||||
if (err2) return callback(err2);
|
|
||||||
log.info(JSON.stringify(resp));
|
|
||||||
console.log('[' + package_json.name + '] Success: removed https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key);
|
|
||||||
return callback();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
2602
server/node_modules/@mapbox/node-pre-gyp/lib/util/abi_crosswalk.json
generated
vendored
2602
server/node_modules/@mapbox/node-pre-gyp/lib/util/abi_crosswalk.json
generated
vendored
File diff suppressed because it is too large
Load diff
93
server/node_modules/@mapbox/node-pre-gyp/lib/util/compile.js
generated
vendored
93
server/node_modules/@mapbox/node-pre-gyp/lib/util/compile.js
generated
vendored
|
@ -1,93 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports;
|
|
||||||
|
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const win = process.platform === 'win32';
|
|
||||||
const existsSync = fs.existsSync || path.existsSync;
|
|
||||||
const cp = require('child_process');
|
|
||||||
|
|
||||||
// try to build up the complete path to node-gyp
|
|
||||||
/* priority:
|
|
||||||
- node-gyp on ENV:npm_config_node_gyp (https://github.com/npm/npm/pull/4887)
|
|
||||||
- node-gyp on NODE_PATH
|
|
||||||
- node-gyp inside npm on NODE_PATH (ignore on iojs)
|
|
||||||
- node-gyp inside npm beside node exe
|
|
||||||
*/
|
|
||||||
function which_node_gyp() {
|
|
||||||
let node_gyp_bin;
|
|
||||||
if (process.env.npm_config_node_gyp) {
|
|
||||||
try {
|
|
||||||
node_gyp_bin = process.env.npm_config_node_gyp;
|
|
||||||
if (existsSync(node_gyp_bin)) {
|
|
||||||
return node_gyp_bin;
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
// do nothing
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const node_gyp_main = require.resolve('node-gyp'); // eslint-disable-line node/no-missing-require
|
|
||||||
node_gyp_bin = path.join(path.dirname(
|
|
||||||
path.dirname(node_gyp_main)),
|
|
||||||
'bin/node-gyp.js');
|
|
||||||
if (existsSync(node_gyp_bin)) {
|
|
||||||
return node_gyp_bin;
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
// do nothing
|
|
||||||
}
|
|
||||||
if (process.execPath.indexOf('iojs') === -1) {
|
|
||||||
try {
|
|
||||||
const npm_main = require.resolve('npm'); // eslint-disable-line node/no-missing-require
|
|
||||||
node_gyp_bin = path.join(path.dirname(
|
|
||||||
path.dirname(npm_main)),
|
|
||||||
'node_modules/node-gyp/bin/node-gyp.js');
|
|
||||||
if (existsSync(node_gyp_bin)) {
|
|
||||||
return node_gyp_bin;
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
// do nothing
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const npm_base = path.join(path.dirname(
|
|
||||||
path.dirname(process.execPath)),
|
|
||||||
'lib/node_modules/npm/');
|
|
||||||
node_gyp_bin = path.join(npm_base, 'node_modules/node-gyp/bin/node-gyp.js');
|
|
||||||
if (existsSync(node_gyp_bin)) {
|
|
||||||
return node_gyp_bin;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports.run_gyp = function(args, opts, callback) {
|
|
||||||
let shell_cmd = '';
|
|
||||||
const cmd_args = [];
|
|
||||||
if (opts.runtime && opts.runtime === 'node-webkit') {
|
|
||||||
shell_cmd = 'nw-gyp';
|
|
||||||
if (win) shell_cmd += '.cmd';
|
|
||||||
} else {
|
|
||||||
const node_gyp_path = which_node_gyp();
|
|
||||||
if (node_gyp_path) {
|
|
||||||
shell_cmd = process.execPath;
|
|
||||||
cmd_args.push(node_gyp_path);
|
|
||||||
} else {
|
|
||||||
shell_cmd = 'node-gyp';
|
|
||||||
if (win) shell_cmd += '.cmd';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const final_args = cmd_args.concat(args);
|
|
||||||
const cmd = cp.spawn(shell_cmd, final_args, { cwd: undefined, env: process.env, stdio: [0, 1, 2] });
|
|
||||||
cmd.on('error', (err) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + err + ')'));
|
|
||||||
}
|
|
||||||
callback(null, opts);
|
|
||||||
});
|
|
||||||
cmd.on('close', (code) => {
|
|
||||||
if (code && code !== 0) {
|
|
||||||
return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + code + ')'));
|
|
||||||
}
|
|
||||||
callback(null, opts);
|
|
||||||
});
|
|
||||||
};
|
|
102
server/node_modules/@mapbox/node-pre-gyp/lib/util/handle_gyp_opts.js
generated
vendored
102
server/node_modules/@mapbox/node-pre-gyp/lib/util/handle_gyp_opts.js
generated
vendored
|
@ -1,102 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
module.exports = exports = handle_gyp_opts;
|
|
||||||
|
|
||||||
const versioning = require('./versioning.js');
|
|
||||||
const napi = require('./napi.js');
|
|
||||||
|
|
||||||
/*
|
|
||||||
|
|
||||||
Here we gather node-pre-gyp generated options (from versioning) and pass them along to node-gyp.
|
|
||||||
|
|
||||||
We massage the args and options slightly to account for differences in what commands mean between
|
|
||||||
node-pre-gyp and node-gyp (e.g. see the difference between "build" and "rebuild" below)
|
|
||||||
|
|
||||||
Keep in mind: the values inside `argv` and `gyp.opts` below are different depending on whether
|
|
||||||
node-pre-gyp is called directory, or if it is called in a `run-script` phase of npm.
|
|
||||||
|
|
||||||
We also try to preserve any command line options that might have been passed to npm or node-pre-gyp.
|
|
||||||
But this is fairly difficult without passing way to much through. For example `gyp.opts` contains all
|
|
||||||
the process.env and npm pushes a lot of variables into process.env which node-pre-gyp inherits. So we have
|
|
||||||
to be very selective about what we pass through.
|
|
||||||
|
|
||||||
For example:
|
|
||||||
|
|
||||||
`npm install --build-from-source` will give:
|
|
||||||
|
|
||||||
argv == [ 'rebuild' ]
|
|
||||||
gyp.opts.argv == { remain: [ 'install' ],
|
|
||||||
cooked: [ 'install', '--fallback-to-build' ],
|
|
||||||
original: [ 'install', '--fallback-to-build' ] }
|
|
||||||
|
|
||||||
`./bin/node-pre-gyp build` will give:
|
|
||||||
|
|
||||||
argv == []
|
|
||||||
gyp.opts.argv == { remain: [ 'build' ],
|
|
||||||
cooked: [ 'build' ],
|
|
||||||
original: [ '-C', 'test/app1', 'build' ] }
|
|
||||||
|
|
||||||
*/
|
|
||||||
|
|
||||||
// select set of node-pre-gyp versioning info
|
|
||||||
// to share with node-gyp
|
|
||||||
const share_with_node_gyp = [
|
|
||||||
'module',
|
|
||||||
'module_name',
|
|
||||||
'module_path',
|
|
||||||
'napi_version',
|
|
||||||
'node_abi_napi',
|
|
||||||
'napi_build_version',
|
|
||||||
'node_napi_label'
|
|
||||||
];
|
|
||||||
|
|
||||||
function handle_gyp_opts(gyp, argv, callback) {
|
|
||||||
|
|
||||||
// Collect node-pre-gyp specific variables to pass to node-gyp
|
|
||||||
const node_pre_gyp_options = [];
|
|
||||||
// generate custom node-pre-gyp versioning info
|
|
||||||
const napi_build_version = napi.get_napi_build_version_from_command_args(argv);
|
|
||||||
const opts = versioning.evaluate(gyp.package_json, gyp.opts, napi_build_version);
|
|
||||||
share_with_node_gyp.forEach((key) => {
|
|
||||||
const val = opts[key];
|
|
||||||
if (val) {
|
|
||||||
node_pre_gyp_options.push('--' + key + '=' + val);
|
|
||||||
} else if (key === 'napi_build_version') {
|
|
||||||
node_pre_gyp_options.push('--' + key + '=0');
|
|
||||||
} else {
|
|
||||||
if (key !== 'napi_version' && key !== 'node_abi_napi')
|
|
||||||
return callback(new Error('Option ' + key + ' required but not found by node-pre-gyp'));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Collect options that follow the special -- which disables nopt parsing
|
|
||||||
const unparsed_options = [];
|
|
||||||
let double_hyphen_found = false;
|
|
||||||
gyp.opts.argv.original.forEach((opt) => {
|
|
||||||
if (double_hyphen_found) {
|
|
||||||
unparsed_options.push(opt);
|
|
||||||
}
|
|
||||||
if (opt === '--') {
|
|
||||||
double_hyphen_found = true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// We try respect and pass through remaining command
|
|
||||||
// line options (like --foo=bar) to node-gyp
|
|
||||||
const cooked = gyp.opts.argv.cooked;
|
|
||||||
const node_gyp_options = [];
|
|
||||||
cooked.forEach((value) => {
|
|
||||||
if (value.length > 2 && value.slice(0, 2) === '--') {
|
|
||||||
const key = value.slice(2);
|
|
||||||
const val = cooked[cooked.indexOf(value) + 1];
|
|
||||||
if (val && val.indexOf('--') === -1) { // handle '--foo=bar' or ['--foo','bar']
|
|
||||||
node_gyp_options.push('--' + key + '=' + val);
|
|
||||||
} else { // pass through --foo
|
|
||||||
node_gyp_options.push(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const result = { 'opts': opts, 'gyp': node_gyp_options, 'pre': node_pre_gyp_options, 'unparsed': unparsed_options };
|
|
||||||
return callback(null, result);
|
|
||||||
}
|
|
205
server/node_modules/@mapbox/node-pre-gyp/lib/util/napi.js
generated
vendored
205
server/node_modules/@mapbox/node-pre-gyp/lib/util/napi.js
generated
vendored
|
@ -1,205 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
const fs = require('fs');
|
|
||||||
|
|
||||||
module.exports = exports;
|
|
||||||
|
|
||||||
const versionArray = process.version
|
|
||||||
.substr(1)
|
|
||||||
.replace(/-.*$/, '')
|
|
||||||
.split('.')
|
|
||||||
.map((item) => {
|
|
||||||
return +item;
|
|
||||||
});
|
|
||||||
|
|
||||||
const napi_multiple_commands = [
|
|
||||||
'build',
|
|
||||||
'clean',
|
|
||||||
'configure',
|
|
||||||
'package',
|
|
||||||
'publish',
|
|
||||||
'reveal',
|
|
||||||
'testbinary',
|
|
||||||
'testpackage',
|
|
||||||
'unpublish'
|
|
||||||
];
|
|
||||||
|
|
||||||
const napi_build_version_tag = 'napi_build_version=';
|
|
||||||
|
|
||||||
module.exports.get_napi_version = function() {
|
|
||||||
// returns the non-zero numeric napi version or undefined if napi is not supported.
|
|
||||||
// correctly supporting target requires an updated cross-walk
|
|
||||||
let version = process.versions.napi; // can be undefined
|
|
||||||
if (!version) { // this code should never need to be updated
|
|
||||||
if (versionArray[0] === 9 && versionArray[1] >= 3) version = 2; // 9.3.0+
|
|
||||||
else if (versionArray[0] === 8) version = 1; // 8.0.0+
|
|
||||||
}
|
|
||||||
return version;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.get_napi_version_as_string = function(target) {
|
|
||||||
// returns the napi version as a string or an empty string if napi is not supported.
|
|
||||||
const version = module.exports.get_napi_version(target);
|
|
||||||
return version ? '' + version : '';
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.validate_package_json = function(package_json, opts) { // throws Error
|
|
||||||
|
|
||||||
const binary = package_json.binary;
|
|
||||||
const module_path_ok = pathOK(binary.module_path);
|
|
||||||
const remote_path_ok = pathOK(binary.remote_path);
|
|
||||||
const package_name_ok = pathOK(binary.package_name);
|
|
||||||
const napi_build_versions = module.exports.get_napi_build_versions(package_json, opts, true);
|
|
||||||
const napi_build_versions_raw = module.exports.get_napi_build_versions_raw(package_json);
|
|
||||||
|
|
||||||
if (napi_build_versions) {
|
|
||||||
napi_build_versions.forEach((napi_build_version)=> {
|
|
||||||
if (!(parseInt(napi_build_version, 10) === napi_build_version && napi_build_version > 0)) {
|
|
||||||
throw new Error('All values specified in napi_versions must be positive integers.');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (napi_build_versions && (!module_path_ok || (!remote_path_ok && !package_name_ok))) {
|
|
||||||
throw new Error('When napi_versions is specified; module_path and either remote_path or ' +
|
|
||||||
"package_name must contain the substitution string '{napi_build_version}`.");
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((module_path_ok || remote_path_ok || package_name_ok) && !napi_build_versions_raw) {
|
|
||||||
throw new Error("When the substitution string '{napi_build_version}` is specified in " +
|
|
||||||
'module_path, remote_path, or package_name; napi_versions must also be specified.');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (napi_build_versions && !module.exports.get_best_napi_build_version(package_json, opts) &&
|
|
||||||
module.exports.build_napi_only(package_json)) {
|
|
||||||
throw new Error(
|
|
||||||
'The Node-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' +
|
|
||||||
'This module supports Node-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' +
|
|
||||||
'This Node instance cannot run this module.');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (napi_build_versions_raw && !napi_build_versions && module.exports.build_napi_only(package_json)) {
|
|
||||||
throw new Error(
|
|
||||||
'The Node-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' +
|
|
||||||
'This module supports Node-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' +
|
|
||||||
'This Node instance cannot run this module.');
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
function pathOK(path) {
|
|
||||||
return path && (path.indexOf('{napi_build_version}') !== -1 || path.indexOf('{node_napi_label}') !== -1);
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports.expand_commands = function(package_json, opts, commands) {
|
|
||||||
const expanded_commands = [];
|
|
||||||
const napi_build_versions = module.exports.get_napi_build_versions(package_json, opts);
|
|
||||||
commands.forEach((command)=> {
|
|
||||||
if (napi_build_versions && command.name === 'install') {
|
|
||||||
const napi_build_version = module.exports.get_best_napi_build_version(package_json, opts);
|
|
||||||
const args = napi_build_version ? [napi_build_version_tag + napi_build_version] : [];
|
|
||||||
expanded_commands.push({ name: command.name, args: args });
|
|
||||||
} else if (napi_build_versions && napi_multiple_commands.indexOf(command.name) !== -1) {
|
|
||||||
napi_build_versions.forEach((napi_build_version)=> {
|
|
||||||
const args = command.args.slice();
|
|
||||||
args.push(napi_build_version_tag + napi_build_version);
|
|
||||||
expanded_commands.push({ name: command.name, args: args });
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
expanded_commands.push(command);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return expanded_commands;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.get_napi_build_versions = function(package_json, opts, warnings) { // opts may be undefined
|
|
||||||
const log = require('npmlog');
|
|
||||||
let napi_build_versions = [];
|
|
||||||
const supported_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined);
|
|
||||||
// remove duplicates, verify each napi version can actaully be built
|
|
||||||
if (package_json.binary && package_json.binary.napi_versions) {
|
|
||||||
package_json.binary.napi_versions.forEach((napi_version) => {
|
|
||||||
const duplicated = napi_build_versions.indexOf(napi_version) !== -1;
|
|
||||||
if (!duplicated && supported_napi_version && napi_version <= supported_napi_version) {
|
|
||||||
napi_build_versions.push(napi_version);
|
|
||||||
} else if (warnings && !duplicated && supported_napi_version) {
|
|
||||||
log.info('This Node instance does not support builds for Node-API version', napi_version);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (opts && opts['build-latest-napi-version-only']) {
|
|
||||||
let latest_version = 0;
|
|
||||||
napi_build_versions.forEach((napi_version) => {
|
|
||||||
if (napi_version > latest_version) latest_version = napi_version;
|
|
||||||
});
|
|
||||||
napi_build_versions = latest_version ? [latest_version] : [];
|
|
||||||
}
|
|
||||||
return napi_build_versions.length ? napi_build_versions : undefined;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.get_napi_build_versions_raw = function(package_json) {
|
|
||||||
const napi_build_versions = [];
|
|
||||||
// remove duplicates
|
|
||||||
if (package_json.binary && package_json.binary.napi_versions) {
|
|
||||||
package_json.binary.napi_versions.forEach((napi_version) => {
|
|
||||||
if (napi_build_versions.indexOf(napi_version) === -1) {
|
|
||||||
napi_build_versions.push(napi_version);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return napi_build_versions.length ? napi_build_versions : undefined;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.get_command_arg = function(napi_build_version) {
|
|
||||||
return napi_build_version_tag + napi_build_version;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.get_napi_build_version_from_command_args = function(command_args) {
|
|
||||||
for (let i = 0; i < command_args.length; i++) {
|
|
||||||
const arg = command_args[i];
|
|
||||||
if (arg.indexOf(napi_build_version_tag) === 0) {
|
|
||||||
return parseInt(arg.substr(napi_build_version_tag.length), 10);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.swap_build_dir_out = function(napi_build_version) {
|
|
||||||
if (napi_build_version) {
|
|
||||||
const rm = require('rimraf');
|
|
||||||
rm.sync(module.exports.get_build_dir(napi_build_version));
|
|
||||||
fs.renameSync('build', module.exports.get_build_dir(napi_build_version));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.swap_build_dir_in = function(napi_build_version) {
|
|
||||||
if (napi_build_version) {
|
|
||||||
const rm = require('rimraf');
|
|
||||||
rm.sync('build');
|
|
||||||
fs.renameSync(module.exports.get_build_dir(napi_build_version), 'build');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.get_build_dir = function(napi_build_version) {
|
|
||||||
return 'build-tmp-napi-v' + napi_build_version;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.get_best_napi_build_version = function(package_json, opts) {
|
|
||||||
let best_napi_build_version = 0;
|
|
||||||
const napi_build_versions = module.exports.get_napi_build_versions(package_json, opts);
|
|
||||||
if (napi_build_versions) {
|
|
||||||
const our_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined);
|
|
||||||
napi_build_versions.forEach((napi_build_version)=> {
|
|
||||||
if (napi_build_version > best_napi_build_version &&
|
|
||||||
napi_build_version <= our_napi_version) {
|
|
||||||
best_napi_build_version = napi_build_version;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return best_napi_build_version === 0 ? undefined : best_napi_build_version;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports.build_napi_only = function(package_json) {
|
|
||||||
return package_json.binary && package_json.binary.package_name &&
|
|
||||||
package_json.binary.package_name.indexOf('{node_napi_label}') === -1;
|
|
||||||
};
|
|
26
server/node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/index.html
generated
vendored
26
server/node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/index.html
generated
vendored
|
@ -1,26 +0,0 @@
|
||||||
<!doctype html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<title>Node-webkit-based module test</title>
|
|
||||||
<script>
|
|
||||||
function nwModuleTest(){
|
|
||||||
var util = require('util');
|
|
||||||
var moduleFolder = require('nw.gui').App.argv[0];
|
|
||||||
try {
|
|
||||||
require(moduleFolder);
|
|
||||||
} catch(e) {
|
|
||||||
if( process.platform !== 'win32' ){
|
|
||||||
util.log('nw-pre-gyp error:');
|
|
||||||
util.log(e.stack);
|
|
||||||
}
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
process.exit(0);
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
</head>
|
|
||||||
<body onload="nwModuleTest()">
|
|
||||||
<h1>Node-webkit-based module test</h1>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
9
server/node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/package.json
generated
vendored
9
server/node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/package.json
generated
vendored
|
@ -1,9 +0,0 @@
|
||||||
{
|
|
||||||
"main": "index.html",
|
|
||||||
"name": "nw-pre-gyp-module-test",
|
|
||||||
"description": "Node-webkit-based module test.",
|
|
||||||
"version": "0.0.1",
|
|
||||||
"window": {
|
|
||||||
"show": false
|
|
||||||
}
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue