diff --git a/README.md b/README.md index 6a6a1380..790432ad 100644 --- a/README.md +++ b/README.md @@ -45,12 +45,17 @@ docker compose up --build - `POST /admin/jobs/:id/retry` (admin) - `POST /admin/impersonate/:userId` (admin) - `GET /admin/tenants/:id/export` (admin) +- `GET /admin/tenants/:id/export?scope=users|accounts|jobs|rules&format=csv|zip` (admin, zip bundle) +- `GET /admin/exports/:id` (admin) +- `GET /admin/exports/:id/download` (admin) +- `GET /jobs/exports/:id/stream` (auth, SSE) - `DELETE /admin/tenants/:id` (admin) OAuth: - `POST /oauth/gmail/url` (auth) - `GET /oauth/gmail/callback` (Google redirect) - `GET /oauth/gmail/status/:accountId` (auth) +- `GET /oauth/gmail/ping/:accountId` (auth) UI: - Admin panel supports password reset, job cancel/retry, tenant export/delete, and impersonation. diff --git a/backend/node_modules/.bin/crc32 b/backend/node_modules/.bin/crc32 new file mode 120000 index 00000000..9f8b79c5 --- /dev/null +++ b/backend/node_modules/.bin/crc32 @@ -0,0 +1 @@ +../crc-32/bin/crc32.njs \ No newline at end of file diff --git a/backend/node_modules/.package-lock.json b/backend/node_modules/.package-lock.json index be20e82c..c802df60 100644 --- a/backend/node_modules/.package-lock.json +++ b/backend/node_modules/.package-lock.json @@ -589,6 +589,101 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/archiver": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-6.0.2.tgz", + "integrity": "sha512-UQ/2nW7NMl1G+1UnrLypQw1VdT9XZg/ECcKPq7l+STzStrSivFIXIp34D8M5zeNGW5NoOupdYCHv6VySCPNNlw==", + "license": "MIT", + "dependencies": { + "archiver-utils": "^4.0.1", + "async": "^3.2.4", + "buffer-crc32": "^0.2.1", + "readable-stream": "^3.6.0", + "readdir-glob": "^1.1.2", + "tar-stream": "^3.0.0", + "zip-stream": "^5.0.1" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/archiver-utils": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-4.0.1.tgz", + "integrity": "sha512-Q4Q99idbvzmgCTEAAhi32BkOyq8iVI5EwdO0PmBDSGIzzjYNdcFn7Q7k3OzbLy4kLUPXfJtG6fO2RjftXbobBg==", + "license": "MIT", + "dependencies": { + "glob": "^8.0.0", + "graceful-fs": "^4.2.0", + "lazystream": "^1.0.0", + "lodash": "^4.17.15", + "normalize-path": "^3.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/archiver-utils/node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/archiver-utils/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/archiver-utils/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/archiver/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/arg": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", @@ -623,6 +718,12 @@ "safer-buffer": "^2.1.0" } }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==", + "license": "MIT" + }, "node_modules/atomic-sleep": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", @@ -648,12 +749,40 @@ "integrity": "sha512-wWSvph+29GR783IhmvdwWnN4bUxTD01Vm5Xad4i7i1VuAOItLvbPAb69sb0IQ2N57yprvhNIwAP5B6xfKTmjmQ==", "license": "MIT" }, + "node_modules/b4a": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.7.3.tgz", + "integrity": "sha512-5Q2mfq2WfGuFp3uS//0s6baOJLMoVduPYVeNmDYxu5OUA1/cBfvr2RIS7vi62LdNj/urk1hfmj867I3qt6uZ7Q==", + "license": "Apache-2.0", + "peerDependencies": { + "react-native-b4a": "*" + }, + "peerDependenciesMeta": { + "react-native-b4a": { + "optional": true + } + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "license": "MIT" }, + "node_modules/bare-events": { + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.8.2.tgz", + "integrity": "sha512-riJjyv1/mHLIPX4RwiK+oW9/4c3TEUeORHKefKAKnZ5kyslbN+HXowtbaVEqt4IMUB7OXlfixcs6gsFeo/jhiQ==", + "license": "Apache-2.0", + "peerDependencies": { + "bare-abort-controller": "*" + }, + "peerDependenciesMeta": { + "bare-abort-controller": { + "optional": true + } + } + }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -722,6 +851,15 @@ "ieee754": "^1.2.1" } }, + "node_modules/buffer-crc32": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", + "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", + "license": "MIT", + "engines": { + "node": "*" + } + }, "node_modules/buffer-equal-constant-time": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", @@ -829,6 +967,35 @@ "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", "license": "MIT" }, + "node_modules/compress-commons": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-5.0.3.tgz", + "integrity": "sha512-/UIcLWvwAQyVibgpQDPtfNM3SvqN7G9elAPAV7GM0L53EbNWwWiCsWtK8Fwed/APEbptPHXs5PuW+y8Bq8lFTA==", + "license": "MIT", + "dependencies": { + "crc-32": "^1.2.0", + "crc32-stream": "^5.0.0", + "normalize-path": "^3.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/compress-commons/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", @@ -850,6 +1017,51 @@ "node": ">= 0.6" } }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "license": "MIT" + }, + "node_modules/crc-32": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", + "license": "Apache-2.0", + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/crc32-stream": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-5.0.1.tgz", + "integrity": "sha512-lO1dFui+CEUh/ztYIpgpKItKW9Bb4NWakCRJrnqAbFIYD+OZAwb2VfD5T5eXMw2FNcsDHkQcNl/Wh3iVXYwU6g==", + "license": "MIT", + "dependencies": { + "crc-32": "^1.2.0", + "readable-stream": "^3.4.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/crc32-stream/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/create-require": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", @@ -1139,6 +1351,15 @@ "node": ">=0.8.x" } }, + "node_modules/events-universal": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/events-universal/-/events-universal-1.0.1.tgz", + "integrity": "sha512-LUd5euvbMLpwOF8m6ivPCbhQeSiYVNb8Vs0fQ8QjXo0JTkEHpz8pxdQf0gStltaPpw0Cca8b39KxvK9cfKRiAw==", + "license": "Apache-2.0", + "dependencies": { + "bare-events": "^2.7.0" + } + }, "node_modules/extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", @@ -1169,6 +1390,12 @@ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", "license": "MIT" }, + "node_modules/fast-fifo": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", + "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==", + "license": "MIT" + }, "node_modules/fast-json-stringify": { "version": "5.16.1", "resolved": "https://registry.npmjs.org/fast-json-stringify/-/fast-json-stringify-5.16.1.tgz", @@ -1407,6 +1634,12 @@ "node": ">= 0.6" } }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -1570,6 +1803,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "license": "ISC" + }, "node_modules/gtoken": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-8.0.0.tgz", @@ -1795,6 +2034,17 @@ ], "license": "MIT" }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -1852,6 +2102,12 @@ "node": ">=8" } }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "license": "MIT" + }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -1960,6 +2216,48 @@ "safe-buffer": "^5.0.1" } }, + "node_modules/lazystream": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", + "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", + "license": "MIT", + "dependencies": { + "readable-stream": "^2.0.5" + }, + "engines": { + "node": ">= 0.6.3" + } + }, + "node_modules/lazystream/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/lazystream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "license": "MIT" + }, + "node_modules/lazystream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, "node_modules/leac": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/leac/-/leac-0.6.0.tgz", @@ -2025,6 +2323,12 @@ "uc.micro": "^2.0.0" } }, + "node_modules/lodash": { + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "license": "MIT" + }, "node_modules/lodash.defaults": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", @@ -2296,6 +2600,15 @@ "node": ">=6.0.0" } }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/object-inspect": { "version": "1.13.4", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", @@ -2517,6 +2830,12 @@ "node": ">= 0.6.0" } }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "license": "MIT" + }, "node_modules/process-warning": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-3.0.0.tgz", @@ -2592,6 +2911,27 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, + "node_modules/readdir-glob": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", + "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", + "license": "Apache-2.0", + "dependencies": { + "minimatch": "^5.1.0" + } + }, + "node_modules/readdir-glob/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/real-require": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", @@ -2932,6 +3272,17 @@ "reusify": "^1.0.0" } }, + "node_modules/streamx": { + "version": "2.23.0", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.23.0.tgz", + "integrity": "sha512-kn+e44esVfn2Fa/O0CPFcex27fjIL6MkVae0Mm6q+E6f0hWv578YCERbv+4m02cjxvDsPKLnmxral/rR6lBMAg==", + "license": "MIT", + "dependencies": { + "events-universal": "^1.0.0", + "fast-fifo": "^1.3.2", + "text-decoder": "^1.1.0" + } + }, "node_modules/string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", @@ -3049,6 +3400,26 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/tar-stream": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", + "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", + "license": "MIT", + "dependencies": { + "b4a": "^1.6.4", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" + } + }, + "node_modules/text-decoder": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.3.tgz", + "integrity": "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==", + "license": "Apache-2.0", + "dependencies": { + "b4a": "^1.6.4" + } + }, "node_modules/thread-stream": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-3.1.0.tgz", @@ -3168,6 +3539,12 @@ "integrity": "sha512-XdVKMF4SJ0nP/O7XIPB0JwAEuT9lDIYnNsK8yGVe43y0AWoKeJNdv3ZNWh7ksJ6KqQFjOO6ox/VEitLnaVNufw==", "license": "BSD" }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + }, "node_modules/uuid": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", @@ -3343,6 +3720,34 @@ "node": ">=6" } }, + "node_modules/zip-stream": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-5.0.2.tgz", + "integrity": "sha512-LfOdrUvPB8ZoXtvOBz6DlNClfvi//b5d56mSWyJi7XbH/HfhOHfUhOqxhT/rUiR7yiktlunqRo+jY6y/cWC/5g==", + "license": "MIT", + "dependencies": { + "archiver-utils": "^4.0.1", + "compress-commons": "^5.0.1", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, + "node_modules/zip-stream/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/zod": { "version": "3.25.76", "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", diff --git a/backend/node_modules/.prisma/client/edge.js b/backend/node_modules/.prisma/client/edge.js index 5e90165e..20733a55 100644 --- a/backend/node_modules/.prisma/client/edge.js +++ b/backend/node_modules/.prisma/client/edge.js @@ -98,6 +98,18 @@ exports.Prisma.TenantScalarFieldEnum = { updatedAt: 'updatedAt' }; +exports.Prisma.ExportJobScalarFieldEnum = { + id: 'id', + tenantId: 'tenantId', + status: 'status', + format: 'format', + scope: 'scope', + filePath: 'filePath', + error: 'error', + createdAt: 'createdAt', + updatedAt: 'updatedAt' +}; + exports.Prisma.UserScalarFieldEnum = { id: 'id', tenantId: 'tenantId', @@ -223,6 +235,13 @@ exports.Prisma.NullsOrder = { first: 'first', last: 'last' }; +exports.ExportStatus = exports.$Enums.ExportStatus = { + QUEUED: 'QUEUED', + RUNNING: 'RUNNING', + DONE: 'DONE', + FAILED: 'FAILED' +}; + exports.UserRole = exports.$Enums.UserRole = { USER: 'USER', ADMIN: 'ADMIN' @@ -259,6 +278,7 @@ exports.JobStatus = exports.$Enums.JobStatus = { exports.Prisma.ModelName = { Tenant: 'Tenant', + ExportJob: 'ExportJob', User: 'User', MailboxAccount: 'MailboxAccount', MailboxFolder: 'MailboxFolder', @@ -315,13 +335,13 @@ const config = { } } }, - "inlineSchema": "generator client {\n provider = \"prisma-client-js\"\n}\n\ndatasource db {\n provider = \"postgresql\"\n url = env(\"DATABASE_URL\")\n}\n\nenum MailProvider {\n GMAIL\n GMX\n WEBDE\n}\n\nenum UserRole {\n USER\n ADMIN\n}\n\nenum JobStatus {\n QUEUED\n RUNNING\n SUCCEEDED\n FAILED\n CANCELED\n}\n\nenum RuleActionType {\n MOVE\n DELETE\n ARCHIVE\n LABEL\n}\n\nenum RuleConditionType {\n HEADER\n SUBJECT\n FROM\n LIST_UNSUBSCRIBE\n LIST_ID\n}\n\nmodel Tenant {\n id String @id @default(cuid())\n name String\n isActive Boolean @default(true)\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n users User[]\n mailboxAccounts MailboxAccount[]\n rules Rule[]\n jobs CleanupJob[]\n}\n\nmodel User {\n id String @id @default(cuid())\n tenantId String\n email String @unique\n password String\n role UserRole @default(USER)\n isActive Boolean @default(true)\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n}\n\nmodel MailboxAccount {\n id String @id @default(cuid())\n tenantId String\n email String\n provider MailProvider\n isActive Boolean @default(true)\n imapHost String\n imapPort Int\n imapTLS Boolean\n smtpHost String?\n smtpPort Int?\n smtpTLS Boolean?\n oauthToken String?\n oauthRefreshToken String?\n oauthAccessToken String?\n oauthExpiresAt DateTime?\n providerUserId String?\n appPassword String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n folders MailboxFolder[]\n jobs CleanupJob[]\n\n @@index([tenantId])\n}\n\nmodel MailboxFolder {\n id String @id @default(cuid())\n mailboxAccountId String\n name String\n remoteId String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n mailboxAccount MailboxAccount @relation(fields: [mailboxAccountId], references: [id])\n mailItems MailItem[]\n\n @@index([mailboxAccountId])\n}\n\nmodel MailItem {\n id String @id @default(cuid())\n folderId String\n messageId String\n subject String?\n from String?\n receivedAt DateTime?\n listId String?\n listUnsubscribe String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n folder MailboxFolder @relation(fields: [folderId], references: [id])\n\n @@index([folderId])\n @@index([messageId])\n}\n\nmodel Rule {\n id String @id @default(cuid())\n tenantId String\n name String\n enabled Boolean @default(true)\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n conditions RuleCondition[]\n actions RuleAction[]\n\n @@index([tenantId])\n}\n\nmodel RuleCondition {\n id String @id @default(cuid())\n ruleId String\n type RuleConditionType\n value String\n\n rule Rule @relation(fields: [ruleId], references: [id])\n\n @@index([ruleId])\n}\n\nmodel RuleAction {\n id String @id @default(cuid())\n ruleId String\n type RuleActionType\n target String?\n\n rule Rule @relation(fields: [ruleId], references: [id])\n\n @@index([ruleId])\n}\n\nmodel CleanupJob {\n id String @id @default(cuid())\n tenantId String\n mailboxAccountId String\n status JobStatus @default(QUEUED)\n dryRun Boolean @default(true)\n unsubscribeEnabled Boolean @default(true)\n routingEnabled Boolean @default(true)\n startedAt DateTime?\n finishedAt DateTime?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n mailboxAccount MailboxAccount @relation(fields: [mailboxAccountId], references: [id])\n unsubscribeAttempts UnsubscribeAttempt[]\n events CleanupJobEvent[]\n\n @@index([tenantId])\n @@index([mailboxAccountId])\n}\n\nmodel UnsubscribeAttempt {\n id String @id @default(cuid())\n jobId String\n mailItemId String?\n method String\n target String\n status String\n createdAt DateTime @default(now())\n\n job CleanupJob @relation(fields: [jobId], references: [id])\n\n @@index([jobId])\n}\n\nmodel CleanupJobEvent {\n id String @id @default(cuid())\n jobId String\n level String\n message String\n progress Int?\n createdAt DateTime @default(now())\n\n job CleanupJob @relation(fields: [jobId], references: [id])\n\n @@index([jobId])\n}\n", - "inlineSchemaHash": "dd24c92a3ea02046b50b7bbe3851abe6159b6331a83d625584e807eef6fb4685", + "inlineSchema": "generator client {\n provider = \"prisma-client-js\"\n}\n\ndatasource db {\n provider = \"postgresql\"\n url = env(\"DATABASE_URL\")\n}\n\nenum MailProvider {\n GMAIL\n GMX\n WEBDE\n}\n\nenum UserRole {\n USER\n ADMIN\n}\n\nenum JobStatus {\n QUEUED\n RUNNING\n SUCCEEDED\n FAILED\n CANCELED\n}\n\nenum RuleActionType {\n MOVE\n DELETE\n ARCHIVE\n LABEL\n}\n\nenum RuleConditionType {\n HEADER\n SUBJECT\n FROM\n LIST_UNSUBSCRIBE\n LIST_ID\n}\n\nmodel Tenant {\n id String @id @default(cuid())\n name String\n isActive Boolean @default(true)\n exportJobs ExportJob[]\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n users User[]\n mailboxAccounts MailboxAccount[]\n rules Rule[]\n jobs CleanupJob[]\n}\n\nenum ExportStatus {\n QUEUED\n RUNNING\n DONE\n FAILED\n}\n\nmodel ExportJob {\n id String @id @default(cuid())\n tenantId String\n status ExportStatus @default(QUEUED)\n format String\n scope String\n filePath String?\n error String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n\n @@index([tenantId])\n}\n\nmodel User {\n id String @id @default(cuid())\n tenantId String\n email String @unique\n password String\n role UserRole @default(USER)\n isActive Boolean @default(true)\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n}\n\nmodel MailboxAccount {\n id String @id @default(cuid())\n tenantId String\n email String\n provider MailProvider\n isActive Boolean @default(true)\n imapHost String\n imapPort Int\n imapTLS Boolean\n smtpHost String?\n smtpPort Int?\n smtpTLS Boolean?\n oauthToken String?\n oauthRefreshToken String?\n oauthAccessToken String?\n oauthExpiresAt DateTime?\n providerUserId String?\n appPassword String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n folders MailboxFolder[]\n jobs CleanupJob[]\n\n @@index([tenantId])\n}\n\nmodel MailboxFolder {\n id String @id @default(cuid())\n mailboxAccountId String\n name String\n remoteId String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n mailboxAccount MailboxAccount @relation(fields: [mailboxAccountId], references: [id])\n mailItems MailItem[]\n\n @@index([mailboxAccountId])\n}\n\nmodel MailItem {\n id String @id @default(cuid())\n folderId String\n messageId String\n subject String?\n from String?\n receivedAt DateTime?\n listId String?\n listUnsubscribe String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n folder MailboxFolder @relation(fields: [folderId], references: [id])\n\n @@index([folderId])\n @@index([messageId])\n}\n\nmodel Rule {\n id String @id @default(cuid())\n tenantId String\n name String\n enabled Boolean @default(true)\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n conditions RuleCondition[]\n actions RuleAction[]\n\n @@index([tenantId])\n}\n\nmodel RuleCondition {\n id String @id @default(cuid())\n ruleId String\n type RuleConditionType\n value String\n\n rule Rule @relation(fields: [ruleId], references: [id])\n\n @@index([ruleId])\n}\n\nmodel RuleAction {\n id String @id @default(cuid())\n ruleId String\n type RuleActionType\n target String?\n\n rule Rule @relation(fields: [ruleId], references: [id])\n\n @@index([ruleId])\n}\n\nmodel CleanupJob {\n id String @id @default(cuid())\n tenantId String\n mailboxAccountId String\n status JobStatus @default(QUEUED)\n dryRun Boolean @default(true)\n unsubscribeEnabled Boolean @default(true)\n routingEnabled Boolean @default(true)\n startedAt DateTime?\n finishedAt DateTime?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n mailboxAccount MailboxAccount @relation(fields: [mailboxAccountId], references: [id])\n unsubscribeAttempts UnsubscribeAttempt[]\n events CleanupJobEvent[]\n\n @@index([tenantId])\n @@index([mailboxAccountId])\n}\n\nmodel UnsubscribeAttempt {\n id String @id @default(cuid())\n jobId String\n mailItemId String?\n method String\n target String\n status String\n createdAt DateTime @default(now())\n\n job CleanupJob @relation(fields: [jobId], references: [id])\n\n @@index([jobId])\n}\n\nmodel CleanupJobEvent {\n id String @id @default(cuid())\n jobId String\n level String\n message String\n progress Int?\n createdAt DateTime @default(now())\n\n job CleanupJob @relation(fields: [jobId], references: [id])\n\n @@index([jobId])\n}\n", + "inlineSchemaHash": "a7e2fb3971910c04a1b754e69e6718268efb7632d1b7e03a6ab8b0b6de7647ea", "copyEngine": true } config.dirname = '/' -config.runtimeDataModel = JSON.parse("{\"models\":{\"Tenant\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"name\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"isActive\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"users\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"User\",\"relationName\":\"TenantToUser\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccounts\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxAccount\",\"relationName\":\"MailboxAccountToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"rules\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Rule\",\"relationName\":\"RuleToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobs\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"User\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"email\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":true,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"password\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"role\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"UserRole\",\"default\":\"USER\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"isActive\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"TenantToUser\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"MailboxAccount\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"email\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"provider\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailProvider\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"isActive\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"imapHost\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"imapPort\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Int\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"imapTLS\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Boolean\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"smtpHost\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"smtpPort\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Int\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"smtpTLS\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Boolean\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthToken\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthRefreshToken\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthAccessToken\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthExpiresAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"providerUserId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"appPassword\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"MailboxAccountToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"folders\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxFolder\",\"relationName\":\"MailboxAccountToMailboxFolder\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobs\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToMailboxAccount\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"MailboxFolder\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccountId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"name\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"remoteId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"mailboxAccount\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxAccount\",\"relationName\":\"MailboxAccountToMailboxFolder\",\"relationFromFields\":[\"mailboxAccountId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailItems\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailItem\",\"relationName\":\"MailItemToMailboxFolder\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"MailItem\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"folderId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"messageId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"subject\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"from\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"receivedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"listId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"listUnsubscribe\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"folder\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxFolder\",\"relationName\":\"MailItemToMailboxFolder\",\"relationFromFields\":[\"folderId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"Rule\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"name\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"enabled\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"RuleToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"conditions\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleCondition\",\"relationName\":\"RuleToRuleCondition\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"actions\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleAction\",\"relationName\":\"RuleToRuleAction\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"RuleCondition\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"ruleId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"type\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleConditionType\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"value\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"rule\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Rule\",\"relationName\":\"RuleToRuleCondition\",\"relationFromFields\":[\"ruleId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"RuleAction\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"ruleId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"type\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleActionType\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"target\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"rule\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Rule\",\"relationName\":\"RuleToRuleAction\",\"relationFromFields\":[\"ruleId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"CleanupJob\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccountId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"status\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"JobStatus\",\"default\":\"QUEUED\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"dryRun\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"unsubscribeEnabled\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"routingEnabled\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"startedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"finishedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"CleanupJobToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccount\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxAccount\",\"relationName\":\"CleanupJobToMailboxAccount\",\"relationFromFields\":[\"mailboxAccountId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"unsubscribeAttempts\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"UnsubscribeAttempt\",\"relationName\":\"CleanupJobToUnsubscribeAttempt\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"events\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJobEvent\",\"relationName\":\"CleanupJobToCleanupJobEvent\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"UnsubscribeAttempt\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailItemId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"method\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"target\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"status\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"job\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToUnsubscribeAttempt\",\"relationFromFields\":[\"jobId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"CleanupJobEvent\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"level\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"message\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"progress\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Int\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"job\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToCleanupJobEvent\",\"relationFromFields\":[\"jobId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false}},\"enums\":{\"MailProvider\":{\"values\":[{\"name\":\"GMAIL\",\"dbName\":null},{\"name\":\"GMX\",\"dbName\":null},{\"name\":\"WEBDE\",\"dbName\":null}],\"dbName\":null},\"UserRole\":{\"values\":[{\"name\":\"USER\",\"dbName\":null},{\"name\":\"ADMIN\",\"dbName\":null}],\"dbName\":null},\"JobStatus\":{\"values\":[{\"name\":\"QUEUED\",\"dbName\":null},{\"name\":\"RUNNING\",\"dbName\":null},{\"name\":\"SUCCEEDED\",\"dbName\":null},{\"name\":\"FAILED\",\"dbName\":null},{\"name\":\"CANCELED\",\"dbName\":null}],\"dbName\":null},\"RuleActionType\":{\"values\":[{\"name\":\"MOVE\",\"dbName\":null},{\"name\":\"DELETE\",\"dbName\":null},{\"name\":\"ARCHIVE\",\"dbName\":null},{\"name\":\"LABEL\",\"dbName\":null}],\"dbName\":null},\"RuleConditionType\":{\"values\":[{\"name\":\"HEADER\",\"dbName\":null},{\"name\":\"SUBJECT\",\"dbName\":null},{\"name\":\"FROM\",\"dbName\":null},{\"name\":\"LIST_UNSUBSCRIBE\",\"dbName\":null},{\"name\":\"LIST_ID\",\"dbName\":null}],\"dbName\":null}},\"types\":{}}") +config.runtimeDataModel = JSON.parse("{\"models\":{\"Tenant\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"name\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"isActive\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"exportJobs\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"ExportJob\",\"relationName\":\"ExportJobToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"users\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"User\",\"relationName\":\"TenantToUser\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccounts\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxAccount\",\"relationName\":\"MailboxAccountToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"rules\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Rule\",\"relationName\":\"RuleToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobs\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"ExportJob\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"status\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"ExportStatus\",\"default\":\"QUEUED\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"format\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"scope\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"filePath\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"error\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"ExportJobToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"User\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"email\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":true,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"password\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"role\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"UserRole\",\"default\":\"USER\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"isActive\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"TenantToUser\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"MailboxAccount\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"email\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"provider\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailProvider\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"isActive\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"imapHost\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"imapPort\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Int\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"imapTLS\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Boolean\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"smtpHost\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"smtpPort\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Int\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"smtpTLS\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Boolean\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthToken\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthRefreshToken\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthAccessToken\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthExpiresAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"providerUserId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"appPassword\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"MailboxAccountToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"folders\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxFolder\",\"relationName\":\"MailboxAccountToMailboxFolder\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobs\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToMailboxAccount\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"MailboxFolder\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccountId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"name\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"remoteId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"mailboxAccount\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxAccount\",\"relationName\":\"MailboxAccountToMailboxFolder\",\"relationFromFields\":[\"mailboxAccountId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailItems\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailItem\",\"relationName\":\"MailItemToMailboxFolder\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"MailItem\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"folderId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"messageId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"subject\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"from\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"receivedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"listId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"listUnsubscribe\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"folder\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxFolder\",\"relationName\":\"MailItemToMailboxFolder\",\"relationFromFields\":[\"folderId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"Rule\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"name\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"enabled\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"RuleToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"conditions\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleCondition\",\"relationName\":\"RuleToRuleCondition\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"actions\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleAction\",\"relationName\":\"RuleToRuleAction\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"RuleCondition\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"ruleId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"type\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleConditionType\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"value\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"rule\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Rule\",\"relationName\":\"RuleToRuleCondition\",\"relationFromFields\":[\"ruleId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"RuleAction\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"ruleId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"type\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleActionType\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"target\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"rule\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Rule\",\"relationName\":\"RuleToRuleAction\",\"relationFromFields\":[\"ruleId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"CleanupJob\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccountId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"status\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"JobStatus\",\"default\":\"QUEUED\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"dryRun\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"unsubscribeEnabled\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"routingEnabled\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"startedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"finishedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"CleanupJobToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccount\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxAccount\",\"relationName\":\"CleanupJobToMailboxAccount\",\"relationFromFields\":[\"mailboxAccountId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"unsubscribeAttempts\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"UnsubscribeAttempt\",\"relationName\":\"CleanupJobToUnsubscribeAttempt\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"events\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJobEvent\",\"relationName\":\"CleanupJobToCleanupJobEvent\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"UnsubscribeAttempt\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailItemId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"method\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"target\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"status\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"job\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToUnsubscribeAttempt\",\"relationFromFields\":[\"jobId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"CleanupJobEvent\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"level\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"message\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"progress\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Int\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"job\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToCleanupJobEvent\",\"relationFromFields\":[\"jobId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false}},\"enums\":{\"MailProvider\":{\"values\":[{\"name\":\"GMAIL\",\"dbName\":null},{\"name\":\"GMX\",\"dbName\":null},{\"name\":\"WEBDE\",\"dbName\":null}],\"dbName\":null},\"UserRole\":{\"values\":[{\"name\":\"USER\",\"dbName\":null},{\"name\":\"ADMIN\",\"dbName\":null}],\"dbName\":null},\"JobStatus\":{\"values\":[{\"name\":\"QUEUED\",\"dbName\":null},{\"name\":\"RUNNING\",\"dbName\":null},{\"name\":\"SUCCEEDED\",\"dbName\":null},{\"name\":\"FAILED\",\"dbName\":null},{\"name\":\"CANCELED\",\"dbName\":null}],\"dbName\":null},\"RuleActionType\":{\"values\":[{\"name\":\"MOVE\",\"dbName\":null},{\"name\":\"DELETE\",\"dbName\":null},{\"name\":\"ARCHIVE\",\"dbName\":null},{\"name\":\"LABEL\",\"dbName\":null}],\"dbName\":null},\"RuleConditionType\":{\"values\":[{\"name\":\"HEADER\",\"dbName\":null},{\"name\":\"SUBJECT\",\"dbName\":null},{\"name\":\"FROM\",\"dbName\":null},{\"name\":\"LIST_UNSUBSCRIBE\",\"dbName\":null},{\"name\":\"LIST_ID\",\"dbName\":null}],\"dbName\":null},\"ExportStatus\":{\"values\":[{\"name\":\"QUEUED\",\"dbName\":null},{\"name\":\"RUNNING\",\"dbName\":null},{\"name\":\"DONE\",\"dbName\":null},{\"name\":\"FAILED\",\"dbName\":null}],\"dbName\":null}},\"types\":{}}") defineDmmfProperty(exports.Prisma, config.runtimeDataModel) config.engineWasm = undefined diff --git a/backend/node_modules/.prisma/client/index-browser.js b/backend/node_modules/.prisma/client/index-browser.js index b89dcbb5..b6faae28 100644 --- a/backend/node_modules/.prisma/client/index-browser.js +++ b/backend/node_modules/.prisma/client/index-browser.js @@ -130,6 +130,18 @@ exports.Prisma.TenantScalarFieldEnum = { updatedAt: 'updatedAt' }; +exports.Prisma.ExportJobScalarFieldEnum = { + id: 'id', + tenantId: 'tenantId', + status: 'status', + format: 'format', + scope: 'scope', + filePath: 'filePath', + error: 'error', + createdAt: 'createdAt', + updatedAt: 'updatedAt' +}; + exports.Prisma.UserScalarFieldEnum = { id: 'id', tenantId: 'tenantId', @@ -255,6 +267,13 @@ exports.Prisma.NullsOrder = { first: 'first', last: 'last' }; +exports.ExportStatus = exports.$Enums.ExportStatus = { + QUEUED: 'QUEUED', + RUNNING: 'RUNNING', + DONE: 'DONE', + FAILED: 'FAILED' +}; + exports.UserRole = exports.$Enums.UserRole = { USER: 'USER', ADMIN: 'ADMIN' @@ -291,6 +310,7 @@ exports.JobStatus = exports.$Enums.JobStatus = { exports.Prisma.ModelName = { Tenant: 'Tenant', + ExportJob: 'ExportJob', User: 'User', MailboxAccount: 'MailboxAccount', MailboxFolder: 'MailboxFolder', diff --git a/backend/node_modules/.prisma/client/index.d.ts b/backend/node_modules/.prisma/client/index.d.ts index 79042949..26f6da32 100644 --- a/backend/node_modules/.prisma/client/index.d.ts +++ b/backend/node_modules/.prisma/client/index.d.ts @@ -18,6 +18,11 @@ export type PrismaPromise = $Public.PrismaPromise * */ export type Tenant = $Result.DefaultSelection +/** + * Model ExportJob + * + */ +export type ExportJob = $Result.DefaultSelection /** * Model User * @@ -73,7 +78,17 @@ export type CleanupJobEvent = $Result.DefaultSelection; + /** + * `prisma.exportJob`: Exposes CRUD operations for the **ExportJob** model. + * Example usage: + * ```ts + * // Fetch zero or more ExportJobs + * const exportJobs = await prisma.exportJob.findMany() + * ``` + */ + get exportJob(): Prisma.ExportJobDelegate; + /** * `prisma.user`: Exposes CRUD operations for the **User** model. * Example usage: @@ -817,6 +846,7 @@ export namespace Prisma { export const ModelName: { Tenant: 'Tenant', + ExportJob: 'ExportJob', User: 'User', MailboxAccount: 'MailboxAccount', MailboxFolder: 'MailboxFolder', @@ -842,7 +872,7 @@ export namespace Prisma { export type TypeMap = { meta: { - modelProps: "tenant" | "user" | "mailboxAccount" | "mailboxFolder" | "mailItem" | "rule" | "ruleCondition" | "ruleAction" | "cleanupJob" | "unsubscribeAttempt" | "cleanupJobEvent" + modelProps: "tenant" | "exportJob" | "user" | "mailboxAccount" | "mailboxFolder" | "mailItem" | "rule" | "ruleCondition" | "ruleAction" | "cleanupJob" | "unsubscribeAttempt" | "cleanupJobEvent" txIsolationLevel: Prisma.TransactionIsolationLevel } model: { @@ -916,6 +946,76 @@ export namespace Prisma { } } } + ExportJob: { + payload: Prisma.$ExportJobPayload + fields: Prisma.ExportJobFieldRefs + operations: { + findUnique: { + args: Prisma.ExportJobFindUniqueArgs + result: $Utils.PayloadToResult | null + } + findUniqueOrThrow: { + args: Prisma.ExportJobFindUniqueOrThrowArgs + result: $Utils.PayloadToResult + } + findFirst: { + args: Prisma.ExportJobFindFirstArgs + result: $Utils.PayloadToResult | null + } + findFirstOrThrow: { + args: Prisma.ExportJobFindFirstOrThrowArgs + result: $Utils.PayloadToResult + } + findMany: { + args: Prisma.ExportJobFindManyArgs + result: $Utils.PayloadToResult[] + } + create: { + args: Prisma.ExportJobCreateArgs + result: $Utils.PayloadToResult + } + createMany: { + args: Prisma.ExportJobCreateManyArgs + result: BatchPayload + } + createManyAndReturn: { + args: Prisma.ExportJobCreateManyAndReturnArgs + result: $Utils.PayloadToResult[] + } + delete: { + args: Prisma.ExportJobDeleteArgs + result: $Utils.PayloadToResult + } + update: { + args: Prisma.ExportJobUpdateArgs + result: $Utils.PayloadToResult + } + deleteMany: { + args: Prisma.ExportJobDeleteManyArgs + result: BatchPayload + } + updateMany: { + args: Prisma.ExportJobUpdateManyArgs + result: BatchPayload + } + upsert: { + args: Prisma.ExportJobUpsertArgs + result: $Utils.PayloadToResult + } + aggregate: { + args: Prisma.ExportJobAggregateArgs + result: $Utils.Optional + } + groupBy: { + args: Prisma.ExportJobGroupByArgs + result: $Utils.Optional[] + } + count: { + args: Prisma.ExportJobCountArgs + result: $Utils.Optional | number + } + } + } User: { payload: Prisma.$UserPayload fields: Prisma.UserFieldRefs @@ -1777,6 +1877,7 @@ export namespace Prisma { */ export type TenantCountOutputType = { + exportJobs: number users: number mailboxAccounts: number rules: number @@ -1784,6 +1885,7 @@ export namespace Prisma { } export type TenantCountOutputTypeSelect = { + exportJobs?: boolean | TenantCountOutputTypeCountExportJobsArgs users?: boolean | TenantCountOutputTypeCountUsersArgs mailboxAccounts?: boolean | TenantCountOutputTypeCountMailboxAccountsArgs rules?: boolean | TenantCountOutputTypeCountRulesArgs @@ -1801,6 +1903,13 @@ export namespace Prisma { select?: TenantCountOutputTypeSelect | null } + /** + * TenantCountOutputType without action + */ + export type TenantCountOutputTypeCountExportJobsArgs = { + where?: ExportJobWhereInput + } + /** * TenantCountOutputType without action */ @@ -2149,6 +2258,7 @@ export namespace Prisma { isActive?: boolean createdAt?: boolean updatedAt?: boolean + exportJobs?: boolean | Tenant$exportJobsArgs users?: boolean | Tenant$usersArgs mailboxAccounts?: boolean | Tenant$mailboxAccountsArgs rules?: boolean | Tenant$rulesArgs @@ -2173,6 +2283,7 @@ export namespace Prisma { } export type TenantInclude = { + exportJobs?: boolean | Tenant$exportJobsArgs users?: boolean | Tenant$usersArgs mailboxAccounts?: boolean | Tenant$mailboxAccountsArgs rules?: boolean | Tenant$rulesArgs @@ -2184,6 +2295,7 @@ export namespace Prisma { export type $TenantPayload = { name: "Tenant" objects: { + exportJobs: Prisma.$ExportJobPayload[] users: Prisma.$UserPayload[] mailboxAccounts: Prisma.$MailboxAccountPayload[] rules: Prisma.$RulePayload[] @@ -2559,6 +2671,7 @@ export namespace Prisma { */ export interface Prisma__TenantClient extends Prisma.PrismaPromise { readonly [Symbol.toStringTag]: "PrismaPromise" + exportJobs = {}>(args?: Subset>): Prisma.PrismaPromise<$Result.GetResult, T, "findMany"> | Null> users = {}>(args?: Subset>): Prisma.PrismaPromise<$Result.GetResult, T, "findMany"> | Null> mailboxAccounts = {}>(args?: Subset>): Prisma.PrismaPromise<$Result.GetResult, T, "findMany"> | Null> rules = {}>(args?: Subset>): Prisma.PrismaPromise<$Result.GetResult, T, "findMany"> | Null> @@ -2910,6 +3023,26 @@ export namespace Prisma { where?: TenantWhereInput } + /** + * Tenant.exportJobs + */ + export type Tenant$exportJobsArgs = { + /** + * Select specific fields to fetch from the ExportJob + */ + select?: ExportJobSelect | null + /** + * Choose, which related nodes to fetch as well + */ + include?: ExportJobInclude | null + where?: ExportJobWhereInput + orderBy?: ExportJobOrderByWithRelationInput | ExportJobOrderByWithRelationInput[] + cursor?: ExportJobWhereUniqueInput + take?: number + skip?: number + distinct?: ExportJobScalarFieldEnum | ExportJobScalarFieldEnum[] + } + /** * Tenant.users */ @@ -3005,6 +3138,987 @@ export namespace Prisma { } + /** + * Model ExportJob + */ + + export type AggregateExportJob = { + _count: ExportJobCountAggregateOutputType | null + _min: ExportJobMinAggregateOutputType | null + _max: ExportJobMaxAggregateOutputType | null + } + + export type ExportJobMinAggregateOutputType = { + id: string | null + tenantId: string | null + status: $Enums.ExportStatus | null + format: string | null + scope: string | null + filePath: string | null + error: string | null + createdAt: Date | null + updatedAt: Date | null + } + + export type ExportJobMaxAggregateOutputType = { + id: string | null + tenantId: string | null + status: $Enums.ExportStatus | null + format: string | null + scope: string | null + filePath: string | null + error: string | null + createdAt: Date | null + updatedAt: Date | null + } + + export type ExportJobCountAggregateOutputType = { + id: number + tenantId: number + status: number + format: number + scope: number + filePath: number + error: number + createdAt: number + updatedAt: number + _all: number + } + + + export type ExportJobMinAggregateInputType = { + id?: true + tenantId?: true + status?: true + format?: true + scope?: true + filePath?: true + error?: true + createdAt?: true + updatedAt?: true + } + + export type ExportJobMaxAggregateInputType = { + id?: true + tenantId?: true + status?: true + format?: true + scope?: true + filePath?: true + error?: true + createdAt?: true + updatedAt?: true + } + + export type ExportJobCountAggregateInputType = { + id?: true + tenantId?: true + status?: true + format?: true + scope?: true + filePath?: true + error?: true + createdAt?: true + updatedAt?: true + _all?: true + } + + export type ExportJobAggregateArgs = { + /** + * Filter which ExportJob to aggregate. + */ + where?: ExportJobWhereInput + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/sorting Sorting Docs} + * + * Determine the order of ExportJobs to fetch. + */ + orderBy?: ExportJobOrderByWithRelationInput | ExportJobOrderByWithRelationInput[] + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination#cursor-based-pagination Cursor Docs} + * + * Sets the start position + */ + cursor?: ExportJobWhereUniqueInput + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} + * + * Take `±n` ExportJobs from the position of the cursor. + */ + take?: number + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} + * + * Skip the first `n` ExportJobs. + */ + skip?: number + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs} + * + * Count returned ExportJobs + **/ + _count?: true | ExportJobCountAggregateInputType + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs} + * + * Select which fields to find the minimum value + **/ + _min?: ExportJobMinAggregateInputType + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs} + * + * Select which fields to find the maximum value + **/ + _max?: ExportJobMaxAggregateInputType + } + + export type GetExportJobAggregateType = { + [P in keyof T & keyof AggregateExportJob]: P extends '_count' | 'count' + ? T[P] extends true + ? number + : GetScalarType + : GetScalarType + } + + + + + export type ExportJobGroupByArgs = { + where?: ExportJobWhereInput + orderBy?: ExportJobOrderByWithAggregationInput | ExportJobOrderByWithAggregationInput[] + by: ExportJobScalarFieldEnum[] | ExportJobScalarFieldEnum + having?: ExportJobScalarWhereWithAggregatesInput + take?: number + skip?: number + _count?: ExportJobCountAggregateInputType | true + _min?: ExportJobMinAggregateInputType + _max?: ExportJobMaxAggregateInputType + } + + export type ExportJobGroupByOutputType = { + id: string + tenantId: string + status: $Enums.ExportStatus + format: string + scope: string + filePath: string | null + error: string | null + createdAt: Date + updatedAt: Date + _count: ExportJobCountAggregateOutputType | null + _min: ExportJobMinAggregateOutputType | null + _max: ExportJobMaxAggregateOutputType | null + } + + type GetExportJobGroupByPayload = Prisma.PrismaPromise< + Array< + PickEnumerable & + { + [P in ((keyof T) & (keyof ExportJobGroupByOutputType))]: P extends '_count' + ? T[P] extends boolean + ? number + : GetScalarType + : GetScalarType + } + > + > + + + export type ExportJobSelect = $Extensions.GetSelect<{ + id?: boolean + tenantId?: boolean + status?: boolean + format?: boolean + scope?: boolean + filePath?: boolean + error?: boolean + createdAt?: boolean + updatedAt?: boolean + tenant?: boolean | TenantDefaultArgs + }, ExtArgs["result"]["exportJob"]> + + export type ExportJobSelectCreateManyAndReturn = $Extensions.GetSelect<{ + id?: boolean + tenantId?: boolean + status?: boolean + format?: boolean + scope?: boolean + filePath?: boolean + error?: boolean + createdAt?: boolean + updatedAt?: boolean + tenant?: boolean | TenantDefaultArgs + }, ExtArgs["result"]["exportJob"]> + + export type ExportJobSelectScalar = { + id?: boolean + tenantId?: boolean + status?: boolean + format?: boolean + scope?: boolean + filePath?: boolean + error?: boolean + createdAt?: boolean + updatedAt?: boolean + } + + export type ExportJobInclude = { + tenant?: boolean | TenantDefaultArgs + } + export type ExportJobIncludeCreateManyAndReturn = { + tenant?: boolean | TenantDefaultArgs + } + + export type $ExportJobPayload = { + name: "ExportJob" + objects: { + tenant: Prisma.$TenantPayload + } + scalars: $Extensions.GetPayloadResult<{ + id: string + tenantId: string + status: $Enums.ExportStatus + format: string + scope: string + filePath: string | null + error: string | null + createdAt: Date + updatedAt: Date + }, ExtArgs["result"]["exportJob"]> + composites: {} + } + + type ExportJobGetPayload = $Result.GetResult + + type ExportJobCountArgs = + Omit & { + select?: ExportJobCountAggregateInputType | true + } + + export interface ExportJobDelegate { + [K: symbol]: { types: Prisma.TypeMap['model']['ExportJob'], meta: { name: 'ExportJob' } } + /** + * Find zero or one ExportJob that matches the filter. + * @param {ExportJobFindUniqueArgs} args - Arguments to find a ExportJob + * @example + * // Get one ExportJob + * const exportJob = await prisma.exportJob.findUnique({ + * where: { + * // ... provide filter here + * } + * }) + */ + findUnique(args: SelectSubset>): Prisma__ExportJobClient<$Result.GetResult, T, "findUnique"> | null, null, ExtArgs> + + /** + * Find one ExportJob that matches the filter or throw an error with `error.code='P2025'` + * if no matches were found. + * @param {ExportJobFindUniqueOrThrowArgs} args - Arguments to find a ExportJob + * @example + * // Get one ExportJob + * const exportJob = await prisma.exportJob.findUniqueOrThrow({ + * where: { + * // ... provide filter here + * } + * }) + */ + findUniqueOrThrow(args: SelectSubset>): Prisma__ExportJobClient<$Result.GetResult, T, "findUniqueOrThrow">, never, ExtArgs> + + /** + * Find the first ExportJob that matches the filter. + * Note, that providing `undefined` is treated as the value not being there. + * Read more here: https://pris.ly/d/null-undefined + * @param {ExportJobFindFirstArgs} args - Arguments to find a ExportJob + * @example + * // Get one ExportJob + * const exportJob = await prisma.exportJob.findFirst({ + * where: { + * // ... provide filter here + * } + * }) + */ + findFirst(args?: SelectSubset>): Prisma__ExportJobClient<$Result.GetResult, T, "findFirst"> | null, null, ExtArgs> + + /** + * Find the first ExportJob that matches the filter or + * throw `PrismaKnownClientError` with `P2025` code if no matches were found. + * Note, that providing `undefined` is treated as the value not being there. + * Read more here: https://pris.ly/d/null-undefined + * @param {ExportJobFindFirstOrThrowArgs} args - Arguments to find a ExportJob + * @example + * // Get one ExportJob + * const exportJob = await prisma.exportJob.findFirstOrThrow({ + * where: { + * // ... provide filter here + * } + * }) + */ + findFirstOrThrow(args?: SelectSubset>): Prisma__ExportJobClient<$Result.GetResult, T, "findFirstOrThrow">, never, ExtArgs> + + /** + * Find zero or more ExportJobs that matches the filter. + * Note, that providing `undefined` is treated as the value not being there. + * Read more here: https://pris.ly/d/null-undefined + * @param {ExportJobFindManyArgs} args - Arguments to filter and select certain fields only. + * @example + * // Get all ExportJobs + * const exportJobs = await prisma.exportJob.findMany() + * + * // Get first 10 ExportJobs + * const exportJobs = await prisma.exportJob.findMany({ take: 10 }) + * + * // Only select the `id` + * const exportJobWithIdOnly = await prisma.exportJob.findMany({ select: { id: true } }) + * + */ + findMany(args?: SelectSubset>): Prisma.PrismaPromise<$Result.GetResult, T, "findMany">> + + /** + * Create a ExportJob. + * @param {ExportJobCreateArgs} args - Arguments to create a ExportJob. + * @example + * // Create one ExportJob + * const ExportJob = await prisma.exportJob.create({ + * data: { + * // ... data to create a ExportJob + * } + * }) + * + */ + create(args: SelectSubset>): Prisma__ExportJobClient<$Result.GetResult, T, "create">, never, ExtArgs> + + /** + * Create many ExportJobs. + * @param {ExportJobCreateManyArgs} args - Arguments to create many ExportJobs. + * @example + * // Create many ExportJobs + * const exportJob = await prisma.exportJob.createMany({ + * data: [ + * // ... provide data here + * ] + * }) + * + */ + createMany(args?: SelectSubset>): Prisma.PrismaPromise + + /** + * Create many ExportJobs and returns the data saved in the database. + * @param {ExportJobCreateManyAndReturnArgs} args - Arguments to create many ExportJobs. + * @example + * // Create many ExportJobs + * const exportJob = await prisma.exportJob.createManyAndReturn({ + * data: [ + * // ... provide data here + * ] + * }) + * + * // Create many ExportJobs and only return the `id` + * const exportJobWithIdOnly = await prisma.exportJob.createManyAndReturn({ + * select: { id: true }, + * data: [ + * // ... provide data here + * ] + * }) + * Note, that providing `undefined` is treated as the value not being there. + * Read more here: https://pris.ly/d/null-undefined + * + */ + createManyAndReturn(args?: SelectSubset>): Prisma.PrismaPromise<$Result.GetResult, T, "createManyAndReturn">> + + /** + * Delete a ExportJob. + * @param {ExportJobDeleteArgs} args - Arguments to delete one ExportJob. + * @example + * // Delete one ExportJob + * const ExportJob = await prisma.exportJob.delete({ + * where: { + * // ... filter to delete one ExportJob + * } + * }) + * + */ + delete(args: SelectSubset>): Prisma__ExportJobClient<$Result.GetResult, T, "delete">, never, ExtArgs> + + /** + * Update one ExportJob. + * @param {ExportJobUpdateArgs} args - Arguments to update one ExportJob. + * @example + * // Update one ExportJob + * const exportJob = await prisma.exportJob.update({ + * where: { + * // ... provide filter here + * }, + * data: { + * // ... provide data here + * } + * }) + * + */ + update(args: SelectSubset>): Prisma__ExportJobClient<$Result.GetResult, T, "update">, never, ExtArgs> + + /** + * Delete zero or more ExportJobs. + * @param {ExportJobDeleteManyArgs} args - Arguments to filter ExportJobs to delete. + * @example + * // Delete a few ExportJobs + * const { count } = await prisma.exportJob.deleteMany({ + * where: { + * // ... provide filter here + * } + * }) + * + */ + deleteMany(args?: SelectSubset>): Prisma.PrismaPromise + + /** + * Update zero or more ExportJobs. + * Note, that providing `undefined` is treated as the value not being there. + * Read more here: https://pris.ly/d/null-undefined + * @param {ExportJobUpdateManyArgs} args - Arguments to update one or more rows. + * @example + * // Update many ExportJobs + * const exportJob = await prisma.exportJob.updateMany({ + * where: { + * // ... provide filter here + * }, + * data: { + * // ... provide data here + * } + * }) + * + */ + updateMany(args: SelectSubset>): Prisma.PrismaPromise + + /** + * Create or update one ExportJob. + * @param {ExportJobUpsertArgs} args - Arguments to update or create a ExportJob. + * @example + * // Update or create a ExportJob + * const exportJob = await prisma.exportJob.upsert({ + * create: { + * // ... data to create a ExportJob + * }, + * update: { + * // ... in case it already exists, update + * }, + * where: { + * // ... the filter for the ExportJob we want to update + * } + * }) + */ + upsert(args: SelectSubset>): Prisma__ExportJobClient<$Result.GetResult, T, "upsert">, never, ExtArgs> + + + /** + * Count the number of ExportJobs. + * Note, that providing `undefined` is treated as the value not being there. + * Read more here: https://pris.ly/d/null-undefined + * @param {ExportJobCountArgs} args - Arguments to filter ExportJobs to count. + * @example + * // Count the number of ExportJobs + * const count = await prisma.exportJob.count({ + * where: { + * // ... the filter for the ExportJobs we want to count + * } + * }) + **/ + count( + args?: Subset, + ): Prisma.PrismaPromise< + T extends $Utils.Record<'select', any> + ? T['select'] extends true + ? number + : GetScalarType + : number + > + + /** + * Allows you to perform aggregations operations on a ExportJob. + * Note, that providing `undefined` is treated as the value not being there. + * Read more here: https://pris.ly/d/null-undefined + * @param {ExportJobAggregateArgs} args - Select which aggregations you would like to apply and on what fields. + * @example + * // Ordered by age ascending + * // Where email contains prisma.io + * // Limited to the 10 users + * const aggregations = await prisma.user.aggregate({ + * _avg: { + * age: true, + * }, + * where: { + * email: { + * contains: "prisma.io", + * }, + * }, + * orderBy: { + * age: "asc", + * }, + * take: 10, + * }) + **/ + aggregate(args: Subset): Prisma.PrismaPromise> + + /** + * Group by ExportJob. + * Note, that providing `undefined` is treated as the value not being there. + * Read more here: https://pris.ly/d/null-undefined + * @param {ExportJobGroupByArgs} args - Group by arguments. + * @example + * // Group by city, order by createdAt, get count + * const result = await prisma.user.groupBy({ + * by: ['city', 'createdAt'], + * orderBy: { + * createdAt: true + * }, + * _count: { + * _all: true + * }, + * }) + * + **/ + groupBy< + T extends ExportJobGroupByArgs, + HasSelectOrTake extends Or< + Extends<'skip', Keys>, + Extends<'take', Keys> + >, + OrderByArg extends True extends HasSelectOrTake + ? { orderBy: ExportJobGroupByArgs['orderBy'] } + : { orderBy?: ExportJobGroupByArgs['orderBy'] }, + OrderFields extends ExcludeUnderscoreKeys>>, + ByFields extends MaybeTupleToUnion, + ByValid extends Has, + HavingFields extends GetHavingFields, + HavingValid extends Has, + ByEmpty extends T['by'] extends never[] ? True : False, + InputErrors extends ByEmpty extends True + ? `Error: "by" must not be empty.` + : HavingValid extends False + ? { + [P in HavingFields]: P extends ByFields + ? never + : P extends string + ? `Error: Field "${P}" used in "having" needs to be provided in "by".` + : [ + Error, + 'Field ', + P, + ` in "having" needs to be provided in "by"`, + ] + }[HavingFields] + : 'take' extends Keys + ? 'orderBy' extends Keys + ? ByValid extends True + ? {} + : { + [P in OrderFields]: P extends ByFields + ? never + : `Error: Field "${P}" in "orderBy" needs to be provided in "by"` + }[OrderFields] + : 'Error: If you provide "take", you also need to provide "orderBy"' + : 'skip' extends Keys + ? 'orderBy' extends Keys + ? ByValid extends True + ? {} + : { + [P in OrderFields]: P extends ByFields + ? never + : `Error: Field "${P}" in "orderBy" needs to be provided in "by"` + }[OrderFields] + : 'Error: If you provide "skip", you also need to provide "orderBy"' + : ByValid extends True + ? {} + : { + [P in OrderFields]: P extends ByFields + ? never + : `Error: Field "${P}" in "orderBy" needs to be provided in "by"` + }[OrderFields] + >(args: SubsetIntersection & InputErrors): {} extends InputErrors ? GetExportJobGroupByPayload : Prisma.PrismaPromise + /** + * Fields of the ExportJob model + */ + readonly fields: ExportJobFieldRefs; + } + + /** + * The delegate class that acts as a "Promise-like" for ExportJob. + * Why is this prefixed with `Prisma__`? + * Because we want to prevent naming conflicts as mentioned in + * https://github.com/prisma/prisma-client-js/issues/707 + */ + export interface Prisma__ExportJobClient extends Prisma.PrismaPromise { + readonly [Symbol.toStringTag]: "PrismaPromise" + tenant = {}>(args?: Subset>): Prisma__TenantClient<$Result.GetResult, T, "findUniqueOrThrow"> | Null, Null, ExtArgs> + /** + * Attaches callbacks for the resolution and/or rejection of the Promise. + * @param onfulfilled The callback to execute when the Promise is resolved. + * @param onrejected The callback to execute when the Promise is rejected. + * @returns A Promise for the completion of which ever callback is executed. + */ + then(onfulfilled?: ((value: T) => TResult1 | PromiseLike) | undefined | null, onrejected?: ((reason: any) => TResult2 | PromiseLike) | undefined | null): $Utils.JsPromise + /** + * Attaches a callback for only the rejection of the Promise. + * @param onrejected The callback to execute when the Promise is rejected. + * @returns A Promise for the completion of the callback. + */ + catch(onrejected?: ((reason: any) => TResult | PromiseLike) | undefined | null): $Utils.JsPromise + /** + * Attaches a callback that is invoked when the Promise is settled (fulfilled or rejected). The + * resolved value cannot be modified from the callback. + * @param onfinally The callback to execute when the Promise is settled (fulfilled or rejected). + * @returns A Promise for the completion of the callback. + */ + finally(onfinally?: (() => void) | undefined | null): $Utils.JsPromise + } + + + + + /** + * Fields of the ExportJob model + */ + interface ExportJobFieldRefs { + readonly id: FieldRef<"ExportJob", 'String'> + readonly tenantId: FieldRef<"ExportJob", 'String'> + readonly status: FieldRef<"ExportJob", 'ExportStatus'> + readonly format: FieldRef<"ExportJob", 'String'> + readonly scope: FieldRef<"ExportJob", 'String'> + readonly filePath: FieldRef<"ExportJob", 'String'> + readonly error: FieldRef<"ExportJob", 'String'> + readonly createdAt: FieldRef<"ExportJob", 'DateTime'> + readonly updatedAt: FieldRef<"ExportJob", 'DateTime'> + } + + + // Custom InputTypes + /** + * ExportJob findUnique + */ + export type ExportJobFindUniqueArgs = { + /** + * Select specific fields to fetch from the ExportJob + */ + select?: ExportJobSelect | null + /** + * Choose, which related nodes to fetch as well + */ + include?: ExportJobInclude | null + /** + * Filter, which ExportJob to fetch. + */ + where: ExportJobWhereUniqueInput + } + + /** + * ExportJob findUniqueOrThrow + */ + export type ExportJobFindUniqueOrThrowArgs = { + /** + * Select specific fields to fetch from the ExportJob + */ + select?: ExportJobSelect | null + /** + * Choose, which related nodes to fetch as well + */ + include?: ExportJobInclude | null + /** + * Filter, which ExportJob to fetch. + */ + where: ExportJobWhereUniqueInput + } + + /** + * ExportJob findFirst + */ + export type ExportJobFindFirstArgs = { + /** + * Select specific fields to fetch from the ExportJob + */ + select?: ExportJobSelect | null + /** + * Choose, which related nodes to fetch as well + */ + include?: ExportJobInclude | null + /** + * Filter, which ExportJob to fetch. + */ + where?: ExportJobWhereInput + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/sorting Sorting Docs} + * + * Determine the order of ExportJobs to fetch. + */ + orderBy?: ExportJobOrderByWithRelationInput | ExportJobOrderByWithRelationInput[] + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination#cursor-based-pagination Cursor Docs} + * + * Sets the position for searching for ExportJobs. + */ + cursor?: ExportJobWhereUniqueInput + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} + * + * Take `±n` ExportJobs from the position of the cursor. + */ + take?: number + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} + * + * Skip the first `n` ExportJobs. + */ + skip?: number + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/distinct Distinct Docs} + * + * Filter by unique combinations of ExportJobs. + */ + distinct?: ExportJobScalarFieldEnum | ExportJobScalarFieldEnum[] + } + + /** + * ExportJob findFirstOrThrow + */ + export type ExportJobFindFirstOrThrowArgs = { + /** + * Select specific fields to fetch from the ExportJob + */ + select?: ExportJobSelect | null + /** + * Choose, which related nodes to fetch as well + */ + include?: ExportJobInclude | null + /** + * Filter, which ExportJob to fetch. + */ + where?: ExportJobWhereInput + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/sorting Sorting Docs} + * + * Determine the order of ExportJobs to fetch. + */ + orderBy?: ExportJobOrderByWithRelationInput | ExportJobOrderByWithRelationInput[] + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination#cursor-based-pagination Cursor Docs} + * + * Sets the position for searching for ExportJobs. + */ + cursor?: ExportJobWhereUniqueInput + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} + * + * Take `±n` ExportJobs from the position of the cursor. + */ + take?: number + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} + * + * Skip the first `n` ExportJobs. + */ + skip?: number + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/distinct Distinct Docs} + * + * Filter by unique combinations of ExportJobs. + */ + distinct?: ExportJobScalarFieldEnum | ExportJobScalarFieldEnum[] + } + + /** + * ExportJob findMany + */ + export type ExportJobFindManyArgs = { + /** + * Select specific fields to fetch from the ExportJob + */ + select?: ExportJobSelect | null + /** + * Choose, which related nodes to fetch as well + */ + include?: ExportJobInclude | null + /** + * Filter, which ExportJobs to fetch. + */ + where?: ExportJobWhereInput + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/sorting Sorting Docs} + * + * Determine the order of ExportJobs to fetch. + */ + orderBy?: ExportJobOrderByWithRelationInput | ExportJobOrderByWithRelationInput[] + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination#cursor-based-pagination Cursor Docs} + * + * Sets the position for listing ExportJobs. + */ + cursor?: ExportJobWhereUniqueInput + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} + * + * Take `±n` ExportJobs from the position of the cursor. + */ + take?: number + /** + * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} + * + * Skip the first `n` ExportJobs. + */ + skip?: number + distinct?: ExportJobScalarFieldEnum | ExportJobScalarFieldEnum[] + } + + /** + * ExportJob create + */ + export type ExportJobCreateArgs = { + /** + * Select specific fields to fetch from the ExportJob + */ + select?: ExportJobSelect | null + /** + * Choose, which related nodes to fetch as well + */ + include?: ExportJobInclude | null + /** + * The data needed to create a ExportJob. + */ + data: XOR + } + + /** + * ExportJob createMany + */ + export type ExportJobCreateManyArgs = { + /** + * The data used to create many ExportJobs. + */ + data: ExportJobCreateManyInput | ExportJobCreateManyInput[] + skipDuplicates?: boolean + } + + /** + * ExportJob createManyAndReturn + */ + export type ExportJobCreateManyAndReturnArgs = { + /** + * Select specific fields to fetch from the ExportJob + */ + select?: ExportJobSelectCreateManyAndReturn | null + /** + * The data used to create many ExportJobs. + */ + data: ExportJobCreateManyInput | ExportJobCreateManyInput[] + skipDuplicates?: boolean + /** + * Choose, which related nodes to fetch as well + */ + include?: ExportJobIncludeCreateManyAndReturn | null + } + + /** + * ExportJob update + */ + export type ExportJobUpdateArgs = { + /** + * Select specific fields to fetch from the ExportJob + */ + select?: ExportJobSelect | null + /** + * Choose, which related nodes to fetch as well + */ + include?: ExportJobInclude | null + /** + * The data needed to update a ExportJob. + */ + data: XOR + /** + * Choose, which ExportJob to update. + */ + where: ExportJobWhereUniqueInput + } + + /** + * ExportJob updateMany + */ + export type ExportJobUpdateManyArgs = { + /** + * The data used to update ExportJobs. + */ + data: XOR + /** + * Filter which ExportJobs to update + */ + where?: ExportJobWhereInput + } + + /** + * ExportJob upsert + */ + export type ExportJobUpsertArgs = { + /** + * Select specific fields to fetch from the ExportJob + */ + select?: ExportJobSelect | null + /** + * Choose, which related nodes to fetch as well + */ + include?: ExportJobInclude | null + /** + * The filter to search for the ExportJob to update in case it exists. + */ + where: ExportJobWhereUniqueInput + /** + * In case the ExportJob found by the `where` argument doesn't exist, create a new ExportJob with this data. + */ + create: XOR + /** + * In case the ExportJob was found with the provided `where` argument, update it with this data. + */ + update: XOR + } + + /** + * ExportJob delete + */ + export type ExportJobDeleteArgs = { + /** + * Select specific fields to fetch from the ExportJob + */ + select?: ExportJobSelect | null + /** + * Choose, which related nodes to fetch as well + */ + include?: ExportJobInclude | null + /** + * Filter which ExportJob to delete. + */ + where: ExportJobWhereUniqueInput + } + + /** + * ExportJob deleteMany + */ + export type ExportJobDeleteManyArgs = { + /** + * Filter which ExportJobs to delete + */ + where?: ExportJobWhereInput + } + + /** + * ExportJob without action + */ + export type ExportJobDefaultArgs = { + /** + * Select specific fields to fetch from the ExportJob + */ + select?: ExportJobSelect | null + /** + * Choose, which related nodes to fetch as well + */ + include?: ExportJobInclude | null + } + + /** * Model User */ @@ -12986,6 +14100,21 @@ export namespace Prisma { export type TenantScalarFieldEnum = (typeof TenantScalarFieldEnum)[keyof typeof TenantScalarFieldEnum] + export const ExportJobScalarFieldEnum: { + id: 'id', + tenantId: 'tenantId', + status: 'status', + format: 'format', + scope: 'scope', + filePath: 'filePath', + error: 'error', + createdAt: 'createdAt', + updatedAt: 'updatedAt' + }; + + export type ExportJobScalarFieldEnum = (typeof ExportJobScalarFieldEnum)[keyof typeof ExportJobScalarFieldEnum] + + export const UserScalarFieldEnum: { id: 'id', tenantId: 'tenantId', @@ -13191,6 +14320,20 @@ export namespace Prisma { + /** + * Reference to a field of type 'ExportStatus' + */ + export type EnumExportStatusFieldRefInput<$PrismaModel> = FieldRefInputType<$PrismaModel, 'ExportStatus'> + + + + /** + * Reference to a field of type 'ExportStatus[]' + */ + export type ListEnumExportStatusFieldRefInput<$PrismaModel> = FieldRefInputType<$PrismaModel, 'ExportStatus[]'> + + + /** * Reference to a field of type 'UserRole' */ @@ -13301,6 +14444,7 @@ export namespace Prisma { isActive?: BoolFilter<"Tenant"> | boolean createdAt?: DateTimeFilter<"Tenant"> | Date | string updatedAt?: DateTimeFilter<"Tenant"> | Date | string + exportJobs?: ExportJobListRelationFilter users?: UserListRelationFilter mailboxAccounts?: MailboxAccountListRelationFilter rules?: RuleListRelationFilter @@ -13313,6 +14457,7 @@ export namespace Prisma { isActive?: SortOrder createdAt?: SortOrder updatedAt?: SortOrder + exportJobs?: ExportJobOrderByRelationAggregateInput users?: UserOrderByRelationAggregateInput mailboxAccounts?: MailboxAccountOrderByRelationAggregateInput rules?: RuleOrderByRelationAggregateInput @@ -13328,6 +14473,7 @@ export namespace Prisma { isActive?: BoolFilter<"Tenant"> | boolean createdAt?: DateTimeFilter<"Tenant"> | Date | string updatedAt?: DateTimeFilter<"Tenant"> | Date | string + exportJobs?: ExportJobListRelationFilter users?: UserListRelationFilter mailboxAccounts?: MailboxAccountListRelationFilter rules?: RuleListRelationFilter @@ -13356,6 +14502,81 @@ export namespace Prisma { updatedAt?: DateTimeWithAggregatesFilter<"Tenant"> | Date | string } + export type ExportJobWhereInput = { + AND?: ExportJobWhereInput | ExportJobWhereInput[] + OR?: ExportJobWhereInput[] + NOT?: ExportJobWhereInput | ExportJobWhereInput[] + id?: StringFilter<"ExportJob"> | string + tenantId?: StringFilter<"ExportJob"> | string + status?: EnumExportStatusFilter<"ExportJob"> | $Enums.ExportStatus + format?: StringFilter<"ExportJob"> | string + scope?: StringFilter<"ExportJob"> | string + filePath?: StringNullableFilter<"ExportJob"> | string | null + error?: StringNullableFilter<"ExportJob"> | string | null + createdAt?: DateTimeFilter<"ExportJob"> | Date | string + updatedAt?: DateTimeFilter<"ExportJob"> | Date | string + tenant?: XOR + } + + export type ExportJobOrderByWithRelationInput = { + id?: SortOrder + tenantId?: SortOrder + status?: SortOrder + format?: SortOrder + scope?: SortOrder + filePath?: SortOrderInput | SortOrder + error?: SortOrderInput | SortOrder + createdAt?: SortOrder + updatedAt?: SortOrder + tenant?: TenantOrderByWithRelationInput + } + + export type ExportJobWhereUniqueInput = Prisma.AtLeast<{ + id?: string + AND?: ExportJobWhereInput | ExportJobWhereInput[] + OR?: ExportJobWhereInput[] + NOT?: ExportJobWhereInput | ExportJobWhereInput[] + tenantId?: StringFilter<"ExportJob"> | string + status?: EnumExportStatusFilter<"ExportJob"> | $Enums.ExportStatus + format?: StringFilter<"ExportJob"> | string + scope?: StringFilter<"ExportJob"> | string + filePath?: StringNullableFilter<"ExportJob"> | string | null + error?: StringNullableFilter<"ExportJob"> | string | null + createdAt?: DateTimeFilter<"ExportJob"> | Date | string + updatedAt?: DateTimeFilter<"ExportJob"> | Date | string + tenant?: XOR + }, "id"> + + export type ExportJobOrderByWithAggregationInput = { + id?: SortOrder + tenantId?: SortOrder + status?: SortOrder + format?: SortOrder + scope?: SortOrder + filePath?: SortOrderInput | SortOrder + error?: SortOrderInput | SortOrder + createdAt?: SortOrder + updatedAt?: SortOrder + _count?: ExportJobCountOrderByAggregateInput + _max?: ExportJobMaxOrderByAggregateInput + _min?: ExportJobMinOrderByAggregateInput + } + + export type ExportJobScalarWhereWithAggregatesInput = { + AND?: ExportJobScalarWhereWithAggregatesInput | ExportJobScalarWhereWithAggregatesInput[] + OR?: ExportJobScalarWhereWithAggregatesInput[] + NOT?: ExportJobScalarWhereWithAggregatesInput | ExportJobScalarWhereWithAggregatesInput[] + id?: StringWithAggregatesFilter<"ExportJob"> | string + tenantId?: StringWithAggregatesFilter<"ExportJob"> | string + status?: EnumExportStatusWithAggregatesFilter<"ExportJob"> | $Enums.ExportStatus + format?: StringWithAggregatesFilter<"ExportJob"> | string + scope?: StringWithAggregatesFilter<"ExportJob"> | string + filePath?: StringNullableWithAggregatesFilter<"ExportJob"> | string | null + error?: StringNullableWithAggregatesFilter<"ExportJob"> | string | null + createdAt?: DateTimeWithAggregatesFilter<"ExportJob"> | Date | string + updatedAt?: DateTimeWithAggregatesFilter<"ExportJob"> | Date | string + } + export type UserWhereInput = { AND?: UserWhereInput | UserWhereInput[] OR?: UserWhereInput[] @@ -14095,6 +15316,7 @@ export namespace Prisma { isActive?: boolean createdAt?: Date | string updatedAt?: Date | string + exportJobs?: ExportJobCreateNestedManyWithoutTenantInput users?: UserCreateNestedManyWithoutTenantInput mailboxAccounts?: MailboxAccountCreateNestedManyWithoutTenantInput rules?: RuleCreateNestedManyWithoutTenantInput @@ -14107,6 +15329,7 @@ export namespace Prisma { isActive?: boolean createdAt?: Date | string updatedAt?: Date | string + exportJobs?: ExportJobUncheckedCreateNestedManyWithoutTenantInput users?: UserUncheckedCreateNestedManyWithoutTenantInput mailboxAccounts?: MailboxAccountUncheckedCreateNestedManyWithoutTenantInput rules?: RuleUncheckedCreateNestedManyWithoutTenantInput @@ -14119,6 +15342,7 @@ export namespace Prisma { isActive?: BoolFieldUpdateOperationsInput | boolean createdAt?: DateTimeFieldUpdateOperationsInput | Date | string updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + exportJobs?: ExportJobUpdateManyWithoutTenantNestedInput users?: UserUpdateManyWithoutTenantNestedInput mailboxAccounts?: MailboxAccountUpdateManyWithoutTenantNestedInput rules?: RuleUpdateManyWithoutTenantNestedInput @@ -14131,6 +15355,7 @@ export namespace Prisma { isActive?: BoolFieldUpdateOperationsInput | boolean createdAt?: DateTimeFieldUpdateOperationsInput | Date | string updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + exportJobs?: ExportJobUncheckedUpdateManyWithoutTenantNestedInput users?: UserUncheckedUpdateManyWithoutTenantNestedInput mailboxAccounts?: MailboxAccountUncheckedUpdateManyWithoutTenantNestedInput rules?: RuleUncheckedUpdateManyWithoutTenantNestedInput @@ -14161,6 +15386,89 @@ export namespace Prisma { updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string } + export type ExportJobCreateInput = { + id?: string + status?: $Enums.ExportStatus + format: string + scope: string + filePath?: string | null + error?: string | null + createdAt?: Date | string + updatedAt?: Date | string + tenant: TenantCreateNestedOneWithoutExportJobsInput + } + + export type ExportJobUncheckedCreateInput = { + id?: string + tenantId: string + status?: $Enums.ExportStatus + format: string + scope: string + filePath?: string | null + error?: string | null + createdAt?: Date | string + updatedAt?: Date | string + } + + export type ExportJobUpdateInput = { + id?: StringFieldUpdateOperationsInput | string + status?: EnumExportStatusFieldUpdateOperationsInput | $Enums.ExportStatus + format?: StringFieldUpdateOperationsInput | string + scope?: StringFieldUpdateOperationsInput | string + filePath?: NullableStringFieldUpdateOperationsInput | string | null + error?: NullableStringFieldUpdateOperationsInput | string | null + createdAt?: DateTimeFieldUpdateOperationsInput | Date | string + updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + tenant?: TenantUpdateOneRequiredWithoutExportJobsNestedInput + } + + export type ExportJobUncheckedUpdateInput = { + id?: StringFieldUpdateOperationsInput | string + tenantId?: StringFieldUpdateOperationsInput | string + status?: EnumExportStatusFieldUpdateOperationsInput | $Enums.ExportStatus + format?: StringFieldUpdateOperationsInput | string + scope?: StringFieldUpdateOperationsInput | string + filePath?: NullableStringFieldUpdateOperationsInput | string | null + error?: NullableStringFieldUpdateOperationsInput | string | null + createdAt?: DateTimeFieldUpdateOperationsInput | Date | string + updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + } + + export type ExportJobCreateManyInput = { + id?: string + tenantId: string + status?: $Enums.ExportStatus + format: string + scope: string + filePath?: string | null + error?: string | null + createdAt?: Date | string + updatedAt?: Date | string + } + + export type ExportJobUpdateManyMutationInput = { + id?: StringFieldUpdateOperationsInput | string + status?: EnumExportStatusFieldUpdateOperationsInput | $Enums.ExportStatus + format?: StringFieldUpdateOperationsInput | string + scope?: StringFieldUpdateOperationsInput | string + filePath?: NullableStringFieldUpdateOperationsInput | string | null + error?: NullableStringFieldUpdateOperationsInput | string | null + createdAt?: DateTimeFieldUpdateOperationsInput | Date | string + updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + } + + export type ExportJobUncheckedUpdateManyInput = { + id?: StringFieldUpdateOperationsInput | string + tenantId?: StringFieldUpdateOperationsInput | string + status?: EnumExportStatusFieldUpdateOperationsInput | $Enums.ExportStatus + format?: StringFieldUpdateOperationsInput | string + scope?: StringFieldUpdateOperationsInput | string + filePath?: NullableStringFieldUpdateOperationsInput | string | null + error?: NullableStringFieldUpdateOperationsInput | string | null + createdAt?: DateTimeFieldUpdateOperationsInput | Date | string + updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + } + export type UserCreateInput = { id?: string email: string @@ -14986,6 +16294,12 @@ export namespace Prisma { not?: NestedDateTimeFilter<$PrismaModel> | Date | string } + export type ExportJobListRelationFilter = { + every?: ExportJobWhereInput + some?: ExportJobWhereInput + none?: ExportJobWhereInput + } + export type UserListRelationFilter = { every?: UserWhereInput some?: UserWhereInput @@ -15010,6 +16324,10 @@ export namespace Prisma { none?: CleanupJobWhereInput } + export type ExportJobOrderByRelationAggregateInput = { + _count?: SortOrder + } + export type UserOrderByRelationAggregateInput = { _count?: SortOrder } @@ -15090,11 +16408,26 @@ export namespace Prisma { _max?: NestedDateTimeFilter<$PrismaModel> } - export type EnumUserRoleFilter<$PrismaModel = never> = { - equals?: $Enums.UserRole | EnumUserRoleFieldRefInput<$PrismaModel> - in?: $Enums.UserRole[] | ListEnumUserRoleFieldRefInput<$PrismaModel> - notIn?: $Enums.UserRole[] | ListEnumUserRoleFieldRefInput<$PrismaModel> - not?: NestedEnumUserRoleFilter<$PrismaModel> | $Enums.UserRole + export type EnumExportStatusFilter<$PrismaModel = never> = { + equals?: $Enums.ExportStatus | EnumExportStatusFieldRefInput<$PrismaModel> + in?: $Enums.ExportStatus[] | ListEnumExportStatusFieldRefInput<$PrismaModel> + notIn?: $Enums.ExportStatus[] | ListEnumExportStatusFieldRefInput<$PrismaModel> + not?: NestedEnumExportStatusFilter<$PrismaModel> | $Enums.ExportStatus + } + + export type StringNullableFilter<$PrismaModel = never> = { + equals?: string | StringFieldRefInput<$PrismaModel> | null + in?: string[] | ListStringFieldRefInput<$PrismaModel> | null + notIn?: string[] | ListStringFieldRefInput<$PrismaModel> | null + lt?: string | StringFieldRefInput<$PrismaModel> + lte?: string | StringFieldRefInput<$PrismaModel> + gt?: string | StringFieldRefInput<$PrismaModel> + gte?: string | StringFieldRefInput<$PrismaModel> + contains?: string | StringFieldRefInput<$PrismaModel> + startsWith?: string | StringFieldRefInput<$PrismaModel> + endsWith?: string | StringFieldRefInput<$PrismaModel> + mode?: QueryMode + not?: NestedStringNullableFilter<$PrismaModel> | string | null } export type TenantRelationFilter = { @@ -15102,6 +16435,82 @@ export namespace Prisma { isNot?: TenantWhereInput } + export type SortOrderInput = { + sort: SortOrder + nulls?: NullsOrder + } + + export type ExportJobCountOrderByAggregateInput = { + id?: SortOrder + tenantId?: SortOrder + status?: SortOrder + format?: SortOrder + scope?: SortOrder + filePath?: SortOrder + error?: SortOrder + createdAt?: SortOrder + updatedAt?: SortOrder + } + + export type ExportJobMaxOrderByAggregateInput = { + id?: SortOrder + tenantId?: SortOrder + status?: SortOrder + format?: SortOrder + scope?: SortOrder + filePath?: SortOrder + error?: SortOrder + createdAt?: SortOrder + updatedAt?: SortOrder + } + + export type ExportJobMinOrderByAggregateInput = { + id?: SortOrder + tenantId?: SortOrder + status?: SortOrder + format?: SortOrder + scope?: SortOrder + filePath?: SortOrder + error?: SortOrder + createdAt?: SortOrder + updatedAt?: SortOrder + } + + export type EnumExportStatusWithAggregatesFilter<$PrismaModel = never> = { + equals?: $Enums.ExportStatus | EnumExportStatusFieldRefInput<$PrismaModel> + in?: $Enums.ExportStatus[] | ListEnumExportStatusFieldRefInput<$PrismaModel> + notIn?: $Enums.ExportStatus[] | ListEnumExportStatusFieldRefInput<$PrismaModel> + not?: NestedEnumExportStatusWithAggregatesFilter<$PrismaModel> | $Enums.ExportStatus + _count?: NestedIntFilter<$PrismaModel> + _min?: NestedEnumExportStatusFilter<$PrismaModel> + _max?: NestedEnumExportStatusFilter<$PrismaModel> + } + + export type StringNullableWithAggregatesFilter<$PrismaModel = never> = { + equals?: string | StringFieldRefInput<$PrismaModel> | null + in?: string[] | ListStringFieldRefInput<$PrismaModel> | null + notIn?: string[] | ListStringFieldRefInput<$PrismaModel> | null + lt?: string | StringFieldRefInput<$PrismaModel> + lte?: string | StringFieldRefInput<$PrismaModel> + gt?: string | StringFieldRefInput<$PrismaModel> + gte?: string | StringFieldRefInput<$PrismaModel> + contains?: string | StringFieldRefInput<$PrismaModel> + startsWith?: string | StringFieldRefInput<$PrismaModel> + endsWith?: string | StringFieldRefInput<$PrismaModel> + mode?: QueryMode + not?: NestedStringNullableWithAggregatesFilter<$PrismaModel> | string | null + _count?: NestedIntNullableFilter<$PrismaModel> + _min?: NestedStringNullableFilter<$PrismaModel> + _max?: NestedStringNullableFilter<$PrismaModel> + } + + export type EnumUserRoleFilter<$PrismaModel = never> = { + equals?: $Enums.UserRole | EnumUserRoleFieldRefInput<$PrismaModel> + in?: $Enums.UserRole[] | ListEnumUserRoleFieldRefInput<$PrismaModel> + notIn?: $Enums.UserRole[] | ListEnumUserRoleFieldRefInput<$PrismaModel> + not?: NestedEnumUserRoleFilter<$PrismaModel> | $Enums.UserRole + } + export type UserCountOrderByAggregateInput = { id?: SortOrder tenantId?: SortOrder @@ -15163,21 +16572,6 @@ export namespace Prisma { not?: NestedIntFilter<$PrismaModel> | number } - export type StringNullableFilter<$PrismaModel = never> = { - equals?: string | StringFieldRefInput<$PrismaModel> | null - in?: string[] | ListStringFieldRefInput<$PrismaModel> | null - notIn?: string[] | ListStringFieldRefInput<$PrismaModel> | null - lt?: string | StringFieldRefInput<$PrismaModel> - lte?: string | StringFieldRefInput<$PrismaModel> - gt?: string | StringFieldRefInput<$PrismaModel> - gte?: string | StringFieldRefInput<$PrismaModel> - contains?: string | StringFieldRefInput<$PrismaModel> - startsWith?: string | StringFieldRefInput<$PrismaModel> - endsWith?: string | StringFieldRefInput<$PrismaModel> - mode?: QueryMode - not?: NestedStringNullableFilter<$PrismaModel> | string | null - } - export type IntNullableFilter<$PrismaModel = never> = { equals?: number | IntFieldRefInput<$PrismaModel> | null in?: number[] | ListIntFieldRefInput<$PrismaModel> | null @@ -15211,11 +16605,6 @@ export namespace Prisma { none?: MailboxFolderWhereInput } - export type SortOrderInput = { - sort: SortOrder - nulls?: NullsOrder - } - export type MailboxFolderOrderByRelationAggregateInput = { _count?: SortOrder } @@ -15322,24 +16711,6 @@ export namespace Prisma { _max?: NestedIntFilter<$PrismaModel> } - export type StringNullableWithAggregatesFilter<$PrismaModel = never> = { - equals?: string | StringFieldRefInput<$PrismaModel> | null - in?: string[] | ListStringFieldRefInput<$PrismaModel> | null - notIn?: string[] | ListStringFieldRefInput<$PrismaModel> | null - lt?: string | StringFieldRefInput<$PrismaModel> - lte?: string | StringFieldRefInput<$PrismaModel> - gt?: string | StringFieldRefInput<$PrismaModel> - gte?: string | StringFieldRefInput<$PrismaModel> - contains?: string | StringFieldRefInput<$PrismaModel> - startsWith?: string | StringFieldRefInput<$PrismaModel> - endsWith?: string | StringFieldRefInput<$PrismaModel> - mode?: QueryMode - not?: NestedStringNullableWithAggregatesFilter<$PrismaModel> | string | null - _count?: NestedIntNullableFilter<$PrismaModel> - _min?: NestedStringNullableFilter<$PrismaModel> - _max?: NestedStringNullableFilter<$PrismaModel> - } - export type IntNullableWithAggregatesFilter<$PrismaModel = never> = { equals?: number | IntFieldRefInput<$PrismaModel> | null in?: number[] | ListIntFieldRefInput<$PrismaModel> | null @@ -15741,6 +17112,13 @@ export namespace Prisma { progress?: SortOrder } + export type ExportJobCreateNestedManyWithoutTenantInput = { + create?: XOR | ExportJobCreateWithoutTenantInput[] | ExportJobUncheckedCreateWithoutTenantInput[] + connectOrCreate?: ExportJobCreateOrConnectWithoutTenantInput | ExportJobCreateOrConnectWithoutTenantInput[] + createMany?: ExportJobCreateManyTenantInputEnvelope + connect?: ExportJobWhereUniqueInput | ExportJobWhereUniqueInput[] + } + export type UserCreateNestedManyWithoutTenantInput = { create?: XOR | UserCreateWithoutTenantInput[] | UserUncheckedCreateWithoutTenantInput[] connectOrCreate?: UserCreateOrConnectWithoutTenantInput | UserCreateOrConnectWithoutTenantInput[] @@ -15769,6 +17147,13 @@ export namespace Prisma { connect?: CleanupJobWhereUniqueInput | CleanupJobWhereUniqueInput[] } + export type ExportJobUncheckedCreateNestedManyWithoutTenantInput = { + create?: XOR | ExportJobCreateWithoutTenantInput[] | ExportJobUncheckedCreateWithoutTenantInput[] + connectOrCreate?: ExportJobCreateOrConnectWithoutTenantInput | ExportJobCreateOrConnectWithoutTenantInput[] + createMany?: ExportJobCreateManyTenantInputEnvelope + connect?: ExportJobWhereUniqueInput | ExportJobWhereUniqueInput[] + } + export type UserUncheckedCreateNestedManyWithoutTenantInput = { create?: XOR | UserCreateWithoutTenantInput[] | UserUncheckedCreateWithoutTenantInput[] connectOrCreate?: UserCreateOrConnectWithoutTenantInput | UserCreateOrConnectWithoutTenantInput[] @@ -15809,6 +17194,20 @@ export namespace Prisma { set?: Date | string } + export type ExportJobUpdateManyWithoutTenantNestedInput = { + create?: XOR | ExportJobCreateWithoutTenantInput[] | ExportJobUncheckedCreateWithoutTenantInput[] + connectOrCreate?: ExportJobCreateOrConnectWithoutTenantInput | ExportJobCreateOrConnectWithoutTenantInput[] + upsert?: ExportJobUpsertWithWhereUniqueWithoutTenantInput | ExportJobUpsertWithWhereUniqueWithoutTenantInput[] + createMany?: ExportJobCreateManyTenantInputEnvelope + set?: ExportJobWhereUniqueInput | ExportJobWhereUniqueInput[] + disconnect?: ExportJobWhereUniqueInput | ExportJobWhereUniqueInput[] + delete?: ExportJobWhereUniqueInput | ExportJobWhereUniqueInput[] + connect?: ExportJobWhereUniqueInput | ExportJobWhereUniqueInput[] + update?: ExportJobUpdateWithWhereUniqueWithoutTenantInput | ExportJobUpdateWithWhereUniqueWithoutTenantInput[] + updateMany?: ExportJobUpdateManyWithWhereWithoutTenantInput | ExportJobUpdateManyWithWhereWithoutTenantInput[] + deleteMany?: ExportJobScalarWhereInput | ExportJobScalarWhereInput[] + } + export type UserUpdateManyWithoutTenantNestedInput = { create?: XOR | UserCreateWithoutTenantInput[] | UserUncheckedCreateWithoutTenantInput[] connectOrCreate?: UserCreateOrConnectWithoutTenantInput | UserCreateOrConnectWithoutTenantInput[] @@ -15865,6 +17264,20 @@ export namespace Prisma { deleteMany?: CleanupJobScalarWhereInput | CleanupJobScalarWhereInput[] } + export type ExportJobUncheckedUpdateManyWithoutTenantNestedInput = { + create?: XOR | ExportJobCreateWithoutTenantInput[] | ExportJobUncheckedCreateWithoutTenantInput[] + connectOrCreate?: ExportJobCreateOrConnectWithoutTenantInput | ExportJobCreateOrConnectWithoutTenantInput[] + upsert?: ExportJobUpsertWithWhereUniqueWithoutTenantInput | ExportJobUpsertWithWhereUniqueWithoutTenantInput[] + createMany?: ExportJobCreateManyTenantInputEnvelope + set?: ExportJobWhereUniqueInput | ExportJobWhereUniqueInput[] + disconnect?: ExportJobWhereUniqueInput | ExportJobWhereUniqueInput[] + delete?: ExportJobWhereUniqueInput | ExportJobWhereUniqueInput[] + connect?: ExportJobWhereUniqueInput | ExportJobWhereUniqueInput[] + update?: ExportJobUpdateWithWhereUniqueWithoutTenantInput | ExportJobUpdateWithWhereUniqueWithoutTenantInput[] + updateMany?: ExportJobUpdateManyWithWhereWithoutTenantInput | ExportJobUpdateManyWithWhereWithoutTenantInput[] + deleteMany?: ExportJobScalarWhereInput | ExportJobScalarWhereInput[] + } + export type UserUncheckedUpdateManyWithoutTenantNestedInput = { create?: XOR | UserCreateWithoutTenantInput[] | UserUncheckedCreateWithoutTenantInput[] connectOrCreate?: UserCreateOrConnectWithoutTenantInput | UserCreateOrConnectWithoutTenantInput[] @@ -15921,6 +17334,28 @@ export namespace Prisma { deleteMany?: CleanupJobScalarWhereInput | CleanupJobScalarWhereInput[] } + export type TenantCreateNestedOneWithoutExportJobsInput = { + create?: XOR + connectOrCreate?: TenantCreateOrConnectWithoutExportJobsInput + connect?: TenantWhereUniqueInput + } + + export type EnumExportStatusFieldUpdateOperationsInput = { + set?: $Enums.ExportStatus + } + + export type NullableStringFieldUpdateOperationsInput = { + set?: string | null + } + + export type TenantUpdateOneRequiredWithoutExportJobsNestedInput = { + create?: XOR + connectOrCreate?: TenantCreateOrConnectWithoutExportJobsInput + upsert?: TenantUpsertWithoutExportJobsInput + connect?: TenantWhereUniqueInput + update?: XOR, TenantUncheckedUpdateWithoutExportJobsInput> + } + export type TenantCreateNestedOneWithoutUsersInput = { create?: XOR connectOrCreate?: TenantCreateOrConnectWithoutUsersInput @@ -15985,10 +17420,6 @@ export namespace Prisma { divide?: number } - export type NullableStringFieldUpdateOperationsInput = { - set?: string | null - } - export type NullableIntFieldUpdateOperationsInput = { set?: number | null increment?: number @@ -16497,6 +17928,65 @@ export namespace Prisma { _max?: NestedDateTimeFilter<$PrismaModel> } + export type NestedEnumExportStatusFilter<$PrismaModel = never> = { + equals?: $Enums.ExportStatus | EnumExportStatusFieldRefInput<$PrismaModel> + in?: $Enums.ExportStatus[] | ListEnumExportStatusFieldRefInput<$PrismaModel> + notIn?: $Enums.ExportStatus[] | ListEnumExportStatusFieldRefInput<$PrismaModel> + not?: NestedEnumExportStatusFilter<$PrismaModel> | $Enums.ExportStatus + } + + export type NestedStringNullableFilter<$PrismaModel = never> = { + equals?: string | StringFieldRefInput<$PrismaModel> | null + in?: string[] | ListStringFieldRefInput<$PrismaModel> | null + notIn?: string[] | ListStringFieldRefInput<$PrismaModel> | null + lt?: string | StringFieldRefInput<$PrismaModel> + lte?: string | StringFieldRefInput<$PrismaModel> + gt?: string | StringFieldRefInput<$PrismaModel> + gte?: string | StringFieldRefInput<$PrismaModel> + contains?: string | StringFieldRefInput<$PrismaModel> + startsWith?: string | StringFieldRefInput<$PrismaModel> + endsWith?: string | StringFieldRefInput<$PrismaModel> + not?: NestedStringNullableFilter<$PrismaModel> | string | null + } + + export type NestedEnumExportStatusWithAggregatesFilter<$PrismaModel = never> = { + equals?: $Enums.ExportStatus | EnumExportStatusFieldRefInput<$PrismaModel> + in?: $Enums.ExportStatus[] | ListEnumExportStatusFieldRefInput<$PrismaModel> + notIn?: $Enums.ExportStatus[] | ListEnumExportStatusFieldRefInput<$PrismaModel> + not?: NestedEnumExportStatusWithAggregatesFilter<$PrismaModel> | $Enums.ExportStatus + _count?: NestedIntFilter<$PrismaModel> + _min?: NestedEnumExportStatusFilter<$PrismaModel> + _max?: NestedEnumExportStatusFilter<$PrismaModel> + } + + export type NestedStringNullableWithAggregatesFilter<$PrismaModel = never> = { + equals?: string | StringFieldRefInput<$PrismaModel> | null + in?: string[] | ListStringFieldRefInput<$PrismaModel> | null + notIn?: string[] | ListStringFieldRefInput<$PrismaModel> | null + lt?: string | StringFieldRefInput<$PrismaModel> + lte?: string | StringFieldRefInput<$PrismaModel> + gt?: string | StringFieldRefInput<$PrismaModel> + gte?: string | StringFieldRefInput<$PrismaModel> + contains?: string | StringFieldRefInput<$PrismaModel> + startsWith?: string | StringFieldRefInput<$PrismaModel> + endsWith?: string | StringFieldRefInput<$PrismaModel> + not?: NestedStringNullableWithAggregatesFilter<$PrismaModel> | string | null + _count?: NestedIntNullableFilter<$PrismaModel> + _min?: NestedStringNullableFilter<$PrismaModel> + _max?: NestedStringNullableFilter<$PrismaModel> + } + + export type NestedIntNullableFilter<$PrismaModel = never> = { + equals?: number | IntFieldRefInput<$PrismaModel> | null + in?: number[] | ListIntFieldRefInput<$PrismaModel> | null + notIn?: number[] | ListIntFieldRefInput<$PrismaModel> | null + lt?: number | IntFieldRefInput<$PrismaModel> + lte?: number | IntFieldRefInput<$PrismaModel> + gt?: number | IntFieldRefInput<$PrismaModel> + gte?: number | IntFieldRefInput<$PrismaModel> + not?: NestedIntNullableFilter<$PrismaModel> | number | null + } + export type NestedEnumUserRoleFilter<$PrismaModel = never> = { equals?: $Enums.UserRole | EnumUserRoleFieldRefInput<$PrismaModel> in?: $Enums.UserRole[] | ListEnumUserRoleFieldRefInput<$PrismaModel> @@ -16521,31 +18011,6 @@ export namespace Prisma { not?: NestedEnumMailProviderFilter<$PrismaModel> | $Enums.MailProvider } - export type NestedStringNullableFilter<$PrismaModel = never> = { - equals?: string | StringFieldRefInput<$PrismaModel> | null - in?: string[] | ListStringFieldRefInput<$PrismaModel> | null - notIn?: string[] | ListStringFieldRefInput<$PrismaModel> | null - lt?: string | StringFieldRefInput<$PrismaModel> - lte?: string | StringFieldRefInput<$PrismaModel> - gt?: string | StringFieldRefInput<$PrismaModel> - gte?: string | StringFieldRefInput<$PrismaModel> - contains?: string | StringFieldRefInput<$PrismaModel> - startsWith?: string | StringFieldRefInput<$PrismaModel> - endsWith?: string | StringFieldRefInput<$PrismaModel> - not?: NestedStringNullableFilter<$PrismaModel> | string | null - } - - export type NestedIntNullableFilter<$PrismaModel = never> = { - equals?: number | IntFieldRefInput<$PrismaModel> | null - in?: number[] | ListIntFieldRefInput<$PrismaModel> | null - notIn?: number[] | ListIntFieldRefInput<$PrismaModel> | null - lt?: number | IntFieldRefInput<$PrismaModel> - lte?: number | IntFieldRefInput<$PrismaModel> - gt?: number | IntFieldRefInput<$PrismaModel> - gte?: number | IntFieldRefInput<$PrismaModel> - not?: NestedIntNullableFilter<$PrismaModel> | number | null - } - export type NestedBoolNullableFilter<$PrismaModel = never> = { equals?: boolean | BooleanFieldRefInput<$PrismaModel> | null not?: NestedBoolNullableFilter<$PrismaModel> | boolean | null @@ -16599,23 +18064,6 @@ export namespace Prisma { not?: NestedFloatFilter<$PrismaModel> | number } - export type NestedStringNullableWithAggregatesFilter<$PrismaModel = never> = { - equals?: string | StringFieldRefInput<$PrismaModel> | null - in?: string[] | ListStringFieldRefInput<$PrismaModel> | null - notIn?: string[] | ListStringFieldRefInput<$PrismaModel> | null - lt?: string | StringFieldRefInput<$PrismaModel> - lte?: string | StringFieldRefInput<$PrismaModel> - gt?: string | StringFieldRefInput<$PrismaModel> - gte?: string | StringFieldRefInput<$PrismaModel> - contains?: string | StringFieldRefInput<$PrismaModel> - startsWith?: string | StringFieldRefInput<$PrismaModel> - endsWith?: string | StringFieldRefInput<$PrismaModel> - not?: NestedStringNullableWithAggregatesFilter<$PrismaModel> | string | null - _count?: NestedIntNullableFilter<$PrismaModel> - _min?: NestedStringNullableFilter<$PrismaModel> - _max?: NestedStringNullableFilter<$PrismaModel> - } - export type NestedIntNullableWithAggregatesFilter<$PrismaModel = never> = { equals?: number | IntFieldRefInput<$PrismaModel> | null in?: number[] | ListIntFieldRefInput<$PrismaModel> | null @@ -16716,6 +18164,38 @@ export namespace Prisma { _max?: NestedEnumJobStatusFilter<$PrismaModel> } + export type ExportJobCreateWithoutTenantInput = { + id?: string + status?: $Enums.ExportStatus + format: string + scope: string + filePath?: string | null + error?: string | null + createdAt?: Date | string + updatedAt?: Date | string + } + + export type ExportJobUncheckedCreateWithoutTenantInput = { + id?: string + status?: $Enums.ExportStatus + format: string + scope: string + filePath?: string | null + error?: string | null + createdAt?: Date | string + updatedAt?: Date | string + } + + export type ExportJobCreateOrConnectWithoutTenantInput = { + where: ExportJobWhereUniqueInput + create: XOR + } + + export type ExportJobCreateManyTenantInputEnvelope = { + data: ExportJobCreateManyTenantInput | ExportJobCreateManyTenantInput[] + skipDuplicates?: boolean + } + export type UserCreateWithoutTenantInput = { id?: string email: string @@ -16872,6 +18352,37 @@ export namespace Prisma { skipDuplicates?: boolean } + export type ExportJobUpsertWithWhereUniqueWithoutTenantInput = { + where: ExportJobWhereUniqueInput + update: XOR + create: XOR + } + + export type ExportJobUpdateWithWhereUniqueWithoutTenantInput = { + where: ExportJobWhereUniqueInput + data: XOR + } + + export type ExportJobUpdateManyWithWhereWithoutTenantInput = { + where: ExportJobScalarWhereInput + data: XOR + } + + export type ExportJobScalarWhereInput = { + AND?: ExportJobScalarWhereInput | ExportJobScalarWhereInput[] + OR?: ExportJobScalarWhereInput[] + NOT?: ExportJobScalarWhereInput | ExportJobScalarWhereInput[] + id?: StringFilter<"ExportJob"> | string + tenantId?: StringFilter<"ExportJob"> | string + status?: EnumExportStatusFilter<"ExportJob"> | $Enums.ExportStatus + format?: StringFilter<"ExportJob"> | string + scope?: StringFilter<"ExportJob"> | string + filePath?: StringNullableFilter<"ExportJob"> | string | null + error?: StringNullableFilter<"ExportJob"> | string | null + createdAt?: DateTimeFilter<"ExportJob"> | Date | string + updatedAt?: DateTimeFilter<"ExportJob"> | Date | string + } + export type UserUpsertWithWhereUniqueWithoutTenantInput = { where: UserWhereUniqueInput update: XOR @@ -17004,12 +18515,77 @@ export namespace Prisma { updatedAt?: DateTimeFilter<"CleanupJob"> | Date | string } + export type TenantCreateWithoutExportJobsInput = { + id?: string + name: string + isActive?: boolean + createdAt?: Date | string + updatedAt?: Date | string + users?: UserCreateNestedManyWithoutTenantInput + mailboxAccounts?: MailboxAccountCreateNestedManyWithoutTenantInput + rules?: RuleCreateNestedManyWithoutTenantInput + jobs?: CleanupJobCreateNestedManyWithoutTenantInput + } + + export type TenantUncheckedCreateWithoutExportJobsInput = { + id?: string + name: string + isActive?: boolean + createdAt?: Date | string + updatedAt?: Date | string + users?: UserUncheckedCreateNestedManyWithoutTenantInput + mailboxAccounts?: MailboxAccountUncheckedCreateNestedManyWithoutTenantInput + rules?: RuleUncheckedCreateNestedManyWithoutTenantInput + jobs?: CleanupJobUncheckedCreateNestedManyWithoutTenantInput + } + + export type TenantCreateOrConnectWithoutExportJobsInput = { + where: TenantWhereUniqueInput + create: XOR + } + + export type TenantUpsertWithoutExportJobsInput = { + update: XOR + create: XOR + where?: TenantWhereInput + } + + export type TenantUpdateToOneWithWhereWithoutExportJobsInput = { + where?: TenantWhereInput + data: XOR + } + + export type TenantUpdateWithoutExportJobsInput = { + id?: StringFieldUpdateOperationsInput | string + name?: StringFieldUpdateOperationsInput | string + isActive?: BoolFieldUpdateOperationsInput | boolean + createdAt?: DateTimeFieldUpdateOperationsInput | Date | string + updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + users?: UserUpdateManyWithoutTenantNestedInput + mailboxAccounts?: MailboxAccountUpdateManyWithoutTenantNestedInput + rules?: RuleUpdateManyWithoutTenantNestedInput + jobs?: CleanupJobUpdateManyWithoutTenantNestedInput + } + + export type TenantUncheckedUpdateWithoutExportJobsInput = { + id?: StringFieldUpdateOperationsInput | string + name?: StringFieldUpdateOperationsInput | string + isActive?: BoolFieldUpdateOperationsInput | boolean + createdAt?: DateTimeFieldUpdateOperationsInput | Date | string + updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + users?: UserUncheckedUpdateManyWithoutTenantNestedInput + mailboxAccounts?: MailboxAccountUncheckedUpdateManyWithoutTenantNestedInput + rules?: RuleUncheckedUpdateManyWithoutTenantNestedInput + jobs?: CleanupJobUncheckedUpdateManyWithoutTenantNestedInput + } + export type TenantCreateWithoutUsersInput = { id?: string name: string isActive?: boolean createdAt?: Date | string updatedAt?: Date | string + exportJobs?: ExportJobCreateNestedManyWithoutTenantInput mailboxAccounts?: MailboxAccountCreateNestedManyWithoutTenantInput rules?: RuleCreateNestedManyWithoutTenantInput jobs?: CleanupJobCreateNestedManyWithoutTenantInput @@ -17021,6 +18597,7 @@ export namespace Prisma { isActive?: boolean createdAt?: Date | string updatedAt?: Date | string + exportJobs?: ExportJobUncheckedCreateNestedManyWithoutTenantInput mailboxAccounts?: MailboxAccountUncheckedCreateNestedManyWithoutTenantInput rules?: RuleUncheckedCreateNestedManyWithoutTenantInput jobs?: CleanupJobUncheckedCreateNestedManyWithoutTenantInput @@ -17048,6 +18625,7 @@ export namespace Prisma { isActive?: BoolFieldUpdateOperationsInput | boolean createdAt?: DateTimeFieldUpdateOperationsInput | Date | string updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + exportJobs?: ExportJobUpdateManyWithoutTenantNestedInput mailboxAccounts?: MailboxAccountUpdateManyWithoutTenantNestedInput rules?: RuleUpdateManyWithoutTenantNestedInput jobs?: CleanupJobUpdateManyWithoutTenantNestedInput @@ -17059,6 +18637,7 @@ export namespace Prisma { isActive?: BoolFieldUpdateOperationsInput | boolean createdAt?: DateTimeFieldUpdateOperationsInput | Date | string updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + exportJobs?: ExportJobUncheckedUpdateManyWithoutTenantNestedInput mailboxAccounts?: MailboxAccountUncheckedUpdateManyWithoutTenantNestedInput rules?: RuleUncheckedUpdateManyWithoutTenantNestedInput jobs?: CleanupJobUncheckedUpdateManyWithoutTenantNestedInput @@ -17070,6 +18649,7 @@ export namespace Prisma { isActive?: boolean createdAt?: Date | string updatedAt?: Date | string + exportJobs?: ExportJobCreateNestedManyWithoutTenantInput users?: UserCreateNestedManyWithoutTenantInput rules?: RuleCreateNestedManyWithoutTenantInput jobs?: CleanupJobCreateNestedManyWithoutTenantInput @@ -17081,6 +18661,7 @@ export namespace Prisma { isActive?: boolean createdAt?: Date | string updatedAt?: Date | string + exportJobs?: ExportJobUncheckedCreateNestedManyWithoutTenantInput users?: UserUncheckedCreateNestedManyWithoutTenantInput rules?: RuleUncheckedCreateNestedManyWithoutTenantInput jobs?: CleanupJobUncheckedCreateNestedManyWithoutTenantInput @@ -17176,6 +18757,7 @@ export namespace Prisma { isActive?: BoolFieldUpdateOperationsInput | boolean createdAt?: DateTimeFieldUpdateOperationsInput | Date | string updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + exportJobs?: ExportJobUpdateManyWithoutTenantNestedInput users?: UserUpdateManyWithoutTenantNestedInput rules?: RuleUpdateManyWithoutTenantNestedInput jobs?: CleanupJobUpdateManyWithoutTenantNestedInput @@ -17187,6 +18769,7 @@ export namespace Prisma { isActive?: BoolFieldUpdateOperationsInput | boolean createdAt?: DateTimeFieldUpdateOperationsInput | Date | string updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + exportJobs?: ExportJobUncheckedUpdateManyWithoutTenantNestedInput users?: UserUncheckedUpdateManyWithoutTenantNestedInput rules?: RuleUncheckedUpdateManyWithoutTenantNestedInput jobs?: CleanupJobUncheckedUpdateManyWithoutTenantNestedInput @@ -17468,6 +19051,7 @@ export namespace Prisma { isActive?: boolean createdAt?: Date | string updatedAt?: Date | string + exportJobs?: ExportJobCreateNestedManyWithoutTenantInput users?: UserCreateNestedManyWithoutTenantInput mailboxAccounts?: MailboxAccountCreateNestedManyWithoutTenantInput jobs?: CleanupJobCreateNestedManyWithoutTenantInput @@ -17479,6 +19063,7 @@ export namespace Prisma { isActive?: boolean createdAt?: Date | string updatedAt?: Date | string + exportJobs?: ExportJobUncheckedCreateNestedManyWithoutTenantInput users?: UserUncheckedCreateNestedManyWithoutTenantInput mailboxAccounts?: MailboxAccountUncheckedCreateNestedManyWithoutTenantInput jobs?: CleanupJobUncheckedCreateNestedManyWithoutTenantInput @@ -17550,6 +19135,7 @@ export namespace Prisma { isActive?: BoolFieldUpdateOperationsInput | boolean createdAt?: DateTimeFieldUpdateOperationsInput | Date | string updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + exportJobs?: ExportJobUpdateManyWithoutTenantNestedInput users?: UserUpdateManyWithoutTenantNestedInput mailboxAccounts?: MailboxAccountUpdateManyWithoutTenantNestedInput jobs?: CleanupJobUpdateManyWithoutTenantNestedInput @@ -17561,6 +19147,7 @@ export namespace Prisma { isActive?: BoolFieldUpdateOperationsInput | boolean createdAt?: DateTimeFieldUpdateOperationsInput | Date | string updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + exportJobs?: ExportJobUncheckedUpdateManyWithoutTenantNestedInput users?: UserUncheckedUpdateManyWithoutTenantNestedInput mailboxAccounts?: MailboxAccountUncheckedUpdateManyWithoutTenantNestedInput jobs?: CleanupJobUncheckedUpdateManyWithoutTenantNestedInput @@ -17736,6 +19323,7 @@ export namespace Prisma { isActive?: boolean createdAt?: Date | string updatedAt?: Date | string + exportJobs?: ExportJobCreateNestedManyWithoutTenantInput users?: UserCreateNestedManyWithoutTenantInput mailboxAccounts?: MailboxAccountCreateNestedManyWithoutTenantInput rules?: RuleCreateNestedManyWithoutTenantInput @@ -17747,6 +19335,7 @@ export namespace Prisma { isActive?: boolean createdAt?: Date | string updatedAt?: Date | string + exportJobs?: ExportJobUncheckedCreateNestedManyWithoutTenantInput users?: UserUncheckedCreateNestedManyWithoutTenantInput mailboxAccounts?: MailboxAccountUncheckedCreateNestedManyWithoutTenantInput rules?: RuleUncheckedCreateNestedManyWithoutTenantInput @@ -17879,6 +19468,7 @@ export namespace Prisma { isActive?: BoolFieldUpdateOperationsInput | boolean createdAt?: DateTimeFieldUpdateOperationsInput | Date | string updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + exportJobs?: ExportJobUpdateManyWithoutTenantNestedInput users?: UserUpdateManyWithoutTenantNestedInput mailboxAccounts?: MailboxAccountUpdateManyWithoutTenantNestedInput rules?: RuleUpdateManyWithoutTenantNestedInput @@ -17890,6 +19480,7 @@ export namespace Prisma { isActive?: BoolFieldUpdateOperationsInput | boolean createdAt?: DateTimeFieldUpdateOperationsInput | Date | string updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + exportJobs?: ExportJobUncheckedUpdateManyWithoutTenantNestedInput users?: UserUncheckedUpdateManyWithoutTenantNestedInput mailboxAccounts?: MailboxAccountUncheckedUpdateManyWithoutTenantNestedInput rules?: RuleUncheckedUpdateManyWithoutTenantNestedInput @@ -18161,6 +19752,17 @@ export namespace Prisma { unsubscribeAttempts?: UnsubscribeAttemptUncheckedUpdateManyWithoutJobNestedInput } + export type ExportJobCreateManyTenantInput = { + id?: string + status?: $Enums.ExportStatus + format: string + scope: string + filePath?: string | null + error?: string | null + createdAt?: Date | string + updatedAt?: Date | string + } + export type UserCreateManyTenantInput = { id?: string email: string @@ -18213,6 +19815,39 @@ export namespace Prisma { updatedAt?: Date | string } + export type ExportJobUpdateWithoutTenantInput = { + id?: StringFieldUpdateOperationsInput | string + status?: EnumExportStatusFieldUpdateOperationsInput | $Enums.ExportStatus + format?: StringFieldUpdateOperationsInput | string + scope?: StringFieldUpdateOperationsInput | string + filePath?: NullableStringFieldUpdateOperationsInput | string | null + error?: NullableStringFieldUpdateOperationsInput | string | null + createdAt?: DateTimeFieldUpdateOperationsInput | Date | string + updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + } + + export type ExportJobUncheckedUpdateWithoutTenantInput = { + id?: StringFieldUpdateOperationsInput | string + status?: EnumExportStatusFieldUpdateOperationsInput | $Enums.ExportStatus + format?: StringFieldUpdateOperationsInput | string + scope?: StringFieldUpdateOperationsInput | string + filePath?: NullableStringFieldUpdateOperationsInput | string | null + error?: NullableStringFieldUpdateOperationsInput | string | null + createdAt?: DateTimeFieldUpdateOperationsInput | Date | string + updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + } + + export type ExportJobUncheckedUpdateManyWithoutTenantInput = { + id?: StringFieldUpdateOperationsInput | string + status?: EnumExportStatusFieldUpdateOperationsInput | $Enums.ExportStatus + format?: StringFieldUpdateOperationsInput | string + scope?: StringFieldUpdateOperationsInput | string + filePath?: NullableStringFieldUpdateOperationsInput | string | null + error?: NullableStringFieldUpdateOperationsInput | string | null + createdAt?: DateTimeFieldUpdateOperationsInput | Date | string + updatedAt?: DateTimeFieldUpdateOperationsInput | Date | string + } + export type UserUpdateWithoutTenantInput = { id?: StringFieldUpdateOperationsInput | string email?: StringFieldUpdateOperationsInput | string @@ -18664,6 +20299,10 @@ export namespace Prisma { * @deprecated Use TenantDefaultArgs instead */ export type TenantArgs = TenantDefaultArgs + /** + * @deprecated Use ExportJobDefaultArgs instead + */ + export type ExportJobArgs = ExportJobDefaultArgs /** * @deprecated Use UserDefaultArgs instead */ diff --git a/backend/node_modules/.prisma/client/index.js b/backend/node_modules/.prisma/client/index.js index 7c20682a..99cd1731 100644 --- a/backend/node_modules/.prisma/client/index.js +++ b/backend/node_modules/.prisma/client/index.js @@ -99,6 +99,18 @@ exports.Prisma.TenantScalarFieldEnum = { updatedAt: 'updatedAt' }; +exports.Prisma.ExportJobScalarFieldEnum = { + id: 'id', + tenantId: 'tenantId', + status: 'status', + format: 'format', + scope: 'scope', + filePath: 'filePath', + error: 'error', + createdAt: 'createdAt', + updatedAt: 'updatedAt' +}; + exports.Prisma.UserScalarFieldEnum = { id: 'id', tenantId: 'tenantId', @@ -224,6 +236,13 @@ exports.Prisma.NullsOrder = { first: 'first', last: 'last' }; +exports.ExportStatus = exports.$Enums.ExportStatus = { + QUEUED: 'QUEUED', + RUNNING: 'RUNNING', + DONE: 'DONE', + FAILED: 'FAILED' +}; + exports.UserRole = exports.$Enums.UserRole = { USER: 'USER', ADMIN: 'ADMIN' @@ -260,6 +279,7 @@ exports.JobStatus = exports.$Enums.JobStatus = { exports.Prisma.ModelName = { Tenant: 'Tenant', + ExportJob: 'ExportJob', User: 'User', MailboxAccount: 'MailboxAccount', MailboxFolder: 'MailboxFolder', @@ -316,8 +336,8 @@ const config = { } } }, - "inlineSchema": "generator client {\n provider = \"prisma-client-js\"\n}\n\ndatasource db {\n provider = \"postgresql\"\n url = env(\"DATABASE_URL\")\n}\n\nenum MailProvider {\n GMAIL\n GMX\n WEBDE\n}\n\nenum UserRole {\n USER\n ADMIN\n}\n\nenum JobStatus {\n QUEUED\n RUNNING\n SUCCEEDED\n FAILED\n CANCELED\n}\n\nenum RuleActionType {\n MOVE\n DELETE\n ARCHIVE\n LABEL\n}\n\nenum RuleConditionType {\n HEADER\n SUBJECT\n FROM\n LIST_UNSUBSCRIBE\n LIST_ID\n}\n\nmodel Tenant {\n id String @id @default(cuid())\n name String\n isActive Boolean @default(true)\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n users User[]\n mailboxAccounts MailboxAccount[]\n rules Rule[]\n jobs CleanupJob[]\n}\n\nmodel User {\n id String @id @default(cuid())\n tenantId String\n email String @unique\n password String\n role UserRole @default(USER)\n isActive Boolean @default(true)\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n}\n\nmodel MailboxAccount {\n id String @id @default(cuid())\n tenantId String\n email String\n provider MailProvider\n isActive Boolean @default(true)\n imapHost String\n imapPort Int\n imapTLS Boolean\n smtpHost String?\n smtpPort Int?\n smtpTLS Boolean?\n oauthToken String?\n oauthRefreshToken String?\n oauthAccessToken String?\n oauthExpiresAt DateTime?\n providerUserId String?\n appPassword String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n folders MailboxFolder[]\n jobs CleanupJob[]\n\n @@index([tenantId])\n}\n\nmodel MailboxFolder {\n id String @id @default(cuid())\n mailboxAccountId String\n name String\n remoteId String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n mailboxAccount MailboxAccount @relation(fields: [mailboxAccountId], references: [id])\n mailItems MailItem[]\n\n @@index([mailboxAccountId])\n}\n\nmodel MailItem {\n id String @id @default(cuid())\n folderId String\n messageId String\n subject String?\n from String?\n receivedAt DateTime?\n listId String?\n listUnsubscribe String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n folder MailboxFolder @relation(fields: [folderId], references: [id])\n\n @@index([folderId])\n @@index([messageId])\n}\n\nmodel Rule {\n id String @id @default(cuid())\n tenantId String\n name String\n enabled Boolean @default(true)\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n conditions RuleCondition[]\n actions RuleAction[]\n\n @@index([tenantId])\n}\n\nmodel RuleCondition {\n id String @id @default(cuid())\n ruleId String\n type RuleConditionType\n value String\n\n rule Rule @relation(fields: [ruleId], references: [id])\n\n @@index([ruleId])\n}\n\nmodel RuleAction {\n id String @id @default(cuid())\n ruleId String\n type RuleActionType\n target String?\n\n rule Rule @relation(fields: [ruleId], references: [id])\n\n @@index([ruleId])\n}\n\nmodel CleanupJob {\n id String @id @default(cuid())\n tenantId String\n mailboxAccountId String\n status JobStatus @default(QUEUED)\n dryRun Boolean @default(true)\n unsubscribeEnabled Boolean @default(true)\n routingEnabled Boolean @default(true)\n startedAt DateTime?\n finishedAt DateTime?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n mailboxAccount MailboxAccount @relation(fields: [mailboxAccountId], references: [id])\n unsubscribeAttempts UnsubscribeAttempt[]\n events CleanupJobEvent[]\n\n @@index([tenantId])\n @@index([mailboxAccountId])\n}\n\nmodel UnsubscribeAttempt {\n id String @id @default(cuid())\n jobId String\n mailItemId String?\n method String\n target String\n status String\n createdAt DateTime @default(now())\n\n job CleanupJob @relation(fields: [jobId], references: [id])\n\n @@index([jobId])\n}\n\nmodel CleanupJobEvent {\n id String @id @default(cuid())\n jobId String\n level String\n message String\n progress Int?\n createdAt DateTime @default(now())\n\n job CleanupJob @relation(fields: [jobId], references: [id])\n\n @@index([jobId])\n}\n", - "inlineSchemaHash": "dd24c92a3ea02046b50b7bbe3851abe6159b6331a83d625584e807eef6fb4685", + "inlineSchema": "generator client {\n provider = \"prisma-client-js\"\n}\n\ndatasource db {\n provider = \"postgresql\"\n url = env(\"DATABASE_URL\")\n}\n\nenum MailProvider {\n GMAIL\n GMX\n WEBDE\n}\n\nenum UserRole {\n USER\n ADMIN\n}\n\nenum JobStatus {\n QUEUED\n RUNNING\n SUCCEEDED\n FAILED\n CANCELED\n}\n\nenum RuleActionType {\n MOVE\n DELETE\n ARCHIVE\n LABEL\n}\n\nenum RuleConditionType {\n HEADER\n SUBJECT\n FROM\n LIST_UNSUBSCRIBE\n LIST_ID\n}\n\nmodel Tenant {\n id String @id @default(cuid())\n name String\n isActive Boolean @default(true)\n exportJobs ExportJob[]\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n users User[]\n mailboxAccounts MailboxAccount[]\n rules Rule[]\n jobs CleanupJob[]\n}\n\nenum ExportStatus {\n QUEUED\n RUNNING\n DONE\n FAILED\n}\n\nmodel ExportJob {\n id String @id @default(cuid())\n tenantId String\n status ExportStatus @default(QUEUED)\n format String\n scope String\n filePath String?\n error String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n\n @@index([tenantId])\n}\n\nmodel User {\n id String @id @default(cuid())\n tenantId String\n email String @unique\n password String\n role UserRole @default(USER)\n isActive Boolean @default(true)\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n}\n\nmodel MailboxAccount {\n id String @id @default(cuid())\n tenantId String\n email String\n provider MailProvider\n isActive Boolean @default(true)\n imapHost String\n imapPort Int\n imapTLS Boolean\n smtpHost String?\n smtpPort Int?\n smtpTLS Boolean?\n oauthToken String?\n oauthRefreshToken String?\n oauthAccessToken String?\n oauthExpiresAt DateTime?\n providerUserId String?\n appPassword String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n folders MailboxFolder[]\n jobs CleanupJob[]\n\n @@index([tenantId])\n}\n\nmodel MailboxFolder {\n id String @id @default(cuid())\n mailboxAccountId String\n name String\n remoteId String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n mailboxAccount MailboxAccount @relation(fields: [mailboxAccountId], references: [id])\n mailItems MailItem[]\n\n @@index([mailboxAccountId])\n}\n\nmodel MailItem {\n id String @id @default(cuid())\n folderId String\n messageId String\n subject String?\n from String?\n receivedAt DateTime?\n listId String?\n listUnsubscribe String?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n folder MailboxFolder @relation(fields: [folderId], references: [id])\n\n @@index([folderId])\n @@index([messageId])\n}\n\nmodel Rule {\n id String @id @default(cuid())\n tenantId String\n name String\n enabled Boolean @default(true)\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n conditions RuleCondition[]\n actions RuleAction[]\n\n @@index([tenantId])\n}\n\nmodel RuleCondition {\n id String @id @default(cuid())\n ruleId String\n type RuleConditionType\n value String\n\n rule Rule @relation(fields: [ruleId], references: [id])\n\n @@index([ruleId])\n}\n\nmodel RuleAction {\n id String @id @default(cuid())\n ruleId String\n type RuleActionType\n target String?\n\n rule Rule @relation(fields: [ruleId], references: [id])\n\n @@index([ruleId])\n}\n\nmodel CleanupJob {\n id String @id @default(cuid())\n tenantId String\n mailboxAccountId String\n status JobStatus @default(QUEUED)\n dryRun Boolean @default(true)\n unsubscribeEnabled Boolean @default(true)\n routingEnabled Boolean @default(true)\n startedAt DateTime?\n finishedAt DateTime?\n createdAt DateTime @default(now())\n updatedAt DateTime @updatedAt\n\n tenant Tenant @relation(fields: [tenantId], references: [id])\n mailboxAccount MailboxAccount @relation(fields: [mailboxAccountId], references: [id])\n unsubscribeAttempts UnsubscribeAttempt[]\n events CleanupJobEvent[]\n\n @@index([tenantId])\n @@index([mailboxAccountId])\n}\n\nmodel UnsubscribeAttempt {\n id String @id @default(cuid())\n jobId String\n mailItemId String?\n method String\n target String\n status String\n createdAt DateTime @default(now())\n\n job CleanupJob @relation(fields: [jobId], references: [id])\n\n @@index([jobId])\n}\n\nmodel CleanupJobEvent {\n id String @id @default(cuid())\n jobId String\n level String\n message String\n progress Int?\n createdAt DateTime @default(now())\n\n job CleanupJob @relation(fields: [jobId], references: [id])\n\n @@index([jobId])\n}\n", + "inlineSchemaHash": "a7e2fb3971910c04a1b754e69e6718268efb7632d1b7e03a6ab8b0b6de7647ea", "copyEngine": true } @@ -338,7 +358,7 @@ if (!fs.existsSync(path.join(__dirname, 'schema.prisma'))) { config.isBundled = true } -config.runtimeDataModel = JSON.parse("{\"models\":{\"Tenant\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"name\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"isActive\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"users\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"User\",\"relationName\":\"TenantToUser\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccounts\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxAccount\",\"relationName\":\"MailboxAccountToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"rules\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Rule\",\"relationName\":\"RuleToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobs\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"User\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"email\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":true,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"password\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"role\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"UserRole\",\"default\":\"USER\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"isActive\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"TenantToUser\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"MailboxAccount\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"email\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"provider\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailProvider\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"isActive\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"imapHost\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"imapPort\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Int\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"imapTLS\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Boolean\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"smtpHost\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"smtpPort\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Int\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"smtpTLS\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Boolean\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthToken\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthRefreshToken\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthAccessToken\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthExpiresAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"providerUserId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"appPassword\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"MailboxAccountToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"folders\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxFolder\",\"relationName\":\"MailboxAccountToMailboxFolder\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobs\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToMailboxAccount\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"MailboxFolder\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccountId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"name\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"remoteId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"mailboxAccount\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxAccount\",\"relationName\":\"MailboxAccountToMailboxFolder\",\"relationFromFields\":[\"mailboxAccountId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailItems\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailItem\",\"relationName\":\"MailItemToMailboxFolder\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"MailItem\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"folderId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"messageId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"subject\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"from\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"receivedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"listId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"listUnsubscribe\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"folder\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxFolder\",\"relationName\":\"MailItemToMailboxFolder\",\"relationFromFields\":[\"folderId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"Rule\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"name\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"enabled\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"RuleToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"conditions\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleCondition\",\"relationName\":\"RuleToRuleCondition\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"actions\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleAction\",\"relationName\":\"RuleToRuleAction\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"RuleCondition\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"ruleId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"type\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleConditionType\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"value\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"rule\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Rule\",\"relationName\":\"RuleToRuleCondition\",\"relationFromFields\":[\"ruleId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"RuleAction\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"ruleId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"type\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleActionType\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"target\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"rule\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Rule\",\"relationName\":\"RuleToRuleAction\",\"relationFromFields\":[\"ruleId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"CleanupJob\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccountId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"status\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"JobStatus\",\"default\":\"QUEUED\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"dryRun\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"unsubscribeEnabled\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"routingEnabled\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"startedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"finishedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"CleanupJobToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccount\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxAccount\",\"relationName\":\"CleanupJobToMailboxAccount\",\"relationFromFields\":[\"mailboxAccountId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"unsubscribeAttempts\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"UnsubscribeAttempt\",\"relationName\":\"CleanupJobToUnsubscribeAttempt\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"events\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJobEvent\",\"relationName\":\"CleanupJobToCleanupJobEvent\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"UnsubscribeAttempt\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailItemId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"method\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"target\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"status\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"job\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToUnsubscribeAttempt\",\"relationFromFields\":[\"jobId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"CleanupJobEvent\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"level\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"message\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"progress\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Int\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"job\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToCleanupJobEvent\",\"relationFromFields\":[\"jobId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false}},\"enums\":{\"MailProvider\":{\"values\":[{\"name\":\"GMAIL\",\"dbName\":null},{\"name\":\"GMX\",\"dbName\":null},{\"name\":\"WEBDE\",\"dbName\":null}],\"dbName\":null},\"UserRole\":{\"values\":[{\"name\":\"USER\",\"dbName\":null},{\"name\":\"ADMIN\",\"dbName\":null}],\"dbName\":null},\"JobStatus\":{\"values\":[{\"name\":\"QUEUED\",\"dbName\":null},{\"name\":\"RUNNING\",\"dbName\":null},{\"name\":\"SUCCEEDED\",\"dbName\":null},{\"name\":\"FAILED\",\"dbName\":null},{\"name\":\"CANCELED\",\"dbName\":null}],\"dbName\":null},\"RuleActionType\":{\"values\":[{\"name\":\"MOVE\",\"dbName\":null},{\"name\":\"DELETE\",\"dbName\":null},{\"name\":\"ARCHIVE\",\"dbName\":null},{\"name\":\"LABEL\",\"dbName\":null}],\"dbName\":null},\"RuleConditionType\":{\"values\":[{\"name\":\"HEADER\",\"dbName\":null},{\"name\":\"SUBJECT\",\"dbName\":null},{\"name\":\"FROM\",\"dbName\":null},{\"name\":\"LIST_UNSUBSCRIBE\",\"dbName\":null},{\"name\":\"LIST_ID\",\"dbName\":null}],\"dbName\":null}},\"types\":{}}") +config.runtimeDataModel = JSON.parse("{\"models\":{\"Tenant\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"name\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"isActive\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"exportJobs\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"ExportJob\",\"relationName\":\"ExportJobToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"users\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"User\",\"relationName\":\"TenantToUser\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccounts\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxAccount\",\"relationName\":\"MailboxAccountToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"rules\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Rule\",\"relationName\":\"RuleToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobs\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToTenant\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"ExportJob\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"status\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"ExportStatus\",\"default\":\"QUEUED\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"format\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"scope\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"filePath\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"error\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"ExportJobToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"User\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"email\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":true,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"password\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"role\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"UserRole\",\"default\":\"USER\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"isActive\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"TenantToUser\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"MailboxAccount\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"email\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"provider\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailProvider\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"isActive\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"imapHost\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"imapPort\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Int\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"imapTLS\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Boolean\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"smtpHost\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"smtpPort\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Int\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"smtpTLS\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Boolean\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthToken\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthRefreshToken\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthAccessToken\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"oauthExpiresAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"providerUserId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"appPassword\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"MailboxAccountToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"folders\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxFolder\",\"relationName\":\"MailboxAccountToMailboxFolder\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobs\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToMailboxAccount\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"MailboxFolder\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccountId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"name\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"remoteId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"mailboxAccount\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxAccount\",\"relationName\":\"MailboxAccountToMailboxFolder\",\"relationFromFields\":[\"mailboxAccountId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailItems\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailItem\",\"relationName\":\"MailItemToMailboxFolder\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"MailItem\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"folderId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"messageId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"subject\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"from\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"receivedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"listId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"listUnsubscribe\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"folder\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxFolder\",\"relationName\":\"MailItemToMailboxFolder\",\"relationFromFields\":[\"folderId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"Rule\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"name\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"enabled\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"RuleToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"conditions\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleCondition\",\"relationName\":\"RuleToRuleCondition\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"actions\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleAction\",\"relationName\":\"RuleToRuleAction\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"RuleCondition\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"ruleId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"type\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleConditionType\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"value\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"rule\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Rule\",\"relationName\":\"RuleToRuleCondition\",\"relationFromFields\":[\"ruleId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"RuleAction\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"ruleId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"type\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"RuleActionType\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"target\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"rule\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Rule\",\"relationName\":\"RuleToRuleAction\",\"relationFromFields\":[\"ruleId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"CleanupJob\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"tenantId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccountId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"status\",\"kind\":\"enum\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"JobStatus\",\"default\":\"QUEUED\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"dryRun\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"unsubscribeEnabled\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"routingEnabled\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"Boolean\",\"default\":true,\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"startedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"finishedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"updatedAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"DateTime\",\"isGenerated\":false,\"isUpdatedAt\":true},{\"name\":\"tenant\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Tenant\",\"relationName\":\"CleanupJobToTenant\",\"relationFromFields\":[\"tenantId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailboxAccount\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"MailboxAccount\",\"relationName\":\"CleanupJobToMailboxAccount\",\"relationFromFields\":[\"mailboxAccountId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"unsubscribeAttempts\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"UnsubscribeAttempt\",\"relationName\":\"CleanupJobToUnsubscribeAttempt\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"events\",\"kind\":\"object\",\"isList\":true,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJobEvent\",\"relationName\":\"CleanupJobToCleanupJobEvent\",\"relationFromFields\":[],\"relationToFields\":[],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"UnsubscribeAttempt\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"mailItemId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"method\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"target\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"status\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"job\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToUnsubscribeAttempt\",\"relationFromFields\":[\"jobId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false},\"CleanupJobEvent\":{\"dbName\":null,\"fields\":[{\"name\":\"id\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":true,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"String\",\"default\":{\"name\":\"cuid\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"jobId\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":true,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"level\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"message\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"String\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"progress\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":false,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"Int\",\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"createdAt\",\"kind\":\"scalar\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":true,\"type\":\"DateTime\",\"default\":{\"name\":\"now\",\"args\":[]},\"isGenerated\":false,\"isUpdatedAt\":false},{\"name\":\"job\",\"kind\":\"object\",\"isList\":false,\"isRequired\":true,\"isUnique\":false,\"isId\":false,\"isReadOnly\":false,\"hasDefaultValue\":false,\"type\":\"CleanupJob\",\"relationName\":\"CleanupJobToCleanupJobEvent\",\"relationFromFields\":[\"jobId\"],\"relationToFields\":[\"id\"],\"isGenerated\":false,\"isUpdatedAt\":false}],\"primaryKey\":null,\"uniqueFields\":[],\"uniqueIndexes\":[],\"isGenerated\":false}},\"enums\":{\"MailProvider\":{\"values\":[{\"name\":\"GMAIL\",\"dbName\":null},{\"name\":\"GMX\",\"dbName\":null},{\"name\":\"WEBDE\",\"dbName\":null}],\"dbName\":null},\"UserRole\":{\"values\":[{\"name\":\"USER\",\"dbName\":null},{\"name\":\"ADMIN\",\"dbName\":null}],\"dbName\":null},\"JobStatus\":{\"values\":[{\"name\":\"QUEUED\",\"dbName\":null},{\"name\":\"RUNNING\",\"dbName\":null},{\"name\":\"SUCCEEDED\",\"dbName\":null},{\"name\":\"FAILED\",\"dbName\":null},{\"name\":\"CANCELED\",\"dbName\":null}],\"dbName\":null},\"RuleActionType\":{\"values\":[{\"name\":\"MOVE\",\"dbName\":null},{\"name\":\"DELETE\",\"dbName\":null},{\"name\":\"ARCHIVE\",\"dbName\":null},{\"name\":\"LABEL\",\"dbName\":null}],\"dbName\":null},\"RuleConditionType\":{\"values\":[{\"name\":\"HEADER\",\"dbName\":null},{\"name\":\"SUBJECT\",\"dbName\":null},{\"name\":\"FROM\",\"dbName\":null},{\"name\":\"LIST_UNSUBSCRIBE\",\"dbName\":null},{\"name\":\"LIST_ID\",\"dbName\":null}],\"dbName\":null},\"ExportStatus\":{\"values\":[{\"name\":\"QUEUED\",\"dbName\":null},{\"name\":\"RUNNING\",\"dbName\":null},{\"name\":\"DONE\",\"dbName\":null},{\"name\":\"FAILED\",\"dbName\":null}],\"dbName\":null}},\"types\":{}}") defineDmmfProperty(exports.Prisma, config.runtimeDataModel) config.engineWasm = undefined diff --git a/backend/node_modules/.prisma/client/package.json b/backend/node_modules/.prisma/client/package.json index a701f996..7c033c8e 100644 --- a/backend/node_modules/.prisma/client/package.json +++ b/backend/node_modules/.prisma/client/package.json @@ -1,5 +1,5 @@ { - "name": "prisma-client-2a5c1ba38f3256b2f353e0fcaad9b6160a215e9b44e19d797b48531a0808d33e", + "name": "prisma-client-f972eed39fc4466cb85f0349f5e72ee1d81f9351e0ae96befde50b28d520b85a", "main": "index.js", "types": "index.d.ts", "browser": "index-browser.js", diff --git a/backend/node_modules/.prisma/client/schema.prisma b/backend/node_modules/.prisma/client/schema.prisma index 6f4e8d3c..7adee51b 100644 --- a/backend/node_modules/.prisma/client/schema.prisma +++ b/backend/node_modules/.prisma/client/schema.prisma @@ -42,11 +42,12 @@ enum RuleConditionType { } model Tenant { - id String @id @default(cuid()) - name String - isActive Boolean @default(true) - createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt + id String @id @default(cuid()) + name String + isActive Boolean @default(true) + exportJobs ExportJob[] + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt users User[] mailboxAccounts MailboxAccount[] @@ -54,6 +55,29 @@ model Tenant { jobs CleanupJob[] } +enum ExportStatus { + QUEUED + RUNNING + DONE + FAILED +} + +model ExportJob { + id String @id @default(cuid()) + tenantId String + status ExportStatus @default(QUEUED) + format String + scope String + filePath String? + error String? + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + tenant Tenant @relation(fields: [tenantId], references: [id]) + + @@index([tenantId]) +} + model User { id String @id @default(cuid()) tenantId String diff --git a/backend/node_modules/.prisma/client/wasm.js b/backend/node_modules/.prisma/client/wasm.js index b89dcbb5..b6faae28 100644 --- a/backend/node_modules/.prisma/client/wasm.js +++ b/backend/node_modules/.prisma/client/wasm.js @@ -130,6 +130,18 @@ exports.Prisma.TenantScalarFieldEnum = { updatedAt: 'updatedAt' }; +exports.Prisma.ExportJobScalarFieldEnum = { + id: 'id', + tenantId: 'tenantId', + status: 'status', + format: 'format', + scope: 'scope', + filePath: 'filePath', + error: 'error', + createdAt: 'createdAt', + updatedAt: 'updatedAt' +}; + exports.Prisma.UserScalarFieldEnum = { id: 'id', tenantId: 'tenantId', @@ -255,6 +267,13 @@ exports.Prisma.NullsOrder = { first: 'first', last: 'last' }; +exports.ExportStatus = exports.$Enums.ExportStatus = { + QUEUED: 'QUEUED', + RUNNING: 'RUNNING', + DONE: 'DONE', + FAILED: 'FAILED' +}; + exports.UserRole = exports.$Enums.UserRole = { USER: 'USER', ADMIN: 'ADMIN' @@ -291,6 +310,7 @@ exports.JobStatus = exports.$Enums.JobStatus = { exports.Prisma.ModelName = { Tenant: 'Tenant', + ExportJob: 'ExportJob', User: 'User', MailboxAccount: 'MailboxAccount', MailboxFolder: 'MailboxFolder', diff --git a/backend/node_modules/archiver-utils/LICENSE b/backend/node_modules/archiver-utils/LICENSE new file mode 100644 index 00000000..f12cd49c --- /dev/null +++ b/backend/node_modules/archiver-utils/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2015 Chris Talkington. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/backend/node_modules/archiver-utils/README.md b/backend/node_modules/archiver-utils/README.md new file mode 100644 index 00000000..4de3b9d1 --- /dev/null +++ b/backend/node_modules/archiver-utils/README.md @@ -0,0 +1,6 @@ +# Archiver Utils + +## Things of Interest +- [Changelog](https://github.com/archiverjs/archiver-utils/releases) +- [Contributing](https://github.com/archiverjs/archiver-utils/blob/master/CONTRIBUTING.md) +- [MIT License](https://github.com/archiverjs/archiver-utils/blob/master/LICENSE) diff --git a/backend/node_modules/archiver-utils/file.js b/backend/node_modules/archiver-utils/file.js new file mode 100644 index 00000000..e507bcea --- /dev/null +++ b/backend/node_modules/archiver-utils/file.js @@ -0,0 +1,209 @@ +/** + * archiver-utils + * + * Copyright (c) 2012-2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT + */ +var fs = require('graceful-fs'); +var path = require('path'); + +var flatten = require('lodash/flatten'); +var difference = require('lodash/difference'); +var union = require('lodash/union'); +var isPlainObject = require('lodash/isPlainObject'); + +var glob = require('glob'); + +var file = module.exports = {}; + +var pathSeparatorRe = /[\/\\]/g; + +// Process specified wildcard glob patterns or filenames against a +// callback, excluding and uniquing files in the result set. +var processPatterns = function(patterns, fn) { + // Filepaths to return. + var result = []; + // Iterate over flattened patterns array. + flatten(patterns).forEach(function(pattern) { + // If the first character is ! it should be omitted + var exclusion = pattern.indexOf('!') === 0; + // If the pattern is an exclusion, remove the ! + if (exclusion) { pattern = pattern.slice(1); } + // Find all matching files for this pattern. + var matches = fn(pattern); + if (exclusion) { + // If an exclusion, remove matching files. + result = difference(result, matches); + } else { + // Otherwise add matching files. + result = union(result, matches); + } + }); + return result; +}; + +// True if the file path exists. +file.exists = function() { + var filepath = path.join.apply(path, arguments); + return fs.existsSync(filepath); +}; + +// Return an array of all file paths that match the given wildcard patterns. +file.expand = function(...args) { + // If the first argument is an options object, save those options to pass + // into the File.prototype.glob.sync method. + var options = isPlainObject(args[0]) ? args.shift() : {}; + // Use the first argument if it's an Array, otherwise convert the arguments + // object to an array and use that. + var patterns = Array.isArray(args[0]) ? args[0] : args; + // Return empty set if there are no patterns or filepaths. + if (patterns.length === 0) { return []; } + // Return all matching filepaths. + var matches = processPatterns(patterns, function(pattern) { + // Find all matching files for this pattern. + return glob.sync(pattern, options); + }); + // Filter result set? + if (options.filter) { + matches = matches.filter(function(filepath) { + filepath = path.join(options.cwd || '', filepath); + try { + if (typeof options.filter === 'function') { + return options.filter(filepath); + } else { + // If the file is of the right type and exists, this should work. + return fs.statSync(filepath)[options.filter](); + } + } catch(e) { + // Otherwise, it's probably not the right type. + return false; + } + }); + } + return matches; +}; + +// Build a multi task "files" object dynamically. +file.expandMapping = function(patterns, destBase, options) { + options = Object.assign({ + rename: function(destBase, destPath) { + return path.join(destBase || '', destPath); + } + }, options); + var files = []; + var fileByDest = {}; + // Find all files matching pattern, using passed-in options. + file.expand(options, patterns).forEach(function(src) { + var destPath = src; + // Flatten? + if (options.flatten) { + destPath = path.basename(destPath); + } + // Change the extension? + if (options.ext) { + destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext); + } + // Generate destination filename. + var dest = options.rename(destBase, destPath, options); + // Prepend cwd to src path if necessary. + if (options.cwd) { src = path.join(options.cwd, src); } + // Normalize filepaths to be unix-style. + dest = dest.replace(pathSeparatorRe, '/'); + src = src.replace(pathSeparatorRe, '/'); + // Map correct src path to dest path. + if (fileByDest[dest]) { + // If dest already exists, push this src onto that dest's src array. + fileByDest[dest].src.push(src); + } else { + // Otherwise create a new src-dest file mapping object. + files.push({ + src: [src], + dest: dest, + }); + // And store a reference for later use. + fileByDest[dest] = files[files.length - 1]; + } + }); + return files; +}; + +// reusing bits of grunt's multi-task source normalization +file.normalizeFilesArray = function(data) { + var files = []; + + data.forEach(function(obj) { + var prop; + if ('src' in obj || 'dest' in obj) { + files.push(obj); + } + }); + + if (files.length === 0) { + return []; + } + + files = _(files).chain().forEach(function(obj) { + if (!('src' in obj) || !obj.src) { return; } + // Normalize .src properties to flattened array. + if (Array.isArray(obj.src)) { + obj.src = flatten(obj.src); + } else { + obj.src = [obj.src]; + } + }).map(function(obj) { + // Build options object, removing unwanted properties. + var expandOptions = Object.assign({}, obj); + delete expandOptions.src; + delete expandOptions.dest; + + // Expand file mappings. + if (obj.expand) { + return file.expandMapping(obj.src, obj.dest, expandOptions).map(function(mapObj) { + // Copy obj properties to result. + var result = Object.assign({}, obj); + // Make a clone of the orig obj available. + result.orig = Object.assign({}, obj); + // Set .src and .dest, processing both as templates. + result.src = mapObj.src; + result.dest = mapObj.dest; + // Remove unwanted properties. + ['expand', 'cwd', 'flatten', 'rename', 'ext'].forEach(function(prop) { + delete result[prop]; + }); + return result; + }); + } + + // Copy obj properties to result, adding an .orig property. + var result = Object.assign({}, obj); + // Make a clone of the orig obj available. + result.orig = Object.assign({}, obj); + + if ('src' in result) { + // Expose an expand-on-demand getter method as .src. + Object.defineProperty(result, 'src', { + enumerable: true, + get: function fn() { + var src; + if (!('result' in fn)) { + src = obj.src; + // If src is an array, flatten it. Otherwise, make it into an array. + src = Array.isArray(src) ? flatten(src) : [src]; + // Expand src files, memoizing result. + fn.result = file.expand(expandOptions, src); + } + return fn.result; + } + }); + } + + if ('dest' in result) { + result.dest = obj.dest; + } + + return result; + }).flatten().value(); + + return files; +}; diff --git a/backend/node_modules/archiver-utils/index.js b/backend/node_modules/archiver-utils/index.js new file mode 100644 index 00000000..6da3e1f4 --- /dev/null +++ b/backend/node_modules/archiver-utils/index.js @@ -0,0 +1,154 @@ +/** + * archiver-utils + * + * Copyright (c) 2015 Chris Talkington. + * Licensed under the MIT license. + * https://github.com/archiverjs/archiver-utils/blob/master/LICENSE + */ +var fs = require('graceful-fs'); +var path = require('path'); +var lazystream = require('lazystream'); +var normalizePath = require('normalize-path'); +var defaults = require('lodash/defaults'); + +var Stream = require('stream').Stream; +var PassThrough = require('readable-stream').PassThrough; + +var utils = module.exports = {}; +utils.file = require('./file.js'); + +utils.collectStream = function(source, callback) { + var collection = []; + var size = 0; + + source.on('error', callback); + + source.on('data', function(chunk) { + collection.push(chunk); + size += chunk.length; + }); + + source.on('end', function() { + var buf = Buffer.alloc(size); + var offset = 0; + + collection.forEach(function(data) { + data.copy(buf, offset); + offset += data.length; + }); + + callback(null, buf); + }); +}; + +utils.dateify = function(dateish) { + dateish = dateish || new Date(); + + if (dateish instanceof Date) { + dateish = dateish; + } else if (typeof dateish === 'string') { + dateish = new Date(dateish); + } else { + dateish = new Date(); + } + + return dateish; +}; + +// this is slightly different from lodash version +utils.defaults = function(object, source, guard) { + var args = arguments; + args[0] = args[0] || {}; + + return defaults(...args); +}; + +utils.isStream = function(source) { + return source instanceof Stream; +}; + +utils.lazyReadStream = function(filepath) { + return new lazystream.Readable(function() { + return fs.createReadStream(filepath); + }); +}; + +utils.normalizeInputSource = function(source) { + if (source === null) { + return Buffer.alloc(0); + } else if (typeof source === 'string') { + return Buffer.from(source); + } else if (utils.isStream(source)) { + // Always pipe through a PassThrough stream to guarantee pausing the stream if it's already flowing, + // since it will only be processed in a (distant) future iteration of the event loop, and will lose + // data if already flowing now. + return source.pipe(new PassThrough()); + } + + return source; +}; + +utils.sanitizePath = function(filepath) { + return normalizePath(filepath, false).replace(/^\w+:/, '').replace(/^(\.\.\/|\/)+/, ''); +}; + +utils.trailingSlashIt = function(str) { + return str.slice(-1) !== '/' ? str + '/' : str; +}; + +utils.unixifyPath = function(filepath) { + return normalizePath(filepath, false).replace(/^\w+:/, ''); +}; + +utils.walkdir = function(dirpath, base, callback) { + var results = []; + + if (typeof base === 'function') { + callback = base; + base = dirpath; + } + + fs.readdir(dirpath, function(err, list) { + var i = 0; + var file; + var filepath; + + if (err) { + return callback(err); + } + + (function next() { + file = list[i++]; + + if (!file) { + return callback(null, results); + } + + filepath = path.join(dirpath, file); + + fs.stat(filepath, function(err, stats) { + results.push({ + path: filepath, + relative: path.relative(base, filepath).replace(/\\/g, '/'), + stats: stats + }); + + if (stats && stats.isDirectory()) { + utils.walkdir(filepath, base, function(err, res) { + if(err){ + return callback(err); + } + + res.forEach(function(dirEntry) { + results.push(dirEntry); + }); + + next(); + }); + } else { + next(); + } + }); + })(); + }); +}; diff --git a/backend/node_modules/archiver-utils/node_modules/glob/LICENSE b/backend/node_modules/archiver-utils/node_modules/glob/LICENSE new file mode 100644 index 00000000..39e8fe16 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/glob/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2009-2022 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/backend/node_modules/archiver-utils/node_modules/glob/README.md b/backend/node_modules/archiver-utils/node_modules/glob/README.md new file mode 100644 index 00000000..62939110 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/glob/README.md @@ -0,0 +1,399 @@ +# Glob + +Match files using the patterns the shell uses, like stars and stuff. + +[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Build Status](https://ci.appveyor.com/api/projects/status/kd7f3yftf7unxlsx?svg=true)](https://ci.appveyor.com/project/isaacs/node-glob) [![Coverage Status](https://coveralls.io/repos/isaacs/node-glob/badge.svg?branch=master&service=github)](https://coveralls.io/github/isaacs/node-glob?branch=master) + +This is a glob implementation in JavaScript. It uses the `minimatch` +library to do its matching. + +![a fun cartoon logo made of glob characters](logo/glob.png) + +## Usage + +Install with npm + +``` +npm i glob +``` + +```javascript +var glob = require("glob") + +// options is optional +glob("**/*.js", options, function (er, files) { + // files is an array of filenames. + // If the `nonull` option is set, and nothing + // was found, then files is ["**/*.js"] + // er is an error object or null. +}) +``` + +## Glob Primer + +"Globs" are the patterns you type when you do stuff like `ls *.js` on +the command line, or put `build/*` in a `.gitignore` file. + +Before parsing the path part patterns, braced sections are expanded +into a set. Braced sections start with `{` and end with `}`, with any +number of comma-delimited sections within. Braced sections may contain +slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`. + +The following characters have special magic meaning when used in a +path portion: + +* `*` Matches 0 or more characters in a single path portion +* `?` Matches 1 character +* `[...]` Matches a range of characters, similar to a RegExp range. + If the first character of the range is `!` or `^` then it matches + any character not in the range. +* `!(pattern|pattern|pattern)` Matches anything that does not match + any of the patterns provided. +* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the + patterns provided. +* `+(pattern|pattern|pattern)` Matches one or more occurrences of the + patterns provided. +* `*(a|b|c)` Matches zero or more occurrences of the patterns provided +* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns + provided +* `**` If a "globstar" is alone in a path portion, then it matches + zero or more directories and subdirectories searching for matches. + It does not crawl symlinked directories. + +### Dots + +If a file or directory path portion has a `.` as the first character, +then it will not match any glob pattern unless that pattern's +corresponding path part also has a `.` as its first character. + +For example, the pattern `a/.*/c` would match the file at `a/.b/c`. +However the pattern `a/*/c` would not, because `*` does not start with +a dot character. + +You can make glob treat dots as normal characters by setting +`dot:true` in the options. + +### Basename Matching + +If you set `matchBase:true` in the options, and the pattern has no +slashes in it, then it will seek for any file anywhere in the tree +with a matching basename. For example, `*.js` would match +`test/simple/basic.js`. + +### Empty Sets + +If no matching files are found, then an empty array is returned. This +differs from the shell, where the pattern itself is returned. For +example: + + $ echo a*s*d*f + a*s*d*f + +To get the bash-style behavior, set the `nonull:true` in the options. + +### See Also: + +* `man sh` +* `man bash` (Search for "Pattern Matching") +* `man 3 fnmatch` +* `man 5 gitignore` +* [minimatch documentation](https://github.com/isaacs/minimatch) + +## glob.hasMagic(pattern, [options]) + +Returns `true` if there are any special characters in the pattern, and +`false` otherwise. + +Note that the options affect the results. If `noext:true` is set in +the options object, then `+(a|b)` will not be considered a magic +pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}` +then that is considered magical, unless `nobrace:true` is set in the +options. + +## glob(pattern, [options], cb) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* `cb` `{Function}` + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Perform an asynchronous glob search. + +## glob.sync(pattern, [options]) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* return: `{Array}` filenames found matching the pattern + +Perform a synchronous glob search. + +## Class: glob.Glob + +Create a Glob object by instantiating the `glob.Glob` class. + +```javascript +var Glob = require("glob").Glob +var mg = new Glob(pattern, options, cb) +``` + +It's an EventEmitter, and starts walking the filesystem to find matches +immediately. + +### new glob.Glob(pattern, [options], [cb]) + +* `pattern` `{String}` pattern to search for +* `options` `{Object}` +* `cb` `{Function}` Called when an error occurs, or matches are found + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Note that if the `sync` flag is set in the options, then matches will +be immediately available on the `g.found` member. + +### Properties + +* `minimatch` The minimatch object that the glob uses. +* `options` The options object passed in. +* `aborted` Boolean which is set to true when calling `abort()`. There + is no way at this time to continue a glob search after aborting, but + you can re-use the statCache to avoid having to duplicate syscalls. +* `cache` Convenience object. Each field has the following possible + values: + * `false` - Path does not exist + * `true` - Path exists + * `'FILE'` - Path exists, and is not a directory + * `'DIR'` - Path exists, and is a directory + * `[file, entries, ...]` - Path exists, is a directory, and the + array value is the results of `fs.readdir` +* `statCache` Cache of `fs.stat` results, to prevent statting the same + path multiple times. +* `symlinks` A record of which paths are symbolic links, which is + relevant in resolving `**` patterns. +* `realpathCache` An optional object which is passed to `fs.realpath` + to minimize unnecessary syscalls. It is stored on the instantiated + Glob object, and may be re-used. + +### Events + +* `end` When the matching is finished, this is emitted with all the + matches found. If the `nonull` option is set, and no match was found, + then the `matches` list contains the original pattern. The matches + are sorted, unless the `nosort` flag is set. +* `match` Every time a match is found, this is emitted with the specific + thing that matched. It is not deduplicated or resolved to a realpath. +* `error` Emitted when an unexpected error is encountered, or whenever + any fs error occurs if `options.strict` is set. +* `abort` When `abort()` is called, this event is raised. + +### Methods + +* `pause` Temporarily stop the search +* `resume` Resume the search +* `abort` Stop the search forever + +### Options + +All the options that can be passed to Minimatch can also be passed to +Glob to change pattern matching behavior. Also, some have been added, +or have glob-specific ramifications. + +All options are false by default, unless otherwise noted. + +All options are added to the Glob object, as well. + +If you are running many `glob` operations, you can pass a Glob object +as the `options` argument to a subsequent operation to shortcut some +`stat` and `readdir` calls. At the very least, you may pass in shared +`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that +parallel glob operations will be sped up by sharing information about +the filesystem. + +* `cwd` The current working directory in which to search. Defaults + to `process.cwd()`. This option is always coerced to use + forward-slashes as a path separator, because it is not tested + as a glob pattern, so there is no need to escape anything. +* `root` The place where patterns starting with `/` will be mounted + onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix + systems, and `C:\` or some such on Windows.) This option is + always coerced to use forward-slashes as a path separator, + because it is not tested as a glob pattern, so there is no need + to escape anything. +* `windowsPathsNoEscape` Use `\\` as a path separator _only_, and + _never_ as an escape character. If set, all `\\` characters + are replaced with `/` in the pattern. Note that this makes it + **impossible** to match against paths containing literal glob + pattern characters, but allows matching with patterns constructed + using `path.join()` and `path.resolve()` on Windows platforms, + mimicking the (buggy!) behavior of Glob v7 and before on + Windows. Please use with caution, and be mindful of [the caveat + below about Windows paths](#windows). (For legacy reasons, + this is also set if `allowWindowsEscape` is set to the exact + value `false`.) +* `dot` Include `.dot` files in normal matches and `globstar` matches. + Note that an explicit dot in a portion of the pattern will always + match dot files. +* `nomount` By default, a pattern starting with a forward-slash will be + "mounted" onto the root setting, so that a valid filesystem path is + returned. Set this flag to disable that behavior. +* `mark` Add a `/` character to directory matches. Note that this + requires additional stat calls. +* `nosort` Don't sort the results. +* `stat` Set to true to stat *all* results. This reduces performance + somewhat, and is completely unnecessary, unless `readdir` is presumed + to be an untrustworthy indicator of file existence. +* `silent` When an unusual error is encountered when attempting to + read a directory, a warning will be printed to stderr. Set the + `silent` option to true to suppress these warnings. +* `strict` When an unusual error is encountered when attempting to + read a directory, the process will just continue on in search of + other matches. Set the `strict` option to raise an error in these + cases. +* `cache` See `cache` property above. Pass in a previously generated + cache object to save some fs calls. +* `statCache` A cache of results of filesystem information, to prevent + unnecessary stat calls. While it should not normally be necessary + to set this, you may pass the statCache from one glob() call to the + options object of another, if you know that the filesystem will not + change between calls. (See "Race Conditions" below.) +* `symlinks` A cache of known symbolic links. You may pass in a + previously generated `symlinks` object to save `lstat` calls when + resolving `**` matches. +* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead. +* `nounique` In some cases, brace-expanded patterns can result in the + same file showing up multiple times in the result set. By default, + this implementation prevents duplicates in the result set. Set this + flag to disable that behavior. +* `nonull` Set to never return an empty set, instead returning a set + containing the pattern itself. This is the default in glob(3). +* `debug` Set to enable debug logging in minimatch and glob. +* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets. +* `noglobstar` Do not match `**` against multiple filenames. (Ie, + treat it as a normal `*` instead.) +* `noext` Do not match `+(a|b)` "extglob" patterns. +* `nocase` Perform a case-insensitive match. Note: on + case-insensitive filesystems, non-magic patterns will match by + default, since `stat` and `readdir` will not raise errors. +* `matchBase` Perform a basename-only match if the pattern does not + contain any slash characters. That is, `*.js` would be treated as + equivalent to `**/*.js`, matching all js files in all directories. +* `nodir` Do not match directories, only files. (Note: to match + *only* directories, simply put a `/` at the end of the pattern.) +* `ignore` Add a pattern or an array of glob patterns to exclude matches. + Note: `ignore` patterns are *always* in `dot:true` mode, regardless + of any other settings. +* `follow` Follow symlinked directories when expanding `**` patterns. + Note that this can result in a lot of duplicate references in the + presence of cyclic links. +* `realpath` Set to true to call `fs.realpath` on all of the results. + In the case of a symlink that cannot be resolved, the full absolute + path to the matched entry is returned (though it will usually be a + broken symlink) +* `absolute` Set to true to always receive absolute paths for matched + files. Unlike `realpath`, this also affects the values returned in + the `match` event. +* `fs` File-system object with Node's `fs` API. By default, the built-in + `fs` module will be used. Set to a volume provided by a library like + `memfs` to avoid using the "real" file-system. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between node-glob and other +implementations, and are intentional. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.3, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +Note that symlinked directories are not crawled as part of a `**`, +though their contents may match against subsequent portions of the +pattern. This prevents infinite loops and duplicates and the like. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then glob returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +### Comments and Negation + +Previously, this module let you mark a pattern as a "comment" if it +started with a `#` character, or a "negated" pattern if it started +with a `!` character. + +These options were deprecated in version 5, and removed in version 6. + +To specify things that should not match, use the `ignore` option. + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes will always +be interpreted as escape characters, not path separators. + +Results from absolute patterns such as `/foo/*` are mounted onto the +root setting using `path.join`. On windows, this will by default result +in `/foo/*` matching `C:\foo\bar.txt`. + +To automatically coerce all `\` characters to `/` in pattern +strings, **thus making it impossible to escape literal glob +characters**, you may set the `windowsPathsNoEscape` option to +`true`. + +## Race Conditions + +Glob searching, by its very nature, is susceptible to race conditions, +since it relies on directory walking and such. + +As a result, it is possible that a file that exists when glob looks for +it may have been deleted or modified by the time it returns the result. + +As part of its internal implementation, this program caches all stat +and readdir calls that it makes, in order to cut down on system +overhead. However, this also makes it even more susceptible to races, +especially if the cache or statCache objects are reused between glob +calls. + +Users are thus advised not to use a glob result as a guarantee of +filesystem state in the face of rapid changes. For the vast majority +of operations, this is never a problem. + +## Glob Logo +Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo). + +The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/). + +## Contributing + +Any change to behavior (including bugfixes) must come with a test. + +Patches that fail tests or reduce performance will be rejected. + +``` +# to run tests +npm test + +# to re-generate test fixtures +npm run test-regen + +# to benchmark against bash/zsh +npm run bench + +# to profile javascript +npm run prof +``` + +![](oh-my-glob.gif) diff --git a/backend/node_modules/archiver-utils/node_modules/glob/common.js b/backend/node_modules/archiver-utils/node_modules/glob/common.js new file mode 100644 index 00000000..61a4452f --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/glob/common.js @@ -0,0 +1,244 @@ +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var fs = require("fs") +var path = require("path") +var minimatch = require("minimatch") +var isAbsolute = require("path").isAbsolute +var Minimatch = minimatch.Minimatch + +function alphasort (a, b) { + return a.localeCompare(b, 'en') +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) + } + + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.windowsPathsNoEscape = !!options.windowsPathsNoEscape || + options.allowWindowsEscape === false + if (self.windowsPathsNoEscape) { + pattern = pattern.replace(/\\/g, '/') + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute + self.fs = options.fs || fs + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = path.resolve(cwd) + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + self.nomount = !!options.nomount + + if (process.platform === "win32") { + self.root = self.root.replace(/\\/g, "/") + self.cwd = self.cwd.replace(/\\/g, "/") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + } + + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') + + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} diff --git a/backend/node_modules/archiver-utils/node_modules/glob/glob.js b/backend/node_modules/archiver-utils/node_modules/glob/glob.js new file mode 100644 index 00000000..2112a957 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/glob/glob.js @@ -0,0 +1,790 @@ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var inherits = require('inherits') +var EE = require('events').EventEmitter +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path').isAbsolute +var globSync = require('./sync.js') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = require('inflight') +var util = require('util') +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = require('once') + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin +} + +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + + if (!pattern) + return false + + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + this._processing = 0 + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false + + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } + } + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = isAbsolute(e) ? e : this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) + e = abs + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + self.fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + self.fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + self.fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return self.fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) +} diff --git a/backend/node_modules/archiver-utils/node_modules/glob/package.json b/backend/node_modules/archiver-utils/node_modules/glob/package.json new file mode 100644 index 00000000..ca0fd916 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/glob/package.json @@ -0,0 +1,55 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "glob", + "description": "a little globber", + "version": "8.1.0", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "glob.js", + "files": [ + "glob.js", + "sync.js", + "common.js" + ], + "engines": { + "node": ">=12" + }, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "devDependencies": { + "memfs": "^3.2.0", + "mkdirp": "0", + "rimraf": "^2.2.8", + "tap": "^16.0.1", + "tick": "0.0.6" + }, + "tap": { + "before": "test/00-setup.js", + "after": "test/zz-cleanup.js", + "statements": 90, + "branches": 90, + "functions": 90, + "lines": 90, + "jobs": 1 + }, + "scripts": { + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test": "tap", + "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", + "bench": "bash benchmark.sh", + "prof": "bash prof.sh && cat profile.txt", + "benchclean": "node benchclean.js" + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } +} diff --git a/backend/node_modules/archiver-utils/node_modules/glob/sync.js b/backend/node_modules/archiver-utils/node_modules/glob/sync.js new file mode 100644 index 00000000..af4600dd --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/glob/sync.js @@ -0,0 +1,486 @@ +module.exports = globSync +globSync.GlobSync = GlobSync + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var Glob = require('./glob.js').Glob +var util = require('util') +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path').isAbsolute +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert.ok(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert.ok(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return + + var abs = this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) { + e = abs + } + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null + } + } + + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, this.fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } + + if (lstat && lstat.isSymbolicLink()) { + try { + stat = this.fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} diff --git a/backend/node_modules/archiver-utils/node_modules/minimatch/LICENSE b/backend/node_modules/archiver-utils/node_modules/minimatch/LICENSE new file mode 100644 index 00000000..1493534e --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/backend/node_modules/archiver-utils/node_modules/minimatch/README.md b/backend/node_modules/archiver-utils/node_modules/minimatch/README.md new file mode 100644 index 00000000..a5bdefaa --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/minimatch/README.md @@ -0,0 +1,259 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://travis-ci.org/isaacs/minimatch.svg?branch=master)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes in patterns +will always be interpreted as escape characters, not path separators. + +Note that `\` or `/` _will_ be interpreted as path separators in paths on +Windows, and will match against `/` in glob expressions. + +So just always use `/` in patterns. + +## Minimatch Class + +Create a minimatch object by instantiating the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true}) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) + +### partial + +Compare a partial path to a pattern. As long as the parts of the path that +are present are not contradicted by the pattern, it will be treated as a +match. This is useful in applications where you're walking through a +folder structure, and don't yet have the full path, but want to ensure that +you do not walk down paths that can never be a match. + +For example, + +```js +minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d +minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d +minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a +``` + +### windowsPathsNoEscape + +Use `\\` as a path separator _only_, and _never_ as an escape +character. If set, all `\\` characters are replaced with `/` in +the pattern. Note that this makes it **impossible** to match +against paths containing literal glob pattern characters, but +allows matching with patterns constructed using `path.join()` and +`path.resolve()` on Windows platforms, mimicking the (buggy!) +behavior of earlier versions on Windows. Please use with +caution, and be mindful of [the caveat about Windows +paths](#windows). + +For legacy reasons, this is also set if +`options.allowWindowsEscape` is set to the exact value `false`. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +Note that `fnmatch(3)` in libc is an extremely naive string comparison +matcher, which does not do anything special for slashes. This library is +designed to be used in glob searching and file walkers, and so it does do +special things with `/`. Thus, `foo*` will not match `foo/bar` in this +library, even though it would in `fnmatch(3)`. diff --git a/backend/node_modules/archiver-utils/node_modules/minimatch/lib/path.js b/backend/node_modules/archiver-utils/node_modules/minimatch/lib/path.js new file mode 100644 index 00000000..ffe453d9 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/minimatch/lib/path.js @@ -0,0 +1,4 @@ +const isWindows = typeof process === 'object' && + process && + process.platform === 'win32' +module.exports = isWindows ? { sep: '\\' } : { sep: '/' } diff --git a/backend/node_modules/archiver-utils/node_modules/minimatch/minimatch.js b/backend/node_modules/archiver-utils/node_modules/minimatch/minimatch.js new file mode 100644 index 00000000..6c8bfc35 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/minimatch/minimatch.js @@ -0,0 +1,944 @@ +const minimatch = module.exports = (p, pattern, options = {}) => { + assertValidPattern(pattern) + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + return new Minimatch(pattern, options).match(p) +} + +module.exports = minimatch + +const path = require('./lib/path.js') +minimatch.sep = path.sep + +const GLOBSTAR = Symbol('globstar **') +minimatch.GLOBSTAR = GLOBSTAR +const expand = require('brace-expansion') + +const plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +const qmark = '[^/]' + +// * => any number of characters +const star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +const twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +const twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// "abc" -> { a:true, b:true, c:true } +const charSet = s => s.split('').reduce((set, c) => { + set[c] = true + return set +}, {}) + +// characters that need to be escaped in RegExp. +const reSpecials = charSet('().*{}+?[]^$\\!') + +// characters that indicate we have to add the pattern start +const addPatternStartSet = charSet('[.(') + +// normalizes slashes. +const slashSplit = /\/+/ + +minimatch.filter = (pattern, options = {}) => + (p, i, list) => minimatch(p, pattern, options) + +const ext = (a, b = {}) => { + const t = {} + Object.keys(a).forEach(k => t[k] = a[k]) + Object.keys(b).forEach(k => t[k] = b[k]) + return t +} + +minimatch.defaults = def => { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch + } + + const orig = minimatch + + const m = (p, pattern, options) => orig(p, pattern, ext(def, options)) + m.Minimatch = class Minimatch extends orig.Minimatch { + constructor (pattern, options) { + super(pattern, ext(def, options)) + } + } + m.Minimatch.defaults = options => orig.defaults(ext(def, options)).Minimatch + m.filter = (pattern, options) => orig.filter(pattern, ext(def, options)) + m.defaults = options => orig.defaults(ext(def, options)) + m.makeRe = (pattern, options) => orig.makeRe(pattern, ext(def, options)) + m.braceExpand = (pattern, options) => orig.braceExpand(pattern, ext(def, options)) + m.match = (list, pattern, options) => orig.match(list, pattern, ext(def, options)) + + return m +} + + + + + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = (pattern, options) => braceExpand(pattern, options) + +const braceExpand = (pattern, options = {}) => { + assertValidPattern(pattern) + + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +const MAX_PATTERN_LENGTH = 1024 * 64 +const assertValidPattern = pattern => { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } + + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +const SUBPARSE = Symbol('subparse') + +minimatch.makeRe = (pattern, options) => + new Minimatch(pattern, options || {}).makeRe() + +minimatch.match = (list, pattern, options = {}) => { + const mm = new Minimatch(pattern, options) + list = list.filter(f => mm.match(f)) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +// replace stuff like \* with * +const globUnescape = s => s.replace(/\\(.)/g, '$1') +const charUnescape = s => s.replace(/\\([^-\]])/g, '$1') +const regExpEscape = s => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +const braExpEscape = s => s.replace(/[[\]\\]/g, '\\$&') + +class Minimatch { + constructor (pattern, options) { + assertValidPattern(pattern) + + if (!options) options = {} + + this.options = options + this.set = [] + this.pattern = pattern + this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || + options.allowWindowsEscape === false + if (this.windowsPathsNoEscape) { + this.pattern = this.pattern.replace(/\\/g, '/') + } + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + this.partial = !!options.partial + + // make the set of regexps etc. + this.make() + } + + debug () {} + + make () { + const pattern = this.pattern + const options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + let set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = (...args) => console.error(...args) + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(s => s.split(slashSplit)) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map((s, si, set) => s.map(this.parse, this)) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(s => s.indexOf(false) === -1) + + this.debug(this.pattern, set) + + this.set = set + } + + parseNegate () { + if (this.options.nonegate) return + + const pattern = this.pattern + let negate = false + let negateOffset = 0 + + for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.slice(negateOffset) + this.negate = negate + } + + // set partial to true to test if, for example, + // "/a/b" matches the start of "/*/b/*/d" + // Partial means, if you run out of file before you run + // out of pattern, then that's fine, as long as all + // the parts match. + matchOne (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + hit = f === p + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') + } + + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') + } + + braceExpand () { + return braceExpand(this.pattern, this.options) + } + + parse (pattern, isSub) { + assertValidPattern(pattern) + + const options = this.options + + // shortcuts + if (pattern === '**') { + if (!options.noglobstar) + return GLOBSTAR + else + pattern = '*' + } + if (pattern === '') return '' + + let re = '' + let hasMagic = false + let escaping = false + // ? => one single character + const patternListStack = [] + const negativeLists = [] + let stateChar + let inClass = false + let reClassStart = -1 + let classStart = -1 + let cs + let pl + let sp + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. However, if the pattern + // starts with ., then traversal patterns can match. + let dotTravAllowed = pattern.charAt(0) === '.' + let dotFileAllowed = options.dot || dotTravAllowed + const patternStart = () => + dotTravAllowed + ? '' + : dotFileAllowed + ? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))' + : '(?!\\.)' + const subPatternStart = (p) => + p.charAt(0) === '.' + ? '' + : options.dot + ? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))' + : '(?!\\.)' + + + const clearStateChar = () => { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + this.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (let i = 0, c; (i < pattern.length) && (c = pattern.charAt(i)); i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping) { + /* istanbul ignore next - completely not allowed, even escaped. */ + if (c === '/') { + return false + } + + if (reSpecials[c]) { + re += '\\' + } + re += c + escaping = false + continue + } + + switch (c) { + /* istanbul ignore next */ + case '/': { + // Should already be path-split by now. + return false + } + + case '\\': + if (inClass && pattern.charAt(i + 1) === '-') { + re += c + continue + } + + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + this.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': { + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + const plEntry = { + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close, + } + this.debug(this.pattern, '\t', plEntry) + patternListStack.push(plEntry) + // negation is (?:(?!(?:js)(?:))[^/]*) + re += plEntry.open + // next entry starts with a dot maybe? + if (plEntry.start === 0 && plEntry.type !== '!') { + dotTravAllowed = true + re += subPatternStart(pattern.slice(i + 1)) + } + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + } + + case ')': { + const plEntry = patternListStack[patternListStack.length - 1] + if (inClass || !plEntry) { + re += '\\)' + continue + } + patternListStack.pop() + + // closing an extglob + clearStateChar() + hasMagic = true + pl = plEntry + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(Object.assign(pl, { reEnd: re.length })) + } + continue + } + + case '|': { + const plEntry = patternListStack[patternListStack.length - 1] + if (inClass || !plEntry) { + re += '\\|' + continue + } + + clearStateChar() + re += '|' + // next subpattern can start with a dot? + if (plEntry.start === 0 && plEntry.type !== '!') { + dotTravAllowed = true + re += subPatternStart(pattern.slice(i + 1)) + } + continue + } + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + continue + } + + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + braExpEscape(charUnescape(cs)) + ']') + // looks good, finish up the class. + re += c + } catch (er) { + // out of order ranges in JS are errors, but in glob syntax, + // they're just a range that matches nothing. + re = re.substring(0, reClassStart) + '(?:$.)' // match nothing ever + } + hasMagic = true + inClass = false + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (reSpecials[c] && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + break + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.slice(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substring(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + let tail + tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, (_, $1, $2) => { + /* istanbul ignore else - should already be done */ + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + const t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + const addPatternStart = addPatternStartSet[re.charAt(0)] + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (let n = negativeLists.length - 1; n > -1; n--) { + const nl = negativeLists[n] + + const nlBefore = re.slice(0, nl.reStart) + const nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + let nlAfter = re.slice(nl.reEnd) + const nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + const closeParensBefore = nlBefore.split(')').length + const openParensBefore = nlBefore.split('(').length - closeParensBefore + let cleanAfter = nlAfter + for (let i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + const dollar = nlAfter === '' && isSub !== SUBPARSE ? '(?:$|\\/)' : '' + + re = nlBefore + nlFirst + nlAfter + dollar + nlLast + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart() + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // if it's nocase, and the lcase/uppercase don't match, it's magic + if (options.nocase && !hasMagic) { + hasMagic = pattern.toUpperCase() !== pattern.toLowerCase() + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + const flags = options.nocase ? 'i' : '' + try { + return Object.assign(new RegExp('^' + re + '$', flags), { + _glob: pattern, + _src: re, + }) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + } + + makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + const set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + const options = this.options + + const twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + const flags = options.nocase ? 'i' : '' + + // coalesce globstars and regexpify non-globstar patterns + // if it's the only item, then we just do one twoStar + // if it's the first, and there are more, prepend (\/|twoStar\/)? to next + // if it's the last, append (\/twoStar|) to previous + // if it's in the middle, append (\/|\/twoStar\/) to previous + // then filter out GLOBSTAR symbols + let re = set.map(pattern => { + pattern = pattern.map(p => + typeof p === 'string' ? regExpEscape(p) + : p === GLOBSTAR ? GLOBSTAR + : p._src + ).reduce((set, p) => { + if (!(set[set.length - 1] === GLOBSTAR && p === GLOBSTAR)) { + set.push(p) + } + return set + }, []) + pattern.forEach((p, i) => { + if (p !== GLOBSTAR || pattern[i-1] === GLOBSTAR) { + return + } + if (i === 0) { + if (pattern.length > 1) { + pattern[i+1] = '(?:\\\/|' + twoStar + '\\\/)?' + pattern[i+1] + } else { + pattern[i] = twoStar + } + } else if (i === pattern.length - 1) { + pattern[i-1] += '(?:\\\/|' + twoStar + ')?' + } else { + pattern[i-1] += '(?:\\\/|\\\/' + twoStar + '\\\/)' + pattern[i+1] + pattern[i+1] = GLOBSTAR + } + }) + return pattern.filter(p => p !== GLOBSTAR).join('/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false + } + return this.regexp + } + + match (f, partial = this.partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + const options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + const set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + let filename + for (let i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (let i = 0; i < set.length; i++) { + const pattern = set[i] + let file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + const hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate + } + + static defaults (def) { + return minimatch.defaults(def).Minimatch + } +} + +minimatch.Minimatch = Minimatch diff --git a/backend/node_modules/archiver-utils/node_modules/minimatch/package.json b/backend/node_modules/archiver-utils/node_modules/minimatch/package.json new file mode 100644 index 00000000..c8809dbb --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/minimatch/package.json @@ -0,0 +1,35 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "publishConfig": { + "tag": "legacy-v5" + }, + "version": "5.1.6", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "engines": { + "node": ">=10" + }, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "devDependencies": { + "tap": "^16.3.2" + }, + "license": "ISC", + "files": [ + "minimatch.js", + "lib" + ] +} diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/CONTRIBUTING.md b/backend/node_modules/archiver-utils/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 00000000..f478d58d --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/GOVERNANCE.md b/backend/node_modules/archiver-utils/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 00000000..16ffb93f --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/LICENSE b/backend/node_modules/archiver-utils/node_modules/readable-stream/LICENSE new file mode 100644 index 00000000..2873b3b2 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/README.md b/backend/node_modules/archiver-utils/node_modules/readable-stream/README.md new file mode 100644 index 00000000..19117c1a --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.18.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/errors-browser.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/errors-browser.js new file mode 100644 index 00000000..fb8e73e1 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/errors.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/errors.js new file mode 100644 index 00000000..8471526d --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/experimentalWarning.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 00000000..78e84149 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_duplex.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 00000000..19abfa60 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,126 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); +require('inherits')(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); \ No newline at end of file diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_passthrough.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 00000000..24a6bdde --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,37 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; +var Transform = require('./_stream_transform'); +require('inherits')(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_readable.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 00000000..df1f608d --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1027 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +var debugUtil = require('util'); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + +var BufferList = require('./internal/streams/buffer_list'); +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +require('inherits')(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + return createReadableStreamAsyncIterator(this); + }; +} +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_transform.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 00000000..1ccb7157 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,190 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = require('./_stream_duplex'); +require('inherits')(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} \ No newline at end of file diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_writable.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 00000000..292415e2 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,641 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +require('inherits')(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; \ No newline at end of file diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 00000000..742c5a46 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,180 @@ +'use strict'; + +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = require('./end-of-stream'); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } + + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 00000000..69bda497 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,183 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = require('buffer'), + Buffer = _require.Buffer; +var _require2 = require('util'), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); \ No newline at end of file diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/destroy.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 00000000..31a17c4d --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,96 @@ +'use strict'; + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; \ No newline at end of file diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 00000000..59c671b5 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; \ No newline at end of file diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/from-browser.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 00000000..a4ce56f3 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/from.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 00000000..0a34ee92 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,52 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/pipeline.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 00000000..e6f39241 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; + + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; \ No newline at end of file diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/state.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 00000000..3fbf8927 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,22 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 00000000..9332a3fd --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 00000000..ce2ad5b6 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/package.json b/backend/node_modules/archiver-utils/node_modules/readable-stream/package.json new file mode 100644 index 00000000..ade59e71 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/package.json @@ -0,0 +1,68 @@ +{ + "name": "readable-stream", + "version": "3.6.2", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "engines": { + "node": ">= 6" + }, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "scripts": { + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/readable-browser.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/readable-browser.js new file mode 100644 index 00000000..adbf60de --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/backend/node_modules/archiver-utils/node_modules/readable-stream/readable.js b/backend/node_modules/archiver-utils/node_modules/readable-stream/readable.js new file mode 100644 index 00000000..9e0ca120 --- /dev/null +++ b/backend/node_modules/archiver-utils/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/backend/node_modules/archiver-utils/package.json b/backend/node_modules/archiver-utils/package.json new file mode 100644 index 00000000..bd3b05cd --- /dev/null +++ b/backend/node_modules/archiver-utils/package.json @@ -0,0 +1,50 @@ +{ + "name": "archiver-utils", + "version": "4.0.1", + "license": "MIT", + "description": "utility functions for archiver", + "homepage": "https://github.com/archiverjs/archiver-utils#readme", + "author": { + "name": "Chris Talkington", + "url": "http://christalkington.com/" + }, + "repository": { + "type": "git", + "url": "https://github.com/archiverjs/archiver-utils.git" + }, + "bugs": { + "url": "https://github.com/archiverjs/archiver-utils/issues" + }, + "keywords": [ + "archiver", + "utils" + ], + "main": "index.js", + "files": [ + "index.js", + "file.js" + ], + "engines": { + "node": ">= 12.0.0" + }, + "scripts": { + "test": "mocha --reporter dot" + }, + "dependencies": { + "glob": "^8.0.0", + "graceful-fs": "^4.2.0", + "lazystream": "^1.0.0", + "lodash": "^4.17.15", + "normalize-path": "^3.0.0", + "readable-stream": "^3.6.0" + }, + "devDependencies": { + "chai": "4.3.8", + "mkdirp": "3.0.1", + "mocha": "9.2.2", + "rimraf": "3.0.2" + }, + "publishConfig": { + "registry": "https://registry.npmjs.org/" + } +} diff --git a/backend/node_modules/archiver/LICENSE b/backend/node_modules/archiver/LICENSE new file mode 100644 index 00000000..88caf871 --- /dev/null +++ b/backend/node_modules/archiver/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2012-2014 Chris Talkington, contributors. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/backend/node_modules/archiver/README.md b/backend/node_modules/archiver/README.md new file mode 100644 index 00000000..2ee7c171 --- /dev/null +++ b/backend/node_modules/archiver/README.md @@ -0,0 +1,92 @@ +# Archiver + +A streaming interface for archive generation + +Visit the [API documentation](https://www.archiverjs.com/) for a list of all methods available. + +## Install + +```bash +npm install archiver --save +``` + +## Quick Start + +```js +// require modules +const fs = require('fs'); +const archiver = require('archiver'); + +// create a file to stream archive data to. +const output = fs.createWriteStream(__dirname + '/example.zip'); +const archive = archiver('zip', { + zlib: { level: 9 } // Sets the compression level. +}); + +// listen for all archive data to be written +// 'close' event is fired only when a file descriptor is involved +output.on('close', function() { + console.log(archive.pointer() + ' total bytes'); + console.log('archiver has been finalized and the output file descriptor has closed.'); +}); + +// This event is fired when the data source is drained no matter what was the data source. +// It is not part of this library but rather from the NodeJS Stream API. +// @see: https://nodejs.org/api/stream.html#stream_event_end +output.on('end', function() { + console.log('Data has been drained'); +}); + +// good practice to catch warnings (ie stat failures and other non-blocking errors) +archive.on('warning', function(err) { + if (err.code === 'ENOENT') { + // log warning + } else { + // throw error + throw err; + } +}); + +// good practice to catch this error explicitly +archive.on('error', function(err) { + throw err; +}); + +// pipe archive data to the file +archive.pipe(output); + +// append a file from stream +const file1 = __dirname + '/file1.txt'; +archive.append(fs.createReadStream(file1), { name: 'file1.txt' }); + +// append a file from string +archive.append('string cheese!', { name: 'file2.txt' }); + +// append a file from buffer +const buffer3 = Buffer.from('buff it!'); +archive.append(buffer3, { name: 'file3.txt' }); + +// append a file +archive.file('file1.txt', { name: 'file4.txt' }); + +// append files from a sub-directory and naming it `new-subdir` within the archive +archive.directory('subdir/', 'new-subdir'); + +// append files from a sub-directory, putting its contents at the root of archive +archive.directory('subdir/', false); + +// append files from a glob pattern +archive.glob('file*.txt', {cwd:__dirname}); + +// finalize the archive (ie we are done appending files but streams have to finish yet) +// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand +archive.finalize(); +``` + +## Formats + +Archiver ships with out of the box support for TAR and ZIP archives. + +You can register additional formats with `registerFormat`. + +You can check if format already exists before to register a new one with `isRegisteredFormat`. diff --git a/backend/node_modules/archiver/index.js b/backend/node_modules/archiver/index.js new file mode 100644 index 00000000..0996daef --- /dev/null +++ b/backend/node_modules/archiver/index.js @@ -0,0 +1,84 @@ +/** + * Archiver Vending + * + * @ignore + * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} + * @copyright (c) 2012-2014 Chris Talkington, contributors. + */ +var Archiver = require('./lib/core'); + +var formats = {}; + +/** + * Dispenses a new Archiver instance. + * + * @constructor + * @param {String} format The archive format to use. + * @param {Object} options See [Archiver]{@link Archiver} + * @return {Archiver} + */ +var vending = function(format, options) { + return vending.create(format, options); +}; + +/** + * Creates a new Archiver instance. + * + * @param {String} format The archive format to use. + * @param {Object} options See [Archiver]{@link Archiver} + * @return {Archiver} + */ +vending.create = function(format, options) { + if (formats[format]) { + var instance = new Archiver(format, options); + instance.setFormat(format); + instance.setModule(new formats[format](options)); + + return instance; + } else { + throw new Error('create(' + format + '): format not registered'); + } +}; + +/** + * Registers a format for use with archiver. + * + * @param {String} format The name of the format. + * @param {Function} module The function for archiver to interact with. + * @return void + */ +vending.registerFormat = function(format, module) { + if (formats[format]) { + throw new Error('register(' + format + '): format already registered'); + } + + if (typeof module !== 'function') { + throw new Error('register(' + format + '): format module invalid'); + } + + if (typeof module.prototype.append !== 'function' || typeof module.prototype.finalize !== 'function') { + throw new Error('register(' + format + '): format module missing methods'); + } + + formats[format] = module; +}; + +/** + * Check if the format is already registered. + * + * @param {String} format the name of the format. + * @return boolean + */ +vending.isRegisteredFormat = function (format) { + if (formats[format]) { + return true; + } + + return false; +}; + +vending.registerFormat('zip', require('./lib/plugins/zip')); +vending.registerFormat('tar', require('./lib/plugins/tar')); +vending.registerFormat('json', require('./lib/plugins/json')); + +module.exports = vending; \ No newline at end of file diff --git a/backend/node_modules/archiver/lib/core.js b/backend/node_modules/archiver/lib/core.js new file mode 100644 index 00000000..7c0a74d7 --- /dev/null +++ b/backend/node_modules/archiver/lib/core.js @@ -0,0 +1,974 @@ +/** + * Archiver Core + * + * @ignore + * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} + * @copyright (c) 2012-2014 Chris Talkington, contributors. + */ +var fs = require('fs'); +var glob = require('readdir-glob'); +var async = require('async'); +var path = require('path'); +var util = require('archiver-utils'); + +var inherits = require('util').inherits; +var ArchiverError = require('./error'); +var Transform = require('readable-stream').Transform; + +var win32 = process.platform === 'win32'; + +/** + * @constructor + * @param {String} format The archive format to use. + * @param {(CoreOptions|TransformOptions)} options See also {@link ZipOptions} and {@link TarOptions}. + */ +var Archiver = function(format, options) { + if (!(this instanceof Archiver)) { + return new Archiver(format, options); + } + + if (typeof format !== 'string') { + options = format; + format = 'zip'; + } + + options = this.options = util.defaults(options, { + highWaterMark: 1024 * 1024, + statConcurrency: 4 + }); + + Transform.call(this, options); + + this._format = false; + this._module = false; + this._pending = 0; + this._pointer = 0; + + this._entriesCount = 0; + this._entriesProcessedCount = 0; + this._fsEntriesTotalBytes = 0; + this._fsEntriesProcessedBytes = 0; + + this._queue = async.queue(this._onQueueTask.bind(this), 1); + this._queue.drain(this._onQueueDrain.bind(this)); + + this._statQueue = async.queue(this._onStatQueueTask.bind(this), options.statConcurrency); + this._statQueue.drain(this._onQueueDrain.bind(this)); + + this._state = { + aborted: false, + finalize: false, + finalizing: false, + finalized: false, + modulePiped: false + }; + + this._streams = []; +}; + +inherits(Archiver, Transform); + +/** + * Internal logic for `abort`. + * + * @private + * @return void + */ +Archiver.prototype._abort = function() { + this._state.aborted = true; + this._queue.kill(); + this._statQueue.kill(); + + if (this._queue.idle()) { + this._shutdown(); + } +}; + +/** + * Internal helper for appending files. + * + * @private + * @param {String} filepath The source filepath. + * @param {EntryData} data The entry data. + * @return void + */ +Archiver.prototype._append = function(filepath, data) { + data = data || {}; + + var task = { + source: null, + filepath: filepath + }; + + if (!data.name) { + data.name = filepath; + } + + data.sourcePath = filepath; + task.data = data; + this._entriesCount++; + + if (data.stats && data.stats instanceof fs.Stats) { + task = this._updateQueueTaskWithStats(task, data.stats); + if (task) { + if (data.stats.size) { + this._fsEntriesTotalBytes += data.stats.size; + } + + this._queue.push(task); + } + } else { + this._statQueue.push(task); + } +}; + +/** + * Internal logic for `finalize`. + * + * @private + * @return void + */ +Archiver.prototype._finalize = function() { + if (this._state.finalizing || this._state.finalized || this._state.aborted) { + return; + } + + this._state.finalizing = true; + + this._moduleFinalize(); + + this._state.finalizing = false; + this._state.finalized = true; +}; + +/** + * Checks the various state variables to determine if we can `finalize`. + * + * @private + * @return {Boolean} + */ +Archiver.prototype._maybeFinalize = function() { + if (this._state.finalizing || this._state.finalized || this._state.aborted) { + return false; + } + + if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { + this._finalize(); + return true; + } + + return false; +}; + +/** + * Appends an entry to the module. + * + * @private + * @fires Archiver#entry + * @param {(Buffer|Stream)} source + * @param {EntryData} data + * @param {Function} callback + * @return void + */ +Archiver.prototype._moduleAppend = function(source, data, callback) { + if (this._state.aborted) { + callback(); + return; + } + + this._module.append(source, data, function(err) { + this._task = null; + + if (this._state.aborted) { + this._shutdown(); + return; + } + + if (err) { + this.emit('error', err); + setImmediate(callback); + return; + } + + /** + * Fires when the entry's input has been processed and appended to the archive. + * + * @event Archiver#entry + * @type {EntryData} + */ + this.emit('entry', data); + this._entriesProcessedCount++; + + if (data.stats && data.stats.size) { + this._fsEntriesProcessedBytes += data.stats.size; + } + + /** + * @event Archiver#progress + * @type {ProgressData} + */ + this.emit('progress', { + entries: { + total: this._entriesCount, + processed: this._entriesProcessedCount + }, + fs: { + totalBytes: this._fsEntriesTotalBytes, + processedBytes: this._fsEntriesProcessedBytes + } + }); + + setImmediate(callback); + }.bind(this)); +}; + +/** + * Finalizes the module. + * + * @private + * @return void + */ +Archiver.prototype._moduleFinalize = function() { + if (typeof this._module.finalize === 'function') { + this._module.finalize(); + } else if (typeof this._module.end === 'function') { + this._module.end(); + } else { + this.emit('error', new ArchiverError('NOENDMETHOD')); + } +}; + +/** + * Pipes the module to our internal stream with error bubbling. + * + * @private + * @return void + */ +Archiver.prototype._modulePipe = function() { + this._module.on('error', this._onModuleError.bind(this)); + this._module.pipe(this); + this._state.modulePiped = true; +}; + +/** + * Determines if the current module supports a defined feature. + * + * @private + * @param {String} key + * @return {Boolean} + */ +Archiver.prototype._moduleSupports = function(key) { + if (!this._module.supports || !this._module.supports[key]) { + return false; + } + + return this._module.supports[key]; +}; + +/** + * Unpipes the module from our internal stream. + * + * @private + * @return void + */ +Archiver.prototype._moduleUnpipe = function() { + this._module.unpipe(this); + this._state.modulePiped = false; +}; + +/** + * Normalizes entry data with fallbacks for key properties. + * + * @private + * @param {Object} data + * @param {fs.Stats} stats + * @return {Object} + */ +Archiver.prototype._normalizeEntryData = function(data, stats) { + data = util.defaults(data, { + type: 'file', + name: null, + date: null, + mode: null, + prefix: null, + sourcePath: null, + stats: false + }); + + if (stats && data.stats === false) { + data.stats = stats; + } + + var isDir = data.type === 'directory'; + + if (data.name) { + if (typeof data.prefix === 'string' && '' !== data.prefix) { + data.name = data.prefix + '/' + data.name; + data.prefix = null; + } + + data.name = util.sanitizePath(data.name); + + if (data.type !== 'symlink' && data.name.slice(-1) === '/') { + isDir = true; + data.type = 'directory'; + } else if (isDir) { + data.name += '/'; + } + } + + // 511 === 0777; 493 === 0755; 438 === 0666; 420 === 0644 + if (typeof data.mode === 'number') { + if (win32) { + data.mode &= 511; + } else { + data.mode &= 4095 + } + } else if (data.stats && data.mode === null) { + if (win32) { + data.mode = data.stats.mode & 511; + } else { + data.mode = data.stats.mode & 4095; + } + + // stat isn't reliable on windows; force 0755 for dir + if (win32 && isDir) { + data.mode = 493; + } + } else if (data.mode === null) { + data.mode = isDir ? 493 : 420; + } + + if (data.stats && data.date === null) { + data.date = data.stats.mtime; + } else { + data.date = util.dateify(data.date); + } + + return data; +}; + +/** + * Error listener that re-emits error on to our internal stream. + * + * @private + * @param {Error} err + * @return void + */ +Archiver.prototype._onModuleError = function(err) { + /** + * @event Archiver#error + * @type {ErrorData} + */ + this.emit('error', err); +}; + +/** + * Checks the various state variables after queue has drained to determine if + * we need to `finalize`. + * + * @private + * @return void + */ +Archiver.prototype._onQueueDrain = function() { + if (this._state.finalizing || this._state.finalized || this._state.aborted) { + return; + } + + if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { + this._finalize(); + } +}; + +/** + * Appends each queue task to the module. + * + * @private + * @param {Object} task + * @param {Function} callback + * @return void + */ +Archiver.prototype._onQueueTask = function(task, callback) { + var fullCallback = () => { + if(task.data.callback) { + task.data.callback(); + } + callback(); + } + + if (this._state.finalizing || this._state.finalized || this._state.aborted) { + fullCallback(); + return; + } + + this._task = task; + this._moduleAppend(task.source, task.data, fullCallback); +}; + +/** + * Performs a file stat and reinjects the task back into the queue. + * + * @private + * @param {Object} task + * @param {Function} callback + * @return void + */ +Archiver.prototype._onStatQueueTask = function(task, callback) { + if (this._state.finalizing || this._state.finalized || this._state.aborted) { + callback(); + return; + } + + fs.lstat(task.filepath, function(err, stats) { + if (this._state.aborted) { + setImmediate(callback); + return; + } + + if (err) { + this._entriesCount--; + + /** + * @event Archiver#warning + * @type {ErrorData} + */ + this.emit('warning', err); + setImmediate(callback); + return; + } + + task = this._updateQueueTaskWithStats(task, stats); + + if (task) { + if (stats.size) { + this._fsEntriesTotalBytes += stats.size; + } + + this._queue.push(task); + } + + setImmediate(callback); + }.bind(this)); +}; + +/** + * Unpipes the module and ends our internal stream. + * + * @private + * @return void + */ +Archiver.prototype._shutdown = function() { + this._moduleUnpipe(); + this.end(); +}; + +/** + * Tracks the bytes emitted by our internal stream. + * + * @private + * @param {Buffer} chunk + * @param {String} encoding + * @param {Function} callback + * @return void + */ +Archiver.prototype._transform = function(chunk, encoding, callback) { + if (chunk) { + this._pointer += chunk.length; + } + + callback(null, chunk); +}; + +/** + * Updates and normalizes a queue task using stats data. + * + * @private + * @param {Object} task + * @param {fs.Stats} stats + * @return {Object} + */ +Archiver.prototype._updateQueueTaskWithStats = function(task, stats) { + if (stats.isFile()) { + task.data.type = 'file'; + task.data.sourceType = 'stream'; + task.source = util.lazyReadStream(task.filepath); + } else if (stats.isDirectory() && this._moduleSupports('directory')) { + task.data.name = util.trailingSlashIt(task.data.name); + task.data.type = 'directory'; + task.data.sourcePath = util.trailingSlashIt(task.filepath); + task.data.sourceType = 'buffer'; + task.source = Buffer.concat([]); + } else if (stats.isSymbolicLink() && this._moduleSupports('symlink')) { + var linkPath = fs.readlinkSync(task.filepath); + var dirName = path.dirname(task.filepath); + task.data.type = 'symlink'; + task.data.linkname = path.relative(dirName, path.resolve(dirName, linkPath)); + task.data.sourceType = 'buffer'; + task.source = Buffer.concat([]); + } else { + if (stats.isDirectory()) { + this.emit('warning', new ArchiverError('DIRECTORYNOTSUPPORTED', task.data)); + } else if (stats.isSymbolicLink()) { + this.emit('warning', new ArchiverError('SYMLINKNOTSUPPORTED', task.data)); + } else { + this.emit('warning', new ArchiverError('ENTRYNOTSUPPORTED', task.data)); + } + + return null; + } + + task.data = this._normalizeEntryData(task.data, stats); + + return task; +}; + +/** + * Aborts the archiving process, taking a best-effort approach, by: + * + * - removing any pending queue tasks + * - allowing any active queue workers to finish + * - detaching internal module pipes + * - ending both sides of the Transform stream + * + * It will NOT drain any remaining sources. + * + * @return {this} + */ +Archiver.prototype.abort = function() { + if (this._state.aborted || this._state.finalized) { + return this; + } + + this._abort(); + + return this; +}; + +/** + * Appends an input source (text string, buffer, or stream) to the instance. + * + * When the instance has received, processed, and emitted the input, the `entry` + * event is fired. + * + * @fires Archiver#entry + * @param {(Buffer|Stream|String)} source The input source. + * @param {EntryData} data See also {@link ZipEntryData} and {@link TarEntryData}. + * @return {this} + */ +Archiver.prototype.append = function(source, data) { + if (this._state.finalize || this._state.aborted) { + this.emit('error', new ArchiverError('QUEUECLOSED')); + return this; + } + + data = this._normalizeEntryData(data); + + if (typeof data.name !== 'string' || data.name.length === 0) { + this.emit('error', new ArchiverError('ENTRYNAMEREQUIRED')); + return this; + } + + if (data.type === 'directory' && !this._moduleSupports('directory')) { + this.emit('error', new ArchiverError('DIRECTORYNOTSUPPORTED', { name: data.name })); + return this; + } + + source = util.normalizeInputSource(source); + + if (Buffer.isBuffer(source)) { + data.sourceType = 'buffer'; + } else if (util.isStream(source)) { + data.sourceType = 'stream'; + } else { + this.emit('error', new ArchiverError('INPUTSTEAMBUFFERREQUIRED', { name: data.name })); + return this; + } + + this._entriesCount++; + this._queue.push({ + data: data, + source: source + }); + + return this; +}; + +/** + * Appends a directory and its files, recursively, given its dirpath. + * + * @param {String} dirpath The source directory path. + * @param {String} destpath The destination path within the archive. + * @param {(EntryData|Function)} data See also [ZipEntryData]{@link ZipEntryData} and + * [TarEntryData]{@link TarEntryData}. + * @return {this} + */ +Archiver.prototype.directory = function(dirpath, destpath, data) { + if (this._state.finalize || this._state.aborted) { + this.emit('error', new ArchiverError('QUEUECLOSED')); + return this; + } + + if (typeof dirpath !== 'string' || dirpath.length === 0) { + this.emit('error', new ArchiverError('DIRECTORYDIRPATHREQUIRED')); + return this; + } + + this._pending++; + + if (destpath === false) { + destpath = ''; + } else if (typeof destpath !== 'string'){ + destpath = dirpath; + } + + var dataFunction = false; + if (typeof data === 'function') { + dataFunction = data; + data = {}; + } else if (typeof data !== 'object') { + data = {}; + } + + var globOptions = { + stat: true, + dot: true + }; + + function onGlobEnd() { + this._pending--; + this._maybeFinalize(); + } + + function onGlobError(err) { + this.emit('error', err); + } + + function onGlobMatch(match){ + globber.pause(); + + var ignoreMatch = false; + var entryData = Object.assign({}, data); + entryData.name = match.relative; + entryData.prefix = destpath; + entryData.stats = match.stat; + entryData.callback = globber.resume.bind(globber); + + try { + if (dataFunction) { + entryData = dataFunction(entryData); + + if (entryData === false) { + ignoreMatch = true; + } else if (typeof entryData !== 'object') { + throw new ArchiverError('DIRECTORYFUNCTIONINVALIDDATA', { dirpath: dirpath }); + } + } + } catch(e) { + this.emit('error', e); + return; + } + + if (ignoreMatch) { + globber.resume(); + return; + } + + this._append(match.absolute, entryData); + } + + var globber = glob(dirpath, globOptions); + globber.on('error', onGlobError.bind(this)); + globber.on('match', onGlobMatch.bind(this)); + globber.on('end', onGlobEnd.bind(this)); + + return this; +}; + +/** + * Appends a file given its filepath using a + * [lazystream]{@link https://github.com/jpommerening/node-lazystream} wrapper to + * prevent issues with open file limits. + * + * When the instance has received, processed, and emitted the file, the `entry` + * event is fired. + * + * @param {String} filepath The source filepath. + * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and + * [TarEntryData]{@link TarEntryData}. + * @return {this} + */ +Archiver.prototype.file = function(filepath, data) { + if (this._state.finalize || this._state.aborted) { + this.emit('error', new ArchiverError('QUEUECLOSED')); + return this; + } + + if (typeof filepath !== 'string' || filepath.length === 0) { + this.emit('error', new ArchiverError('FILEFILEPATHREQUIRED')); + return this; + } + + this._append(filepath, data); + + return this; +}; + +/** + * Appends multiple files that match a glob pattern. + * + * @param {String} pattern The [glob pattern]{@link https://github.com/isaacs/minimatch} to match. + * @param {Object} options See [node-readdir-glob]{@link https://github.com/yqnn/node-readdir-glob#options}. + * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and + * [TarEntryData]{@link TarEntryData}. + * @return {this} + */ +Archiver.prototype.glob = function(pattern, options, data) { + this._pending++; + + options = util.defaults(options, { + stat: true, + pattern: pattern + }); + + function onGlobEnd() { + this._pending--; + this._maybeFinalize(); + } + + function onGlobError(err) { + this.emit('error', err); + } + + function onGlobMatch(match){ + globber.pause(); + var entryData = Object.assign({}, data); + entryData.callback = globber.resume.bind(globber); + entryData.stats = match.stat; + entryData.name = match.relative; + + this._append(match.absolute, entryData); + } + + var globber = glob(options.cwd || '.', options); + globber.on('error', onGlobError.bind(this)); + globber.on('match', onGlobMatch.bind(this)); + globber.on('end', onGlobEnd.bind(this)); + + return this; +}; + +/** + * Finalizes the instance and prevents further appending to the archive + * structure (queue will continue til drained). + * + * The `end`, `close` or `finish` events on the destination stream may fire + * right after calling this method so you should set listeners beforehand to + * properly detect stream completion. + * + * @return {Promise} + */ +Archiver.prototype.finalize = function() { + if (this._state.aborted) { + var abortedError = new ArchiverError('ABORTED'); + this.emit('error', abortedError); + return Promise.reject(abortedError); + } + + if (this._state.finalize) { + var finalizingError = new ArchiverError('FINALIZING'); + this.emit('error', finalizingError); + return Promise.reject(finalizingError); + } + + this._state.finalize = true; + + if (this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { + this._finalize(); + } + + var self = this; + + return new Promise(function(resolve, reject) { + var errored; + + self._module.on('end', function() { + if (!errored) { + resolve(); + } + }) + + self._module.on('error', function(err) { + errored = true; + reject(err); + }) + }) +}; + +/** + * Sets the module format name used for archiving. + * + * @param {String} format The name of the format. + * @return {this} + */ +Archiver.prototype.setFormat = function(format) { + if (this._format) { + this.emit('error', new ArchiverError('FORMATSET')); + return this; + } + + this._format = format; + + return this; +}; + +/** + * Sets the module used for archiving. + * + * @param {Function} module The function for archiver to interact with. + * @return {this} + */ +Archiver.prototype.setModule = function(module) { + if (this._state.aborted) { + this.emit('error', new ArchiverError('ABORTED')); + return this; + } + + if (this._state.module) { + this.emit('error', new ArchiverError('MODULESET')); + return this; + } + + this._module = module; + this._modulePipe(); + + return this; +}; + +/** + * Appends a symlink to the instance. + * + * This does NOT interact with filesystem and is used for programmatically creating symlinks. + * + * @param {String} filepath The symlink path (within archive). + * @param {String} target The target path (within archive). + * @param {Number} mode Sets the entry permissions. + * @return {this} + */ +Archiver.prototype.symlink = function(filepath, target, mode) { + if (this._state.finalize || this._state.aborted) { + this.emit('error', new ArchiverError('QUEUECLOSED')); + return this; + } + + if (typeof filepath !== 'string' || filepath.length === 0) { + this.emit('error', new ArchiverError('SYMLINKFILEPATHREQUIRED')); + return this; + } + + if (typeof target !== 'string' || target.length === 0) { + this.emit('error', new ArchiverError('SYMLINKTARGETREQUIRED', { filepath: filepath })); + return this; + } + + if (!this._moduleSupports('symlink')) { + this.emit('error', new ArchiverError('SYMLINKNOTSUPPORTED', { filepath: filepath })); + return this; + } + + var data = {}; + data.type = 'symlink'; + data.name = filepath.replace(/\\/g, '/'); + data.linkname = target.replace(/\\/g, '/'); + data.sourceType = 'buffer'; + + if (typeof mode === "number") { + data.mode = mode; + } + + this._entriesCount++; + this._queue.push({ + data: data, + source: Buffer.concat([]) + }); + + return this; +}; + +/** + * Returns the current length (in bytes) that has been emitted. + * + * @return {Number} + */ +Archiver.prototype.pointer = function() { + return this._pointer; +}; + +/** + * Middleware-like helper that has yet to be fully implemented. + * + * @private + * @param {Function} plugin + * @return {this} + */ +Archiver.prototype.use = function(plugin) { + this._streams.push(plugin); + return this; +}; + +module.exports = Archiver; + +/** + * @typedef {Object} CoreOptions + * @global + * @property {Number} [statConcurrency=4] Sets the number of workers used to + * process the internal fs stat queue. + */ + +/** + * @typedef {Object} TransformOptions + * @property {Boolean} [allowHalfOpen=true] If set to false, then the stream + * will automatically end the readable side when the writable side ends and vice + * versa. + * @property {Boolean} [readableObjectMode=false] Sets objectMode for readable + * side of the stream. Has no effect if objectMode is true. + * @property {Boolean} [writableObjectMode=false] Sets objectMode for writable + * side of the stream. Has no effect if objectMode is true. + * @property {Boolean} [decodeStrings=true] Whether or not to decode strings + * into Buffers before passing them to _write(). `Writable` + * @property {String} [encoding=NULL] If specified, then buffers will be decoded + * to strings using the specified encoding. `Readable` + * @property {Number} [highWaterMark=16kb] The maximum number of bytes to store + * in the internal buffer before ceasing to read from the underlying resource. + * `Readable` `Writable` + * @property {Boolean} [objectMode=false] Whether this stream should behave as a + * stream of objects. Meaning that stream.read(n) returns a single value instead + * of a Buffer of size n. `Readable` `Writable` + */ + +/** + * @typedef {Object} EntryData + * @property {String} name Sets the entry name including internal path. + * @property {(String|Date)} [date=NOW()] Sets the entry date. + * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. + * @property {String} [prefix] Sets a path prefix for the entry name. Useful + * when working with methods like `directory` or `glob`. + * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing + * for reduction of fs stat calls when stat data is already known. + */ + +/** + * @typedef {Object} ErrorData + * @property {String} message The message of the error. + * @property {String} code The error code assigned to this error. + * @property {String} data Additional data provided for reporting or debugging (where available). + */ + +/** + * @typedef {Object} ProgressData + * @property {Object} entries + * @property {Number} entries.total Number of entries that have been appended. + * @property {Number} entries.processed Number of entries that have been processed. + * @property {Object} fs + * @property {Number} fs.totalBytes Number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats) + * @property {Number} fs.processedBytes Number of bytes that have been processed. (based on fs.Stats) + */ diff --git a/backend/node_modules/archiver/lib/error.js b/backend/node_modules/archiver/lib/error.js new file mode 100644 index 00000000..6bcb0ae1 --- /dev/null +++ b/backend/node_modules/archiver/lib/error.js @@ -0,0 +1,40 @@ +/** + * Archiver Core + * + * @ignore + * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} + * @copyright (c) 2012-2014 Chris Talkington, contributors. + */ + +var util = require('util'); + +const ERROR_CODES = { + 'ABORTED': 'archive was aborted', + 'DIRECTORYDIRPATHREQUIRED': 'diretory dirpath argument must be a non-empty string value', + 'DIRECTORYFUNCTIONINVALIDDATA': 'invalid data returned by directory custom data function', + 'ENTRYNAMEREQUIRED': 'entry name must be a non-empty string value', + 'FILEFILEPATHREQUIRED': 'file filepath argument must be a non-empty string value', + 'FINALIZING': 'archive already finalizing', + 'QUEUECLOSED': 'queue closed', + 'NOENDMETHOD': 'no suitable finalize/end method defined by module', + 'DIRECTORYNOTSUPPORTED': 'support for directory entries not defined by module', + 'FORMATSET': 'archive format already set', + 'INPUTSTEAMBUFFERREQUIRED': 'input source must be valid Stream or Buffer instance', + 'MODULESET': 'module already set', + 'SYMLINKNOTSUPPORTED': 'support for symlink entries not defined by module', + 'SYMLINKFILEPATHREQUIRED': 'symlink filepath argument must be a non-empty string value', + 'SYMLINKTARGETREQUIRED': 'symlink target argument must be a non-empty string value', + 'ENTRYNOTSUPPORTED': 'entry not supported' +}; + +function ArchiverError(code, data) { + Error.captureStackTrace(this, this.constructor); + //this.name = this.constructor.name; + this.message = ERROR_CODES[code] || code; + this.code = code; + this.data = data; +} + +util.inherits(ArchiverError, Error); + +exports = module.exports = ArchiverError; \ No newline at end of file diff --git a/backend/node_modules/archiver/lib/plugins/json.js b/backend/node_modules/archiver/lib/plugins/json.js new file mode 100644 index 00000000..caf63de9 --- /dev/null +++ b/backend/node_modules/archiver/lib/plugins/json.js @@ -0,0 +1,110 @@ +/** + * JSON Format Plugin + * + * @module plugins/json + * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} + * @copyright (c) 2012-2014 Chris Talkington, contributors. + */ +var inherits = require('util').inherits; +var Transform = require('readable-stream').Transform; + +var crc32 = require('buffer-crc32'); +var util = require('archiver-utils'); + +/** + * @constructor + * @param {(JsonOptions|TransformOptions)} options + */ +var Json = function(options) { + if (!(this instanceof Json)) { + return new Json(options); + } + + options = this.options = util.defaults(options, {}); + + Transform.call(this, options); + + this.supports = { + directory: true, + symlink: true + }; + + this.files = []; +}; + +inherits(Json, Transform); + +/** + * [_transform description] + * + * @private + * @param {Buffer} chunk + * @param {String} encoding + * @param {Function} callback + * @return void + */ +Json.prototype._transform = function(chunk, encoding, callback) { + callback(null, chunk); +}; + +/** + * [_writeStringified description] + * + * @private + * @return void + */ +Json.prototype._writeStringified = function() { + var fileString = JSON.stringify(this.files); + this.write(fileString); +}; + +/** + * [append description] + * + * @param {(Buffer|Stream)} source + * @param {EntryData} data + * @param {Function} callback + * @return void + */ +Json.prototype.append = function(source, data, callback) { + var self = this; + + data.crc32 = 0; + + function onend(err, sourceBuffer) { + if (err) { + callback(err); + return; + } + + data.size = sourceBuffer.length || 0; + data.crc32 = crc32.unsigned(sourceBuffer); + + self.files.push(data); + + callback(null, data); + } + + if (data.sourceType === 'buffer') { + onend(null, source); + } else if (data.sourceType === 'stream') { + util.collectStream(source, onend); + } +}; + +/** + * [finalize description] + * + * @return void + */ +Json.prototype.finalize = function() { + this._writeStringified(); + this.end(); +}; + +module.exports = Json; + +/** + * @typedef {Object} JsonOptions + * @global + */ diff --git a/backend/node_modules/archiver/lib/plugins/tar.js b/backend/node_modules/archiver/lib/plugins/tar.js new file mode 100644 index 00000000..3a170090 --- /dev/null +++ b/backend/node_modules/archiver/lib/plugins/tar.js @@ -0,0 +1,167 @@ +/** + * TAR Format Plugin + * + * @module plugins/tar + * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} + * @copyright (c) 2012-2014 Chris Talkington, contributors. + */ +var zlib = require('zlib'); + +var engine = require('tar-stream'); +var util = require('archiver-utils'); + +/** + * @constructor + * @param {TarOptions} options + */ +var Tar = function(options) { + if (!(this instanceof Tar)) { + return new Tar(options); + } + + options = this.options = util.defaults(options, { + gzip: false + }); + + if (typeof options.gzipOptions !== 'object') { + options.gzipOptions = {}; + } + + this.supports = { + directory: true, + symlink: true + }; + + this.engine = engine.pack(options); + this.compressor = false; + + if (options.gzip) { + this.compressor = zlib.createGzip(options.gzipOptions); + this.compressor.on('error', this._onCompressorError.bind(this)); + } +}; + +/** + * [_onCompressorError description] + * + * @private + * @param {Error} err + * @return void + */ +Tar.prototype._onCompressorError = function(err) { + this.engine.emit('error', err); +}; + +/** + * [append description] + * + * @param {(Buffer|Stream)} source + * @param {TarEntryData} data + * @param {Function} callback + * @return void + */ +Tar.prototype.append = function(source, data, callback) { + var self = this; + + data.mtime = data.date; + + function append(err, sourceBuffer) { + if (err) { + callback(err); + return; + } + + self.engine.entry(data, sourceBuffer, function(err) { + callback(err, data); + }); + } + + if (data.sourceType === 'buffer') { + append(null, source); + } else if (data.sourceType === 'stream' && data.stats) { + data.size = data.stats.size; + + var entry = self.engine.entry(data, function(err) { + callback(err, data); + }); + + source.pipe(entry); + } else if (data.sourceType === 'stream') { + util.collectStream(source, append); + } +}; + +/** + * [finalize description] + * + * @return void + */ +Tar.prototype.finalize = function() { + this.engine.finalize(); +}; + +/** + * [on description] + * + * @return this.engine + */ +Tar.prototype.on = function() { + return this.engine.on.apply(this.engine, arguments); +}; + +/** + * [pipe description] + * + * @param {String} destination + * @param {Object} options + * @return this.engine + */ +Tar.prototype.pipe = function(destination, options) { + if (this.compressor) { + return this.engine.pipe.apply(this.engine, [this.compressor]).pipe(destination, options); + } else { + return this.engine.pipe.apply(this.engine, arguments); + } +}; + +/** + * [unpipe description] + * + * @return this.engine + */ +Tar.prototype.unpipe = function() { + if (this.compressor) { + return this.compressor.unpipe.apply(this.compressor, arguments); + } else { + return this.engine.unpipe.apply(this.engine, arguments); + } +}; + +module.exports = Tar; + +/** + * @typedef {Object} TarOptions + * @global + * @property {Boolean} [gzip=false] Compress the tar archive using gzip. + * @property {Object} [gzipOptions] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} + * to control compression. + * @property {*} [*] See [tar-stream]{@link https://github.com/mafintosh/tar-stream} documentation for additional properties. + */ + +/** + * @typedef {Object} TarEntryData + * @global + * @property {String} name Sets the entry name including internal path. + * @property {(String|Date)} [date=NOW()] Sets the entry date. + * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. + * @property {String} [prefix] Sets a path prefix for the entry name. Useful + * when working with methods like `directory` or `glob`. + * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing + * for reduction of fs stat calls when stat data is already known. + */ + +/** + * TarStream Module + * @external TarStream + * @see {@link https://github.com/mafintosh/tar-stream} + */ diff --git a/backend/node_modules/archiver/lib/plugins/zip.js b/backend/node_modules/archiver/lib/plugins/zip.js new file mode 100644 index 00000000..df6f0743 --- /dev/null +++ b/backend/node_modules/archiver/lib/plugins/zip.js @@ -0,0 +1,120 @@ +/** + * ZIP Format Plugin + * + * @module plugins/zip + * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} + * @copyright (c) 2012-2014 Chris Talkington, contributors. + */ +var engine = require('zip-stream'); +var util = require('archiver-utils'); + +/** + * @constructor + * @param {ZipOptions} [options] + * @param {String} [options.comment] Sets the zip archive comment. + * @param {Boolean} [options.forceLocalTime=false] Forces the archive to contain local file times instead of UTC. + * @param {Boolean} [options.forceZip64=false] Forces the archive to contain ZIP64 headers. + * @param {Boolean} [options.namePrependSlash=false] Prepends a forward slash to archive file paths. + * @param {Boolean} [options.store=false] Sets the compression method to STORE. + * @param {Object} [options.zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} + */ +var Zip = function(options) { + if (!(this instanceof Zip)) { + return new Zip(options); + } + + options = this.options = util.defaults(options, { + comment: '', + forceUTC: false, + namePrependSlash: false, + store: false + }); + + this.supports = { + directory: true, + symlink: true + }; + + this.engine = new engine(options); +}; + +/** + * @param {(Buffer|Stream)} source + * @param {ZipEntryData} data + * @param {String} data.name Sets the entry name including internal path. + * @param {(String|Date)} [data.date=NOW()] Sets the entry date. + * @param {Number} [data.mode=D:0755/F:0644] Sets the entry permissions. + * @param {String} [data.prefix] Sets a path prefix for the entry name. Useful + * when working with methods like `directory` or `glob`. + * @param {fs.Stats} [data.stats] Sets the fs stat data for this entry allowing + * for reduction of fs stat calls when stat data is already known. + * @param {Boolean} [data.store=ZipOptions.store] Sets the compression method to STORE. + * @param {Function} callback + * @return void + */ +Zip.prototype.append = function(source, data, callback) { + this.engine.entry(source, data, callback); +}; + +/** + * @return void + */ +Zip.prototype.finalize = function() { + this.engine.finalize(); +}; + +/** + * @return this.engine + */ +Zip.prototype.on = function() { + return this.engine.on.apply(this.engine, arguments); +}; + +/** + * @return this.engine + */ +Zip.prototype.pipe = function() { + return this.engine.pipe.apply(this.engine, arguments); +}; + +/** + * @return this.engine + */ +Zip.prototype.unpipe = function() { + return this.engine.unpipe.apply(this.engine, arguments); +}; + +module.exports = Zip; + +/** + * @typedef {Object} ZipOptions + * @global + * @property {String} [comment] Sets the zip archive comment. + * @property {Boolean} [forceLocalTime=false] Forces the archive to contain local file times instead of UTC. + * @property {Boolean} [forceZip64=false] Forces the archive to contain ZIP64 headers. + * @prpperty {Boolean} [namePrependSlash=false] Prepends a forward slash to archive file paths. + * @property {Boolean} [store=false] Sets the compression method to STORE. + * @property {Object} [zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} + * to control compression. + * @property {*} [*] See [zip-stream]{@link https://archiverjs.com/zip-stream/ZipStream.html} documentation for current list of properties. + */ + +/** + * @typedef {Object} ZipEntryData + * @global + * @property {String} name Sets the entry name including internal path. + * @property {(String|Date)} [date=NOW()] Sets the entry date. + * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. + * @property {Boolean} [namePrependSlash=ZipOptions.namePrependSlash] Prepends a forward slash to archive file paths. + * @property {String} [prefix] Sets a path prefix for the entry name. Useful + * when working with methods like `directory` or `glob`. + * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing + * for reduction of fs stat calls when stat data is already known. + * @property {Boolean} [store=ZipOptions.store] Sets the compression method to STORE. + */ + +/** + * ZipStream Module + * @external ZipStream + * @see {@link https://www.archiverjs.com/zip-stream/ZipStream.html} + */ diff --git a/backend/node_modules/archiver/node_modules/readable-stream/CONTRIBUTING.md b/backend/node_modules/archiver/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 00000000..f478d58d --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/backend/node_modules/archiver/node_modules/readable-stream/GOVERNANCE.md b/backend/node_modules/archiver/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 00000000..16ffb93f --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/backend/node_modules/archiver/node_modules/readable-stream/LICENSE b/backend/node_modules/archiver/node_modules/readable-stream/LICENSE new file mode 100644 index 00000000..2873b3b2 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/backend/node_modules/archiver/node_modules/readable-stream/README.md b/backend/node_modules/archiver/node_modules/readable-stream/README.md new file mode 100644 index 00000000..19117c1a --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.18.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/backend/node_modules/archiver/node_modules/readable-stream/errors-browser.js b/backend/node_modules/archiver/node_modules/readable-stream/errors-browser.js new file mode 100644 index 00000000..fb8e73e1 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/backend/node_modules/archiver/node_modules/readable-stream/errors.js b/backend/node_modules/archiver/node_modules/readable-stream/errors.js new file mode 100644 index 00000000..8471526d --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/backend/node_modules/archiver/node_modules/readable-stream/experimentalWarning.js b/backend/node_modules/archiver/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 00000000..78e84149 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_duplex.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 00000000..19abfa60 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,126 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); +require('inherits')(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); \ No newline at end of file diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_passthrough.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 00000000..24a6bdde --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,37 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; +var Transform = require('./_stream_transform'); +require('inherits')(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_readable.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 00000000..df1f608d --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1027 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +var debugUtil = require('util'); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + +var BufferList = require('./internal/streams/buffer_list'); +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +require('inherits')(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + return createReadableStreamAsyncIterator(this); + }; +} +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_transform.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 00000000..1ccb7157 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,190 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = require('./_stream_duplex'); +require('inherits')(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} \ No newline at end of file diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_writable.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 00000000..292415e2 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,641 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +require('inherits')(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; \ No newline at end of file diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 00000000..742c5a46 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,180 @@ +'use strict'; + +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = require('./end-of-stream'); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } + + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 00000000..69bda497 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,183 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = require('buffer'), + Buffer = _require.Buffer; +var _require2 = require('util'), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); \ No newline at end of file diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/destroy.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 00000000..31a17c4d --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,96 @@ +'use strict'; + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; \ No newline at end of file diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 00000000..59c671b5 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; \ No newline at end of file diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/from-browser.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 00000000..a4ce56f3 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/from.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 00000000..0a34ee92 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,52 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/pipeline.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 00000000..e6f39241 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; + + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; \ No newline at end of file diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/state.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 00000000..3fbf8927 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,22 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 00000000..9332a3fd --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/stream.js b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 00000000..ce2ad5b6 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/backend/node_modules/archiver/node_modules/readable-stream/package.json b/backend/node_modules/archiver/node_modules/readable-stream/package.json new file mode 100644 index 00000000..ade59e71 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/package.json @@ -0,0 +1,68 @@ +{ + "name": "readable-stream", + "version": "3.6.2", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "engines": { + "node": ">= 6" + }, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "scripts": { + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/backend/node_modules/archiver/node_modules/readable-stream/readable-browser.js b/backend/node_modules/archiver/node_modules/readable-stream/readable-browser.js new file mode 100644 index 00000000..adbf60de --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/backend/node_modules/archiver/node_modules/readable-stream/readable.js b/backend/node_modules/archiver/node_modules/readable-stream/readable.js new file mode 100644 index 00000000..9e0ca120 --- /dev/null +++ b/backend/node_modules/archiver/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/backend/node_modules/archiver/package.json b/backend/node_modules/archiver/package.json new file mode 100644 index 00000000..9145a7a7 --- /dev/null +++ b/backend/node_modules/archiver/package.json @@ -0,0 +1,60 @@ +{ + "name": "archiver", + "version": "6.0.2", + "description": "a streaming interface for archive generation", + "homepage": "https://github.com/archiverjs/node-archiver", + "author": { + "name": "Chris Talkington", + "url": "http://christalkington.com/" + }, + "repository": { + "type": "git", + "url": "https://github.com/archiverjs/node-archiver.git" + }, + "bugs": { + "url": "https://github.com/archiverjs/node-archiver/issues" + }, + "license": "MIT", + "main": "index.js", + "files": [ + "index.js", + "lib" + ], + "engines": { + "node": ">= 12.0.0" + }, + "scripts": { + "test": "mocha --reporter dot", + "bench": "node benchmark/simple/pack-zip.js" + }, + "dependencies": { + "archiver-utils": "^4.0.1", + "async": "^3.2.4", + "buffer-crc32": "^0.2.1", + "readable-stream": "^3.6.0", + "readdir-glob": "^1.1.2", + "tar-stream": "^3.0.0", + "zip-stream": "^5.0.1" + }, + "devDependencies": { + "archiver-jsdoc-theme": "1.1.3", + "chai": "4.4.1", + "jsdoc": "4.0.2", + "mkdirp": "3.0.1", + "mocha": "9.2.2", + "rimraf": "4.4.1", + "stream-bench": "0.1.2", + "tar": "6.2.0", + "yauzl": "2.10.0" + }, + "keywords": [ + "archive", + "archiver", + "stream", + "zip", + "tar" + ], + "publishConfig": { + "registry": "https://registry.npmjs.org/" + } +} diff --git a/backend/node_modules/async/CHANGELOG.md b/backend/node_modules/async/CHANGELOG.md new file mode 100644 index 00000000..ce990a51 --- /dev/null +++ b/backend/node_modules/async/CHANGELOG.md @@ -0,0 +1,351 @@ +# v3.2.5 +- Ensure `Error` objects such as `AggregateError` are propagated without modification (#1920) + +# v3.2.4 +- Fix a bug in `priorityQueue` where it didn't wait for the result. (#1725) +- Fix a bug where `unshiftAsync` was included in `priorityQueue`. (#1790) + +# v3.2.3 +- Fix bugs in comment parsing in `autoInject`. (#1767, #1780) + +# v3.2.2 +- Fix potential prototype pollution exploit + +# v3.2.1 +- Use `queueMicrotask` if available to the environment (#1761) +- Minor perf improvement in `priorityQueue` (#1727) +- More examples in documentation (#1726) +- Various doc fixes (#1708, #1712, #1717, #1740, #1739, #1749, #1756) +- Improved test coverage (#1754) + +# v3.2.0 +- Fix a bug in Safari related to overwriting `func.name` +- Remove built-in browserify configuration (#1653) +- Varios doc fixes (#1688, #1703, #1704) + +# v3.1.1 +- Allow redefining `name` property on wrapped functions. + +# v3.1.0 + +- Added `q.pushAsync` and `q.unshiftAsync`, analagous to `q.push` and `q.unshift`, except they always do not accept a callback, and reject if processing the task errors. (#1659) +- Promises returned from `q.push` and `q.unshift` when a callback is not passed now resolve even if an error ocurred. (#1659) +- Fixed a parsing bug in `autoInject` with complicated function bodies (#1663) +- Added ES6+ configuration for Browserify bundlers (#1653) +- Various doc fixes (#1664, #1658, #1665, #1652) + +# v3.0.1 + +## Bug fixes +- Fixed a regression where arrays passed to `queue` and `cargo` would be completely flattened. (#1645) +- Clarified Async's browser support (#1643) + + +# v3.0.0 + +The `async`/`await` release! + +There are a lot of new features and subtle breaking changes in this major version, but the biggest feature is that most Async methods return a Promise if you omit the callback, meaning you can `await` them from within an `async` function. + +```js +const results = await async.mapLimit(urls, 5, async url => { + const resp = await fetch(url) + return resp.body +}) +``` + +## Breaking Changes +- Most Async methods return a Promise when the final callback is omitted, making them `await`-able! (#1572) +- We are now making heavy use of ES2015 features, this means we have dropped out-of-the-box support for Node 4 and earlier, and many old versions of browsers. (#1541, #1553) +- In `queue`, `priorityQueue`, `cargo` and `cargoQueue`, the "event"-style methods, like `q.drain` and `q.saturated` are now methods that register a callback, rather than properties you assign a callback to. They are now of the form `q.drain(callback)`. If you do not pass a callback a Promise will be returned for the next occurrence of the event, making them `await`-able, e.g. `await q.drain()`. (#1586, #1641) +- Calling `callback(false)` will cancel an async method, preventing further iteration and callback calls. This is useful for preventing memory leaks when you break out of an async flow by calling an outer callback. (#1064, #1542) +- `during` and `doDuring` have been removed, and instead `whilst`, `doWhilst`, `until` and `doUntil` now have asynchronous `test` functions. (#850, #1557) +- `limits` of less than 1 now cause an error to be thrown in queues and collection methods. (#1249, #1552) +- `memoize` no longer memoizes errors (#1465, #1466) +- `applyEach`/`applyEachSeries` have a simpler interface, to make them more easily type-able. It always returns a function that takes in a single callback argument. If that callback is omitted, a promise is returned, making it awaitable. (#1228, #1640) + +## New Features +- Async generators are now supported in all the Collection methods. (#1560) +- Added `cargoQueue`, a queue with both `concurrency` and `payload` size parameters. (#1567) +- Queue objects returned from `queue` now have a `Symbol.iterator` method, meaning they can be iterated over to inspect the current list of items in the queue. (#1459, #1556) +- A ESM-flavored `async.mjs` is included in the `async` package. This is described in the `package.json` `"module"` field, meaning it should be automatically used by Webpack and other compatible bundlers. + +## Bug fixes +- Better handle arbitrary error objects in `asyncify` (#1568, #1569) + +## Other +- Removed Lodash as a dependency (#1283, #1528) +- Miscellaneous docs fixes (#1393, #1501, #1540, #1543, #1558, #1563, #1564, #1579, #1581) +- Miscellaneous test fixes (#1538) + +------- + +# v2.6.1 +- Updated lodash to prevent `npm audit` warnings. (#1532, #1533) +- Made `async-es` more optimized for webpack users (#1517) +- Fixed a stack overflow with large collections and a synchronous iterator (#1514) +- Various small fixes/chores (#1505, #1511, #1527, #1530) + +# v2.6.0 +- Added missing aliases for many methods. Previously, you could not (e.g.) `require('async/find')` or use `async.anyLimit`. (#1483) +- Improved `queue` performance. (#1448, #1454) +- Add missing sourcemap (#1452, #1453) +- Various doc updates (#1448, #1471, #1483) + +# v2.5.0 +- Added `concatLimit`, the `Limit` equivalent of [`concat`](https://caolan.github.io/async/docs.html#concat) ([#1426](https://github.com/caolan/async/issues/1426), [#1430](https://github.com/caolan/async/pull/1430)) +- `concat` improvements: it now preserves order, handles falsy values and the `iteratee` callback takes a variable number of arguments ([#1437](https://github.com/caolan/async/issues/1437), [#1436](https://github.com/caolan/async/pull/1436)) +- Fixed an issue in `queue` where there was a size discrepancy between `workersList().length` and `running()` ([#1428](https://github.com/caolan/async/issues/1428), [#1429](https://github.com/caolan/async/pull/1429)) +- Various doc fixes ([#1422](https://github.com/caolan/async/issues/1422), [#1424](https://github.com/caolan/async/pull/1424)) + +# v2.4.1 +- Fixed a bug preventing functions wrapped with `timeout()` from being re-used. ([#1418](https://github.com/caolan/async/issues/1418), [#1419](https://github.com/caolan/async/issues/1419)) + +# v2.4.0 +- Added `tryEach`, for running async functions in parallel, where you only expect one to succeed. ([#1365](https://github.com/caolan/async/issues/1365), [#687](https://github.com/caolan/async/issues/687)) +- Improved performance, most notably in `parallel` and `waterfall` ([#1395](https://github.com/caolan/async/issues/1395)) +- Added `queue.remove()`, for removing items in a `queue` ([#1397](https://github.com/caolan/async/issues/1397), [#1391](https://github.com/caolan/async/issues/1391)) +- Fixed using `eval`, preventing Async from running in pages with Content Security Policy ([#1404](https://github.com/caolan/async/issues/1404), [#1403](https://github.com/caolan/async/issues/1403)) +- Fixed errors thrown in an `asyncify`ed function's callback being caught by the underlying Promise ([#1408](https://github.com/caolan/async/issues/1408)) +- Fixed timing of `queue.empty()` ([#1367](https://github.com/caolan/async/issues/1367)) +- Various doc fixes ([#1314](https://github.com/caolan/async/issues/1314), [#1394](https://github.com/caolan/async/issues/1394), [#1412](https://github.com/caolan/async/issues/1412)) + +# v2.3.0 +- Added support for ES2017 `async` functions. Wherever you can pass a Node-style/CPS function that uses a callback, you can also pass an `async` function. Previously, you had to wrap `async` functions with `asyncify`. The caveat is that it will only work if `async` functions are supported natively in your environment, transpiled implementations can't be detected. ([#1386](https://github.com/caolan/async/issues/1386), [#1390](https://github.com/caolan/async/issues/1390)) +- Small doc fix ([#1392](https://github.com/caolan/async/issues/1392)) + +# v2.2.0 +- Added `groupBy`, and the `Series`/`Limit` equivalents, analogous to [`_.groupBy`](http://lodash.com/docs#groupBy) ([#1364](https://github.com/caolan/async/issues/1364)) +- Fixed `transform` bug when `callback` was not passed ([#1381](https://github.com/caolan/async/issues/1381)) +- Added note about `reflect` to `parallel` docs ([#1385](https://github.com/caolan/async/issues/1385)) + +# v2.1.5 +- Fix `auto` bug when function names collided with Array.prototype ([#1358](https://github.com/caolan/async/issues/1358)) +- Improve some error messages ([#1349](https://github.com/caolan/async/issues/1349)) +- Avoid stack overflow case in queue +- Fixed an issue in `some`, `every` and `find` where processing would continue after the result was determined. +- Cleanup implementations of `some`, `every` and `find` + +# v2.1.3 +- Make bundle size smaller +- Create optimized hotpath for `filter` in array case. + +# v2.1.2 +- Fixed a stackoverflow bug with `detect`, `some`, `every` on large inputs ([#1293](https://github.com/caolan/async/issues/1293)). + +# v2.1.0 + +- `retry` and `retryable` now support an optional `errorFilter` function that determines if the `task` should retry on the error ([#1256](https://github.com/caolan/async/issues/1256), [#1261](https://github.com/caolan/async/issues/1261)) +- Optimized array iteration in `race`, `cargo`, `queue`, and `priorityQueue` ([#1253](https://github.com/caolan/async/issues/1253)) +- Added alias documentation to doc site ([#1251](https://github.com/caolan/async/issues/1251), [#1254](https://github.com/caolan/async/issues/1254)) +- Added [BootStrap scrollspy](http://getbootstrap.com/javascript/#scrollspy) to docs to highlight in the sidebar the current method being viewed ([#1289](https://github.com/caolan/async/issues/1289), [#1300](https://github.com/caolan/async/issues/1300)) +- Various minor doc fixes ([#1263](https://github.com/caolan/async/issues/1263), [#1264](https://github.com/caolan/async/issues/1264), [#1271](https://github.com/caolan/async/issues/1271), [#1278](https://github.com/caolan/async/issues/1278), [#1280](https://github.com/caolan/async/issues/1280), [#1282](https://github.com/caolan/async/issues/1282), [#1302](https://github.com/caolan/async/issues/1302)) + +# v2.0.1 + +- Significantly optimized all iteration based collection methods such as `each`, `map`, `filter`, etc ([#1245](https://github.com/caolan/async/issues/1245), [#1246](https://github.com/caolan/async/issues/1246), [#1247](https://github.com/caolan/async/issues/1247)). + +# v2.0.0 + +Lots of changes here! + +First and foremost, we have a slick new [site for docs](https://caolan.github.io/async/). Special thanks to [**@hargasinski**](https://github.com/hargasinski) for his work converting our old docs to `jsdoc` format and implementing the new website. Also huge ups to [**@ivanseidel**](https://github.com/ivanseidel) for designing our new logo. It was a long process for both of these tasks, but I think these changes turned out extraordinary well. + +The biggest feature is modularization. You can now `require("async/series")` to only require the `series` function. Every Async library function is available this way. You still can `require("async")` to require the entire library, like you could do before. + +We also provide Async as a collection of ES2015 modules. You can now `import {each} from 'async-es'` or `import waterfall from 'async-es/waterfall'`. If you are using only a few Async functions, and are using a ES bundler such as Rollup, this can significantly lower your build size. + +Major thanks to [**@Kikobeats**](github.com/Kikobeats), [**@aearly**](github.com/aearly) and [**@megawac**](github.com/megawac) for doing the majority of the modularization work, as well as [**@jdalton**](github.com/jdalton) and [**@Rich-Harris**](github.com/Rich-Harris) for advisory work on the general modularization strategy. + +Another one of the general themes of the 2.0 release is standardization of what an "async" function is. We are now more strictly following the node-style continuation passing style. That is, an async function is a function that: + +1. Takes a variable number of arguments +2. The last argument is always a callback +3. The callback can accept any number of arguments +4. The first argument passed to the callback will be treated as an error result, if the argument is truthy +5. Any number of result arguments can be passed after the "error" argument +6. The callback is called once and exactly once, either on the same tick or later tick of the JavaScript event loop. + +There were several cases where Async accepted some functions that did not strictly have these properties, most notably `auto`, `every`, `some`, `filter`, `reject` and `detect`. + +Another theme is performance. We have eliminated internal deferrals in all cases where they make sense. For example, in `waterfall` and `auto`, there was a `setImmediate` between each task -- these deferrals have been removed. A `setImmediate` call can add up to 1ms of delay. This might not seem like a lot, but it can add up if you are using many Async functions in the course of processing a HTTP request, for example. Nearly all asynchronous functions that do I/O already have some sort of deferral built in, so the extra deferral is unnecessary. The trade-off of this change is removing our built-in stack-overflow defense. Many synchronous callback calls in series can quickly overflow the JS call stack. If you do have a function that is sometimes synchronous (calling its callback on the same tick), and are running into stack overflows, wrap it with `async.ensureAsync()`. + +Another big performance win has been re-implementing `queue`, `cargo`, and `priorityQueue` with [doubly linked lists](https://en.wikipedia.org/wiki/Doubly_linked_list) instead of arrays. This has lead to queues being an order of [magnitude faster on large sets of tasks](https://github.com/caolan/async/pull/1205). + +## New Features + +- Async is now modularized. Individual functions can be `require()`d from the main package. (`require('async/auto')`) ([#984](https://github.com/caolan/async/issues/984), [#996](https://github.com/caolan/async/issues/996)) +- Async is also available as a collection of ES2015 modules in the new `async-es` package. (`import {forEachSeries} from 'async-es'`) ([#984](https://github.com/caolan/async/issues/984), [#996](https://github.com/caolan/async/issues/996)) +- Added `race`, analogous to `Promise.race()`. It will run an array of async tasks in parallel and will call its callback with the result of the first task to respond. ([#568](https://github.com/caolan/async/issues/568), [#1038](https://github.com/caolan/async/issues/1038)) +- Collection methods now accept ES2015 iterators. Maps, Sets, and anything that implements the iterator spec can now be passed directly to `each`, `map`, `parallel`, etc.. ([#579](https://github.com/caolan/async/issues/579), [#839](https://github.com/caolan/async/issues/839), [#1074](https://github.com/caolan/async/issues/1074)) +- Added `mapValues`, for mapping over the properties of an object and returning an object with the same keys. ([#1157](https://github.com/caolan/async/issues/1157), [#1177](https://github.com/caolan/async/issues/1177)) +- Added `timeout`, a wrapper for an async function that will make the task time-out after the specified time. ([#1007](https://github.com/caolan/async/issues/1007), [#1027](https://github.com/caolan/async/issues/1027)) +- Added `reflect` and `reflectAll`, analagous to [`Promise.reflect()`](http://bluebirdjs.com/docs/api/reflect.html), a wrapper for async tasks that always succeeds, by gathering results and errors into an object. ([#942](https://github.com/caolan/async/issues/942), [#1012](https://github.com/caolan/async/issues/1012), [#1095](https://github.com/caolan/async/issues/1095)) +- `constant` supports dynamic arguments -- it will now always use its last argument as the callback. ([#1016](https://github.com/caolan/async/issues/1016), [#1052](https://github.com/caolan/async/issues/1052)) +- `setImmediate` and `nextTick` now support arguments to partially apply to the deferred function, like the node-native versions do. ([#940](https://github.com/caolan/async/issues/940), [#1053](https://github.com/caolan/async/issues/1053)) +- `auto` now supports resolving cyclic dependencies using [Kahn's algorithm](https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm) ([#1140](https://github.com/caolan/async/issues/1140)). +- Added `autoInject`, a relative of `auto` that automatically spreads a task's dependencies as arguments to the task function. ([#608](https://github.com/caolan/async/issues/608), [#1055](https://github.com/caolan/async/issues/1055), [#1099](https://github.com/caolan/async/issues/1099), [#1100](https://github.com/caolan/async/issues/1100)) +- You can now limit the concurrency of `auto` tasks. ([#635](https://github.com/caolan/async/issues/635), [#637](https://github.com/caolan/async/issues/637)) +- Added `retryable`, a relative of `retry` that wraps an async function, making it retry when called. ([#1058](https://github.com/caolan/async/issues/1058)) +- `retry` now supports specifying a function that determines the next time interval, useful for exponential backoff, logging and other retry strategies. ([#1161](https://github.com/caolan/async/issues/1161)) +- `retry` will now pass all of the arguments the task function was resolved with to the callback ([#1231](https://github.com/caolan/async/issues/1231)). +- Added `q.unsaturated` -- callback called when a `queue`'s number of running workers falls below a threshold. ([#868](https://github.com/caolan/async/issues/868), [#1030](https://github.com/caolan/async/issues/1030), [#1033](https://github.com/caolan/async/issues/1033), [#1034](https://github.com/caolan/async/issues/1034)) +- Added `q.error` -- a callback called whenever a `queue` task calls its callback with an error. ([#1170](https://github.com/caolan/async/issues/1170)) +- `applyEach` and `applyEachSeries` now pass results to the final callback. ([#1088](https://github.com/caolan/async/issues/1088)) + +## Breaking changes + +- Calling a callback more than once is considered an error, and an error will be thrown. This had an explicit breaking change in `waterfall`. If you were relying on this behavior, you should more accurately represent your control flow as an event emitter or stream. ([#814](https://github.com/caolan/async/issues/814), [#815](https://github.com/caolan/async/issues/815), [#1048](https://github.com/caolan/async/issues/1048), [#1050](https://github.com/caolan/async/issues/1050)) +- `auto` task functions now always take the callback as the last argument. If a task has dependencies, the `results` object will be passed as the first argument. To migrate old task functions, wrap them with [`_.flip`](https://lodash.com/docs#flip) ([#1036](https://github.com/caolan/async/issues/1036), [#1042](https://github.com/caolan/async/issues/1042)) +- Internal `setImmediate` calls have been refactored away. This may make existing flows vulnerable to stack overflows if you use many synchronous functions in series. Use `ensureAsync` to work around this. ([#696](https://github.com/caolan/async/issues/696), [#704](https://github.com/caolan/async/issues/704), [#1049](https://github.com/caolan/async/issues/1049), [#1050](https://github.com/caolan/async/issues/1050)) +- `map` used to return an object when iterating over an object. `map` now always returns an array, like in other libraries. The previous object behavior has been split out into `mapValues`. ([#1157](https://github.com/caolan/async/issues/1157), [#1177](https://github.com/caolan/async/issues/1177)) +- `filter`, `reject`, `some`, `every`, `detect` and their families like `{METHOD}Series` and `{METHOD}Limit` now expect an error as the first callback argument, rather than just a simple boolean. Pass `null` as the first argument, or use `fs.access` instead of `fs.exists`. ([#118](https://github.com/caolan/async/issues/118), [#774](https://github.com/caolan/async/issues/774), [#1028](https://github.com/caolan/async/issues/1028), [#1041](https://github.com/caolan/async/issues/1041)) +- `{METHOD}` and `{METHOD}Series` are now implemented in terms of `{METHOD}Limit`. This is a major internal simplification, and is not expected to cause many problems, but it does subtly affect how functions execute internally. ([#778](https://github.com/caolan/async/issues/778), [#847](https://github.com/caolan/async/issues/847)) +- `retry`'s callback is now optional. Previously, omitting the callback would partially apply the function, meaning it could be passed directly as a task to `series` or `auto`. The partially applied "control-flow" behavior has been separated out into `retryable`. ([#1054](https://github.com/caolan/async/issues/1054), [#1058](https://github.com/caolan/async/issues/1058)) +- The test function for `whilst`, `until`, and `during` used to be passed non-error args from the iteratee function's callback, but this led to weirdness where the first call of the test function would be passed no args. We have made it so the test function is never passed extra arguments, and only the `doWhilst`, `doUntil`, and `doDuring` functions pass iteratee callback arguments to the test function ([#1217](https://github.com/caolan/async/issues/1217), [#1224](https://github.com/caolan/async/issues/1224)) +- The `q.tasks` array has been renamed `q._tasks` and is now implemented as a doubly linked list (DLL). Any code that used to interact with this array will need to be updated to either use the provided helpers or support DLLs ([#1205](https://github.com/caolan/async/issues/1205)). +- The timing of the `q.saturated()` callback in a `queue` has been modified to better reflect when tasks pushed to the queue will start queueing. ([#724](https://github.com/caolan/async/issues/724), [#1078](https://github.com/caolan/async/issues/1078)) +- Removed `iterator` method in favour of [ES2015 iterator protocol](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators ) which natively supports arrays ([#1237](https://github.com/caolan/async/issues/1237)) +- Dropped support for Component, Jam, SPM, and Volo ([#1175](https://github.com/caolan/async/issues/1175), #[#176](https://github.com/caolan/async/issues/176)) + +## Bug Fixes + +- Improved handling of no dependency cases in `auto` & `autoInject` ([#1147](https://github.com/caolan/async/issues/1147)). +- Fixed a bug where the callback generated by `asyncify` with `Promises` could resolve twice ([#1197](https://github.com/caolan/async/issues/1197)). +- Fixed several documented optional callbacks not actually being optional ([#1223](https://github.com/caolan/async/issues/1223)). + +## Other + +- Added `someSeries` and `everySeries` for symmetry, as well as a complete set of `any`/`anyLimit`/`anySeries` and `all`/`/allLmit`/`allSeries` aliases. +- Added `find` as an alias for `detect. (as well as `findLimit` and `findSeries`). +- Various doc fixes ([#1005](https://github.com/caolan/async/issues/1005), [#1008](https://github.com/caolan/async/issues/1008), [#1010](https://github.com/caolan/async/issues/1010), [#1015](https://github.com/caolan/async/issues/1015), [#1021](https://github.com/caolan/async/issues/1021), [#1037](https://github.com/caolan/async/issues/1037), [#1039](https://github.com/caolan/async/issues/1039), [#1051](https://github.com/caolan/async/issues/1051), [#1102](https://github.com/caolan/async/issues/1102), [#1107](https://github.com/caolan/async/issues/1107), [#1121](https://github.com/caolan/async/issues/1121), [#1123](https://github.com/caolan/async/issues/1123), [#1129](https://github.com/caolan/async/issues/1129), [#1135](https://github.com/caolan/async/issues/1135), [#1138](https://github.com/caolan/async/issues/1138), [#1141](https://github.com/caolan/async/issues/1141), [#1153](https://github.com/caolan/async/issues/1153), [#1216](https://github.com/caolan/async/issues/1216), [#1217](https://github.com/caolan/async/issues/1217), [#1232](https://github.com/caolan/async/issues/1232), [#1233](https://github.com/caolan/async/issues/1233), [#1236](https://github.com/caolan/async/issues/1236), [#1238](https://github.com/caolan/async/issues/1238)) + +Thank you [**@aearly**](github.com/aearly) and [**@megawac**](github.com/megawac) for taking the lead on version 2 of async. + +------------------------------------------ + +# v1.5.2 +- Allow using `"constructor"` as an argument in `memoize` ([#998](https://github.com/caolan/async/issues/998)) +- Give a better error messsage when `auto` dependency checking fails ([#994](https://github.com/caolan/async/issues/994)) +- Various doc updates ([#936](https://github.com/caolan/async/issues/936), [#956](https://github.com/caolan/async/issues/956), [#979](https://github.com/caolan/async/issues/979), [#1002](https://github.com/caolan/async/issues/1002)) + +# v1.5.1 +- Fix issue with `pause` in `queue` with concurrency enabled ([#946](https://github.com/caolan/async/issues/946)) +- `while` and `until` now pass the final result to callback ([#963](https://github.com/caolan/async/issues/963)) +- `auto` will properly handle concurrency when there is no callback ([#966](https://github.com/caolan/async/issues/966)) +- `auto` will no. properly stop execution when an error occurs ([#988](https://github.com/caolan/async/issues/988), [#993](https://github.com/caolan/async/issues/993)) +- Various doc fixes ([#971](https://github.com/caolan/async/issues/971), [#980](https://github.com/caolan/async/issues/980)) + +# v1.5.0 + +- Added `transform`, analogous to [`_.transform`](http://lodash.com/docs#transform) ([#892](https://github.com/caolan/async/issues/892)) +- `map` now returns an object when an object is passed in, rather than array with non-numeric keys. `map` will begin always returning an array with numeric indexes in the next major release. ([#873](https://github.com/caolan/async/issues/873)) +- `auto` now accepts an optional `concurrency` argument to limit the number o. running tasks ([#637](https://github.com/caolan/async/issues/637)) +- Added `queue#workersList()`, to retrieve the lis. of currently running tasks. ([#891](https://github.com/caolan/async/issues/891)) +- Various code simplifications ([#896](https://github.com/caolan/async/issues/896), [#904](https://github.com/caolan/async/issues/904)) +- Various doc fixes :scroll: ([#890](https://github.com/caolan/async/issues/890), [#894](https://github.com/caolan/async/issues/894), [#903](https://github.com/caolan/async/issues/903), [#905](https://github.com/caolan/async/issues/905), [#912](https://github.com/caolan/async/issues/912)) + +# v1.4.2 + +- Ensure coverage files don't get published on npm ([#879](https://github.com/caolan/async/issues/879)) + +# v1.4.1 + +- Add in overlooked `detectLimit` method ([#866](https://github.com/caolan/async/issues/866)) +- Removed unnecessary files from npm releases ([#861](https://github.com/caolan/async/issues/861)) +- Removed usage of a reserved word to prevent :boom: in older environments ([#870](https://github.com/caolan/async/issues/870)) + +# v1.4.0 + +- `asyncify` now supports promises ([#840](https://github.com/caolan/async/issues/840)) +- Added `Limit` versions of `filter` and `reject` ([#836](https://github.com/caolan/async/issues/836)) +- Add `Limit` versions of `detect`, `some` and `every` ([#828](https://github.com/caolan/async/issues/828), [#829](https://github.com/caolan/async/issues/829)) +- `some`, `every` and `detect` now short circuit early ([#828](https://github.com/caolan/async/issues/828), [#829](https://github.com/caolan/async/issues/829)) +- Improve detection of the global object ([#804](https://github.com/caolan/async/issues/804)), enabling use in WebWorkers +- `whilst` now called with arguments from iterator ([#823](https://github.com/caolan/async/issues/823)) +- `during` now gets called with arguments from iterator ([#824](https://github.com/caolan/async/issues/824)) +- Code simplifications and optimizations aplenty ([diff](https://github.com/caolan/async/compare/v1.3.0...v1.4.0)) + + +# v1.3.0 + +New Features: +- Added `constant` +- Added `asyncify`/`wrapSync` for making sync functions work with callbacks. ([#671](https://github.com/caolan/async/issues/671), [#806](https://github.com/caolan/async/issues/806)) +- Added `during` and `doDuring`, which are like `whilst` with an async truth test. ([#800](https://github.com/caolan/async/issues/800)) +- `retry` now accepts an `interval` parameter to specify a delay between retries. ([#793](https://github.com/caolan/async/issues/793)) +- `async` should work better in Web Workers due to better `root` detection ([#804](https://github.com/caolan/async/issues/804)) +- Callbacks are now optional in `whilst`, `doWhilst`, `until`, and `doUntil` ([#642](https://github.com/caolan/async/issues/642)) +- Various internal updates ([#786](https://github.com/caolan/async/issues/786), [#801](https://github.com/caolan/async/issues/801), [#802](https://github.com/caolan/async/issues/802), [#803](https://github.com/caolan/async/issues/803)) +- Various doc fixes ([#790](https://github.com/caolan/async/issues/790), [#794](https://github.com/caolan/async/issues/794)) + +Bug Fixes: +- `cargo` now exposes the `payload` size, and `cargo.payload` can be changed on the fly after the `cargo` is created. ([#740](https://github.com/caolan/async/issues/740), [#744](https://github.com/caolan/async/issues/744), [#783](https://github.com/caolan/async/issues/783)) + + +# v1.2.1 + +Bug Fix: + +- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. ([#782](https://github.com/caolan/async/issues/782)) + + +# v1.2.0 + +New Features: + +- Added `timesLimit` ([#743](https://github.com/caolan/async/issues/743)) +- `concurrency` can be changed after initialization in `queue` by setting `q.concurrency`. The new concurrency will be reflected the next time a task is processed. ([#747](https://github.com/caolan/async/issues/747), [#772](https://github.com/caolan/async/issues/772)) + +Bug Fixes: + +- Fixed a regression in `each` and family with empty arrays that have additional properties. ([#775](https://github.com/caolan/async/issues/775), [#777](https://github.com/caolan/async/issues/777)) + + +# v1.1.1 + +Bug Fix: + +- Small regression with synchronous iterator behavior in `eachSeries` with a 1-element array. Before 1.1.0, `eachSeries`'s callback was called on the same tick, which this patch restores. In 2.0.0, it will be called on the next tick. ([#782](https://github.com/caolan/async/issues/782)) + + +# v1.1.0 + +New Features: + +- `cargo` now supports all of the same methods and event callbacks as `queue`. +- Added `ensureAsync` - A wrapper that ensures an async function calls its callback on a later tick. ([#769](https://github.com/caolan/async/issues/769)) +- Optimized `map`, `eachOf`, and `waterfall` families of functions +- Passing a `null` or `undefined` array to `map`, `each`, `parallel` and families will be treated as an empty array ([#667](https://github.com/caolan/async/issues/667)). +- The callback is now optional for the composed results of `compose` and `seq`. ([#618](https://github.com/caolan/async/issues/618)) +- Reduced file size by 4kb, (minified version by 1kb) +- Added code coverage through `nyc` and `coveralls` ([#768](https://github.com/caolan/async/issues/768)) + +Bug Fixes: + +- `forever` will no longer stack overflow with a synchronous iterator ([#622](https://github.com/caolan/async/issues/622)) +- `eachLimit` and other limit functions will stop iterating once an error occurs ([#754](https://github.com/caolan/async/issues/754)) +- Always pass `null` in callbacks when there is no error ([#439](https://github.com/caolan/async/issues/439)) +- Ensure proper conditions when calling `drain()` after pushing an empty data set to a queue ([#668](https://github.com/caolan/async/issues/668)) +- `each` and family will properly handle an empty array ([#578](https://github.com/caolan/async/issues/578)) +- `eachSeries` and family will finish if the underlying array is modified during execution ([#557](https://github.com/caolan/async/issues/557)) +- `queue` will throw if a non-function is passed to `q.push()` ([#593](https://github.com/caolan/async/issues/593)) +- Doc fixes ([#629](https://github.com/caolan/async/issues/629), [#766](https://github.com/caolan/async/issues/766)) + + +# v1.0.0 + +No known breaking changes, we are simply complying with semver from here on out. + +Changes: + +- Start using a changelog! +- Add `forEachOf` for iterating over Objects (or to iterate Arrays with indexes available) ([#168](https://github.com/caolan/async/issues/168) [#704](https://github.com/caolan/async/issues/704) [#321](https://github.com/caolan/async/issues/321)) +- Detect deadlocks in `auto` ([#663](https://github.com/caolan/async/issues/663)) +- Better support for require.js ([#527](https://github.com/caolan/async/issues/527)) +- Throw if queue created with concurrency `0` ([#714](https://github.com/caolan/async/issues/714)) +- Fix unneeded iteration in `queue.resume()` ([#758](https://github.com/caolan/async/issues/758)) +- Guard against timer mocking overriding `setImmediate` ([#609](https://github.com/caolan/async/issues/609) [#611](https://github.com/caolan/async/issues/611)) +- Miscellaneous doc fixes ([#542](https://github.com/caolan/async/issues/542) [#596](https://github.com/caolan/async/issues/596) [#615](https://github.com/caolan/async/issues/615) [#628](https://github.com/caolan/async/issues/628) [#631](https://github.com/caolan/async/issues/631) [#690](https://github.com/caolan/async/issues/690) [#729](https://github.com/caolan/async/issues/729)) +- Use single noop function internally ([#546](https://github.com/caolan/async/issues/546)) +- Optimize internal `_each`, `_map` and `_keys` functions. diff --git a/backend/node_modules/async/LICENSE b/backend/node_modules/async/LICENSE new file mode 100644 index 00000000..b18aed69 --- /dev/null +++ b/backend/node_modules/async/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010-2018 Caolan McMahon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/backend/node_modules/async/README.md b/backend/node_modules/async/README.md new file mode 100644 index 00000000..55a0626d --- /dev/null +++ b/backend/node_modules/async/README.md @@ -0,0 +1,59 @@ +![Async Logo](https://raw.githubusercontent.com/caolan/async/master/logo/async-logo_readme.jpg) + +![Github Actions CI status](https://github.com/caolan/async/actions/workflows/ci.yml/badge.svg) +[![NPM version](https://img.shields.io/npm/v/async.svg)](https://www.npmjs.com/package/async) +[![Coverage Status](https://coveralls.io/repos/caolan/async/badge.svg?branch=master)](https://coveralls.io/r/caolan/async?branch=master) +[![Join the chat at https://gitter.im/caolan/async](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/caolan/async?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![jsDelivr Hits](https://data.jsdelivr.com/v1/package/npm/async/badge?style=rounded)](https://www.jsdelivr.com/package/npm/async) + + + +Async is a utility module which provides straight-forward, powerful functions for working with [asynchronous JavaScript](http://caolan.github.io/async/v3/global.html). Although originally designed for use with [Node.js](https://nodejs.org/) and installable via `npm i async`, it can also be used directly in the browser. An ESM/MJS version is included in the main `async` package that should automatically be used with compatible bundlers such as Webpack and Rollup. + +A pure ESM version of Async is available as [`async-es`](https://www.npmjs.com/package/async-es). + +For Documentation, visit + +*For Async v1.5.x documentation, go [HERE](https://github.com/caolan/async/blob/v1.5.2/README.md)* + + +```javascript +// for use with Node-style callbacks... +var async = require("async"); + +var obj = {dev: "/dev.json", test: "/test.json", prod: "/prod.json"}; +var configs = {}; + +async.forEachOf(obj, (value, key, callback) => { + fs.readFile(__dirname + value, "utf8", (err, data) => { + if (err) return callback(err); + try { + configs[key] = JSON.parse(data); + } catch (e) { + return callback(e); + } + callback(); + }); +}, err => { + if (err) console.error(err.message); + // configs is now a map of JSON data + doSomethingWith(configs); +}); +``` + +```javascript +var async = require("async"); + +// ...or ES2017 async functions +async.mapLimit(urls, 5, async function(url) { + const response = await fetch(url) + return response.body +}, (err, results) => { + if (err) throw err + // results is now an array of the response bodies + console.log(results) +}) +``` diff --git a/backend/node_modules/async/all.js b/backend/node_modules/async/all.js new file mode 100644 index 00000000..622b3019 --- /dev/null +++ b/backend/node_modules/async/all.js @@ -0,0 +1,119 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function every(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(every, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/allLimit.js b/backend/node_modules/async/allLimit.js new file mode 100644 index 00000000..375e1260 --- /dev/null +++ b/backend/node_modules/async/allLimit.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everyLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(everyLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/allSeries.js b/backend/node_modules/async/allSeries.js new file mode 100644 index 00000000..9a6bf7d4 --- /dev/null +++ b/backend/node_modules/async/allSeries.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everySeries(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(everySeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/any.js b/backend/node_modules/async/any.js new file mode 100644 index 00000000..a5bd328a --- /dev/null +++ b/backend/node_modules/async/any.js @@ -0,0 +1,122 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); + * + * // Using Promises + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function some(coll, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(some, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/anyLimit.js b/backend/node_modules/async/anyLimit.js new file mode 100644 index 00000000..3a8096fd --- /dev/null +++ b/backend/node_modules/async/anyLimit.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(someLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/anySeries.js b/backend/node_modules/async/anySeries.js new file mode 100644 index 00000000..51aad19b --- /dev/null +++ b/backend/node_modules/async/anySeries.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someSeries(coll, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(someSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/apply.js b/backend/node_modules/async/apply.js new file mode 100644 index 00000000..d40784f3 --- /dev/null +++ b/backend/node_modules/async/apply.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (fn, ...args) { + return (...callArgs) => fn(...args, ...callArgs); +}; + +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/applyEach.js b/backend/node_modules/async/applyEach.js new file mode 100644 index 00000000..841842c8 --- /dev/null +++ b/backend/node_modules/async/applyEach.js @@ -0,0 +1,57 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _applyEach = require('./internal/applyEach.js'); + +var _applyEach2 = _interopRequireDefault(_applyEach); + +var _map = require('./map.js'); + +var _map2 = _interopRequireDefault(_map); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Applies the provided arguments to each function in the array, calling + * `callback` after all functions have completed. If you only provide the first + * argument, `fns`, then it will return a function which lets you pass in the + * arguments as if it were a single function call. If more arguments are + * provided, `callback` is required while `args` is still optional. The results + * for each of the applied async functions are passed to the final callback + * as an array. + * + * @name applyEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s + * to all call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - Returns a function that takes no args other than + * an optional callback, that is the result of applying the `args` to each + * of the functions. + * @example + * + * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * + * appliedFn((err, results) => { + * // results[0] is the results for `enableSearch` + * // results[1] is the results for `updateSchema` + * }); + * + * // partial application example: + * async.each( + * buckets, + * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), + * callback + * ); + */ +exports.default = (0, _applyEach2.default)(_map2.default); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/applyEachSeries.js b/backend/node_modules/async/applyEachSeries.js new file mode 100644 index 00000000..f5267f67 --- /dev/null +++ b/backend/node_modules/async/applyEachSeries.js @@ -0,0 +1,37 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _applyEach = require('./internal/applyEach.js'); + +var _applyEach2 = _interopRequireDefault(_applyEach); + +var _mapSeries = require('./mapSeries.js'); + +var _mapSeries2 = _interopRequireDefault(_mapSeries); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * + * @name applyEachSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.applyEach]{@link module:ControlFlow.applyEach} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all + * call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - A function, that when called, is the result of + * appling the `args` to the list of functions. It takes no args, other than + * a callback. + */ +exports.default = (0, _applyEach2.default)(_mapSeries2.default); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/asyncify.js b/backend/node_modules/async/asyncify.js new file mode 100644 index 00000000..ddc3f02f --- /dev/null +++ b/backend/node_modules/async/asyncify.js @@ -0,0 +1,118 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = asyncify; + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ +function asyncify(func) { + if ((0, _wrapAsync.isAsync)(func)) { + return function (...args /*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback); + }; + } + + return (0, _initialParams2.default)(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback); + } else { + callback(null, result); + } + }); +} + +function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && (err instanceof Error || err.message) ? err : new Error(err)); + }); +} + +function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + (0, _setImmediate2.default)(e => { + throw e; + }, err); + } +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/auto.js b/backend/node_modules/async/auto.js new file mode 100644 index 00000000..163c4f27 --- /dev/null +++ b/backend/node_modules/async/auto.js @@ -0,0 +1,333 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = auto; + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on + * their requirements. Each function can optionally depend on other functions + * being completed first, and each function is run as soon as its requirements + * are satisfied. + * + * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence + * will stop. Further tasks will not execute (so any other functions depending + * on it will not run), and the main `callback` is immediately called with the + * error. + * + * {@link AsyncFunction}s also receive an object containing the results of functions which + * have completed so far as the first argument, if they have dependencies. If a + * task function has no dependencies, it will only be passed a callback. + * + * @name auto + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Object} tasks - An object. Each of its properties is either a + * function or an array of requirements, with the {@link AsyncFunction} itself the last item + * in the array. The object's key of a property serves as the name of the task + * defined by that property, i.e. can be used when specifying requirements for + * other tasks. The function receives one or two arguments: + * * a `results` object, containing the results of the previously executed + * functions, only passed if the task has any dependencies, + * * a `callback(err, result)` function, which must be called when finished, + * passing an `error` (which can be `null`) and the result of the function's + * execution. + * @param {number} [concurrency=Infinity] - An optional `integer` for + * determining the maximum number of tasks that can be run in parallel. By + * default, as many as possible. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback. Results are always returned; however, if an + * error occurs, no further `tasks` will be performed, and the results object + * will only contain partial results. Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * @example + * + * //Using Callbacks + * async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }, function(err, results) { + * if (err) { + * console.log('err = ', err); + * } + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }); + * + * //Using Promises + * async.auto({ + * get_data: function(callback) { + * console.log('in get_data'); + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * console.log('in make_folder'); + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }).then(results => { + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }).catch(err => { + * console.log('err = ', err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }); + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function auto(tasks, concurrency, callback) { + if (typeof concurrency !== 'number') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = (0, _once2.default)(callback || (0, _promiseCallback.promiseCallback)()); + var numTasks = Object.keys(tasks).length; + if (!numTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = numTasks; + } + + var results = {}; + var runningTasks = 0; + var canceled = false; + var hasError = false; + + var listeners = Object.create(null); + + var readyTasks = []; + + // for cycle detection: + var readyToCheck = []; // tasks that have been identified as reachable + // without the possibility of returning to an ancestor task + var uncheckedDependencies = {}; + + Object.keys(tasks).forEach(key => { + var task = tasks[key]; + if (!Array.isArray(task)) { + // no dependencies + enqueueTask(key, [task]); + readyToCheck.push(key); + return; + } + + var dependencies = task.slice(0, task.length - 1); + var remainingDependencies = dependencies.length; + if (remainingDependencies === 0) { + enqueueTask(key, task); + readyToCheck.push(key); + return; + } + uncheckedDependencies[key] = remainingDependencies; + + dependencies.forEach(dependencyName => { + if (!tasks[dependencyName]) { + throw new Error('async.auto task `' + key + '` has a non-existent dependency `' + dependencyName + '` in ' + dependencies.join(', ')); + } + addListener(dependencyName, () => { + remainingDependencies--; + if (remainingDependencies === 0) { + enqueueTask(key, task); + } + }); + }); + }); + + checkForDeadlocks(); + processQueue(); + + function enqueueTask(key, task) { + readyTasks.push(() => runTask(key, task)); + } + + function processQueue() { + if (canceled) return; + if (readyTasks.length === 0 && runningTasks === 0) { + return callback(null, results); + } + while (readyTasks.length && runningTasks < concurrency) { + var run = readyTasks.shift(); + run(); + } + } + + function addListener(taskName, fn) { + var taskListeners = listeners[taskName]; + if (!taskListeners) { + taskListeners = listeners[taskName] = []; + } + + taskListeners.push(fn); + } + + function taskComplete(taskName) { + var taskListeners = listeners[taskName] || []; + taskListeners.forEach(fn => fn()); + processQueue(); + } + + function runTask(key, task) { + if (hasError) return; + + var taskCallback = (0, _onlyOnce2.default)((err, ...result) => { + runningTasks--; + if (err === false) { + canceled = true; + return; + } + if (result.length < 2) { + [result] = result; + } + if (err) { + var safeResults = {}; + Object.keys(results).forEach(rkey => { + safeResults[rkey] = results[rkey]; + }); + safeResults[key] = result; + hasError = true; + listeners = Object.create(null); + if (canceled) return; + callback(err, safeResults); + } else { + results[key] = result; + taskComplete(key); + } + }); + + runningTasks++; + var taskFn = (0, _wrapAsync2.default)(task[task.length - 1]); + if (task.length > 1) { + taskFn(results, taskCallback); + } else { + taskFn(taskCallback); + } + } + + function checkForDeadlocks() { + // Kahn's algorithm + // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm + // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html + var currentTask; + var counter = 0; + while (readyToCheck.length) { + currentTask = readyToCheck.pop(); + counter++; + getDependents(currentTask).forEach(dependent => { + if (--uncheckedDependencies[dependent] === 0) { + readyToCheck.push(dependent); + } + }); + } + + if (counter !== numTasks) { + throw new Error('async.auto cannot execute tasks due to a recursive dependency'); + } + } + + function getDependents(taskName) { + var result = []; + Object.keys(tasks).forEach(key => { + const task = tasks[key]; + if (Array.isArray(task) && task.indexOf(taskName) >= 0) { + result.push(key); + } + }); + return result; + } + + return callback[_promiseCallback.PROMISE_SYMBOL]; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/autoInject.js b/backend/node_modules/async/autoInject.js new file mode 100644 index 00000000..5db2653a --- /dev/null +++ b/backend/node_modules/async/autoInject.js @@ -0,0 +1,182 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = autoInject; + +var _auto = require('./auto.js'); + +var _auto2 = _interopRequireDefault(_auto); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var FN_ARGS = /^(?:async\s)?(?:function)?\s*(?:\w+\s*)?\(([^)]+)\)(?:\s*{)/; +var ARROW_FN_ARGS = /^(?:async\s)?\s*(?:\(\s*)?((?:[^)=\s]\s*)*)(?:\)\s*)?=>/; +var FN_ARG_SPLIT = /,/; +var FN_ARG = /(=.+)?(\s*)$/; + +function stripComments(string) { + let stripped = ''; + let index = 0; + let endBlockComment = string.indexOf('*/'); + while (index < string.length) { + if (string[index] === '/' && string[index + 1] === '/') { + // inline comment + let endIndex = string.indexOf('\n', index); + index = endIndex === -1 ? string.length : endIndex; + } else if (endBlockComment !== -1 && string[index] === '/' && string[index + 1] === '*') { + // block comment + let endIndex = string.indexOf('*/', index); + if (endIndex !== -1) { + index = endIndex + 2; + endBlockComment = string.indexOf('*/', index); + } else { + stripped += string[index]; + index++; + } + } else { + stripped += string[index]; + index++; + } + } + return stripped; +} + +function parseParams(func) { + const src = stripComments(func.toString()); + let match = src.match(FN_ARGS); + if (!match) { + match = src.match(ARROW_FN_ARGS); + } + if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src); + let [, args] = match; + return args.replace(/\s/g, '').split(FN_ARG_SPLIT).map(arg => arg.replace(FN_ARG, '').trim()); +} + +/** + * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent + * tasks are specified as parameters to the function, after the usual callback + * parameter, with the parameter names matching the names of the tasks it + * depends on. This can provide even more readable task graphs which can be + * easier to maintain. + * + * If a final callback is specified, the task results are similarly injected, + * specified as named parameters after the initial error parameter. + * + * The autoInject function is purely syntactic sugar and its semantics are + * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * + * @name autoInject + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.auto]{@link module:ControlFlow.auto} + * @category Control Flow + * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of + * the form 'func([dependencies...], callback). The object's key of a property + * serves as the name of the task defined by that property, i.e. can be used + * when specifying requirements for other tasks. + * * The `callback` parameter is a `callback(err, result)` which must be called + * when finished, passing an `error` (which can be `null`) and the result of + * the function's execution. The remaining parameters name other tasks on + * which the task is dependent, and the results from those tasks are the + * arguments of those parameters. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback, and a `results` object with any completed + * task results, similar to `auto`. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // The example from `auto` can be rewritten as follows: + * async.autoInject({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: function(get_data, make_folder, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }, + * email_link: function(write_file, callback) { + * // once the file is written let's email a link to it... + * // write_file contains the filename returned by write_file. + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * } + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + * + * // If you are using a JS minifier that mangles parameter names, `autoInject` + * // will not work with plain functions, since the parameter names will be + * // collapsed to a single letter identifier. To work around this, you can + * // explicitly specify the names of the parameters your task function needs + * // in an array, similar to Angular.js dependency injection. + * + * // This still has an advantage over plain `auto`, since the results a task + * // depends on are still spread into arguments. + * async.autoInject({ + * //... + * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(write_file, callback) { + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * }] + * //... + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + */ +function autoInject(tasks, callback) { + var newTasks = {}; + + Object.keys(tasks).forEach(key => { + var taskFn = tasks[key]; + var params; + var fnIsAsync = (0, _wrapAsync.isAsync)(taskFn); + var hasNoDeps = !fnIsAsync && taskFn.length === 1 || fnIsAsync && taskFn.length === 0; + + if (Array.isArray(taskFn)) { + params = [...taskFn]; + taskFn = params.pop(); + + newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); + } else if (hasNoDeps) { + // no dependencies, use the function as-is + newTasks[key] = taskFn; + } else { + params = parseParams(taskFn); + if (taskFn.length === 0 && !fnIsAsync && params.length === 0) { + throw new Error("autoInject task functions require explicit parameters."); + } + + // remove callback param + if (!fnIsAsync) params.pop(); + + newTasks[key] = params.concat(newTask); + } + + function newTask(results, taskCb) { + var newArgs = params.map(name => results[name]); + newArgs.push(taskCb); + (0, _wrapAsync2.default)(taskFn)(...newArgs); + } + }); + + return (0, _auto2.default)(newTasks, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/bower.json b/backend/node_modules/async/bower.json new file mode 100644 index 00000000..390c6502 --- /dev/null +++ b/backend/node_modules/async/bower.json @@ -0,0 +1,17 @@ +{ + "name": "async", + "main": "dist/async.js", + "ignore": [ + "bower_components", + "lib", + "test", + "node_modules", + "perf", + "support", + "**/.*", + "*.config.js", + "*.json", + "index.js", + "Makefile" + ] +} diff --git a/backend/node_modules/async/cargo.js b/backend/node_modules/async/cargo.js new file mode 100644 index 00000000..d4abd214 --- /dev/null +++ b/backend/node_modules/async/cargo.js @@ -0,0 +1,63 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = cargo; + +var _queue = require('./internal/queue.js'); + +var _queue2 = _interopRequireDefault(_queue); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Creates a `cargo` object with the specified payload. Tasks added to the + * cargo will be processed altogether (up to the `payload` limit). If the + * `worker` is in progress, the task is queued until it becomes available. Once + * the `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. + * + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, cargo passes an array of tasks to a single worker, repeating + * when the worker is finished. + * + * @name cargo + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargo and inner queue. + * @example + * + * // create a cargo object with payload 2 + * var cargo = async.cargo(function(tasks, callback) { + * for (var i=0; i { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { + * console.log(results); + * }).catch(err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.concat(directoryList, fs.readdir); + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * console.log(results); + * } catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } + * } + * + */ +function concat(coll, iteratee, callback) { + return (0, _concatLimit2.default)(coll, Infinity, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(concat, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/concatLimit.js b/backend/node_modules/async/concatLimit.js new file mode 100644 index 00000000..a27cc7d4 --- /dev/null +++ b/backend/node_modules/async/concatLimit.js @@ -0,0 +1,60 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _mapLimit = require('./mapLimit.js'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _mapLimit2.default)(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); +} +exports.default = (0, _awaitify2.default)(concatLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/concatSeries.js b/backend/node_modules/async/concatSeries.js new file mode 100644 index 00000000..332de3f3 --- /dev/null +++ b/backend/node_modules/async/concatSeries.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _concatLimit = require('./concatLimit.js'); + +var _concatLimit2 = _interopRequireDefault(_concatLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatSeries(coll, iteratee, callback) { + return (0, _concatLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(concatSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/constant.js b/backend/node_modules/async/constant.js new file mode 100644 index 00000000..ea406f65 --- /dev/null +++ b/backend/node_modules/async/constant.js @@ -0,0 +1,14 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (...args) { + return function (...ignoredArgs /*, callback*/) { + var callback = ignoredArgs.pop(); + return callback(null, ...args); + }; +}; + +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/detect.js b/backend/node_modules/async/detect.js new file mode 100644 index 00000000..d5896ef6 --- /dev/null +++ b/backend/node_modules/async/detect.js @@ -0,0 +1,96 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function detect(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(detect, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/detectLimit.js b/backend/node_modules/async/detectLimit.js new file mode 100644 index 00000000..c59843b6 --- /dev/null +++ b/backend/node_modules/async/detectLimit.js @@ -0,0 +1,48 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(detectLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/detectSeries.js b/backend/node_modules/async/detectSeries.js new file mode 100644 index 00000000..b4868996 --- /dev/null +++ b/backend/node_modules/async/detectSeries.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectSeries(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)((0, _eachOfLimit2.default)(1), coll, iteratee, callback); +} + +exports.default = (0, _awaitify2.default)(detectSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/dir.js b/backend/node_modules/async/dir.js new file mode 100644 index 00000000..8e9fafd7 --- /dev/null +++ b/backend/node_modules/async/dir.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _consoleFunc = require('./internal/consoleFunc.js'); + +var _consoleFunc2 = _interopRequireDefault(_consoleFunc); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Logs the result of an [`async` function]{@link AsyncFunction} to the + * `console` using `console.dir` to display the properties of the resulting object. + * Only works in Node.js or in browsers that support `console.dir` and + * `console.error` (such as FF and Chrome). + * If multiple arguments are returned from the async function, + * `console.dir` is called on each argument in order. + * + * @name dir + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, {hello: name}); + * }, 1000); + * }; + * + * // in the node repl + * node> async.dir(hello, 'world'); + * {hello: 'world'} + */ +exports.default = (0, _consoleFunc2.default)('dir'); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/dist/async.js b/backend/node_modules/async/dist/async.js new file mode 100644 index 00000000..d7b79189 --- /dev/null +++ b/backend/node_modules/async/dist/async.js @@ -0,0 +1,6061 @@ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : + typeof define === 'function' && define.amd ? define(['exports'], factory) : + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.async = {})); +})(this, (function (exports) { 'use strict'; + + /** + * Creates a continuation function with some arguments already applied. + * + * Useful as a shorthand when combined with other control flow functions. Any + * arguments passed to the returned function are added to the arguments + * originally passed to apply. + * + * @name apply + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {Function} fn - The function you want to eventually apply all + * arguments to. Invokes with (arguments...). + * @param {...*} arguments... - Any number of arguments to automatically apply + * when the continuation is called. + * @returns {Function} the partially-applied function + * @example + * + * // using apply + * async.parallel([ + * async.apply(fs.writeFile, 'testfile1', 'test1'), + * async.apply(fs.writeFile, 'testfile2', 'test2') + * ]); + * + * + * // the same process without using apply + * async.parallel([ + * function(callback) { + * fs.writeFile('testfile1', 'test1', callback); + * }, + * function(callback) { + * fs.writeFile('testfile2', 'test2', callback); + * } + * ]); + * + * // It's possible to pass any number of additional arguments when calling the + * // continuation: + * + * node> var fn = async.apply(sys.puts, 'one'); + * node> fn('two', 'three'); + * one + * two + * three + */ + function apply(fn, ...args) { + return (...callArgs) => fn(...args,...callArgs); + } + + function initialParams (fn) { + return function (...args/*, callback*/) { + var callback = args.pop(); + return fn.call(this, args, callback); + }; + } + + /* istanbul ignore file */ + + var hasQueueMicrotask = typeof queueMicrotask === 'function' && queueMicrotask; + var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; + var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; + + function fallback(fn) { + setTimeout(fn, 0); + } + + function wrap(defer) { + return (fn, ...args) => defer(() => fn(...args)); + } + + var _defer$1; + + if (hasQueueMicrotask) { + _defer$1 = queueMicrotask; + } else if (hasSetImmediate) { + _defer$1 = setImmediate; + } else if (hasNextTick) { + _defer$1 = process.nextTick; + } else { + _defer$1 = fallback; + } + + var setImmediate$1 = wrap(_defer$1); + + /** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ + function asyncify(func) { + if (isAsync(func)) { + return function (...args/*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback) + } + } + + return initialParams(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback) + } else { + callback(null, result); + } + }); + } + + function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && (err instanceof Error || err.message) ? err : new Error(err)); + }); + } + + function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + setImmediate$1(e => { throw e }, err); + } + } + + function isAsync(fn) { + return fn[Symbol.toStringTag] === 'AsyncFunction'; + } + + function isAsyncGenerator(fn) { + return fn[Symbol.toStringTag] === 'AsyncGenerator'; + } + + function isAsyncIterable(obj) { + return typeof obj[Symbol.asyncIterator] === 'function'; + } + + function wrapAsync(asyncFn) { + if (typeof asyncFn !== 'function') throw new Error('expected a function') + return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; + } + + // conditionally promisify a function. + // only return a promise if a callback is omitted + function awaitify (asyncFn, arity) { + if (!arity) arity = asyncFn.length; + if (!arity) throw new Error('arity is undefined') + function awaitable (...args) { + if (typeof args[arity - 1] === 'function') { + return asyncFn.apply(this, args) + } + + return new Promise((resolve, reject) => { + args[arity - 1] = (err, ...cbArgs) => { + if (err) return reject(err) + resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); + }; + asyncFn.apply(this, args); + }) + } + + return awaitable + } + + function applyEach$1 (eachfn) { + return function applyEach(fns, ...callArgs) { + const go = awaitify(function (callback) { + var that = this; + return eachfn(fns, (fn, cb) => { + wrapAsync(fn).apply(that, callArgs.concat(cb)); + }, callback); + }); + return go; + }; + } + + function _asyncMap(eachfn, arr, iteratee, callback) { + arr = arr || []; + var results = []; + var counter = 0; + var _iteratee = wrapAsync(iteratee); + + return eachfn(arr, (value, _, iterCb) => { + var index = counter++; + _iteratee(value, (err, v) => { + results[index] = v; + iterCb(err); + }); + }, err => { + callback(err, results); + }); + } + + function isArrayLike(value) { + return value && + typeof value.length === 'number' && + value.length >= 0 && + value.length % 1 === 0; + } + + // A temporary value used to identify if the loop should be broken. + // See #1064, #1293 + const breakLoop = {}; + + function once(fn) { + function wrapper (...args) { + if (fn === null) return; + var callFn = fn; + fn = null; + callFn.apply(this, args); + } + Object.assign(wrapper, fn); + return wrapper + } + + function getIterator (coll) { + return coll[Symbol.iterator] && coll[Symbol.iterator](); + } + + function createArrayIterator(coll) { + var i = -1; + var len = coll.length; + return function next() { + return ++i < len ? {value: coll[i], key: i} : null; + } + } + + function createES2015Iterator(iterator) { + var i = -1; + return function next() { + var item = iterator.next(); + if (item.done) + return null; + i++; + return {value: item.value, key: i}; + } + } + + function createObjectIterator(obj) { + var okeys = obj ? Object.keys(obj) : []; + var i = -1; + var len = okeys.length; + return function next() { + var key = okeys[++i]; + if (key === '__proto__') { + return next(); + } + return i < len ? {value: obj[key], key} : null; + }; + } + + function createIterator(coll) { + if (isArrayLike(coll)) { + return createArrayIterator(coll); + } + + var iterator = getIterator(coll); + return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); + } + + function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error("Callback was already called."); + var callFn = fn; + fn = null; + callFn.apply(this, args); + }; + } + + // for async generators + function asyncEachOfLimit(generator, limit, iteratee, callback) { + let done = false; + let canceled = false; + let awaiting = false; + let running = 0; + let idx = 0; + + function replenish() { + //console.log('replenish') + if (running >= limit || awaiting || done) return + //console.log('replenish awaiting') + awaiting = true; + generator.next().then(({value, done: iterDone}) => { + //console.log('got value', value) + if (canceled || done) return + awaiting = false; + if (iterDone) { + done = true; + if (running <= 0) { + //console.log('done nextCb') + callback(null); + } + return; + } + running++; + iteratee(value, idx, iterateeCallback); + idx++; + replenish(); + }).catch(handleError); + } + + function iterateeCallback(err, result) { + //console.log('iterateeCallback') + running -= 1; + if (canceled) return + if (err) return handleError(err) + + if (err === false) { + done = true; + canceled = true; + return + } + + if (result === breakLoop || (done && running <= 0)) { + done = true; + //console.log('done iterCb') + return callback(null); + } + replenish(); + } + + function handleError(err) { + if (canceled) return + awaiting = false; + done = true; + callback(err); + } + + replenish(); + } + + var eachOfLimit$2 = (limit) => { + return (obj, iteratee, callback) => { + callback = once(callback); + if (limit <= 0) { + throw new RangeError('concurrency limit cannot be less than 1') + } + if (!obj) { + return callback(null); + } + if (isAsyncGenerator(obj)) { + return asyncEachOfLimit(obj, limit, iteratee, callback) + } + if (isAsyncIterable(obj)) { + return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback) + } + var nextElem = createIterator(obj); + var done = false; + var canceled = false; + var running = 0; + var looping = false; + + function iterateeCallback(err, value) { + if (canceled) return + running -= 1; + if (err) { + done = true; + callback(err); + } + else if (err === false) { + done = true; + canceled = true; + } + else if (value === breakLoop || (done && running <= 0)) { + done = true; + return callback(null); + } + else if (!looping) { + replenish(); + } + } + + function replenish () { + looping = true; + while (running < limit && !done) { + var elem = nextElem(); + if (elem === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); + } + looping = false; + } + + replenish(); + }; + }; + + /** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachOfLimit(coll, limit, iteratee, callback) { + return eachOfLimit$2(limit)(coll, wrapAsync(iteratee), callback); + } + + var eachOfLimit$1 = awaitify(eachOfLimit, 4); + + // eachOf implementation optimized for array-likes + function eachOfArrayLike(coll, iteratee, callback) { + callback = once(callback); + var index = 0, + completed = 0, + {length} = coll, + canceled = false; + if (length === 0) { + callback(null); + } + + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return + if (err) { + callback(err); + } else if ((++completed === length) || value === breakLoop) { + callback(null); + } + } + + for (; index < length; index++) { + iteratee(coll[index], index, onlyOnce(iteratorCallback)); + } + } + + // a generic version of eachOf which can handle array, object, and iterator cases. + function eachOfGeneric (coll, iteratee, callback) { + return eachOfLimit$1(coll, Infinity, iteratee, callback); + } + + /** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ + function eachOf(coll, iteratee, callback) { + var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, wrapAsync(iteratee), callback); + } + + var eachOf$1 = awaitify(eachOf, 3); + + /** + * Produces a new collection of values by mapping each value in `coll` through + * the `iteratee` function. The `iteratee` is called with an item from `coll` + * and a callback for when it has finished processing. Each of these callbacks + * takes 2 arguments: an `error`, and the transformed item from `coll`. If + * `iteratee` passes an error to its callback, the main `callback` (for the + * `map` function) is immediately called with the error. + * + * Note, that since this function applies the `iteratee` to each item in + * parallel, there is no guarantee that the `iteratee` functions will complete + * in order. However, the results array will be in the same order as the + * original `coll`. + * + * If `map` is passed an Object, the results will be an Array. The results + * will roughly be in the order of the original Objects' keys (but this can + * vary across JavaScript engines). + * + * @name map + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an Array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.map(fileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.map(fileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.map(fileList, getFileSizeInBytes); + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.map(withMissingFileList, getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function map (coll, iteratee, callback) { + return _asyncMap(eachOf$1, coll, iteratee, callback) + } + var map$1 = awaitify(map, 3); + + /** + * Applies the provided arguments to each function in the array, calling + * `callback` after all functions have completed. If you only provide the first + * argument, `fns`, then it will return a function which lets you pass in the + * arguments as if it were a single function call. If more arguments are + * provided, `callback` is required while `args` is still optional. The results + * for each of the applied async functions are passed to the final callback + * as an array. + * + * @name applyEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s + * to all call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - Returns a function that takes no args other than + * an optional callback, that is the result of applying the `args` to each + * of the functions. + * @example + * + * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * + * appliedFn((err, results) => { + * // results[0] is the results for `enableSearch` + * // results[1] is the results for `updateSchema` + * }); + * + * // partial application example: + * async.each( + * buckets, + * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), + * callback + * ); + */ + var applyEach = applyEach$1(map$1); + + /** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachOfSeries(coll, iteratee, callback) { + return eachOfLimit$1(coll, 1, iteratee, callback) + } + var eachOfSeries$1 = awaitify(eachOfSeries, 3); + + /** + * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * + * @name mapSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function mapSeries (coll, iteratee, callback) { + return _asyncMap(eachOfSeries$1, coll, iteratee, callback) + } + var mapSeries$1 = awaitify(mapSeries, 3); + + /** + * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * + * @name applyEachSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.applyEach]{@link module:ControlFlow.applyEach} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all + * call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - A function, that when called, is the result of + * appling the `args` to the list of functions. It takes no args, other than + * a callback. + */ + var applyEachSeries = applyEach$1(mapSeries$1); + + const PROMISE_SYMBOL = Symbol('promiseCallback'); + + function promiseCallback () { + let resolve, reject; + function callback (err, ...args) { + if (err) return reject(err) + resolve(args.length > 1 ? args : args[0]); + } + + callback[PROMISE_SYMBOL] = new Promise((res, rej) => { + resolve = res, + reject = rej; + }); + + return callback + } + + /** + * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on + * their requirements. Each function can optionally depend on other functions + * being completed first, and each function is run as soon as its requirements + * are satisfied. + * + * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence + * will stop. Further tasks will not execute (so any other functions depending + * on it will not run), and the main `callback` is immediately called with the + * error. + * + * {@link AsyncFunction}s also receive an object containing the results of functions which + * have completed so far as the first argument, if they have dependencies. If a + * task function has no dependencies, it will only be passed a callback. + * + * @name auto + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Object} tasks - An object. Each of its properties is either a + * function or an array of requirements, with the {@link AsyncFunction} itself the last item + * in the array. The object's key of a property serves as the name of the task + * defined by that property, i.e. can be used when specifying requirements for + * other tasks. The function receives one or two arguments: + * * a `results` object, containing the results of the previously executed + * functions, only passed if the task has any dependencies, + * * a `callback(err, result)` function, which must be called when finished, + * passing an `error` (which can be `null`) and the result of the function's + * execution. + * @param {number} [concurrency=Infinity] - An optional `integer` for + * determining the maximum number of tasks that can be run in parallel. By + * default, as many as possible. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback. Results are always returned; however, if an + * error occurs, no further `tasks` will be performed, and the results object + * will only contain partial results. Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * @example + * + * //Using Callbacks + * async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }, function(err, results) { + * if (err) { + * console.log('err = ', err); + * } + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }); + * + * //Using Promises + * async.auto({ + * get_data: function(callback) { + * console.log('in get_data'); + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * console.log('in make_folder'); + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }).then(results => { + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }).catch(err => { + * console.log('err = ', err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }); + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function auto(tasks, concurrency, callback) { + if (typeof concurrency !== 'number') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = once(callback || promiseCallback()); + var numTasks = Object.keys(tasks).length; + if (!numTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = numTasks; + } + + var results = {}; + var runningTasks = 0; + var canceled = false; + var hasError = false; + + var listeners = Object.create(null); + + var readyTasks = []; + + // for cycle detection: + var readyToCheck = []; // tasks that have been identified as reachable + // without the possibility of returning to an ancestor task + var uncheckedDependencies = {}; + + Object.keys(tasks).forEach(key => { + var task = tasks[key]; + if (!Array.isArray(task)) { + // no dependencies + enqueueTask(key, [task]); + readyToCheck.push(key); + return; + } + + var dependencies = task.slice(0, task.length - 1); + var remainingDependencies = dependencies.length; + if (remainingDependencies === 0) { + enqueueTask(key, task); + readyToCheck.push(key); + return; + } + uncheckedDependencies[key] = remainingDependencies; + + dependencies.forEach(dependencyName => { + if (!tasks[dependencyName]) { + throw new Error('async.auto task `' + key + + '` has a non-existent dependency `' + + dependencyName + '` in ' + + dependencies.join(', ')); + } + addListener(dependencyName, () => { + remainingDependencies--; + if (remainingDependencies === 0) { + enqueueTask(key, task); + } + }); + }); + }); + + checkForDeadlocks(); + processQueue(); + + function enqueueTask(key, task) { + readyTasks.push(() => runTask(key, task)); + } + + function processQueue() { + if (canceled) return + if (readyTasks.length === 0 && runningTasks === 0) { + return callback(null, results); + } + while(readyTasks.length && runningTasks < concurrency) { + var run = readyTasks.shift(); + run(); + } + + } + + function addListener(taskName, fn) { + var taskListeners = listeners[taskName]; + if (!taskListeners) { + taskListeners = listeners[taskName] = []; + } + + taskListeners.push(fn); + } + + function taskComplete(taskName) { + var taskListeners = listeners[taskName] || []; + taskListeners.forEach(fn => fn()); + processQueue(); + } + + + function runTask(key, task) { + if (hasError) return; + + var taskCallback = onlyOnce((err, ...result) => { + runningTasks--; + if (err === false) { + canceled = true; + return + } + if (result.length < 2) { + [result] = result; + } + if (err) { + var safeResults = {}; + Object.keys(results).forEach(rkey => { + safeResults[rkey] = results[rkey]; + }); + safeResults[key] = result; + hasError = true; + listeners = Object.create(null); + if (canceled) return + callback(err, safeResults); + } else { + results[key] = result; + taskComplete(key); + } + }); + + runningTasks++; + var taskFn = wrapAsync(task[task.length - 1]); + if (task.length > 1) { + taskFn(results, taskCallback); + } else { + taskFn(taskCallback); + } + } + + function checkForDeadlocks() { + // Kahn's algorithm + // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm + // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html + var currentTask; + var counter = 0; + while (readyToCheck.length) { + currentTask = readyToCheck.pop(); + counter++; + getDependents(currentTask).forEach(dependent => { + if (--uncheckedDependencies[dependent] === 0) { + readyToCheck.push(dependent); + } + }); + } + + if (counter !== numTasks) { + throw new Error( + 'async.auto cannot execute tasks due to a recursive dependency' + ); + } + } + + function getDependents(taskName) { + var result = []; + Object.keys(tasks).forEach(key => { + const task = tasks[key]; + if (Array.isArray(task) && task.indexOf(taskName) >= 0) { + result.push(key); + } + }); + return result; + } + + return callback[PROMISE_SYMBOL] + } + + var FN_ARGS = /^(?:async\s)?(?:function)?\s*(?:\w+\s*)?\(([^)]+)\)(?:\s*{)/; + var ARROW_FN_ARGS = /^(?:async\s)?\s*(?:\(\s*)?((?:[^)=\s]\s*)*)(?:\)\s*)?=>/; + var FN_ARG_SPLIT = /,/; + var FN_ARG = /(=.+)?(\s*)$/; + + function stripComments(string) { + let stripped = ''; + let index = 0; + let endBlockComment = string.indexOf('*/'); + while (index < string.length) { + if (string[index] === '/' && string[index+1] === '/') { + // inline comment + let endIndex = string.indexOf('\n', index); + index = (endIndex === -1) ? string.length : endIndex; + } else if ((endBlockComment !== -1) && (string[index] === '/') && (string[index+1] === '*')) { + // block comment + let endIndex = string.indexOf('*/', index); + if (endIndex !== -1) { + index = endIndex + 2; + endBlockComment = string.indexOf('*/', index); + } else { + stripped += string[index]; + index++; + } + } else { + stripped += string[index]; + index++; + } + } + return stripped; + } + + function parseParams(func) { + const src = stripComments(func.toString()); + let match = src.match(FN_ARGS); + if (!match) { + match = src.match(ARROW_FN_ARGS); + } + if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src) + let [, args] = match; + return args + .replace(/\s/g, '') + .split(FN_ARG_SPLIT) + .map((arg) => arg.replace(FN_ARG, '').trim()); + } + + /** + * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent + * tasks are specified as parameters to the function, after the usual callback + * parameter, with the parameter names matching the names of the tasks it + * depends on. This can provide even more readable task graphs which can be + * easier to maintain. + * + * If a final callback is specified, the task results are similarly injected, + * specified as named parameters after the initial error parameter. + * + * The autoInject function is purely syntactic sugar and its semantics are + * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * + * @name autoInject + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.auto]{@link module:ControlFlow.auto} + * @category Control Flow + * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of + * the form 'func([dependencies...], callback). The object's key of a property + * serves as the name of the task defined by that property, i.e. can be used + * when specifying requirements for other tasks. + * * The `callback` parameter is a `callback(err, result)` which must be called + * when finished, passing an `error` (which can be `null`) and the result of + * the function's execution. The remaining parameters name other tasks on + * which the task is dependent, and the results from those tasks are the + * arguments of those parameters. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback, and a `results` object with any completed + * task results, similar to `auto`. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // The example from `auto` can be rewritten as follows: + * async.autoInject({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: function(get_data, make_folder, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }, + * email_link: function(write_file, callback) { + * // once the file is written let's email a link to it... + * // write_file contains the filename returned by write_file. + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * } + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + * + * // If you are using a JS minifier that mangles parameter names, `autoInject` + * // will not work with plain functions, since the parameter names will be + * // collapsed to a single letter identifier. To work around this, you can + * // explicitly specify the names of the parameters your task function needs + * // in an array, similar to Angular.js dependency injection. + * + * // This still has an advantage over plain `auto`, since the results a task + * // depends on are still spread into arguments. + * async.autoInject({ + * //... + * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(write_file, callback) { + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * }] + * //... + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + */ + function autoInject(tasks, callback) { + var newTasks = {}; + + Object.keys(tasks).forEach(key => { + var taskFn = tasks[key]; + var params; + var fnIsAsync = isAsync(taskFn); + var hasNoDeps = + (!fnIsAsync && taskFn.length === 1) || + (fnIsAsync && taskFn.length === 0); + + if (Array.isArray(taskFn)) { + params = [...taskFn]; + taskFn = params.pop(); + + newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); + } else if (hasNoDeps) { + // no dependencies, use the function as-is + newTasks[key] = taskFn; + } else { + params = parseParams(taskFn); + if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) { + throw new Error("autoInject task functions require explicit parameters."); + } + + // remove callback param + if (!fnIsAsync) params.pop(); + + newTasks[key] = params.concat(newTask); + } + + function newTask(results, taskCb) { + var newArgs = params.map(name => results[name]); + newArgs.push(taskCb); + wrapAsync(taskFn)(...newArgs); + } + }); + + return auto(newTasks, callback); + } + + // Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation + // used for queues. This implementation assumes that the node provided by the user can be modified + // to adjust the next and last properties. We implement only the minimal functionality + // for queue support. + class DLL { + constructor() { + this.head = this.tail = null; + this.length = 0; + } + + removeLink(node) { + if (node.prev) node.prev.next = node.next; + else this.head = node.next; + if (node.next) node.next.prev = node.prev; + else this.tail = node.prev; + + node.prev = node.next = null; + this.length -= 1; + return node; + } + + empty () { + while(this.head) this.shift(); + return this; + } + + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next) node.next.prev = newNode; + else this.tail = newNode; + node.next = newNode; + this.length += 1; + } + + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) node.prev.next = newNode; + else this.head = newNode; + node.prev = newNode; + this.length += 1; + } + + unshift(node) { + if (this.head) this.insertBefore(this.head, node); + else setInitial(this, node); + } + + push(node) { + if (this.tail) this.insertAfter(this.tail, node); + else setInitial(this, node); + } + + shift() { + return this.head && this.removeLink(this.head); + } + + pop() { + return this.tail && this.removeLink(this.tail); + } + + toArray() { + return [...this] + } + + *[Symbol.iterator] () { + var cur = this.head; + while (cur) { + yield cur.data; + cur = cur.next; + } + } + + remove (testFn) { + var curr = this.head; + while(curr) { + var {next} = curr; + if (testFn(curr)) { + this.removeLink(curr); + } + curr = next; + } + return this; + } + } + + function setInitial(dll, node) { + dll.length = 1; + dll.head = dll.tail = node; + } + + function queue$1(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } + else if(concurrency === 0) { + throw new RangeError('Concurrency must not be zero'); + } + + var _worker = wrapAsync(worker); + var numRunning = 0; + var workersList = []; + const events = { + error: [], + drain: [], + saturated: [], + unsaturated: [], + empty: [] + }; + + function on (event, handler) { + events[event].push(handler); + } + + function once (event, handler) { + const handleAndRemove = (...args) => { + off(event, handleAndRemove); + handler(...args); + }; + events[event].push(handleAndRemove); + } + + function off (event, handler) { + if (!event) return Object.keys(events).forEach(ev => events[ev] = []) + if (!handler) return events[event] = [] + events[event] = events[event].filter(ev => ev !== handler); + } + + function trigger (event, ...args) { + events[event].forEach(handler => handler(...args)); + } + + var processingScheduled = false; + function _insert(data, insertAtFront, rejectOnError, callback) { + if (callback != null && typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; + + var res, rej; + function promiseCallback (err, ...args) { + // we don't care about the error, let the global error handler + // deal with it + if (err) return rejectOnError ? rej(err) : res() + if (args.length <= 1) return res(args[0]) + res(args); + } + + var item = q._createTaskItem( + data, + rejectOnError ? promiseCallback : + (callback || promiseCallback) + ); + + if (insertAtFront) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } + + if (!processingScheduled) { + processingScheduled = true; + setImmediate$1(() => { + processingScheduled = false; + q.process(); + }); + } + + if (rejectOnError || !callback) { + return new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }) + } + } + + function _createCB(tasks) { + return function (err, ...args) { + numRunning -= 1; + + for (var i = 0, l = tasks.length; i < l; i++) { + var task = tasks[i]; + + var index = workersList.indexOf(task); + if (index === 0) { + workersList.shift(); + } else if (index > 0) { + workersList.splice(index, 1); + } + + task.callback(err, ...args); + + if (err != null) { + trigger('error', err, task.data); + } + } + + if (numRunning <= (q.concurrency - q.buffer) ) { + trigger('unsaturated'); + } + + if (q.idle()) { + trigger('drain'); + } + q.process(); + }; + } + + function _maybeDrain(data) { + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + setImmediate$1(() => trigger('drain')); + return true + } + return false + } + + const eventMethod = (name) => (handler) => { + if (!handler) { + return new Promise((resolve, reject) => { + once(name, (err, data) => { + if (err) return reject(err) + resolve(data); + }); + }) + } + off(name); + on(name, handler); + + }; + + var isProcessing = false; + var q = { + _tasks: new DLL(), + _createTaskItem (data, callback) { + return { + data, + callback + }; + }, + *[Symbol.iterator] () { + yield* q._tasks[Symbol.iterator](); + }, + concurrency, + payload, + buffer: concurrency / 4, + started: false, + paused: false, + push (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, false, callback)) + } + return _insert(data, false, false, callback); + }, + pushAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, true, callback)) + } + return _insert(data, false, true, callback); + }, + kill () { + off(); + q._tasks.empty(); + }, + unshift (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, false, callback)) + } + return _insert(data, true, false, callback); + }, + unshiftAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, true, callback)) + } + return _insert(data, true, true, callback); + }, + remove (testFn) { + q._tasks.remove(testFn); + }, + process () { + // Avoid trying to start too many processing operations. This can occur + // when callbacks resolve synchronously (#1267). + if (isProcessing) { + return; + } + isProcessing = true; + while(!q.paused && numRunning < q.concurrency && q._tasks.length){ + var tasks = [], data = []; + var l = q._tasks.length; + if (q.payload) l = Math.min(l, q.payload); + for (var i = 0; i < l; i++) { + var node = q._tasks.shift(); + tasks.push(node); + workersList.push(node); + data.push(node.data); + } + + numRunning += 1; + + if (q._tasks.length === 0) { + trigger('empty'); + } + + if (numRunning === q.concurrency) { + trigger('saturated'); + } + + var cb = onlyOnce(_createCB(tasks)); + _worker(data, cb); + } + isProcessing = false; + }, + length () { + return q._tasks.length; + }, + running () { + return numRunning; + }, + workersList () { + return workersList; + }, + idle() { + return q._tasks.length + numRunning === 0; + }, + pause () { + q.paused = true; + }, + resume () { + if (q.paused === false) { return; } + q.paused = false; + setImmediate$1(q.process); + } + }; + // define these as fixed properties, so people get useful errors when updating + Object.defineProperties(q, { + saturated: { + writable: false, + value: eventMethod('saturated') + }, + unsaturated: { + writable: false, + value: eventMethod('unsaturated') + }, + empty: { + writable: false, + value: eventMethod('empty') + }, + drain: { + writable: false, + value: eventMethod('drain') + }, + error: { + writable: false, + value: eventMethod('error') + }, + }); + return q; + } + + /** + * Creates a `cargo` object with the specified payload. Tasks added to the + * cargo will be processed altogether (up to the `payload` limit). If the + * `worker` is in progress, the task is queued until it becomes available. Once + * the `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. + * + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, cargo passes an array of tasks to a single worker, repeating + * when the worker is finished. + * + * @name cargo + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargo and inner queue. + * @example + * + * // create a cargo object with payload 2 + * var cargo = async.cargo(function(tasks, callback) { + * for (var i=0; i { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function reduce(coll, memo, iteratee, callback) { + callback = once(callback); + var _iteratee = wrapAsync(iteratee); + return eachOfSeries$1(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); + } + var reduce$1 = awaitify(reduce, 4); + + /** + * Version of the compose function that is more natural to read. Each function + * consumes the return value of the previous function. It is the equivalent of + * [compose]{@link module:ControlFlow.compose} with the arguments reversed. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name seq + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.compose]{@link module:ControlFlow.compose} + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} a function that composes the `functions` in order + * @example + * + * // Requires lodash (or underscore), express3 and dresende's orm2. + * // Part of an app, that fetches cats of the logged user. + * // This example uses `seq` function to avoid overnesting and error + * // handling clutter. + * app.get('/cats', function(request, response) { + * var User = request.models.User; + * async.seq( + * User.get.bind(User), // 'User.get' has signature (id, callback(err, data)) + * function(user, fn) { + * user.getCats(fn); // 'getCats' has signature (callback(err, data)) + * } + * )(req.session.user_id, function (err, cats) { + * if (err) { + * console.error(err); + * response.json({ status: 'error', message: err.message }); + * } else { + * response.json({ status: 'ok', message: 'Cats found', data: cats }); + * } + * }); + * }); + */ + function seq(...functions) { + var _functions = functions.map(wrapAsync); + return function (...args) { + var that = this; + + var cb = args[args.length - 1]; + if (typeof cb == 'function') { + args.pop(); + } else { + cb = promiseCallback(); + } + + reduce$1(_functions, args, (newargs, fn, iterCb) => { + fn.apply(that, newargs.concat((err, ...nextargs) => { + iterCb(err, nextargs); + })); + }, + (err, results) => cb(err, ...results)); + + return cb[PROMISE_SYMBOL] + }; + } + + /** + * Creates a function which is a composition of the passed asynchronous + * functions. Each function consumes the return value of the function that + * follows. Composing functions `f()`, `g()`, and `h()` would produce the result + * of `f(g(h()))`, only this version uses callbacks to obtain the return values. + * + * If the last argument to the composed function is not a function, a promise + * is returned when you call it. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name compose + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} an asynchronous function that is the composed + * asynchronous `functions` + * @example + * + * function add1(n, callback) { + * setTimeout(function () { + * callback(null, n + 1); + * }, 10); + * } + * + * function mul3(n, callback) { + * setTimeout(function () { + * callback(null, n * 3); + * }, 10); + * } + * + * var add1mul3 = async.compose(mul3, add1); + * add1mul3(4, function (err, result) { + * // result now equals 15 + * }); + */ + function compose(...args) { + return seq(...args.reverse()); + } + + /** + * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * + * @name mapLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function mapLimit (coll, limit, iteratee, callback) { + return _asyncMap(eachOfLimit$2(limit), coll, iteratee, callback) + } + var mapLimit$1 = awaitify(mapLimit, 4); + + /** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ + function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); + } + var concatLimit$1 = awaitify(concatLimit, 4); + + /** + * Applies `iteratee` to each item in `coll`, concatenating the results. Returns + * the concatenated list. The `iteratee`s are called in parallel, and the + * results are concatenated as they return. The results array will be returned in + * the original order of `coll` passed to the `iteratee` function. + * + * @name concat + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @alias flatMap + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * let directoryList = ['dir1','dir2','dir3']; + * let withMissingDirectoryList = ['dir1','dir2','dir3', 'dir4']; + * + * // Using callbacks + * async.concat(directoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.concat(directoryList, fs.readdir) + * .then(results => { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { + * console.log(results); + * }).catch(err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.concat(directoryList, fs.readdir); + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * console.log(results); + * } catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } + * } + * + */ + function concat(coll, iteratee, callback) { + return concatLimit$1(coll, Infinity, iteratee, callback) + } + var concat$1 = awaitify(concat, 3); + + /** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ + function concatSeries(coll, iteratee, callback) { + return concatLimit$1(coll, 1, iteratee, callback) + } + var concatSeries$1 = awaitify(concatSeries, 3); + + /** + * Returns a function that when called, calls-back with the values provided. + * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to + * [`auto`]{@link module:ControlFlow.auto}. + * + * @name constant + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {...*} arguments... - Any number of arguments to automatically invoke + * callback with. + * @returns {AsyncFunction} Returns a function that when invoked, automatically + * invokes the callback with the previous given arguments. + * @example + * + * async.waterfall([ + * async.constant(42), + * function (value, next) { + * // value === 42 + * }, + * //... + * ], callback); + * + * async.waterfall([ + * async.constant(filename, "utf8"), + * fs.readFile, + * function (fileData, next) { + * //... + * } + * //... + * ], callback); + * + * async.auto({ + * hostname: async.constant("https://server.net/"), + * port: findFreePort, + * launchServer: ["hostname", "port", function (options, cb) { + * startServer(options, cb); + * }], + * //... + * }, callback); + */ + function constant$1(...args) { + return function (...ignoredArgs/*, callback*/) { + var callback = ignoredArgs.pop(); + return callback(null, ...args); + }; + } + + function _createTester(check, getResult) { + return (eachfn, arr, _iteratee, cb) => { + var testPassed = false; + var testResult; + const iteratee = wrapAsync(_iteratee); + eachfn(arr, (value, _, callback) => { + iteratee(value, (err, result) => { + if (err || err === false) return callback(err); + + if (check(result) && !testResult) { + testPassed = true; + testResult = getResult(true, value); + return callback(null, breakLoop); + } + callback(); + }); + }, err => { + if (err) return cb(err); + cb(null, testPassed ? testResult : getResult(false)); + }); + }; + } + + /** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function detect(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback) + } + var detect$1 = awaitify(detect, 3); + + /** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ + function detectLimit(coll, limit, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit$2(limit), coll, iteratee, callback) + } + var detectLimit$1 = awaitify(detectLimit, 4); + + /** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ + function detectSeries(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit$2(1), coll, iteratee, callback) + } + + var detectSeries$1 = awaitify(detectSeries, 3); + + function consoleFunc(name) { + return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => { + /* istanbul ignore else */ + if (typeof console === 'object') { + /* istanbul ignore else */ + if (err) { + /* istanbul ignore else */ + if (console.error) { + console.error(err); + } + } else if (console[name]) { /* istanbul ignore else */ + resultArgs.forEach(x => console[name](x)); + } + } + }) + } + + /** + * Logs the result of an [`async` function]{@link AsyncFunction} to the + * `console` using `console.dir` to display the properties of the resulting object. + * Only works in Node.js or in browsers that support `console.dir` and + * `console.error` (such as FF and Chrome). + * If multiple arguments are returned from the async function, + * `console.dir` is called on each argument in order. + * + * @name dir + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, {hello: name}); + * }, 1000); + * }; + * + * // in the node repl + * node> async.dir(hello, 'world'); + * {hello: 'world'} + */ + var dir = consoleFunc('dir'); + + /** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ + function doWhilst(iteratee, test, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return check(null, true); + } + + var doWhilst$1 = awaitify(doWhilst, 3); + + /** + * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the + * argument ordering differs from `until`. + * + * @name doUntil + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee` + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ + function doUntil(iteratee, test, callback) { + const _test = wrapAsync(test); + return doWhilst$1(iteratee, (...args) => { + const cb = args.pop(); + _test(...args, (err, truth) => cb (err, !truth)); + }, callback); + } + + function _withoutIndex(iteratee) { + return (value, index, callback) => iteratee(value, callback); + } + + /** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); + * } + * }); + * + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await + * async () => { + * try { + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * } + * } + * + */ + function eachLimit$2(coll, iteratee, callback) { + return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback); + } + + var each = awaitify(eachLimit$2, 3); + + /** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachLimit(coll, limit, iteratee, callback) { + return eachOfLimit$2(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); + } + var eachLimit$1 = awaitify(eachLimit, 4); + + /** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. + + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ + function eachSeries(coll, iteratee, callback) { + return eachLimit$1(coll, 1, iteratee, callback) + } + var eachSeries$1 = awaitify(eachSeries, 3); + + /** + * Wrap an async function and ensure it calls its callback on a later tick of + * the event loop. If the function already calls its callback on a next tick, + * no extra deferral is added. This is useful for preventing stack overflows + * (`RangeError: Maximum call stack size exceeded`) and generally keeping + * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) + * contained. ES2017 `async` functions are returned as-is -- they are immune + * to Zalgo's corrupting influences, as they always resolve on a later tick. + * + * @name ensureAsync + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - an async function, one that expects a node-style + * callback as its last argument. + * @returns {AsyncFunction} Returns a wrapped function with the exact same call + * signature as the function passed in. + * @example + * + * function sometimesAsync(arg, callback) { + * if (cache[arg]) { + * return callback(null, cache[arg]); // this would be synchronous!! + * } else { + * doSomeIO(arg, callback); // this IO would be asynchronous + * } + * } + * + * // this has a risk of stack overflows if many results are cached in a row + * async.mapSeries(args, sometimesAsync, done); + * + * // this will defer sometimesAsync's callback if necessary, + * // preventing stack overflows + * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + */ + function ensureAsync(fn) { + if (isAsync(fn)) return fn; + return function (...args/*, callback*/) { + var callback = args.pop(); + var sync = true; + args.push((...innerArgs) => { + if (sync) { + setImmediate$1(() => callback(...innerArgs)); + } else { + callback(...innerArgs); + } + }); + fn.apply(this, args); + sync = false; + }; + } + + /** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function every(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback) + } + var every$1 = awaitify(every, 3); + + /** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function everyLimit(coll, limit, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfLimit$2(limit), coll, iteratee, callback) + } + var everyLimit$1 = awaitify(everyLimit, 4); + + /** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function everySeries(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback) + } + var everySeries$1 = awaitify(everySeries, 3); + + function filterArray(eachfn, arr, iteratee, callback) { + var truthValues = new Array(arr.length); + eachfn(arr, (x, index, iterCb) => { + iteratee(x, (err, v) => { + truthValues[index] = !!v; + iterCb(err); + }); + }, err => { + if (err) return callback(err); + var results = []; + for (var i = 0; i < arr.length; i++) { + if (truthValues[i]) results.push(arr[i]); + } + callback(null, results); + }); + } + + function filterGeneric(eachfn, coll, iteratee, callback) { + var results = []; + eachfn(coll, (x, index, iterCb) => { + iteratee(x, (err, v) => { + if (err) return iterCb(err); + if (v) { + results.push({index, value: x}); + } + iterCb(err); + }); + }, err => { + if (err) return callback(err); + callback(null, results + .sort((a, b) => a.index - b.index) + .map(v => v.value)); + }); + } + + function _filter(eachfn, coll, iteratee, callback) { + var filter = isArrayLike(coll) ? filterArray : filterGeneric; + return filter(eachfn, coll, wrapAsync(iteratee), callback); + } + + /** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function filter (coll, iteratee, callback) { + return _filter(eachOf$1, coll, iteratee, callback) + } + var filter$1 = awaitify(filter, 3); + + /** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ + function filterLimit (coll, limit, iteratee, callback) { + return _filter(eachOfLimit$2(limit), coll, iteratee, callback) + } + var filterLimit$1 = awaitify(filterLimit, 4); + + /** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ + function filterSeries (coll, iteratee, callback) { + return _filter(eachOfSeries$1, coll, iteratee, callback) + } + var filterSeries$1 = awaitify(filterSeries, 3); + + /** + * Calls the asynchronous function `fn` with a callback parameter that allows it + * to call itself again, in series, indefinitely. + + * If an error is passed to the callback then `errback` is called with the + * error, and execution stops, otherwise it will never be called. + * + * @name forever + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} fn - an async function to call repeatedly. + * Invoked with (next). + * @param {Function} [errback] - when `fn` passes an error to it's callback, + * this function will be called, and execution stops. Invoked with (err). + * @returns {Promise} a promise that rejects if an error occurs and an errback + * is not passed + * @example + * + * async.forever( + * function(next) { + * // next is suitable for passing to things that need a callback(err [, whatever]); + * // it will result in this function being called again. + * }, + * function(err) { + * // if next is called with a value in its first parameter, it will appear + * // in here as 'err', and execution will stop. + * } + * ); + */ + function forever(fn, errback) { + var done = onlyOnce(errback); + var task = wrapAsync(ensureAsync(fn)); + + function next(err) { + if (err) return done(err); + if (err === false) return; + task(next); + } + return next(); + } + var forever$1 = awaitify(forever, 2); + + /** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * + * @name groupByLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ + function groupByLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, key) => { + if (err) return iterCb(err); + return iterCb(err, {key, val}); + }); + }, (err, mapResults) => { + var result = {}; + // from MDN, handle object having an `hasOwnProperty` prop + var {hasOwnProperty} = Object.prototype; + + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + var {key} = mapResults[i]; + var {val} = mapResults[i]; + + if (hasOwnProperty.call(result, key)) { + result[key].push(val); + } else { + result[key] = [val]; + } + } + } + + return callback(err, result); + }); + } + + var groupByLimit$1 = awaitify(groupByLimit, 4); + + /** + * Returns a new object, where each value corresponds to an array of items, from + * `coll`, that returned the corresponding key. That is, the keys of the object + * correspond to the values passed to the `iteratee` callback. + * + * Note: Since this function applies the `iteratee` to each item in parallel, + * there is no guarantee that the `iteratee` functions will complete in order. + * However, the values for each key in the `result` will be in the same order as + * the original `coll`. For Objects, the values will roughly be in the order of + * the original Objects' keys (but this can vary across JavaScript engines). + * + * @name groupBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const files = ['dir1/file1.txt','dir2','dir4'] + * + * // asynchronous function that detects file type as none, file, or directory + * function detectFile(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(null, 'none'); + * } + * callback(null, stat.isDirectory() ? 'directory' : 'file'); + * }); + * } + * + * //Using callbacks + * async.groupBy(files, detectFile, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * }); + * + * // Using Promises + * async.groupBy(files, detectFile) + * .then( result => { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.groupBy(files, detectFile); + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function groupBy (coll, iteratee, callback) { + return groupByLimit$1(coll, Infinity, iteratee, callback) + } + + /** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. + * + * @name groupBySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whose + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ + function groupBySeries (coll, iteratee, callback) { + return groupByLimit$1(coll, 1, iteratee, callback) + } + + /** + * Logs the result of an `async` function to the `console`. Only works in + * Node.js or in browsers that support `console.log` and `console.error` (such + * as FF and Chrome). If multiple arguments are returned from the async + * function, `console.log` is called on each argument in order. + * + * @name log + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, 'hello ' + name); + * }, 1000); + * }; + * + * // in the node repl + * node> async.log(hello, 'world'); + * 'hello world' + */ + var log = consoleFunc('log'); + + /** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a + * time. + * + * @name mapValuesLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function mapValuesLimit(obj, limit, iteratee, callback) { + callback = once(callback); + var newObj = {}; + var _iteratee = wrapAsync(iteratee); + return eachOfLimit$2(limit)(obj, (val, key, next) => { + _iteratee(val, key, (err, result) => { + if (err) return next(err); + newObj[key] = result; + next(err); + }); + }, err => callback(err, newObj)); + } + + var mapValuesLimit$1 = awaitify(mapValuesLimit, 4); + + /** + * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. + * + * Produces a new Object by mapping each value of `obj` through the `iteratee` + * function. The `iteratee` is called each `value` and `key` from `obj` and a + * callback for when it has finished processing. Each of these callbacks takes + * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` + * passes an error to its callback, the main `callback` (for the `mapValues` + * function) is immediately called with the error. + * + * Note, the order of the keys in the result is not guaranteed. The keys will + * be roughly in the order they complete, (but this is very engine-specific) + * + * @name mapValues + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file3.txt' + * }; + * + * const withMissingFileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file4.txt' + * }; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, key, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.mapValues(fileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * }); + * + * // Error handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.mapValues(fileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * }).catch (err => { + * console.log(err); + * }); + * + * // Error Handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch (err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.mapValues(fileMap, getFileSizeInBytes); + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.mapValues(withMissingFileMap, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function mapValues(obj, iteratee, callback) { + return mapValuesLimit$1(obj, Infinity, iteratee, callback) + } + + /** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. + * + * @name mapValuesSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function mapValuesSeries(obj, iteratee, callback) { + return mapValuesLimit$1(obj, 1, iteratee, callback) + } + + /** + * Caches the results of an async function. When creating a hash to store + * function results against, the callback is omitted from the hash and an + * optional hash function can be used. + * + * **Note: if the async function errs, the result will not be cached and + * subsequent calls will call the wrapped function.** + * + * If no hash function is specified, the first argument is used as a hash key, + * which may work reasonably if it is a string or a data type that converts to a + * distinct string. Note that objects and arrays will not behave reasonably. + * Neither will cases where the other arguments are significant. In such cases, + * specify your own hash function. + * + * The cache of results is exposed as the `memo` property of the function + * returned by `memoize`. + * + * @name memoize + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function to proxy and cache results from. + * @param {Function} hasher - An optional function for generating a custom hash + * for storing results. It has all the arguments applied to it apart from the + * callback, and must be synchronous. + * @returns {AsyncFunction} a memoized version of `fn` + * @example + * + * var slow_fn = function(name, callback) { + * // do something + * callback(null, result); + * }; + * var fn = async.memoize(slow_fn); + * + * // fn can now be used as if it were slow_fn + * fn('some name', function() { + * // callback + * }); + */ + function memoize(fn, hasher = v => v) { + var memo = Object.create(null); + var queues = Object.create(null); + var _fn = wrapAsync(fn); + var memoized = initialParams((args, callback) => { + var key = hasher(...args); + if (key in memo) { + setImmediate$1(() => callback(null, ...memo[key])); + } else if (key in queues) { + queues[key].push(callback); + } else { + queues[key] = [callback]; + _fn(...args, (err, ...resultArgs) => { + // #1465 don't memoize if an error occurred + if (!err) { + memo[key] = resultArgs; + } + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i](err, ...resultArgs); + } + }); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; + } + + /* istanbul ignore file */ + + /** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `process.nextTick`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name nextTick + * @static + * @memberOf module:Utils + * @method + * @see [async.setImmediate]{@link module:Utils.setImmediate} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ + var _defer; + + if (hasNextTick) { + _defer = process.nextTick; + } else if (hasSetImmediate) { + _defer = setImmediate; + } else { + _defer = fallback; + } + + var nextTick = wrap(_defer); + + var _parallel = awaitify((eachfn, tasks, callback) => { + var results = isArrayLike(tasks) ? [] : {}; + + eachfn(tasks, (task, key, taskCb) => { + wrapAsync(task)((err, ...result) => { + if (result.length < 2) { + [result] = result; + } + results[key] = result; + taskCb(err); + }); + }, err => callback(err, results)); + }, 3); + + /** + * Run the `tasks` collection of functions in parallel, without waiting until + * the previous function has completed. If any of the functions pass an error to + * its callback, the main `callback` is immediately called with the value of the + * error. Once the `tasks` have completed, the results are passed to the final + * `callback` as an array. + * + * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about + * parallel execution of code. If your tasks do not use any timers or perform + * any I/O, they will actually be executed in series. Any synchronous setup + * sections for each task will happen one after the other. JavaScript remains + * single-threaded. + * + * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the + * execution of other tasks when a task fails. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.parallel}. + * + * @name parallel + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * + * //Using Callbacks + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function parallel(tasks, callback) { + return _parallel(eachOf$1, tasks, callback); + } + + /** + * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a + * time. + * + * @name parallelLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.parallel]{@link module:ControlFlow.parallel} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + */ + function parallelLimit(tasks, limit, callback) { + return _parallel(eachOfLimit$2(limit), tasks, callback); + } + + /** + * A queue of tasks for the worker function to complete. + * @typedef {Iterable} QueueObject + * @memberOf module:ControlFlow + * @property {Function} length - a function returning the number of items + * waiting to be processed. Invoke with `queue.length()`. + * @property {boolean} started - a boolean indicating whether or not any + * items have been pushed and processed by the queue. + * @property {Function} running - a function returning the number of items + * currently being processed. Invoke with `queue.running()`. + * @property {Function} workersList - a function returning the array of items + * currently being processed. Invoke with `queue.workersList()`. + * @property {Function} idle - a function returning false if there are items + * waiting or being processed, or true if not. Invoke with `queue.idle()`. + * @property {number} concurrency - an integer for determining how many `worker` + * functions should be run in parallel. This property can be changed after a + * `queue` is created to alter the concurrency on-the-fly. + * @property {number} payload - an integer that specifies how many items are + * passed to the worker function at a time. only applies if this is a + * [cargo]{@link module:ControlFlow.cargo} object + * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` + * once the `worker` has finished processing the task. Instead of a single task, + * a `tasks` array can be submitted. The respective callback is used for every + * task in the list. Invoke with `queue.push(task, [callback])`, + * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. + * Invoke with `queue.unshift(task, [callback])`. + * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns + * a promise that rejects if an error occurs. + * @property {AsyncFunction} unshiftAsync - the same as `q.unshift`, except this returns + * a promise that rejects if an error occurs. + * @property {Function} remove - remove items from the queue that match a test + * function. The test function will be passed an object with a `data` property, + * and a `priority` property, if this is a + * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. + * Invoked with `queue.remove(testFn)`, where `testFn` is of the form + * `function ({data, priority}) {}` and returns a Boolean. + * @property {Function} saturated - a function that sets a callback that is + * called when the number of running workers hits the `concurrency` limit, and + * further tasks will be queued. If the callback is omitted, `q.saturated()` + * returns a promise for the next occurrence. + * @property {Function} unsaturated - a function that sets a callback that is + * called when the number of running workers is less than the `concurrency` & + * `buffer` limits, and further tasks will not be queued. If the callback is + * omitted, `q.unsaturated()` returns a promise for the next occurrence. + * @property {number} buffer - A minimum threshold buffer in order to say that + * the `queue` is `unsaturated`. + * @property {Function} empty - a function that sets a callback that is called + * when the last item from the `queue` is given to a `worker`. If the callback + * is omitted, `q.empty()` returns a promise for the next occurrence. + * @property {Function} drain - a function that sets a callback that is called + * when the last item from the `queue` has returned from the `worker`. If the + * callback is omitted, `q.drain()` returns a promise for the next occurrence. + * @property {Function} error - a function that sets a callback that is called + * when a task errors. Has the signature `function(error, task)`. If the + * callback is omitted, `error()` returns a promise that rejects on the next + * error. + * @property {boolean} paused - a boolean for determining whether the queue is + * in a paused state. + * @property {Function} pause - a function that pauses the processing of tasks + * until `resume()` is called. Invoke with `queue.pause()`. + * @property {Function} resume - a function that resumes the processing of + * queued tasks when the queue is paused. Invoke with `queue.resume()`. + * @property {Function} kill - a function that removes the `drain` callback and + * empties remaining tasks from the queue forcing it to go idle. No more tasks + * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. + * + * @example + * const q = async.queue(worker, 2) + * q.push(item1) + * q.push(item2) + * q.push(item3) + * // queues are iterable, spread into an array to inspect + * const items = [...q] // [item1, item2, item3] + * // or use for of + * for (let item of q) { + * console.log(item) + * } + * + * q.drain(() => { + * console.log('all done') + * }) + * // or + * await q.drain() + */ + + /** + * Creates a `queue` object with the specified `concurrency`. Tasks added to the + * `queue` are processed in parallel (up to the `concurrency` limit). If all + * `worker`s are in progress, the task is queued until one becomes available. + * Once a `worker` completes a `task`, that `task`'s callback is called. + * + * @name queue + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. Invoked with (task, callback). + * @param {number} [concurrency=1] - An `integer` for determining how many + * `worker` functions should be run in parallel. If omitted, the concurrency + * defaults to `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be + * attached as certain properties to listen for specific events during the + * lifecycle of the queue. + * @example + * + * // create a queue object with concurrency 2 + * var q = async.queue(function(task, callback) { + * console.log('hello ' + task.name); + * callback(); + * }, 2); + * + * // assign a callback + * q.drain(function() { + * console.log('all items have been processed'); + * }); + * // or await the end + * await q.drain() + * + * // assign an error callback + * q.error(function(err, task) { + * console.error('task experienced an error'); + * }); + * + * // add some items to the queue + * q.push({name: 'foo'}, function(err) { + * console.log('finished processing foo'); + * }); + * // callback is optional + * q.push({name: 'bar'}); + * + * // add some items to the queue (batch-wise) + * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { + * console.log('finished processing item'); + * }); + * + * // add some items to the front of the queue + * q.unshift({name: 'bar'}, function (err) { + * console.log('finished processing bar'); + * }); + */ + function queue (worker, concurrency) { + var _worker = wrapAsync(worker); + return queue$1((items, cb) => { + _worker(items[0], cb); + }, concurrency, 1); + } + + // Binary min-heap implementation used for priority queue. + // Implementation is stable, i.e. push time is considered for equal priorities + class Heap { + constructor() { + this.heap = []; + this.pushCount = Number.MIN_SAFE_INTEGER; + } + + get length() { + return this.heap.length; + } + + empty () { + this.heap = []; + return this; + } + + percUp(index) { + let p; + + while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) { + let t = this.heap[index]; + this.heap[index] = this.heap[p]; + this.heap[p] = t; + + index = p; + } + } + + percDown(index) { + let l; + + while ((l=leftChi(index)) < this.heap.length) { + if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) { + l = l+1; + } + + if (smaller(this.heap[index], this.heap[l])) { + break; + } + + let t = this.heap[index]; + this.heap[index] = this.heap[l]; + this.heap[l] = t; + + index = l; + } + } + + push(node) { + node.pushCount = ++this.pushCount; + this.heap.push(node); + this.percUp(this.heap.length-1); + } + + unshift(node) { + return this.heap.push(node); + } + + shift() { + let [top] = this.heap; + + this.heap[0] = this.heap[this.heap.length-1]; + this.heap.pop(); + this.percDown(0); + + return top; + } + + toArray() { + return [...this]; + } + + *[Symbol.iterator] () { + for (let i = 0; i < this.heap.length; i++) { + yield this.heap[i].data; + } + } + + remove (testFn) { + let j = 0; + for (let i = 0; i < this.heap.length; i++) { + if (!testFn(this.heap[i])) { + this.heap[j] = this.heap[i]; + j++; + } + } + + this.heap.splice(j); + + for (let i = parent(this.heap.length-1); i >= 0; i--) { + this.percDown(i); + } + + return this; + } + } + + function leftChi(i) { + return (i<<1)+1; + } + + function parent(i) { + return ((i+1)>>1)-1; + } + + function smaller(x, y) { + if (x.priority !== y.priority) { + return x.priority < y.priority; + } + else { + return x.pushCount < y.pushCount; + } + } + + /** + * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and + * completed in ascending priority order. + * + * @name priorityQueue + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. + * Invoked with (task, callback). + * @param {number} concurrency - An `integer` for determining how many `worker` + * functions should be run in parallel. If omitted, the concurrency defaults to + * `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are three + * differences between `queue` and `priorityQueue` objects: + * * `push(task, priority, [callback])` - `priority` should be a number. If an + * array of `tasks` is given, all tasks will be assigned the same priority. + * * `pushAsync(task, priority, [callback])` - the same as `priorityQueue.push`, + * except this returns a promise that rejects if an error occurs. + * * The `unshift` and `unshiftAsync` methods were removed. + */ + function priorityQueue(worker, concurrency) { + // Start with a normal queue + var q = queue(worker, concurrency); + + var { + push, + pushAsync + } = q; + + q._tasks = new Heap(); + q._createTaskItem = ({data, priority}, callback) => { + return { + data, + priority, + callback + }; + }; + + function createDataItems(tasks, priority) { + if (!Array.isArray(tasks)) { + return {data: tasks, priority}; + } + return tasks.map(data => { return {data, priority}; }); + } + + // Override push to accept second parameter representing priority + q.push = function(data, priority = 0, callback) { + return push(createDataItems(data, priority), callback); + }; + + q.pushAsync = function(data, priority = 0, callback) { + return pushAsync(createDataItems(data, priority), callback); + }; + + // Remove unshift functions + delete q.unshift; + delete q.unshiftAsync; + + return q; + } + + /** + * Runs the `tasks` array of functions in parallel, without waiting until the + * previous function has completed. Once any of the `tasks` complete or pass an + * error to its callback, the main `callback` is immediately called. It's + * equivalent to `Promise.race()`. + * + * @name race + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} + * to run. Each function can complete with an optional `result` value. + * @param {Function} callback - A callback to run once any of the functions have + * completed. This function gets an error or result from the first function that + * completed. Invoked with (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.race([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // main callback + * function(err, result) { + * // the result will be equal to 'two' as it finishes earlier + * }); + */ + function race(tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!tasks.length) return callback(); + for (var i = 0, l = tasks.length; i < l; i++) { + wrapAsync(tasks[i])(callback); + } + } + + var race$1 = awaitify(race, 2); + + /** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ + function reduceRight (array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return reduce$1(reversed, memo, iteratee, callback); + } + + /** + * Wraps the async function in another function that always completes with a + * result object, even when it errors. + * + * The result object has either the property `error` or `value`. + * + * @name reflect + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function you want to wrap + * @returns {Function} - A function that always passes null to it's callback as + * the error. The second argument to the callback will be an `object` with + * either an `error` or a `value` property. + * @example + * + * async.parallel([ + * async.reflect(function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }), + * async.reflect(function(callback) { + * // do some more stuff but error ... + * callback('bad stuff happened'); + * }), + * async.reflect(function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * }) + * ], + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = 'bad stuff happened' + * // results[2].value = 'two' + * }); + */ + function reflect(fn) { + var _fn = wrapAsync(fn); + return initialParams(function reflectOn(args, reflectCallback) { + args.push((error, ...cbArgs) => { + let retVal = {}; + if (error) { + retVal.error = error; + } + if (cbArgs.length > 0){ + var value = cbArgs; + if (cbArgs.length <= 1) { + [value] = cbArgs; + } + retVal.value = value; + } + reflectCallback(null, retVal); + }); + + return _fn.apply(this, args); + }); + } + + /** + * A helper function that wraps an array or an object of functions with `reflect`. + * + * @name reflectAll + * @static + * @memberOf module:Utils + * @method + * @see [async.reflect]{@link module:Utils.reflect} + * @category Util + * @param {Array|Object|Iterable} tasks - The collection of + * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. + * @returns {Array} Returns an array of async functions, each wrapped in + * `async.reflect` + * @example + * + * let tasks = [ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * // do some more stuff but error ... + * callback(new Error('bad stuff happened')); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = Error('bad stuff happened') + * // results[2].value = 'two' + * }); + * + * // an example using an object instead of an array + * let tasks = { + * one: function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * two: function(callback) { + * callback('two'); + * }, + * three: function(callback) { + * setTimeout(function() { + * callback(null, 'three'); + * }, 100); + * } + * }; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results.one.value = 'one' + * // results.two.error = 'two' + * // results.three.value = 'three' + * }); + */ + function reflectAll(tasks) { + var results; + if (Array.isArray(tasks)) { + results = tasks.map(reflect); + } else { + results = {}; + Object.keys(tasks).forEach(key => { + results[key] = reflect.call(this, tasks[key]); + }); + } + return results; + } + + function reject$2(eachfn, arr, _iteratee, callback) { + const iteratee = wrapAsync(_iteratee); + return _filter(eachfn, arr, (value, cb) => { + iteratee(value, (err, v) => { + cb(err, !v); + }); + }, callback); + } + + /** + * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. + * + * @name reject + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.reject(fileList, fileExists, function(err, results) { + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }); + * + * // Using Promises + * async.reject(fileList, fileExists) + * .then( results => { + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.reject(fileList, fileExists); + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function reject (coll, iteratee, callback) { + return reject$2(eachOf$1, coll, iteratee, callback) + } + var reject$1 = awaitify(reject, 3); + + /** + * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a + * time. + * + * @name rejectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function rejectLimit (coll, limit, iteratee, callback) { + return reject$2(eachOfLimit$2(limit), coll, iteratee, callback) + } + var rejectLimit$1 = awaitify(rejectLimit, 4); + + /** + * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. + * + * @name rejectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ + function rejectSeries (coll, iteratee, callback) { + return reject$2(eachOfSeries$1, coll, iteratee, callback) + } + var rejectSeries$1 = awaitify(rejectSeries, 3); + + function constant(value) { + return function () { + return value; + } + } + + /** + * Attempts to get a successful response from `task` no more than `times` times + * before returning an error. If the task is successful, the `callback` will be + * passed the result of the successful task. If all attempts fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name retry + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @see [async.retryable]{@link module:ControlFlow.retryable} + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an + * object with `times` and `interval` or a number. + * * `times` - The number of attempts to make before giving up. The default + * is `5`. + * * `interval` - The time to wait between retries, in milliseconds. The + * default is `0`. The interval may also be specified as a function of the + * retry count (see example). + * * `errorFilter` - An optional synchronous function that is invoked on + * erroneous result. If it returns `true` the retry attempts will continue; + * if the function returns `false` the retry flow is aborted with the current + * attempt's error and result being returned to the final callback. + * Invoked with (err). + * * If `opts` is a number, the number specifies the number of times to retry, + * with the default interval of `0`. + * @param {AsyncFunction} task - An async function to retry. + * Invoked with (callback). + * @param {Function} [callback] - An optional callback which is called when the + * task has succeeded, or after the final failed attempt. It receives the `err` + * and `result` arguments of the last attempt at completing the `task`. Invoked + * with (err, results). + * @returns {Promise} a promise if no callback provided + * + * @example + * + * // The `retry` function can be used as a stand-alone control flow by passing + * // a callback, as shown below: + * + * // try calling apiMethod 3 times + * async.retry(3, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 3 times, waiting 200 ms between each retry + * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 10 times with exponential backoff + * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) + * async.retry({ + * times: 10, + * interval: function(retryCount) { + * return 50 * Math.pow(2, retryCount); + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod the default 5 times no delay between each retry + * async.retry(apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod only when error condition satisfies, all other + * // errors will abort the retry control flow and return to final callback + * async.retry({ + * errorFilter: function(err) { + * return err.message === 'Temporary error'; // only retry on a specific error + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // to retry individual methods that are not as reliable within other + * // control flow functions, use the `retryable` wrapper: + * async.auto({ + * users: api.getUsers.bind(api), + * payments: async.retryable(3, api.getPayments.bind(api)) + * }, function(err, results) { + * // do something with the results + * }); + * + */ + const DEFAULT_TIMES = 5; + const DEFAULT_INTERVAL = 0; + + function retry(opts, task, callback) { + var options = { + times: DEFAULT_TIMES, + intervalFunc: constant(DEFAULT_INTERVAL) + }; + + if (arguments.length < 3 && typeof opts === 'function') { + callback = task || promiseCallback(); + task = opts; + } else { + parseTimes(options, opts); + callback = callback || promiseCallback(); + } + + if (typeof task !== 'function') { + throw new Error("Invalid arguments for async.retry"); + } + + var _task = wrapAsync(task); + + var attempt = 1; + function retryAttempt() { + _task((err, ...args) => { + if (err === false) return + if (err && attempt++ < options.times && + (typeof options.errorFilter != 'function' || + options.errorFilter(err))) { + setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); + } else { + callback(err, ...args); + } + }); + } + + retryAttempt(); + return callback[PROMISE_SYMBOL] + } + + function parseTimes(acc, t) { + if (typeof t === 'object') { + acc.times = +t.times || DEFAULT_TIMES; + + acc.intervalFunc = typeof t.interval === 'function' ? + t.interval : + constant(+t.interval || DEFAULT_INTERVAL); + + acc.errorFilter = t.errorFilter; + } else if (typeof t === 'number' || typeof t === 'string') { + acc.times = +t || DEFAULT_TIMES; + } else { + throw new Error("Invalid arguments for async.retry"); + } + } + + /** + * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method + * wraps a task and makes it retryable, rather than immediately calling it + * with retries. + * + * @name retryable + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.retry]{@link module:ControlFlow.retry} + * @category Control Flow + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional + * options, exactly the same as from `retry`, except for a `opts.arity` that + * is the arity of the `task` function, defaulting to `task.length` + * @param {AsyncFunction} task - the asynchronous function to wrap. + * This function will be passed any arguments passed to the returned wrapper. + * Invoked with (...args, callback). + * @returns {AsyncFunction} The wrapped function, which when invoked, will + * retry on an error, based on the parameters specified in `opts`. + * This function will accept the same parameters as `task`. + * @example + * + * async.auto({ + * dep1: async.retryable(3, getFromFlakyService), + * process: ["dep1", async.retryable(3, function (results, cb) { + * maybeProcessData(results.dep1, cb); + * })] + * }, callback); + */ + function retryable (opts, task) { + if (!task) { + task = opts; + opts = null; + } + let arity = (opts && opts.arity) || task.length; + if (isAsync(task)) { + arity += 1; + } + var _task = wrapAsync(task); + return initialParams((args, callback) => { + if (args.length < arity - 1 || callback == null) { + args.push(callback); + callback = promiseCallback(); + } + function taskFn(cb) { + _task(...args, cb); + } + + if (opts) retry(opts, taskFn, callback); + else retry(taskFn, callback); + + return callback[PROMISE_SYMBOL] + }); + } + + /** + * Run the functions in the `tasks` collection in series, each one running once + * the previous function has completed. If any functions in the series pass an + * error to its callback, no more functions are run, and `callback` is + * immediately called with the value of the error. Otherwise, `callback` + * receives an array of results when `tasks` have completed. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function, and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.series}. + * + * **Note** that while many implementations preserve the order of object + * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) + * explicitly states that + * + * > The mechanics and order of enumerating the properties is not specified. + * + * So if you rely on the order in which your series of functions are executed, + * and want this to work on all platforms, consider using an array. + * + * @name series + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing + * [async functions]{@link AsyncFunction} to run in series. + * Each function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This function gets a results array (or object) + * containing all the result arguments passed to the `task` callbacks. Invoked + * with (err, result). + * @return {Promise} a promise, if no callback is passed + * @example + * + * //Using Callbacks + * async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] + * }); + * + * // an example using objects instead of arrays + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.series([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function series(tasks, callback) { + return _parallel(eachOfSeries$1, tasks, callback); + } + + /** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); + * + * // Using Promises + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function some(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback) + } + var some$1 = awaitify(some, 3); + + /** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function someLimit(coll, limit, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfLimit$2(limit), coll, iteratee, callback) + } + var someLimit$1 = awaitify(someLimit, 4); + + /** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ + function someSeries(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback) + } + var someSeries$1 = awaitify(someSeries, 3); + + /** + * Sorts a list by the results of running each `coll` value through an async + * `iteratee`. + * + * @name sortBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a value to use as the sort criteria as + * its `result`. + * Invoked with (item, callback). + * @param {Function} callback - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is the items + * from the original `coll` sorted by the values returned by the `iteratee` + * calls. Invoked with (err, results). + * @returns {Promise} a promise, if no callback passed + * @example + * + * // bigfile.txt is a file that is 251100 bytes in size + * // mediumfile.txt is a file that is 11000 bytes in size + * // smallfile.txt is a file that is 121 bytes in size + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // By modifying the callback parameter the + * // sorting order can be influenced: + * + * // ascending order + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) return callback(getFileSizeErr); + * callback(null, fileSize); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // descending order + * async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) { + * return callback(getFileSizeErr); + * } + * callback(null, fileSize * -1); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'bigfile.txt', 'mediumfile.txt', 'smallfile.txt'] + * } + * } + * ); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * } + * ); + * + * // Using Promises + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * (async () => { + * try { + * let results = await async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * // Error handling + * async () => { + * try { + * let results = await async.sortBy(['missingfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ + function sortBy (coll, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return map$1(coll, (x, iterCb) => { + _iteratee(x, (err, criteria) => { + if (err) return iterCb(err); + iterCb(err, {value: x, criteria}); + }); + }, (err, results) => { + if (err) return callback(err); + callback(null, results.sort(comparator).map(v => v.value)); + }); + + function comparator(left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } + } + var sortBy$1 = awaitify(sortBy, 3); + + /** + * Sets a time limit on an asynchronous function. If the function does not call + * its callback within the specified milliseconds, it will be called with a + * timeout error. The code property for the error object will be `'ETIMEDOUT'`. + * + * @name timeout + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} asyncFn - The async function to limit in time. + * @param {number} milliseconds - The specified time limit. + * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) + * to timeout Error for more information.. + * @returns {AsyncFunction} Returns a wrapped function that can be used with any + * of the control flow functions. + * Invoke this function with the same parameters as you would `asyncFunc`. + * @example + * + * function myFunction(foo, callback) { + * doAsyncTask(foo, function(err, data) { + * // handle errors + * if (err) return callback(err); + * + * // do some stuff ... + * + * // return processed data + * return callback(null, data); + * }); + * } + * + * var wrapped = async.timeout(myFunction, 1000); + * + * // call `wrapped` as you would `myFunction` + * wrapped({ bar: 'bar' }, function(err, data) { + * // if `myFunction` takes < 1000 ms to execute, `err` + * // and `data` will have their expected values + * + * // else `err` will be an Error with the code 'ETIMEDOUT' + * }); + */ + function timeout(asyncFn, milliseconds, info) { + var fn = wrapAsync(asyncFn); + + return initialParams((args, callback) => { + var timedOut = false; + var timer; + + function timeoutCallback() { + var name = asyncFn.name || 'anonymous'; + var error = new Error('Callback function "' + name + '" timed out.'); + error.code = 'ETIMEDOUT'; + if (info) { + error.info = info; + } + timedOut = true; + callback(error); + } + + args.push((...cbArgs) => { + if (!timedOut) { + callback(...cbArgs); + clearTimeout(timer); + } + }); + + // setup timer and call original function + timer = setTimeout(timeoutCallback, milliseconds); + fn(...args); + }); + } + + function range(size) { + var result = Array(size); + while (size--) { + result[size] = size; + } + return result; + } + + /** + * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a + * time. + * + * @name timesLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} count - The number of times to run the function. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see [async.map]{@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ + function timesLimit(count, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(range(count), limit, _iteratee, callback); + } + + /** + * Calls the `iteratee` function `n` times, and accumulates results in the same + * manner you would use with [map]{@link module:Collections.map}. + * + * @name times + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.map]{@link module:Collections.map} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + * @example + * + * // Pretend this is some complicated async factory + * var createUser = function(id, callback) { + * callback(null, { + * id: 'user' + id + * }); + * }; + * + * // generate 5 users + * async.times(5, function(n, next) { + * createUser(n, function(err, user) { + * next(err, user); + * }); + * }, function(err, users) { + * // we should now have 5 users + * }); + */ + function times (n, iteratee, callback) { + return timesLimit(n, Infinity, iteratee, callback) + } + + /** + * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. + * + * @name timesSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ + function timesSeries (n, iteratee, callback) { + return timesLimit(n, 1, iteratee, callback) + } + + /** + * A relative of `reduce`. Takes an Object or Array, and iterates over each + * element in parallel, each step potentially mutating an `accumulator` value. + * The type of the accumulator defaults to the type of collection passed in. + * + * @name transform + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} [accumulator] - The initial state of the transform. If omitted, + * it will default to an empty Object or Array, depending on the type of `coll` + * @param {AsyncFunction} iteratee - A function applied to each item in the + * collection that potentially modifies the accumulator. + * Invoked with (accumulator, item, key, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the transformed accumulator. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileList, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * }); + * + * // Using Promises + * async.transform(fileList, transformFileSize) + * .then(result => { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * (async () => { + * try { + * let result = await async.transform(fileList, transformFileSize); + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileMap = { f1: 'file1.txt', f2: 'file2.txt', f3: 'file3.txt' }; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileMap, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * }); + * + * // Using Promises + * async.transform(fileMap, transformFileSize) + * .then(result => { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.transform(fileMap, transformFileSize); + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ + function transform (coll, accumulator, iteratee, callback) { + if (arguments.length <= 3 && typeof accumulator === 'function') { + callback = iteratee; + iteratee = accumulator; + accumulator = Array.isArray(coll) ? [] : {}; + } + callback = once(callback || promiseCallback()); + var _iteratee = wrapAsync(iteratee); + + eachOf$1(coll, (v, k, cb) => { + _iteratee(accumulator, v, k, cb); + }, err => callback(err, accumulator)); + return callback[PROMISE_SYMBOL] + } + + /** + * It runs each task in series but stops whenever any of the functions were + * successful. If one of the tasks were successful, the `callback` will be + * passed the result of the successful task. If all tasks fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name tryEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to + * run, each function is passed a `callback(err, result)` it must call on + * completion with an error `err` (which can be `null`) and an optional `result` + * value. + * @param {Function} [callback] - An optional callback which is called when one + * of the tasks has succeeded, or all have failed. It receives the `err` and + * `result` arguments of the last attempt at completing the `task`. Invoked with + * (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * async.tryEach([ + * function getDataFromFirstWebsite(callback) { + * // Try getting the data from the first website + * callback(err, data); + * }, + * function getDataFromSecondWebsite(callback) { + * // First website failed, + * // Try getting the data from the backup website + * callback(err, data); + * } + * ], + * // optional callback + * function(err, results) { + * Now do something with the data. + * }); + * + */ + function tryEach(tasks, callback) { + var error = null; + var result; + return eachSeries$1(tasks, (task, taskCb) => { + wrapAsync(task)((err, ...args) => { + if (err === false) return taskCb(err); + + if (args.length < 2) { + [result] = args; + } else { + result = args; + } + error = err; + taskCb(err ? null : {}); + }); + }, () => callback(error, result)); + } + + var tryEach$1 = awaitify(tryEach); + + /** + * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, + * unmemoized form. Handy for testing. + * + * @name unmemoize + * @static + * @memberOf module:Utils + * @method + * @see [async.memoize]{@link module:Utils.memoize} + * @category Util + * @param {AsyncFunction} fn - the memoized function + * @returns {AsyncFunction} a function that calls the original unmemoized function + */ + function unmemoize(fn) { + return (...args) => { + return (fn.unmemoized || fn)(...args); + }; + } + + /** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ + function whilst(test, iteratee, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); + } + var whilst$1 = awaitify(whilst, 3); + + /** + * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. `callback` will be passed an error and any + * arguments passed to the final `iteratee`'s callback. + * + * The inverse of [whilst]{@link module:ControlFlow.whilst}. + * + * @name until + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * const results = [] + * let finished = false + * async.until(function test(cb) { + * cb(null, finished) + * }, function iter(next) { + * fetchPage(url, (err, body) => { + * if (err) return next(err) + * results = results.concat(body.objects) + * finished = !!body.next + * next(err) + * }) + * }, function done (err) { + * // all pages have been fetched + * }) + */ + function until(test, iteratee, callback) { + const _test = wrapAsync(test); + return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback); + } + + /** + * Runs the `tasks` array of functions in series, each passing their results to + * the next in the array. However, if any of the `tasks` pass an error to their + * own callback, the next function is not executed, and the main `callback` is + * immediately called with the error. + * + * @name waterfall + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} + * to run. + * Each function should complete with any number of `result` values. + * The `result` values will be passed as arguments, in order, to the next task. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This will be passed the results of the last task's + * callback. Invoked with (err, [results]). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.waterfall([ + * function(callback) { + * callback(null, 'one', 'two'); + * }, + * function(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * }, + * function(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + * ], function (err, result) { + * // result now equals 'done' + * }); + * + * // Or, with named functions: + * async.waterfall([ + * myFirstFunction, + * mySecondFunction, + * myLastFunction, + * ], function (err, result) { + * // result now equals 'done' + * }); + * function myFirstFunction(callback) { + * callback(null, 'one', 'two'); + * } + * function mySecondFunction(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * } + * function myLastFunction(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + */ + function waterfall (tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); + if (!tasks.length) return callback(); + var taskIndex = 0; + + function nextTask(args) { + var task = wrapAsync(tasks[taskIndex++]); + task(...args, onlyOnce(next)); + } + + function next(err, ...args) { + if (err === false) return + if (err || taskIndex === tasks.length) { + return callback(err, ...args); + } + nextTask(args); + } + + nextTask([]); + } + + var waterfall$1 = awaitify(waterfall); + + /** + * An "async function" in the context of Async is an asynchronous function with + * a variable number of parameters, with the final parameter being a callback. + * (`function (arg1, arg2, ..., callback) {}`) + * The final callback is of the form `callback(err, results...)`, which must be + * called once the function is completed. The callback should be called with a + * Error as its first argument to signal that an error occurred. + * Otherwise, if no error occurred, it should be called with `null` as the first + * argument, and any additional `result` arguments that may apply, to signal + * successful completion. + * The callback must be called exactly once, ideally on a later tick of the + * JavaScript event loop. + * + * This type of function is also referred to as a "Node-style async function", + * or a "continuation passing-style function" (CPS). Most of the methods of this + * library are themselves CPS/Node-style async functions, or functions that + * return CPS/Node-style async functions. + * + * Wherever we accept a Node-style async function, we also directly accept an + * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. + * In this case, the `async` function will not be passed a final callback + * argument, and any thrown error will be used as the `err` argument of the + * implicit callback, and the return value will be used as the `result` value. + * (i.e. a `rejected` of the returned Promise becomes the `err` callback + * argument, and a `resolved` value becomes the `result`.) + * + * Note, due to JavaScript limitations, we can only detect native `async` + * functions and not transpilied implementations. + * Your environment must have `async`/`await` support for this to work. + * (e.g. Node > v7.6, or a recent version of a modern browser). + * If you are using `async` functions through a transpiler (e.g. Babel), you + * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, + * because the `async function` will be compiled to an ordinary function that + * returns a promise. + * + * @typedef {Function} AsyncFunction + * @static + */ + + + var index = { + apply, + applyEach, + applyEachSeries, + asyncify, + auto, + autoInject, + cargo: cargo$1, + cargoQueue: cargo, + compose, + concat: concat$1, + concatLimit: concatLimit$1, + concatSeries: concatSeries$1, + constant: constant$1, + detect: detect$1, + detectLimit: detectLimit$1, + detectSeries: detectSeries$1, + dir, + doUntil, + doWhilst: doWhilst$1, + each, + eachLimit: eachLimit$1, + eachOf: eachOf$1, + eachOfLimit: eachOfLimit$1, + eachOfSeries: eachOfSeries$1, + eachSeries: eachSeries$1, + ensureAsync, + every: every$1, + everyLimit: everyLimit$1, + everySeries: everySeries$1, + filter: filter$1, + filterLimit: filterLimit$1, + filterSeries: filterSeries$1, + forever: forever$1, + groupBy, + groupByLimit: groupByLimit$1, + groupBySeries, + log, + map: map$1, + mapLimit: mapLimit$1, + mapSeries: mapSeries$1, + mapValues, + mapValuesLimit: mapValuesLimit$1, + mapValuesSeries, + memoize, + nextTick, + parallel, + parallelLimit, + priorityQueue, + queue, + race: race$1, + reduce: reduce$1, + reduceRight, + reflect, + reflectAll, + reject: reject$1, + rejectLimit: rejectLimit$1, + rejectSeries: rejectSeries$1, + retry, + retryable, + seq, + series, + setImmediate: setImmediate$1, + some: some$1, + someLimit: someLimit$1, + someSeries: someSeries$1, + sortBy: sortBy$1, + timeout, + times, + timesLimit, + timesSeries, + transform, + tryEach: tryEach$1, + unmemoize, + until, + waterfall: waterfall$1, + whilst: whilst$1, + + // aliases + all: every$1, + allLimit: everyLimit$1, + allSeries: everySeries$1, + any: some$1, + anyLimit: someLimit$1, + anySeries: someSeries$1, + find: detect$1, + findLimit: detectLimit$1, + findSeries: detectSeries$1, + flatMap: concat$1, + flatMapLimit: concatLimit$1, + flatMapSeries: concatSeries$1, + forEach: each, + forEachSeries: eachSeries$1, + forEachLimit: eachLimit$1, + forEachOf: eachOf$1, + forEachOfSeries: eachOfSeries$1, + forEachOfLimit: eachOfLimit$1, + inject: reduce$1, + foldl: reduce$1, + foldr: reduceRight, + select: filter$1, + selectLimit: filterLimit$1, + selectSeries: filterSeries$1, + wrapSync: asyncify, + during: whilst$1, + doDuring: doWhilst$1 + }; + + exports.all = every$1; + exports.allLimit = everyLimit$1; + exports.allSeries = everySeries$1; + exports.any = some$1; + exports.anyLimit = someLimit$1; + exports.anySeries = someSeries$1; + exports.apply = apply; + exports.applyEach = applyEach; + exports.applyEachSeries = applyEachSeries; + exports.asyncify = asyncify; + exports.auto = auto; + exports.autoInject = autoInject; + exports.cargo = cargo$1; + exports.cargoQueue = cargo; + exports.compose = compose; + exports.concat = concat$1; + exports.concatLimit = concatLimit$1; + exports.concatSeries = concatSeries$1; + exports.constant = constant$1; + exports.default = index; + exports.detect = detect$1; + exports.detectLimit = detectLimit$1; + exports.detectSeries = detectSeries$1; + exports.dir = dir; + exports.doDuring = doWhilst$1; + exports.doUntil = doUntil; + exports.doWhilst = doWhilst$1; + exports.during = whilst$1; + exports.each = each; + exports.eachLimit = eachLimit$1; + exports.eachOf = eachOf$1; + exports.eachOfLimit = eachOfLimit$1; + exports.eachOfSeries = eachOfSeries$1; + exports.eachSeries = eachSeries$1; + exports.ensureAsync = ensureAsync; + exports.every = every$1; + exports.everyLimit = everyLimit$1; + exports.everySeries = everySeries$1; + exports.filter = filter$1; + exports.filterLimit = filterLimit$1; + exports.filterSeries = filterSeries$1; + exports.find = detect$1; + exports.findLimit = detectLimit$1; + exports.findSeries = detectSeries$1; + exports.flatMap = concat$1; + exports.flatMapLimit = concatLimit$1; + exports.flatMapSeries = concatSeries$1; + exports.foldl = reduce$1; + exports.foldr = reduceRight; + exports.forEach = each; + exports.forEachLimit = eachLimit$1; + exports.forEachOf = eachOf$1; + exports.forEachOfLimit = eachOfLimit$1; + exports.forEachOfSeries = eachOfSeries$1; + exports.forEachSeries = eachSeries$1; + exports.forever = forever$1; + exports.groupBy = groupBy; + exports.groupByLimit = groupByLimit$1; + exports.groupBySeries = groupBySeries; + exports.inject = reduce$1; + exports.log = log; + exports.map = map$1; + exports.mapLimit = mapLimit$1; + exports.mapSeries = mapSeries$1; + exports.mapValues = mapValues; + exports.mapValuesLimit = mapValuesLimit$1; + exports.mapValuesSeries = mapValuesSeries; + exports.memoize = memoize; + exports.nextTick = nextTick; + exports.parallel = parallel; + exports.parallelLimit = parallelLimit; + exports.priorityQueue = priorityQueue; + exports.queue = queue; + exports.race = race$1; + exports.reduce = reduce$1; + exports.reduceRight = reduceRight; + exports.reflect = reflect; + exports.reflectAll = reflectAll; + exports.reject = reject$1; + exports.rejectLimit = rejectLimit$1; + exports.rejectSeries = rejectSeries$1; + exports.retry = retry; + exports.retryable = retryable; + exports.select = filter$1; + exports.selectLimit = filterLimit$1; + exports.selectSeries = filterSeries$1; + exports.seq = seq; + exports.series = series; + exports.setImmediate = setImmediate$1; + exports.some = some$1; + exports.someLimit = someLimit$1; + exports.someSeries = someSeries$1; + exports.sortBy = sortBy$1; + exports.timeout = timeout; + exports.times = times; + exports.timesLimit = timesLimit; + exports.timesSeries = timesSeries; + exports.transform = transform; + exports.tryEach = tryEach$1; + exports.unmemoize = unmemoize; + exports.until = until; + exports.waterfall = waterfall$1; + exports.whilst = whilst$1; + exports.wrapSync = asyncify; + + Object.defineProperty(exports, '__esModule', { value: true }); + +})); diff --git a/backend/node_modules/async/dist/async.min.js b/backend/node_modules/async/dist/async.min.js new file mode 100644 index 00000000..f0b7fdf5 --- /dev/null +++ b/backend/node_modules/async/dist/async.min.js @@ -0,0 +1 @@ +(function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):(e="undefined"==typeof globalThis?e||self:globalThis,t(e.async={}))})(this,function(e){"use strict";function t(e,...t){return(...n)=>e(...t,...n)}function n(e){return function(...t){var n=t.pop();return e.call(this,t,n)}}function a(e){setTimeout(e,0)}function i(e){return(t,...n)=>e(()=>t(...n))}function r(e){return d(e)?function(...t){const n=t.pop(),a=e.apply(this,t);return s(a,n)}:n(function(t,n){var a;try{a=e.apply(this,t)}catch(t){return n(t)}return a&&"function"==typeof a.then?s(a,n):void n(null,a)})}function s(e,t){return e.then(e=>{l(t,null,e)},e=>{l(t,e&&(e instanceof Error||e.message)?e:new Error(e))})}function l(e,t,n){try{e(t,n)}catch(e){_e(t=>{throw t},e)}}function d(e){return"AsyncFunction"===e[Symbol.toStringTag]}function u(e){return"AsyncGenerator"===e[Symbol.toStringTag]}function p(e){return"function"==typeof e[Symbol.asyncIterator]}function c(e){if("function"!=typeof e)throw new Error("expected a function");return d(e)?r(e):e}function o(e,t){function n(...n){return"function"==typeof n[t-1]?e.apply(this,n):new Promise((a,i)=>{n[t-1]=(e,...t)=>e?i(e):void a(1{c(e).apply(i,n.concat(t))},a)});return i}}function f(e,t,n,a){t=t||[];var i=[],r=0,s=c(n);return e(t,(e,t,n)=>{var a=r++;s(e,(e,t)=>{i[a]=t,n(e)})},e=>{a(e,i)})}function y(e){return e&&"number"==typeof e.length&&0<=e.length&&0==e.length%1}function m(e){function t(...t){if(null!==e){var n=e;e=null,n.apply(this,t)}}return Object.assign(t,e),t}function g(e){return e[Symbol.iterator]&&e[Symbol.iterator]()}function k(e){var t=-1,n=e.length;return function a(){return++t=t||u||l||(u=!0,e.next().then(({value:e,done:t})=>{if(!(d||l))return u=!1,t?(l=!0,void(0>=p&&a(null))):void(p++,n(e,c,r),c++,i())}).catch(s))}function r(e,t){return p-=1,d?void 0:e?s(e):!1===e?(l=!0,void(d=!0)):t===be||l&&0>=p?(l=!0,a(null)):void i()}function s(e){d||(u=!1,l=!0,a(e))}let l=!1,d=!1,u=!1,p=0,c=0;i()}function O(e,t,n){function a(e,t){!1===e&&(l=!0);!0===l||(e?n(e):(++r===s||t===be)&&n(null))}n=m(n);var i=0,r=0,{length:s}=e,l=!1;for(0===s&&n(null);i{t=e,n=a}),e}function A(e,t,n){function a(e,t){k.push(()=>l(e,t))}function i(){if(!f){if(0===k.length&&0===h)return n(null,o);for(;k.length&&he()),i()}function l(e,t){if(!y){var a=L((t,...a)=>{if(h--,!1===t)return void(f=!0);if(2>a.length&&([a]=a),t){var i={};if(Object.keys(o).forEach(e=>{i[e]=o[e]}),i[e]=a,y=!0,g=Object.create(null),f)return;n(t,i)}else o[e]=a,s(e)});h++;var i=c(t[t.length-1]);1{0==--S[e]&&v.push(e)});if(t!==p)throw new Error("async.auto cannot execute tasks due to a recursive dependency")}function u(t){var n=[];return Object.keys(e).forEach(a=>{const i=e[a];Array.isArray(i)&&0<=i.indexOf(t)&&n.push(a)}),n}"number"!=typeof t&&(n=t,t=null),n=m(n||b());var p=Object.keys(e).length;if(!p)return n(null);t||(t=p);var o={},h=0,f=!1,y=!1,g=Object.create(null),k=[],v=[],S={};return Object.keys(e).forEach(t=>{var n=e[t];if(!Array.isArray(n))return a(t,[n]),void v.push(t);var i=n.slice(0,n.length-1),s=i.length;return 0===s?(a(t,n),void v.push(t)):void(S[t]=s,i.forEach(l=>{if(!e[l])throw new Error("async.auto task `"+t+"` has a non-existent dependency `"+l+"` in "+i.join(", "));r(l,()=>{s--,0===s&&a(t,n)})}))}),d(),i(),n[Ce]}function I(e){let t="",n=0,a=e.indexOf("*/");for(;ne.replace(Ne,"").trim())}function j(e,t){var n={};return Object.keys(e).forEach(t=>{function a(e,t){var n=i.map(t=>e[t]);n.push(t),c(r)(...n)}var i,r=e[t],s=d(r),l=!s&&1===r.length||s&&0===r.length;if(Array.isArray(r))i=[...r],r=i.pop(),n[t]=i.concat(0{r(e,n),t(...a)};f[e].push(n)}function r(e,t){return e?t?void(f[e]=f[e].filter(e=>e!==t)):f[e]=[]:Object.keys(f).forEach(e=>f[e]=[])}function s(e,...t){f[e].forEach(e=>e(...t))}function l(e,t,n,a){function i(e,...t){return e?n?s(e):r():1>=t.length?r(t[0]):void r(t)}if(null!=a&&"function"!=typeof a)throw new Error("task callback must be a function");k.started=!0;var r,s,l=k._createTaskItem(e,n?i:a||i);if(t?k._tasks.unshift(l):k._tasks.push(l),y||(y=!0,_e(()=>{y=!1,k.process()})),n||!a)return new Promise((e,t)=>{r=e,s=t})}function d(e){return function(t,...n){o-=1;for(var a=0,r=e.length;as("drain")),!0)}if(null==t)t=1;else if(0===t)throw new RangeError("Concurrency must not be zero");var p=c(e),o=0,h=[];const f={error:[],drain:[],saturated:[],unsaturated:[],empty:[]};var y=!1;const m=e=>t=>t?void(r(e),a(e,t)):new Promise((t,n)=>{i(e,(e,a)=>e?n(e):void t(a))});var g=!1,k={_tasks:new Ve,_createTaskItem(e,t){return{data:e,callback:t}},*[Symbol.iterator](){yield*k._tasks[Symbol.iterator]()},concurrency:t,payload:n,buffer:t/4,started:!1,paused:!1,push(e,t){return Array.isArray(e)?u(e)?void 0:e.map(e=>l(e,!1,!1,t)):l(e,!1,!1,t)},pushAsync(e,t){return Array.isArray(e)?u(e)?void 0:e.map(e=>l(e,!1,!0,t)):l(e,!1,!0,t)},kill(){r(),k._tasks.empty()},unshift(e,t){return Array.isArray(e)?u(e)?void 0:e.map(e=>l(e,!0,!1,t)):l(e,!0,!1,t)},unshiftAsync(e,t){return Array.isArray(e)?u(e)?void 0:e.map(e=>l(e,!0,!0,t)):l(e,!0,!0,t)},remove(e){k._tasks.remove(e)},process(){var e=Math.min;if(!g){for(g=!0;!k.paused&&o{t.apply(n,e.concat((e,...t)=>{a(e,t)}))},(e,t)=>a(e,...t)),a[Ce]}}function P(...e){return C(...e.reverse())}function R(...e){return function(...t){var n=t.pop();return n(null,...e)}}function z(e,t){return(n,a,i,r)=>{var s,l=!1;const d=c(i);n(a,(n,a,i)=>{d(n,(a,r)=>a||!1===a?i(a):e(r)&&!s?(l=!0,s=t(!0,n),i(null,be)):void i())},e=>e?r(e):void r(null,l?s:t(!1)))}}function N(e){return(t,...n)=>c(t)(...n,(t,...n)=>{"object"==typeof console&&(t?console.error&&console.error(t):console[e]&&n.forEach(t=>console[e](t)))})}function V(e,t,n){const a=c(t);return Xe(e,(...e)=>{const t=e.pop();a(...e,(e,n)=>t(e,!n))},n)}function Y(e){return(t,n,a)=>e(t,a)}function q(e){return d(e)?e:function(...t){var n=t.pop(),a=!0;t.push((...e)=>{a?_e(()=>n(...e)):n(...e)}),e.apply(this,t),a=!1}}function D(e,t,n,a){var r=Array(t.length);e(t,(e,t,a)=>{n(e,(e,n)=>{r[t]=!!n,a(e)})},e=>{if(e)return a(e);for(var n=[],s=0;s{n(e,(n,r)=>n?a(n):void(r&&i.push({index:t,value:e}),a(n)))},e=>e?a(e):void a(null,i.sort((e,t)=>e.index-t.index).map(e=>e.value)))}function U(e,t,n,a){var i=y(t)?D:Q;return i(e,t,c(n),a)}function G(e,t,n){return dt(e,1/0,t,n)}function W(e,t,n){return dt(e,1,t,n)}function H(e,t,n){return pt(e,1/0,t,n)}function J(e,t,n){return pt(e,1,t,n)}function K(e,t=e=>e){var a=Object.create(null),r=Object.create(null),s=c(e),l=n((e,n)=>{var d=t(...e);d in a?_e(()=>n(null,...a[d])):d in r?r[d].push(n):(r[d]=[n],s(...e,(e,...t)=>{e||(a[d]=t);var n=r[d];delete r[d];for(var s=0,u=n.length;s{n(e[0],t)},t,1)}function ee(e){return(e<<1)+1}function te(e){return(e+1>>1)-1}function ne(e,t){return e.priority===t.priority?e.pushCount({data:e,priority:t})):{data:e,priority:t}}var a=$(e,t),{push:i,pushAsync:r}=a;return a._tasks=new ht,a._createTaskItem=({data:e,priority:t},n)=>({data:e,priority:t,callback:n}),a.push=function(e,t=0,a){return i(n(e,t),a)},a.pushAsync=function(e,t=0,a){return r(n(e,t),a)},delete a.unshift,delete a.unshiftAsync,a}function ie(e,t,n,a){var i=[...e].reverse();return qe(i,t,n,a)}function re(e){var t=c(e);return n(function a(e,n){return e.push((e,...t)=>{let a={};if(e&&(a.error=e),0=t.length&&([i]=t),a.value=i}n(null,a)}),t.apply(this,e)})}function se(e){var t;return Array.isArray(e)?t=e.map(re):(t={},Object.keys(e).forEach(n=>{t[n]=re.call(this,e[n])})),t}function le(e,t,n,a){const i=c(n);return U(e,t,(e,t)=>{i(e,(e,n)=>{t(e,!n)})},a)}function de(e){return function(){return e}}function ue(e,t,n){function a(){r((e,...t)=>{!1===e||(e&&s++arguments.length&&"function"==typeof e?(n=t||b(),t=e):(pe(i,e),n=n||b()),"function"!=typeof t)throw new Error("Invalid arguments for async.retry");var r=c(t),s=1;return a(),n[Ce]}function pe(e,n){if("object"==typeof n)e.times=+n.times||kt,e.intervalFunc="function"==typeof n.interval?n.interval:de(+n.interval||vt),e.errorFilter=n.errorFilter;else if("number"==typeof n||"string"==typeof n)e.times=+n||kt;else throw new Error("Invalid arguments for async.retry")}function ce(e,t){t||(t=e,e=null);let a=e&&e.arity||t.length;d(t)&&(a+=1);var i=c(t);return n((t,n)=>{function r(e){i(...t,e)}return(t.length{function s(){var t=e.name||"anonymous",n=new Error("Callback function \""+t+"\" timed out.");n.code="ETIMEDOUT",a&&(n.info=a),d=!0,r(n)}var l,d=!1;n.push((...e)=>{d||(r(...e),clearTimeout(l))}),l=setTimeout(s,t),i(...n)})}function fe(e){for(var t=Array(e);e--;)t[e]=e;return t}function ye(e,t,n,a){var i=c(n);return De(fe(e),t,i,a)}function me(e,t,n){return ye(e,1/0,t,n)}function ge(e,t,n){return ye(e,1,t,n)}function ke(e,t,n,a){3>=arguments.length&&"function"==typeof t&&(a=n,n=t,t=Array.isArray(e)?[]:{}),a=m(a||b());var i=c(n);return Me(e,(e,n,a)=>{i(t,e,n,a)},e=>a(e,t)),a[Ce]}function ve(e){return(...t)=>(e.unmemoized||e)(...t)}function Se(e,t,n){const a=c(e);return _t(e=>a((t,n)=>e(t,!n)),t,n)}var xe,Le="function"==typeof queueMicrotask&&queueMicrotask,Ee="function"==typeof setImmediate&&setImmediate,Oe="object"==typeof process&&"function"==typeof process.nextTick;xe=Le?queueMicrotask:Ee?setImmediate:Oe?process.nextTick:a;var _e=i(xe);const be={};var Ae=e=>(t,n,a)=>{function i(e,t){if(!d)if(c-=1,e)l=!0,a(e);else if(!1===e)l=!0,d=!0;else{if(t===be||l&&0>=c)return l=!0,a(null);o||r()}}function r(){for(o=!0;c=c&&a(null));c+=1,n(t.value,t.key,L(i))}o=!1}if(a=m(a),0>=e)throw new RangeError("concurrency limit cannot be less than 1");if(!t)return a(null);if(u(t))return E(t,e,n,a);if(p(t))return E(t[Symbol.asyncIterator](),e,n,a);var s=x(t),l=!1,d=!1,c=0,o=!1;r()},Ie=o(function i(e,t,n,a){return Ae(t)(e,c(n),a)},4),Me=o(function a(e,t,n){var i=y(e)?O:_;return i(e,c(t),n)},3),je=o(function a(e,t,n){return f(Me,e,t,n)},3),we=h(je),Be=o(function a(e,t,n){return Ie(e,1,t,n)},3),Te=o(function a(e,t,n){return f(Be,e,t,n)},3),Fe=h(Te);const Ce=Symbol("promiseCallback");var Pe=/^(?:async\s)?(?:function)?\s*(?:\w+\s*)?\(([^)]+)\)(?:\s*{)/,Re=/^(?:async\s)?\s*(?:\(\s*)?((?:[^)=\s]\s*)*)(?:\)\s*)?=>/,ze=/,/,Ne=/(=.+)?(\s*)$/;class Ve{constructor(){this.head=this.tail=null,this.length=0}removeLink(e){return e.prev?e.prev.next=e.next:this.head=e.next,e.next?e.next.prev=e.prev:this.tail=e.prev,e.prev=e.next=null,this.length-=1,e}empty(){for(;this.head;)this.shift();return this}insertAfter(e,t){t.prev=e,t.next=e.next,e.next?e.next.prev=t:this.tail=t,e.next=t,this.length+=1}insertBefore(e,t){t.prev=e.prev,t.next=e,e.prev?e.prev.next=t:this.head=t,e.prev=t,this.length+=1}unshift(e){this.head?this.insertBefore(this.head,e):w(this,e)}push(e){this.tail?this.insertAfter(this.tail,e):w(this,e)}shift(){return this.head&&this.removeLink(this.head)}pop(){return this.tail&&this.removeLink(this.tail)}toArray(){return[...this]}*[Symbol.iterator](){for(var e=this.head;e;)yield e.data,e=e.next}remove(e){for(var t=this.head;t;){var{next:n}=t;e(t)&&this.removeLink(t),t=n}return this}}var Ye,qe=o(function i(e,t,n,a){a=m(a);var r=c(n);return Be(e,(e,n,a)=>{r(t,e,(e,n)=>{t=n,a(e)})},e=>a(e,t))},4),De=o(function i(e,t,n,a){return f(Ae(t),e,n,a)},4),Qe=o(function i(e,t,n,a){var r=c(n);return De(e,t,(e,t)=>{r(e,(e,...n)=>e?t(e):t(e,n))},(e,t)=>{for(var n=[],r=0;re,(e,t)=>t)(Me,e,t,n)},3),He=o(function i(e,t,n,a){return z(e=>e,(e,t)=>t)(Ae(t),e,n,a)},4),Je=o(function a(e,t,n){return z(e=>e,(e,t)=>t)(Ae(1),e,t,n)},3),Ke=N("dir"),Xe=o(function a(e,t,n){function i(e,...t){return e?n(e):void(!1===e||(s=t,d(...t,r)))}function r(e,t){return e?n(e):!1===e?void 0:t?void l(i):n(null,...s)}n=L(n);var s,l=c(e),d=c(t);return r(null,!0)},3),Ze=o(function a(e,t,n){return Me(e,Y(c(t)),n)},3),$e=o(function i(e,t,n,a){return Ae(t)(e,Y(c(n)),a)},4),et=o(function a(e,t,n){return $e(e,1,t,n)},3),tt=o(function a(e,t,n){return z(e=>!e,e=>!e)(Me,e,t,n)},3),nt=o(function i(e,t,n,a){return z(e=>!e,e=>!e)(Ae(t),e,n,a)},4),at=o(function a(e,t,n){return z(e=>!e,e=>!e)(Be,e,t,n)},3),it=o(function a(e,t,n){return U(Me,e,t,n)},3),rt=o(function i(e,t,n,a){return U(Ae(t),e,n,a)},4),st=o(function a(e,t,n){return U(Be,e,t,n)},3),lt=o(function n(e,t){function a(e){return e?i(e):void(!1===e||r(a))}var i=L(t),r=c(q(e));return a()},2),dt=o(function i(e,t,n,a){var r=c(n);return De(e,t,(e,t)=>{r(e,(n,a)=>n?t(n):t(n,{key:a,val:e}))},(e,t)=>{for(var n={},{hasOwnProperty:r}=Object.prototype,s=0;s{s(e,t,(e,a)=>e?n(e):void(r[t]=a,n(e)))},e=>a(e,r))},4);Ye=Oe?process.nextTick:Ee?setImmediate:a;var ct=i(Ye),ot=o((e,t,n)=>{var a=y(t)?[]:{};e(t,(e,t,n)=>{c(e)((e,...i)=>{2>i.length&&([i]=i),a[t]=i,n(e)})},e=>n(e,a))},3);class ht{constructor(){this.heap=[],this.pushCount=Number.MIN_SAFE_INTEGER}get length(){return this.heap.length}empty(){return this.heap=[],this}percUp(e){for(let n;0e)(Me,e,t,n)},3),xt=o(function i(e,t,n,a){return z(Boolean,e=>e)(Ae(t),e,n,a)},4),Lt=o(function a(e,t,n){return z(Boolean,e=>e)(Be,e,t,n)},3),Et=o(function a(e,t,n){function i(e,t){var n=e.criteria,a=t.criteria;return na?1:0}var r=c(t);return je(e,(e,t)=>{r(e,(n,a)=>n?t(n):void t(n,{value:e,criteria:a}))},(e,t)=>e?n(e):void n(null,t.sort(i).map(e=>e.value)))},3),Ot=o(function n(e,t){var a,i=null;return et(e,(e,t)=>{c(e)((e,...n)=>!1===e?t(e):void(2>n.length?[a]=n:a=n,i=e,t(e?null:{})))},()=>t(i,a))}),_t=o(function a(e,t,n){function i(e,...t){if(e)return n(e);d=t;!1===e||l(r)}function r(e,t){return e?n(e):!1===e?void 0:t?void s(i):n(null,...d)}n=L(n);var s=c(t),l=c(e),d=[];return l(r)},3),bt=o(function n(e,t){function a(t){var n=c(e[r++]);n(...t,L(i))}function i(n,...i){return!1===n?void 0:n||r===e.length?t(n,...i):void a(i)}if(t=m(t),!Array.isArray(e))return t(new Error("First argument to waterfall must be an array of functions"));if(!e.length)return t();var r=0;a([])});e.all=tt,e.allLimit=nt,e.allSeries=at,e.any=St,e.anyLimit=xt,e.anySeries=Lt,e.apply=t,e.applyEach=we,e.applyEachSeries=Fe,e.asyncify=r,e.auto=A,e.autoInject=j,e.cargo=T,e.cargoQueue=F,e.compose=P,e.concat=Ue,e.concatLimit=Qe,e.concatSeries=Ge,e.constant=R,e.default={apply:t,applyEach:we,applyEachSeries:Fe,asyncify:r,auto:A,autoInject:j,cargo:T,cargoQueue:F,compose:P,concat:Ue,concatLimit:Qe,concatSeries:Ge,constant:R,detect:We,detectLimit:He,detectSeries:Je,dir:Ke,doUntil:V,doWhilst:Xe,each:Ze,eachLimit:$e,eachOf:Me,eachOfLimit:Ie,eachOfSeries:Be,eachSeries:et,ensureAsync:q,every:tt,everyLimit:nt,everySeries:at,filter:it,filterLimit:rt,filterSeries:st,forever:lt,groupBy:G,groupByLimit:dt,groupBySeries:W,log:ut,map:je,mapLimit:De,mapSeries:Te,mapValues:H,mapValuesLimit:pt,mapValuesSeries:J,memoize:K,nextTick:ct,parallel:X,parallelLimit:Z,priorityQueue:ae,queue:$,race:ft,reduce:qe,reduceRight:ie,reflect:re,reflectAll:se,reject:yt,rejectLimit:mt,rejectSeries:gt,retry:ue,retryable:ce,seq:C,series:oe,setImmediate:_e,some:St,someLimit:xt,someSeries:Lt,sortBy:Et,timeout:he,times:me,timesLimit:ye,timesSeries:ge,transform:ke,tryEach:Ot,unmemoize:ve,until:Se,waterfall:bt,whilst:_t,all:tt,allLimit:nt,allSeries:at,any:St,anyLimit:xt,anySeries:Lt,find:We,findLimit:He,findSeries:Je,flatMap:Ue,flatMapLimit:Qe,flatMapSeries:Ge,forEach:Ze,forEachSeries:et,forEachLimit:$e,forEachOf:Me,forEachOfSeries:Be,forEachOfLimit:Ie,inject:qe,foldl:qe,foldr:ie,select:it,selectLimit:rt,selectSeries:st,wrapSync:r,during:_t,doDuring:Xe},e.detect=We,e.detectLimit=He,e.detectSeries=Je,e.dir=Ke,e.doDuring=Xe,e.doUntil=V,e.doWhilst=Xe,e.during=_t,e.each=Ze,e.eachLimit=$e,e.eachOf=Me,e.eachOfLimit=Ie,e.eachOfSeries=Be,e.eachSeries=et,e.ensureAsync=q,e.every=tt,e.everyLimit=nt,e.everySeries=at,e.filter=it,e.filterLimit=rt,e.filterSeries=st,e.find=We,e.findLimit=He,e.findSeries=Je,e.flatMap=Ue,e.flatMapLimit=Qe,e.flatMapSeries=Ge,e.foldl=qe,e.foldr=ie,e.forEach=Ze,e.forEachLimit=$e,e.forEachOf=Me,e.forEachOfLimit=Ie,e.forEachOfSeries=Be,e.forEachSeries=et,e.forever=lt,e.groupBy=G,e.groupByLimit=dt,e.groupBySeries=W,e.inject=qe,e.log=ut,e.map=je,e.mapLimit=De,e.mapSeries=Te,e.mapValues=H,e.mapValuesLimit=pt,e.mapValuesSeries=J,e.memoize=K,e.nextTick=ct,e.parallel=X,e.parallelLimit=Z,e.priorityQueue=ae,e.queue=$,e.race=ft,e.reduce=qe,e.reduceRight=ie,e.reflect=re,e.reflectAll=se,e.reject=yt,e.rejectLimit=mt,e.rejectSeries=gt,e.retry=ue,e.retryable=ce,e.select=it,e.selectLimit=rt,e.selectSeries=st,e.seq=C,e.series=oe,e.setImmediate=_e,e.some=St,e.someLimit=xt,e.someSeries=Lt,e.sortBy=Et,e.timeout=he,e.times=me,e.timesLimit=ye,e.timesSeries=ge,e.transform=ke,e.tryEach=Ot,e.unmemoize=ve,e.until=Se,e.waterfall=bt,e.whilst=_t,e.wrapSync=r,Object.defineProperty(e,"__esModule",{value:!0})}); \ No newline at end of file diff --git a/backend/node_modules/async/dist/async.mjs b/backend/node_modules/async/dist/async.mjs new file mode 100644 index 00000000..2c599653 --- /dev/null +++ b/backend/node_modules/async/dist/async.mjs @@ -0,0 +1,5948 @@ +/** + * Creates a continuation function with some arguments already applied. + * + * Useful as a shorthand when combined with other control flow functions. Any + * arguments passed to the returned function are added to the arguments + * originally passed to apply. + * + * @name apply + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {Function} fn - The function you want to eventually apply all + * arguments to. Invokes with (arguments...). + * @param {...*} arguments... - Any number of arguments to automatically apply + * when the continuation is called. + * @returns {Function} the partially-applied function + * @example + * + * // using apply + * async.parallel([ + * async.apply(fs.writeFile, 'testfile1', 'test1'), + * async.apply(fs.writeFile, 'testfile2', 'test2') + * ]); + * + * + * // the same process without using apply + * async.parallel([ + * function(callback) { + * fs.writeFile('testfile1', 'test1', callback); + * }, + * function(callback) { + * fs.writeFile('testfile2', 'test2', callback); + * } + * ]); + * + * // It's possible to pass any number of additional arguments when calling the + * // continuation: + * + * node> var fn = async.apply(sys.puts, 'one'); + * node> fn('two', 'three'); + * one + * two + * three + */ +function apply(fn, ...args) { + return (...callArgs) => fn(...args,...callArgs); +} + +function initialParams (fn) { + return function (...args/*, callback*/) { + var callback = args.pop(); + return fn.call(this, args, callback); + }; +} + +/* istanbul ignore file */ + +var hasQueueMicrotask = typeof queueMicrotask === 'function' && queueMicrotask; +var hasSetImmediate = typeof setImmediate === 'function' && setImmediate; +var hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; + +function fallback(fn) { + setTimeout(fn, 0); +} + +function wrap(defer) { + return (fn, ...args) => defer(() => fn(...args)); +} + +var _defer$1; + +if (hasQueueMicrotask) { + _defer$1 = queueMicrotask; +} else if (hasSetImmediate) { + _defer$1 = setImmediate; +} else if (hasNextTick) { + _defer$1 = process.nextTick; +} else { + _defer$1 = fallback; +} + +var setImmediate$1 = wrap(_defer$1); + +/** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ +function asyncify(func) { + if (isAsync(func)) { + return function (...args/*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback) + } + } + + return initialParams(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback) + } else { + callback(null, result); + } + }); +} + +function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && (err instanceof Error || err.message) ? err : new Error(err)); + }); +} + +function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + setImmediate$1(e => { throw e }, err); + } +} + +function isAsync(fn) { + return fn[Symbol.toStringTag] === 'AsyncFunction'; +} + +function isAsyncGenerator(fn) { + return fn[Symbol.toStringTag] === 'AsyncGenerator'; +} + +function isAsyncIterable(obj) { + return typeof obj[Symbol.asyncIterator] === 'function'; +} + +function wrapAsync(asyncFn) { + if (typeof asyncFn !== 'function') throw new Error('expected a function') + return isAsync(asyncFn) ? asyncify(asyncFn) : asyncFn; +} + +// conditionally promisify a function. +// only return a promise if a callback is omitted +function awaitify (asyncFn, arity) { + if (!arity) arity = asyncFn.length; + if (!arity) throw new Error('arity is undefined') + function awaitable (...args) { + if (typeof args[arity - 1] === 'function') { + return asyncFn.apply(this, args) + } + + return new Promise((resolve, reject) => { + args[arity - 1] = (err, ...cbArgs) => { + if (err) return reject(err) + resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); + }; + asyncFn.apply(this, args); + }) + } + + return awaitable +} + +function applyEach$1 (eachfn) { + return function applyEach(fns, ...callArgs) { + const go = awaitify(function (callback) { + var that = this; + return eachfn(fns, (fn, cb) => { + wrapAsync(fn).apply(that, callArgs.concat(cb)); + }, callback); + }); + return go; + }; +} + +function _asyncMap(eachfn, arr, iteratee, callback) { + arr = arr || []; + var results = []; + var counter = 0; + var _iteratee = wrapAsync(iteratee); + + return eachfn(arr, (value, _, iterCb) => { + var index = counter++; + _iteratee(value, (err, v) => { + results[index] = v; + iterCb(err); + }); + }, err => { + callback(err, results); + }); +} + +function isArrayLike(value) { + return value && + typeof value.length === 'number' && + value.length >= 0 && + value.length % 1 === 0; +} + +// A temporary value used to identify if the loop should be broken. +// See #1064, #1293 +const breakLoop = {}; + +function once(fn) { + function wrapper (...args) { + if (fn === null) return; + var callFn = fn; + fn = null; + callFn.apply(this, args); + } + Object.assign(wrapper, fn); + return wrapper +} + +function getIterator (coll) { + return coll[Symbol.iterator] && coll[Symbol.iterator](); +} + +function createArrayIterator(coll) { + var i = -1; + var len = coll.length; + return function next() { + return ++i < len ? {value: coll[i], key: i} : null; + } +} + +function createES2015Iterator(iterator) { + var i = -1; + return function next() { + var item = iterator.next(); + if (item.done) + return null; + i++; + return {value: item.value, key: i}; + } +} + +function createObjectIterator(obj) { + var okeys = obj ? Object.keys(obj) : []; + var i = -1; + var len = okeys.length; + return function next() { + var key = okeys[++i]; + if (key === '__proto__') { + return next(); + } + return i < len ? {value: obj[key], key} : null; + }; +} + +function createIterator(coll) { + if (isArrayLike(coll)) { + return createArrayIterator(coll); + } + + var iterator = getIterator(coll); + return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); +} + +function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error("Callback was already called."); + var callFn = fn; + fn = null; + callFn.apply(this, args); + }; +} + +// for async generators +function asyncEachOfLimit(generator, limit, iteratee, callback) { + let done = false; + let canceled = false; + let awaiting = false; + let running = 0; + let idx = 0; + + function replenish() { + //console.log('replenish') + if (running >= limit || awaiting || done) return + //console.log('replenish awaiting') + awaiting = true; + generator.next().then(({value, done: iterDone}) => { + //console.log('got value', value) + if (canceled || done) return + awaiting = false; + if (iterDone) { + done = true; + if (running <= 0) { + //console.log('done nextCb') + callback(null); + } + return; + } + running++; + iteratee(value, idx, iterateeCallback); + idx++; + replenish(); + }).catch(handleError); + } + + function iterateeCallback(err, result) { + //console.log('iterateeCallback') + running -= 1; + if (canceled) return + if (err) return handleError(err) + + if (err === false) { + done = true; + canceled = true; + return + } + + if (result === breakLoop || (done && running <= 0)) { + done = true; + //console.log('done iterCb') + return callback(null); + } + replenish(); + } + + function handleError(err) { + if (canceled) return + awaiting = false; + done = true; + callback(err); + } + + replenish(); +} + +var eachOfLimit$2 = (limit) => { + return (obj, iteratee, callback) => { + callback = once(callback); + if (limit <= 0) { + throw new RangeError('concurrency limit cannot be less than 1') + } + if (!obj) { + return callback(null); + } + if (isAsyncGenerator(obj)) { + return asyncEachOfLimit(obj, limit, iteratee, callback) + } + if (isAsyncIterable(obj)) { + return asyncEachOfLimit(obj[Symbol.asyncIterator](), limit, iteratee, callback) + } + var nextElem = createIterator(obj); + var done = false; + var canceled = false; + var running = 0; + var looping = false; + + function iterateeCallback(err, value) { + if (canceled) return + running -= 1; + if (err) { + done = true; + callback(err); + } + else if (err === false) { + done = true; + canceled = true; + } + else if (value === breakLoop || (done && running <= 0)) { + done = true; + return callback(null); + } + else if (!looping) { + replenish(); + } + } + + function replenish () { + looping = true; + while (running < limit && !done) { + var elem = nextElem(); + if (elem === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iteratee(elem.value, elem.key, onlyOnce(iterateeCallback)); + } + looping = false; + } + + replenish(); + }; +}; + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfLimit(coll, limit, iteratee, callback) { + return eachOfLimit$2(limit)(coll, wrapAsync(iteratee), callback); +} + +var eachOfLimit$1 = awaitify(eachOfLimit, 4); + +// eachOf implementation optimized for array-likes +function eachOfArrayLike(coll, iteratee, callback) { + callback = once(callback); + var index = 0, + completed = 0, + {length} = coll, + canceled = false; + if (length === 0) { + callback(null); + } + + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return + if (err) { + callback(err); + } else if ((++completed === length) || value === breakLoop) { + callback(null); + } + } + + for (; index < length; index++) { + iteratee(coll[index], index, onlyOnce(iteratorCallback)); + } +} + +// a generic version of eachOf which can handle array, object, and iterator cases. +function eachOfGeneric (coll, iteratee, callback) { + return eachOfLimit$1(coll, Infinity, iteratee, callback); +} + +/** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ +function eachOf(coll, iteratee, callback) { + var eachOfImplementation = isArrayLike(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, wrapAsync(iteratee), callback); +} + +var eachOf$1 = awaitify(eachOf, 3); + +/** + * Produces a new collection of values by mapping each value in `coll` through + * the `iteratee` function. The `iteratee` is called with an item from `coll` + * and a callback for when it has finished processing. Each of these callbacks + * takes 2 arguments: an `error`, and the transformed item from `coll`. If + * `iteratee` passes an error to its callback, the main `callback` (for the + * `map` function) is immediately called with the error. + * + * Note, that since this function applies the `iteratee` to each item in + * parallel, there is no guarantee that the `iteratee` functions will complete + * in order. However, the results array will be in the same order as the + * original `coll`. + * + * If `map` is passed an Object, the results will be an Array. The results + * will roughly be in the order of the original Objects' keys (but this can + * vary across JavaScript engines). + * + * @name map + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an Array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.map(fileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.map(fileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.map(fileList, getFileSizeInBytes); + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.map(withMissingFileList, getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function map (coll, iteratee, callback) { + return _asyncMap(eachOf$1, coll, iteratee, callback) +} +var map$1 = awaitify(map, 3); + +/** + * Applies the provided arguments to each function in the array, calling + * `callback` after all functions have completed. If you only provide the first + * argument, `fns`, then it will return a function which lets you pass in the + * arguments as if it were a single function call. If more arguments are + * provided, `callback` is required while `args` is still optional. The results + * for each of the applied async functions are passed to the final callback + * as an array. + * + * @name applyEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s + * to all call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - Returns a function that takes no args other than + * an optional callback, that is the result of applying the `args` to each + * of the functions. + * @example + * + * const appliedFn = async.applyEach([enableSearch, updateSchema], 'bucket') + * + * appliedFn((err, results) => { + * // results[0] is the results for `enableSearch` + * // results[1] is the results for `updateSchema` + * }); + * + * // partial application example: + * async.each( + * buckets, + * async (bucket) => async.applyEach([enableSearch, updateSchema], bucket)(), + * callback + * ); + */ +var applyEach = applyEach$1(map$1); + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfSeries(coll, iteratee, callback) { + return eachOfLimit$1(coll, 1, iteratee, callback) +} +var eachOfSeries$1 = awaitify(eachOfSeries, 3); + +/** + * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * + * @name mapSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function mapSeries (coll, iteratee, callback) { + return _asyncMap(eachOfSeries$1, coll, iteratee, callback) +} +var mapSeries$1 = awaitify(mapSeries, 3); + +/** + * The same as [`applyEach`]{@link module:ControlFlow.applyEach} but runs only a single async operation at a time. + * + * @name applyEachSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.applyEach]{@link module:ControlFlow.applyEach} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} fns - A collection of {@link AsyncFunction}s to all + * call with the same arguments + * @param {...*} [args] - any number of separate arguments to pass to the + * function. + * @param {Function} [callback] - the final argument should be the callback, + * called when all functions have completed processing. + * @returns {AsyncFunction} - A function, that when called, is the result of + * appling the `args` to the list of functions. It takes no args, other than + * a callback. + */ +var applyEachSeries = applyEach$1(mapSeries$1); + +const PROMISE_SYMBOL = Symbol('promiseCallback'); + +function promiseCallback () { + let resolve, reject; + function callback (err, ...args) { + if (err) return reject(err) + resolve(args.length > 1 ? args : args[0]); + } + + callback[PROMISE_SYMBOL] = new Promise((res, rej) => { + resolve = res, + reject = rej; + }); + + return callback +} + +/** + * Determines the best order for running the {@link AsyncFunction}s in `tasks`, based on + * their requirements. Each function can optionally depend on other functions + * being completed first, and each function is run as soon as its requirements + * are satisfied. + * + * If any of the {@link AsyncFunction}s pass an error to their callback, the `auto` sequence + * will stop. Further tasks will not execute (so any other functions depending + * on it will not run), and the main `callback` is immediately called with the + * error. + * + * {@link AsyncFunction}s also receive an object containing the results of functions which + * have completed so far as the first argument, if they have dependencies. If a + * task function has no dependencies, it will only be passed a callback. + * + * @name auto + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Object} tasks - An object. Each of its properties is either a + * function or an array of requirements, with the {@link AsyncFunction} itself the last item + * in the array. The object's key of a property serves as the name of the task + * defined by that property, i.e. can be used when specifying requirements for + * other tasks. The function receives one or two arguments: + * * a `results` object, containing the results of the previously executed + * functions, only passed if the task has any dependencies, + * * a `callback(err, result)` function, which must be called when finished, + * passing an `error` (which can be `null`) and the result of the function's + * execution. + * @param {number} [concurrency=Infinity] - An optional `integer` for + * determining the maximum number of tasks that can be run in parallel. By + * default, as many as possible. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback. Results are always returned; however, if an + * error occurs, no further `tasks` will be performed, and the results object + * will only contain partial results. Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * @example + * + * //Using Callbacks + * async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }, function(err, results) { + * if (err) { + * console.log('err = ', err); + * } + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }); + * + * //Using Promises + * async.auto({ + * get_data: function(callback) { + * console.log('in get_data'); + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * console.log('in make_folder'); + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }).then(results => { + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * }).catch(err => { + * console.log('err = ', err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.auto({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: ['get_data', 'make_folder', function(results, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(results, callback) { + * // once the file is written let's email a link to it... + * callback(null, {'file':results.write_file, 'email':'user@example.com'}); + * }] + * }); + * console.log('results = ', results); + * // results = { + * // get_data: ['data', 'converted to array'] + * // make_folder; 'folder', + * // write_file: 'filename' + * // email_link: { file: 'filename', email: 'user@example.com' } + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function auto(tasks, concurrency, callback) { + if (typeof concurrency !== 'number') { + // concurrency is optional, shift the args. + callback = concurrency; + concurrency = null; + } + callback = once(callback || promiseCallback()); + var numTasks = Object.keys(tasks).length; + if (!numTasks) { + return callback(null); + } + if (!concurrency) { + concurrency = numTasks; + } + + var results = {}; + var runningTasks = 0; + var canceled = false; + var hasError = false; + + var listeners = Object.create(null); + + var readyTasks = []; + + // for cycle detection: + var readyToCheck = []; // tasks that have been identified as reachable + // without the possibility of returning to an ancestor task + var uncheckedDependencies = {}; + + Object.keys(tasks).forEach(key => { + var task = tasks[key]; + if (!Array.isArray(task)) { + // no dependencies + enqueueTask(key, [task]); + readyToCheck.push(key); + return; + } + + var dependencies = task.slice(0, task.length - 1); + var remainingDependencies = dependencies.length; + if (remainingDependencies === 0) { + enqueueTask(key, task); + readyToCheck.push(key); + return; + } + uncheckedDependencies[key] = remainingDependencies; + + dependencies.forEach(dependencyName => { + if (!tasks[dependencyName]) { + throw new Error('async.auto task `' + key + + '` has a non-existent dependency `' + + dependencyName + '` in ' + + dependencies.join(', ')); + } + addListener(dependencyName, () => { + remainingDependencies--; + if (remainingDependencies === 0) { + enqueueTask(key, task); + } + }); + }); + }); + + checkForDeadlocks(); + processQueue(); + + function enqueueTask(key, task) { + readyTasks.push(() => runTask(key, task)); + } + + function processQueue() { + if (canceled) return + if (readyTasks.length === 0 && runningTasks === 0) { + return callback(null, results); + } + while(readyTasks.length && runningTasks < concurrency) { + var run = readyTasks.shift(); + run(); + } + + } + + function addListener(taskName, fn) { + var taskListeners = listeners[taskName]; + if (!taskListeners) { + taskListeners = listeners[taskName] = []; + } + + taskListeners.push(fn); + } + + function taskComplete(taskName) { + var taskListeners = listeners[taskName] || []; + taskListeners.forEach(fn => fn()); + processQueue(); + } + + + function runTask(key, task) { + if (hasError) return; + + var taskCallback = onlyOnce((err, ...result) => { + runningTasks--; + if (err === false) { + canceled = true; + return + } + if (result.length < 2) { + [result] = result; + } + if (err) { + var safeResults = {}; + Object.keys(results).forEach(rkey => { + safeResults[rkey] = results[rkey]; + }); + safeResults[key] = result; + hasError = true; + listeners = Object.create(null); + if (canceled) return + callback(err, safeResults); + } else { + results[key] = result; + taskComplete(key); + } + }); + + runningTasks++; + var taskFn = wrapAsync(task[task.length - 1]); + if (task.length > 1) { + taskFn(results, taskCallback); + } else { + taskFn(taskCallback); + } + } + + function checkForDeadlocks() { + // Kahn's algorithm + // https://en.wikipedia.org/wiki/Topological_sorting#Kahn.27s_algorithm + // http://connalle.blogspot.com/2013/10/topological-sortingkahn-algorithm.html + var currentTask; + var counter = 0; + while (readyToCheck.length) { + currentTask = readyToCheck.pop(); + counter++; + getDependents(currentTask).forEach(dependent => { + if (--uncheckedDependencies[dependent] === 0) { + readyToCheck.push(dependent); + } + }); + } + + if (counter !== numTasks) { + throw new Error( + 'async.auto cannot execute tasks due to a recursive dependency' + ); + } + } + + function getDependents(taskName) { + var result = []; + Object.keys(tasks).forEach(key => { + const task = tasks[key]; + if (Array.isArray(task) && task.indexOf(taskName) >= 0) { + result.push(key); + } + }); + return result; + } + + return callback[PROMISE_SYMBOL] +} + +var FN_ARGS = /^(?:async\s)?(?:function)?\s*(?:\w+\s*)?\(([^)]+)\)(?:\s*{)/; +var ARROW_FN_ARGS = /^(?:async\s)?\s*(?:\(\s*)?((?:[^)=\s]\s*)*)(?:\)\s*)?=>/; +var FN_ARG_SPLIT = /,/; +var FN_ARG = /(=.+)?(\s*)$/; + +function stripComments(string) { + let stripped = ''; + let index = 0; + let endBlockComment = string.indexOf('*/'); + while (index < string.length) { + if (string[index] === '/' && string[index+1] === '/') { + // inline comment + let endIndex = string.indexOf('\n', index); + index = (endIndex === -1) ? string.length : endIndex; + } else if ((endBlockComment !== -1) && (string[index] === '/') && (string[index+1] === '*')) { + // block comment + let endIndex = string.indexOf('*/', index); + if (endIndex !== -1) { + index = endIndex + 2; + endBlockComment = string.indexOf('*/', index); + } else { + stripped += string[index]; + index++; + } + } else { + stripped += string[index]; + index++; + } + } + return stripped; +} + +function parseParams(func) { + const src = stripComments(func.toString()); + let match = src.match(FN_ARGS); + if (!match) { + match = src.match(ARROW_FN_ARGS); + } + if (!match) throw new Error('could not parse args in autoInject\nSource:\n' + src) + let [, args] = match; + return args + .replace(/\s/g, '') + .split(FN_ARG_SPLIT) + .map((arg) => arg.replace(FN_ARG, '').trim()); +} + +/** + * A dependency-injected version of the [async.auto]{@link module:ControlFlow.auto} function. Dependent + * tasks are specified as parameters to the function, after the usual callback + * parameter, with the parameter names matching the names of the tasks it + * depends on. This can provide even more readable task graphs which can be + * easier to maintain. + * + * If a final callback is specified, the task results are similarly injected, + * specified as named parameters after the initial error parameter. + * + * The autoInject function is purely syntactic sugar and its semantics are + * otherwise equivalent to [async.auto]{@link module:ControlFlow.auto}. + * + * @name autoInject + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.auto]{@link module:ControlFlow.auto} + * @category Control Flow + * @param {Object} tasks - An object, each of whose properties is an {@link AsyncFunction} of + * the form 'func([dependencies...], callback). The object's key of a property + * serves as the name of the task defined by that property, i.e. can be used + * when specifying requirements for other tasks. + * * The `callback` parameter is a `callback(err, result)` which must be called + * when finished, passing an `error` (which can be `null`) and the result of + * the function's execution. The remaining parameters name other tasks on + * which the task is dependent, and the results from those tasks are the + * arguments of those parameters. + * @param {Function} [callback] - An optional callback which is called when all + * the tasks have been completed. It receives the `err` argument if any `tasks` + * pass an error to their callback, and a `results` object with any completed + * task results, similar to `auto`. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // The example from `auto` can be rewritten as follows: + * async.autoInject({ + * get_data: function(callback) { + * // async code to get some data + * callback(null, 'data', 'converted to array'); + * }, + * make_folder: function(callback) { + * // async code to create a directory to store a file in + * // this is run at the same time as getting the data + * callback(null, 'folder'); + * }, + * write_file: function(get_data, make_folder, callback) { + * // once there is some data and the directory exists, + * // write the data to a file in the directory + * callback(null, 'filename'); + * }, + * email_link: function(write_file, callback) { + * // once the file is written let's email a link to it... + * // write_file contains the filename returned by write_file. + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * } + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + * + * // If you are using a JS minifier that mangles parameter names, `autoInject` + * // will not work with plain functions, since the parameter names will be + * // collapsed to a single letter identifier. To work around this, you can + * // explicitly specify the names of the parameters your task function needs + * // in an array, similar to Angular.js dependency injection. + * + * // This still has an advantage over plain `auto`, since the results a task + * // depends on are still spread into arguments. + * async.autoInject({ + * //... + * write_file: ['get_data', 'make_folder', function(get_data, make_folder, callback) { + * callback(null, 'filename'); + * }], + * email_link: ['write_file', function(write_file, callback) { + * callback(null, {'file':write_file, 'email':'user@example.com'}); + * }] + * //... + * }, function(err, results) { + * console.log('err = ', err); + * console.log('email_link = ', results.email_link); + * }); + */ +function autoInject(tasks, callback) { + var newTasks = {}; + + Object.keys(tasks).forEach(key => { + var taskFn = tasks[key]; + var params; + var fnIsAsync = isAsync(taskFn); + var hasNoDeps = + (!fnIsAsync && taskFn.length === 1) || + (fnIsAsync && taskFn.length === 0); + + if (Array.isArray(taskFn)) { + params = [...taskFn]; + taskFn = params.pop(); + + newTasks[key] = params.concat(params.length > 0 ? newTask : taskFn); + } else if (hasNoDeps) { + // no dependencies, use the function as-is + newTasks[key] = taskFn; + } else { + params = parseParams(taskFn); + if ((taskFn.length === 0 && !fnIsAsync) && params.length === 0) { + throw new Error("autoInject task functions require explicit parameters."); + } + + // remove callback param + if (!fnIsAsync) params.pop(); + + newTasks[key] = params.concat(newTask); + } + + function newTask(results, taskCb) { + var newArgs = params.map(name => results[name]); + newArgs.push(taskCb); + wrapAsync(taskFn)(...newArgs); + } + }); + + return auto(newTasks, callback); +} + +// Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation +// used for queues. This implementation assumes that the node provided by the user can be modified +// to adjust the next and last properties. We implement only the minimal functionality +// for queue support. +class DLL { + constructor() { + this.head = this.tail = null; + this.length = 0; + } + + removeLink(node) { + if (node.prev) node.prev.next = node.next; + else this.head = node.next; + if (node.next) node.next.prev = node.prev; + else this.tail = node.prev; + + node.prev = node.next = null; + this.length -= 1; + return node; + } + + empty () { + while(this.head) this.shift(); + return this; + } + + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next) node.next.prev = newNode; + else this.tail = newNode; + node.next = newNode; + this.length += 1; + } + + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) node.prev.next = newNode; + else this.head = newNode; + node.prev = newNode; + this.length += 1; + } + + unshift(node) { + if (this.head) this.insertBefore(this.head, node); + else setInitial(this, node); + } + + push(node) { + if (this.tail) this.insertAfter(this.tail, node); + else setInitial(this, node); + } + + shift() { + return this.head && this.removeLink(this.head); + } + + pop() { + return this.tail && this.removeLink(this.tail); + } + + toArray() { + return [...this] + } + + *[Symbol.iterator] () { + var cur = this.head; + while (cur) { + yield cur.data; + cur = cur.next; + } + } + + remove (testFn) { + var curr = this.head; + while(curr) { + var {next} = curr; + if (testFn(curr)) { + this.removeLink(curr); + } + curr = next; + } + return this; + } +} + +function setInitial(dll, node) { + dll.length = 1; + dll.head = dll.tail = node; +} + +function queue$1(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } + else if(concurrency === 0) { + throw new RangeError('Concurrency must not be zero'); + } + + var _worker = wrapAsync(worker); + var numRunning = 0; + var workersList = []; + const events = { + error: [], + drain: [], + saturated: [], + unsaturated: [], + empty: [] + }; + + function on (event, handler) { + events[event].push(handler); + } + + function once (event, handler) { + const handleAndRemove = (...args) => { + off(event, handleAndRemove); + handler(...args); + }; + events[event].push(handleAndRemove); + } + + function off (event, handler) { + if (!event) return Object.keys(events).forEach(ev => events[ev] = []) + if (!handler) return events[event] = [] + events[event] = events[event].filter(ev => ev !== handler); + } + + function trigger (event, ...args) { + events[event].forEach(handler => handler(...args)); + } + + var processingScheduled = false; + function _insert(data, insertAtFront, rejectOnError, callback) { + if (callback != null && typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; + + var res, rej; + function promiseCallback (err, ...args) { + // we don't care about the error, let the global error handler + // deal with it + if (err) return rejectOnError ? rej(err) : res() + if (args.length <= 1) return res(args[0]) + res(args); + } + + var item = q._createTaskItem( + data, + rejectOnError ? promiseCallback : + (callback || promiseCallback) + ); + + if (insertAtFront) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } + + if (!processingScheduled) { + processingScheduled = true; + setImmediate$1(() => { + processingScheduled = false; + q.process(); + }); + } + + if (rejectOnError || !callback) { + return new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }) + } + } + + function _createCB(tasks) { + return function (err, ...args) { + numRunning -= 1; + + for (var i = 0, l = tasks.length; i < l; i++) { + var task = tasks[i]; + + var index = workersList.indexOf(task); + if (index === 0) { + workersList.shift(); + } else if (index > 0) { + workersList.splice(index, 1); + } + + task.callback(err, ...args); + + if (err != null) { + trigger('error', err, task.data); + } + } + + if (numRunning <= (q.concurrency - q.buffer) ) { + trigger('unsaturated'); + } + + if (q.idle()) { + trigger('drain'); + } + q.process(); + }; + } + + function _maybeDrain(data) { + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + setImmediate$1(() => trigger('drain')); + return true + } + return false + } + + const eventMethod = (name) => (handler) => { + if (!handler) { + return new Promise((resolve, reject) => { + once(name, (err, data) => { + if (err) return reject(err) + resolve(data); + }); + }) + } + off(name); + on(name, handler); + + }; + + var isProcessing = false; + var q = { + _tasks: new DLL(), + _createTaskItem (data, callback) { + return { + data, + callback + }; + }, + *[Symbol.iterator] () { + yield* q._tasks[Symbol.iterator](); + }, + concurrency, + payload, + buffer: concurrency / 4, + started: false, + paused: false, + push (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, false, callback)) + } + return _insert(data, false, false, callback); + }, + pushAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, false, true, callback)) + } + return _insert(data, false, true, callback); + }, + kill () { + off(); + q._tasks.empty(); + }, + unshift (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, false, callback)) + } + return _insert(data, true, false, callback); + }, + unshiftAsync (data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return + return data.map(datum => _insert(datum, true, true, callback)) + } + return _insert(data, true, true, callback); + }, + remove (testFn) { + q._tasks.remove(testFn); + }, + process () { + // Avoid trying to start too many processing operations. This can occur + // when callbacks resolve synchronously (#1267). + if (isProcessing) { + return; + } + isProcessing = true; + while(!q.paused && numRunning < q.concurrency && q._tasks.length){ + var tasks = [], data = []; + var l = q._tasks.length; + if (q.payload) l = Math.min(l, q.payload); + for (var i = 0; i < l; i++) { + var node = q._tasks.shift(); + tasks.push(node); + workersList.push(node); + data.push(node.data); + } + + numRunning += 1; + + if (q._tasks.length === 0) { + trigger('empty'); + } + + if (numRunning === q.concurrency) { + trigger('saturated'); + } + + var cb = onlyOnce(_createCB(tasks)); + _worker(data, cb); + } + isProcessing = false; + }, + length () { + return q._tasks.length; + }, + running () { + return numRunning; + }, + workersList () { + return workersList; + }, + idle() { + return q._tasks.length + numRunning === 0; + }, + pause () { + q.paused = true; + }, + resume () { + if (q.paused === false) { return; } + q.paused = false; + setImmediate$1(q.process); + } + }; + // define these as fixed properties, so people get useful errors when updating + Object.defineProperties(q, { + saturated: { + writable: false, + value: eventMethod('saturated') + }, + unsaturated: { + writable: false, + value: eventMethod('unsaturated') + }, + empty: { + writable: false, + value: eventMethod('empty') + }, + drain: { + writable: false, + value: eventMethod('drain') + }, + error: { + writable: false, + value: eventMethod('error') + }, + }); + return q; +} + +/** + * Creates a `cargo` object with the specified payload. Tasks added to the + * cargo will be processed altogether (up to the `payload` limit). If the + * `worker` is in progress, the task is queued until it becomes available. Once + * the `worker` has completed some tasks, each callback of those tasks is + * called. Check out [these](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) [animations](https://camo.githubusercontent.com/f4810e00e1c5f5f8addbe3e9f49064fd5d102699/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130312f38346339323036362d356632392d313165322d383134662d3964336430323431336266642e676966) + * for how `cargo` and `queue` work. + * + * While [`queue`]{@link module:ControlFlow.queue} passes only one task to one of a group of workers + * at a time, cargo passes an array of tasks to a single worker, repeating + * when the worker is finished. + * + * @name cargo + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An asynchronous function for processing an array + * of queued tasks. Invoked with `(tasks, callback)`. + * @param {number} [payload=Infinity] - An optional `integer` for determining + * how many tasks should be processed per round; if omitted, the default is + * unlimited. + * @returns {module:ControlFlow.QueueObject} A cargo object to manage the tasks. Callbacks can + * attached as certain properties to listen for specific events during the + * lifecycle of the cargo and inner queue. + * @example + * + * // create a cargo object with payload 2 + * var cargo = async.cargo(function(tasks, callback) { + * for (var i=0; i { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function reduce(coll, memo, iteratee, callback) { + callback = once(callback); + var _iteratee = wrapAsync(iteratee); + return eachOfSeries$1(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); +} +var reduce$1 = awaitify(reduce, 4); + +/** + * Version of the compose function that is more natural to read. Each function + * consumes the return value of the previous function. It is the equivalent of + * [compose]{@link module:ControlFlow.compose} with the arguments reversed. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name seq + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.compose]{@link module:ControlFlow.compose} + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} a function that composes the `functions` in order + * @example + * + * // Requires lodash (or underscore), express3 and dresende's orm2. + * // Part of an app, that fetches cats of the logged user. + * // This example uses `seq` function to avoid overnesting and error + * // handling clutter. + * app.get('/cats', function(request, response) { + * var User = request.models.User; + * async.seq( + * User.get.bind(User), // 'User.get' has signature (id, callback(err, data)) + * function(user, fn) { + * user.getCats(fn); // 'getCats' has signature (callback(err, data)) + * } + * )(req.session.user_id, function (err, cats) { + * if (err) { + * console.error(err); + * response.json({ status: 'error', message: err.message }); + * } else { + * response.json({ status: 'ok', message: 'Cats found', data: cats }); + * } + * }); + * }); + */ +function seq(...functions) { + var _functions = functions.map(wrapAsync); + return function (...args) { + var that = this; + + var cb = args[args.length - 1]; + if (typeof cb == 'function') { + args.pop(); + } else { + cb = promiseCallback(); + } + + reduce$1(_functions, args, (newargs, fn, iterCb) => { + fn.apply(that, newargs.concat((err, ...nextargs) => { + iterCb(err, nextargs); + })); + }, + (err, results) => cb(err, ...results)); + + return cb[PROMISE_SYMBOL] + }; +} + +/** + * Creates a function which is a composition of the passed asynchronous + * functions. Each function consumes the return value of the function that + * follows. Composing functions `f()`, `g()`, and `h()` would produce the result + * of `f(g(h()))`, only this version uses callbacks to obtain the return values. + * + * If the last argument to the composed function is not a function, a promise + * is returned when you call it. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name compose + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} an asynchronous function that is the composed + * asynchronous `functions` + * @example + * + * function add1(n, callback) { + * setTimeout(function () { + * callback(null, n + 1); + * }, 10); + * } + * + * function mul3(n, callback) { + * setTimeout(function () { + * callback(null, n * 3); + * }, 10); + * } + * + * var add1mul3 = async.compose(mul3, add1); + * add1mul3(4, function (err, result) { + * // result now equals 15 + * }); + */ +function compose(...args) { + return seq(...args.reverse()); +} + +/** + * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * + * @name mapLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function mapLimit (coll, limit, iteratee, callback) { + return _asyncMap(eachOfLimit$2(limit), coll, iteratee, callback) +} +var mapLimit$1 = awaitify(mapLimit, 4); + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); +} +var concatLimit$1 = awaitify(concatLimit, 4); + +/** + * Applies `iteratee` to each item in `coll`, concatenating the results. Returns + * the concatenated list. The `iteratee`s are called in parallel, and the + * results are concatenated as they return. The results array will be returned in + * the original order of `coll` passed to the `iteratee` function. + * + * @name concat + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @alias flatMap + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * let directoryList = ['dir1','dir2','dir3']; + * let withMissingDirectoryList = ['dir1','dir2','dir3', 'dir4']; + * + * // Using callbacks + * async.concat(directoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.concat(directoryList, fs.readdir) + * .then(results => { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { + * console.log(results); + * }).catch(err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.concat(directoryList, fs.readdir); + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * console.log(results); + * } catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } + * } + * + */ +function concat(coll, iteratee, callback) { + return concatLimit$1(coll, Infinity, iteratee, callback) +} +var concat$1 = awaitify(concat, 3); + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatSeries(coll, iteratee, callback) { + return concatLimit$1(coll, 1, iteratee, callback) +} +var concatSeries$1 = awaitify(concatSeries, 3); + +/** + * Returns a function that when called, calls-back with the values provided. + * Useful as the first function in a [`waterfall`]{@link module:ControlFlow.waterfall}, or for plugging values in to + * [`auto`]{@link module:ControlFlow.auto}. + * + * @name constant + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {...*} arguments... - Any number of arguments to automatically invoke + * callback with. + * @returns {AsyncFunction} Returns a function that when invoked, automatically + * invokes the callback with the previous given arguments. + * @example + * + * async.waterfall([ + * async.constant(42), + * function (value, next) { + * // value === 42 + * }, + * //... + * ], callback); + * + * async.waterfall([ + * async.constant(filename, "utf8"), + * fs.readFile, + * function (fileData, next) { + * //... + * } + * //... + * ], callback); + * + * async.auto({ + * hostname: async.constant("https://server.net/"), + * port: findFreePort, + * launchServer: ["hostname", "port", function (options, cb) { + * startServer(options, cb); + * }], + * //... + * }, callback); + */ +function constant$1(...args) { + return function (...ignoredArgs/*, callback*/) { + var callback = ignoredArgs.pop(); + return callback(null, ...args); + }; +} + +function _createTester(check, getResult) { + return (eachfn, arr, _iteratee, cb) => { + var testPassed = false; + var testResult; + const iteratee = wrapAsync(_iteratee); + eachfn(arr, (value, _, callback) => { + iteratee(value, (err, result) => { + if (err || err === false) return callback(err); + + if (check(result) && !testResult) { + testPassed = true; + testResult = getResult(true, value); + return callback(null, breakLoop); + } + callback(); + }); + }, err => { + if (err) return cb(err); + cb(null, testPassed ? testResult : getResult(false)); + }); + }; +} + +/** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function detect(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOf$1, coll, iteratee, callback) +} +var detect$1 = awaitify(detect, 3); + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectLimit(coll, limit, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit$2(limit), coll, iteratee, callback) +} +var detectLimit$1 = awaitify(detectLimit, 4); + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectSeries(coll, iteratee, callback) { + return _createTester(bool => bool, (res, item) => item)(eachOfLimit$2(1), coll, iteratee, callback) +} + +var detectSeries$1 = awaitify(detectSeries, 3); + +function consoleFunc(name) { + return (fn, ...args) => wrapAsync(fn)(...args, (err, ...resultArgs) => { + /* istanbul ignore else */ + if (typeof console === 'object') { + /* istanbul ignore else */ + if (err) { + /* istanbul ignore else */ + if (console.error) { + console.error(err); + } + } else if (console[name]) { /* istanbul ignore else */ + resultArgs.forEach(x => console[name](x)); + } + } + }) +} + +/** + * Logs the result of an [`async` function]{@link AsyncFunction} to the + * `console` using `console.dir` to display the properties of the resulting object. + * Only works in Node.js or in browsers that support `console.dir` and + * `console.error` (such as FF and Chrome). + * If multiple arguments are returned from the async function, + * `console.dir` is called on each argument in order. + * + * @name dir + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, {hello: name}); + * }, 1000); + * }; + * + * // in the node repl + * node> async.dir(hello, 'world'); + * {hello: 'world'} + */ +var dir = consoleFunc('dir'); + +/** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doWhilst(iteratee, test, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return check(null, true); +} + +var doWhilst$1 = awaitify(doWhilst, 3); + +/** + * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the + * argument ordering differs from `until`. + * + * @name doUntil + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee` + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doUntil(iteratee, test, callback) { + const _test = wrapAsync(test); + return doWhilst$1(iteratee, (...args) => { + const cb = args.pop(); + _test(...args, (err, truth) => cb (err, !truth)); + }, callback); +} + +function _withoutIndex(iteratee) { + return (value, index, callback) => iteratee(value, callback); +} + +/** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); + * } + * }); + * + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await + * async () => { + * try { + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * } + * } + * + */ +function eachLimit$2(coll, iteratee, callback) { + return eachOf$1(coll, _withoutIndex(wrapAsync(iteratee)), callback); +} + +var each = awaitify(eachLimit$2, 3); + +/** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachLimit(coll, limit, iteratee, callback) { + return eachOfLimit$2(limit)(coll, _withoutIndex(wrapAsync(iteratee)), callback); +} +var eachLimit$1 = awaitify(eachLimit, 4); + +/** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. + + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachSeries(coll, iteratee, callback) { + return eachLimit$1(coll, 1, iteratee, callback) +} +var eachSeries$1 = awaitify(eachSeries, 3); + +/** + * Wrap an async function and ensure it calls its callback on a later tick of + * the event loop. If the function already calls its callback on a next tick, + * no extra deferral is added. This is useful for preventing stack overflows + * (`RangeError: Maximum call stack size exceeded`) and generally keeping + * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) + * contained. ES2017 `async` functions are returned as-is -- they are immune + * to Zalgo's corrupting influences, as they always resolve on a later tick. + * + * @name ensureAsync + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - an async function, one that expects a node-style + * callback as its last argument. + * @returns {AsyncFunction} Returns a wrapped function with the exact same call + * signature as the function passed in. + * @example + * + * function sometimesAsync(arg, callback) { + * if (cache[arg]) { + * return callback(null, cache[arg]); // this would be synchronous!! + * } else { + * doSomeIO(arg, callback); // this IO would be asynchronous + * } + * } + * + * // this has a risk of stack overflows if many results are cached in a row + * async.mapSeries(args, sometimesAsync, done); + * + * // this will defer sometimesAsync's callback if necessary, + * // preventing stack overflows + * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + */ +function ensureAsync(fn) { + if (isAsync(fn)) return fn; + return function (...args/*, callback*/) { + var callback = args.pop(); + var sync = true; + args.push((...innerArgs) => { + if (sync) { + setImmediate$1(() => callback(...innerArgs)); + } else { + callback(...innerArgs); + } + }); + fn.apply(this, args); + sync = false; + }; +} + +/** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function every(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOf$1, coll, iteratee, callback) +} +var every$1 = awaitify(every, 3); + +/** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everyLimit(coll, limit, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfLimit$2(limit), coll, iteratee, callback) +} +var everyLimit$1 = awaitify(everyLimit, 4); + +/** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everySeries(coll, iteratee, callback) { + return _createTester(bool => !bool, res => !res)(eachOfSeries$1, coll, iteratee, callback) +} +var everySeries$1 = awaitify(everySeries, 3); + +function filterArray(eachfn, arr, iteratee, callback) { + var truthValues = new Array(arr.length); + eachfn(arr, (x, index, iterCb) => { + iteratee(x, (err, v) => { + truthValues[index] = !!v; + iterCb(err); + }); + }, err => { + if (err) return callback(err); + var results = []; + for (var i = 0; i < arr.length; i++) { + if (truthValues[i]) results.push(arr[i]); + } + callback(null, results); + }); +} + +function filterGeneric(eachfn, coll, iteratee, callback) { + var results = []; + eachfn(coll, (x, index, iterCb) => { + iteratee(x, (err, v) => { + if (err) return iterCb(err); + if (v) { + results.push({index, value: x}); + } + iterCb(err); + }); + }, err => { + if (err) return callback(err); + callback(null, results + .sort((a, b) => a.index - b.index) + .map(v => v.value)); + }); +} + +function _filter(eachfn, coll, iteratee, callback) { + var filter = isArrayLike(coll) ? filterArray : filterGeneric; + return filter(eachfn, coll, wrapAsync(iteratee), callback); +} + +/** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function filter (coll, iteratee, callback) { + return _filter(eachOf$1, coll, iteratee, callback) +} +var filter$1 = awaitify(filter, 3); + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ +function filterLimit (coll, limit, iteratee, callback) { + return _filter(eachOfLimit$2(limit), coll, iteratee, callback) +} +var filterLimit$1 = awaitify(filterLimit, 4); + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ +function filterSeries (coll, iteratee, callback) { + return _filter(eachOfSeries$1, coll, iteratee, callback) +} +var filterSeries$1 = awaitify(filterSeries, 3); + +/** + * Calls the asynchronous function `fn` with a callback parameter that allows it + * to call itself again, in series, indefinitely. + + * If an error is passed to the callback then `errback` is called with the + * error, and execution stops, otherwise it will never be called. + * + * @name forever + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} fn - an async function to call repeatedly. + * Invoked with (next). + * @param {Function} [errback] - when `fn` passes an error to it's callback, + * this function will be called, and execution stops. Invoked with (err). + * @returns {Promise} a promise that rejects if an error occurs and an errback + * is not passed + * @example + * + * async.forever( + * function(next) { + * // next is suitable for passing to things that need a callback(err [, whatever]); + * // it will result in this function being called again. + * }, + * function(err) { + * // if next is called with a value in its first parameter, it will appear + * // in here as 'err', and execution will stop. + * } + * ); + */ +function forever(fn, errback) { + var done = onlyOnce(errback); + var task = wrapAsync(ensureAsync(fn)); + + function next(err) { + if (err) return done(err); + if (err === false) return; + task(next); + } + return next(); +} +var forever$1 = awaitify(forever, 2); + +/** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * + * @name groupByLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ +function groupByLimit(coll, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(coll, limit, (val, iterCb) => { + _iteratee(val, (err, key) => { + if (err) return iterCb(err); + return iterCb(err, {key, val}); + }); + }, (err, mapResults) => { + var result = {}; + // from MDN, handle object having an `hasOwnProperty` prop + var {hasOwnProperty} = Object.prototype; + + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + var {key} = mapResults[i]; + var {val} = mapResults[i]; + + if (hasOwnProperty.call(result, key)) { + result[key].push(val); + } else { + result[key] = [val]; + } + } + } + + return callback(err, result); + }); +} + +var groupByLimit$1 = awaitify(groupByLimit, 4); + +/** + * Returns a new object, where each value corresponds to an array of items, from + * `coll`, that returned the corresponding key. That is, the keys of the object + * correspond to the values passed to the `iteratee` callback. + * + * Note: Since this function applies the `iteratee` to each item in parallel, + * there is no guarantee that the `iteratee` functions will complete in order. + * However, the values for each key in the `result` will be in the same order as + * the original `coll`. For Objects, the values will roughly be in the order of + * the original Objects' keys (but this can vary across JavaScript engines). + * + * @name groupBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const files = ['dir1/file1.txt','dir2','dir4'] + * + * // asynchronous function that detects file type as none, file, or directory + * function detectFile(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(null, 'none'); + * } + * callback(null, stat.isDirectory() ? 'directory' : 'file'); + * }); + * } + * + * //Using callbacks + * async.groupBy(files, detectFile, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * }); + * + * // Using Promises + * async.groupBy(files, detectFile) + * .then( result => { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.groupBy(files, detectFile); + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function groupBy (coll, iteratee, callback) { + return groupByLimit$1(coll, Infinity, iteratee, callback) +} + +/** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. + * + * @name groupBySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whose + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ +function groupBySeries (coll, iteratee, callback) { + return groupByLimit$1(coll, 1, iteratee, callback) +} + +/** + * Logs the result of an `async` function to the `console`. Only works in + * Node.js or in browsers that support `console.log` and `console.error` (such + * as FF and Chrome). If multiple arguments are returned from the async + * function, `console.log` is called on each argument in order. + * + * @name log + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, 'hello ' + name); + * }, 1000); + * }; + * + * // in the node repl + * node> async.log(hello, 'world'); + * 'hello world' + */ +var log = consoleFunc('log'); + +/** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a + * time. + * + * @name mapValuesLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function mapValuesLimit(obj, limit, iteratee, callback) { + callback = once(callback); + var newObj = {}; + var _iteratee = wrapAsync(iteratee); + return eachOfLimit$2(limit)(obj, (val, key, next) => { + _iteratee(val, key, (err, result) => { + if (err) return next(err); + newObj[key] = result; + next(err); + }); + }, err => callback(err, newObj)); +} + +var mapValuesLimit$1 = awaitify(mapValuesLimit, 4); + +/** + * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. + * + * Produces a new Object by mapping each value of `obj` through the `iteratee` + * function. The `iteratee` is called each `value` and `key` from `obj` and a + * callback for when it has finished processing. Each of these callbacks takes + * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` + * passes an error to its callback, the main `callback` (for the `mapValues` + * function) is immediately called with the error. + * + * Note, the order of the keys in the result is not guaranteed. The keys will + * be roughly in the order they complete, (but this is very engine-specific) + * + * @name mapValues + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file3.txt' + * }; + * + * const withMissingFileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file4.txt' + * }; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, key, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.mapValues(fileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * }); + * + * // Error handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.mapValues(fileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * }).catch (err => { + * console.log(err); + * }); + * + * // Error Handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch (err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.mapValues(fileMap, getFileSizeInBytes); + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.mapValues(withMissingFileMap, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function mapValues(obj, iteratee, callback) { + return mapValuesLimit$1(obj, Infinity, iteratee, callback) +} + +/** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. + * + * @name mapValuesSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function mapValuesSeries(obj, iteratee, callback) { + return mapValuesLimit$1(obj, 1, iteratee, callback) +} + +/** + * Caches the results of an async function. When creating a hash to store + * function results against, the callback is omitted from the hash and an + * optional hash function can be used. + * + * **Note: if the async function errs, the result will not be cached and + * subsequent calls will call the wrapped function.** + * + * If no hash function is specified, the first argument is used as a hash key, + * which may work reasonably if it is a string or a data type that converts to a + * distinct string. Note that objects and arrays will not behave reasonably. + * Neither will cases where the other arguments are significant. In such cases, + * specify your own hash function. + * + * The cache of results is exposed as the `memo` property of the function + * returned by `memoize`. + * + * @name memoize + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function to proxy and cache results from. + * @param {Function} hasher - An optional function for generating a custom hash + * for storing results. It has all the arguments applied to it apart from the + * callback, and must be synchronous. + * @returns {AsyncFunction} a memoized version of `fn` + * @example + * + * var slow_fn = function(name, callback) { + * // do something + * callback(null, result); + * }; + * var fn = async.memoize(slow_fn); + * + * // fn can now be used as if it were slow_fn + * fn('some name', function() { + * // callback + * }); + */ +function memoize(fn, hasher = v => v) { + var memo = Object.create(null); + var queues = Object.create(null); + var _fn = wrapAsync(fn); + var memoized = initialParams((args, callback) => { + var key = hasher(...args); + if (key in memo) { + setImmediate$1(() => callback(null, ...memo[key])); + } else if (key in queues) { + queues[key].push(callback); + } else { + queues[key] = [callback]; + _fn(...args, (err, ...resultArgs) => { + // #1465 don't memoize if an error occurred + if (!err) { + memo[key] = resultArgs; + } + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i](err, ...resultArgs); + } + }); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; +} + +/* istanbul ignore file */ + +/** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `process.nextTick`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name nextTick + * @static + * @memberOf module:Utils + * @method + * @see [async.setImmediate]{@link module:Utils.setImmediate} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ +var _defer; + +if (hasNextTick) { + _defer = process.nextTick; +} else if (hasSetImmediate) { + _defer = setImmediate; +} else { + _defer = fallback; +} + +var nextTick = wrap(_defer); + +var _parallel = awaitify((eachfn, tasks, callback) => { + var results = isArrayLike(tasks) ? [] : {}; + + eachfn(tasks, (task, key, taskCb) => { + wrapAsync(task)((err, ...result) => { + if (result.length < 2) { + [result] = result; + } + results[key] = result; + taskCb(err); + }); + }, err => callback(err, results)); +}, 3); + +/** + * Run the `tasks` collection of functions in parallel, without waiting until + * the previous function has completed. If any of the functions pass an error to + * its callback, the main `callback` is immediately called with the value of the + * error. Once the `tasks` have completed, the results are passed to the final + * `callback` as an array. + * + * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about + * parallel execution of code. If your tasks do not use any timers or perform + * any I/O, they will actually be executed in series. Any synchronous setup + * sections for each task will happen one after the other. JavaScript remains + * single-threaded. + * + * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the + * execution of other tasks when a task fails. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.parallel}. + * + * @name parallel + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * + * //Using Callbacks + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function parallel(tasks, callback) { + return _parallel(eachOf$1, tasks, callback); +} + +/** + * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a + * time. + * + * @name parallelLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.parallel]{@link module:ControlFlow.parallel} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + */ +function parallelLimit(tasks, limit, callback) { + return _parallel(eachOfLimit$2(limit), tasks, callback); +} + +/** + * A queue of tasks for the worker function to complete. + * @typedef {Iterable} QueueObject + * @memberOf module:ControlFlow + * @property {Function} length - a function returning the number of items + * waiting to be processed. Invoke with `queue.length()`. + * @property {boolean} started - a boolean indicating whether or not any + * items have been pushed and processed by the queue. + * @property {Function} running - a function returning the number of items + * currently being processed. Invoke with `queue.running()`. + * @property {Function} workersList - a function returning the array of items + * currently being processed. Invoke with `queue.workersList()`. + * @property {Function} idle - a function returning false if there are items + * waiting or being processed, or true if not. Invoke with `queue.idle()`. + * @property {number} concurrency - an integer for determining how many `worker` + * functions should be run in parallel. This property can be changed after a + * `queue` is created to alter the concurrency on-the-fly. + * @property {number} payload - an integer that specifies how many items are + * passed to the worker function at a time. only applies if this is a + * [cargo]{@link module:ControlFlow.cargo} object + * @property {AsyncFunction} push - add a new task to the `queue`. Calls `callback` + * once the `worker` has finished processing the task. Instead of a single task, + * a `tasks` array can be submitted. The respective callback is used for every + * task in the list. Invoke with `queue.push(task, [callback])`, + * @property {AsyncFunction} unshift - add a new task to the front of the `queue`. + * Invoke with `queue.unshift(task, [callback])`. + * @property {AsyncFunction} pushAsync - the same as `q.push`, except this returns + * a promise that rejects if an error occurs. + * @property {AsyncFunction} unshiftAsync - the same as `q.unshift`, except this returns + * a promise that rejects if an error occurs. + * @property {Function} remove - remove items from the queue that match a test + * function. The test function will be passed an object with a `data` property, + * and a `priority` property, if this is a + * [priorityQueue]{@link module:ControlFlow.priorityQueue} object. + * Invoked with `queue.remove(testFn)`, where `testFn` is of the form + * `function ({data, priority}) {}` and returns a Boolean. + * @property {Function} saturated - a function that sets a callback that is + * called when the number of running workers hits the `concurrency` limit, and + * further tasks will be queued. If the callback is omitted, `q.saturated()` + * returns a promise for the next occurrence. + * @property {Function} unsaturated - a function that sets a callback that is + * called when the number of running workers is less than the `concurrency` & + * `buffer` limits, and further tasks will not be queued. If the callback is + * omitted, `q.unsaturated()` returns a promise for the next occurrence. + * @property {number} buffer - A minimum threshold buffer in order to say that + * the `queue` is `unsaturated`. + * @property {Function} empty - a function that sets a callback that is called + * when the last item from the `queue` is given to a `worker`. If the callback + * is omitted, `q.empty()` returns a promise for the next occurrence. + * @property {Function} drain - a function that sets a callback that is called + * when the last item from the `queue` has returned from the `worker`. If the + * callback is omitted, `q.drain()` returns a promise for the next occurrence. + * @property {Function} error - a function that sets a callback that is called + * when a task errors. Has the signature `function(error, task)`. If the + * callback is omitted, `error()` returns a promise that rejects on the next + * error. + * @property {boolean} paused - a boolean for determining whether the queue is + * in a paused state. + * @property {Function} pause - a function that pauses the processing of tasks + * until `resume()` is called. Invoke with `queue.pause()`. + * @property {Function} resume - a function that resumes the processing of + * queued tasks when the queue is paused. Invoke with `queue.resume()`. + * @property {Function} kill - a function that removes the `drain` callback and + * empties remaining tasks from the queue forcing it to go idle. No more tasks + * should be pushed to the queue after calling this function. Invoke with `queue.kill()`. + * + * @example + * const q = async.queue(worker, 2) + * q.push(item1) + * q.push(item2) + * q.push(item3) + * // queues are iterable, spread into an array to inspect + * const items = [...q] // [item1, item2, item3] + * // or use for of + * for (let item of q) { + * console.log(item) + * } + * + * q.drain(() => { + * console.log('all done') + * }) + * // or + * await q.drain() + */ + +/** + * Creates a `queue` object with the specified `concurrency`. Tasks added to the + * `queue` are processed in parallel (up to the `concurrency` limit). If all + * `worker`s are in progress, the task is queued until one becomes available. + * Once a `worker` completes a `task`, that `task`'s callback is called. + * + * @name queue + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. Invoked with (task, callback). + * @param {number} [concurrency=1] - An `integer` for determining how many + * `worker` functions should be run in parallel. If omitted, the concurrency + * defaults to `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A queue object to manage the tasks. Callbacks can be + * attached as certain properties to listen for specific events during the + * lifecycle of the queue. + * @example + * + * // create a queue object with concurrency 2 + * var q = async.queue(function(task, callback) { + * console.log('hello ' + task.name); + * callback(); + * }, 2); + * + * // assign a callback + * q.drain(function() { + * console.log('all items have been processed'); + * }); + * // or await the end + * await q.drain() + * + * // assign an error callback + * q.error(function(err, task) { + * console.error('task experienced an error'); + * }); + * + * // add some items to the queue + * q.push({name: 'foo'}, function(err) { + * console.log('finished processing foo'); + * }); + * // callback is optional + * q.push({name: 'bar'}); + * + * // add some items to the queue (batch-wise) + * q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function(err) { + * console.log('finished processing item'); + * }); + * + * // add some items to the front of the queue + * q.unshift({name: 'bar'}, function (err) { + * console.log('finished processing bar'); + * }); + */ +function queue (worker, concurrency) { + var _worker = wrapAsync(worker); + return queue$1((items, cb) => { + _worker(items[0], cb); + }, concurrency, 1); +} + +// Binary min-heap implementation used for priority queue. +// Implementation is stable, i.e. push time is considered for equal priorities +class Heap { + constructor() { + this.heap = []; + this.pushCount = Number.MIN_SAFE_INTEGER; + } + + get length() { + return this.heap.length; + } + + empty () { + this.heap = []; + return this; + } + + percUp(index) { + let p; + + while (index > 0 && smaller(this.heap[index], this.heap[p=parent(index)])) { + let t = this.heap[index]; + this.heap[index] = this.heap[p]; + this.heap[p] = t; + + index = p; + } + } + + percDown(index) { + let l; + + while ((l=leftChi(index)) < this.heap.length) { + if (l+1 < this.heap.length && smaller(this.heap[l+1], this.heap[l])) { + l = l+1; + } + + if (smaller(this.heap[index], this.heap[l])) { + break; + } + + let t = this.heap[index]; + this.heap[index] = this.heap[l]; + this.heap[l] = t; + + index = l; + } + } + + push(node) { + node.pushCount = ++this.pushCount; + this.heap.push(node); + this.percUp(this.heap.length-1); + } + + unshift(node) { + return this.heap.push(node); + } + + shift() { + let [top] = this.heap; + + this.heap[0] = this.heap[this.heap.length-1]; + this.heap.pop(); + this.percDown(0); + + return top; + } + + toArray() { + return [...this]; + } + + *[Symbol.iterator] () { + for (let i = 0; i < this.heap.length; i++) { + yield this.heap[i].data; + } + } + + remove (testFn) { + let j = 0; + for (let i = 0; i < this.heap.length; i++) { + if (!testFn(this.heap[i])) { + this.heap[j] = this.heap[i]; + j++; + } + } + + this.heap.splice(j); + + for (let i = parent(this.heap.length-1); i >= 0; i--) { + this.percDown(i); + } + + return this; + } +} + +function leftChi(i) { + return (i<<1)+1; +} + +function parent(i) { + return ((i+1)>>1)-1; +} + +function smaller(x, y) { + if (x.priority !== y.priority) { + return x.priority < y.priority; + } + else { + return x.pushCount < y.pushCount; + } +} + +/** + * The same as [async.queue]{@link module:ControlFlow.queue} only tasks are assigned a priority and + * completed in ascending priority order. + * + * @name priorityQueue + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.queue]{@link module:ControlFlow.queue} + * @category Control Flow + * @param {AsyncFunction} worker - An async function for processing a queued task. + * If you want to handle errors from an individual task, pass a callback to + * `q.push()`. + * Invoked with (task, callback). + * @param {number} concurrency - An `integer` for determining how many `worker` + * functions should be run in parallel. If omitted, the concurrency defaults to + * `1`. If the concurrency is `0`, an error is thrown. + * @returns {module:ControlFlow.QueueObject} A priorityQueue object to manage the tasks. There are three + * differences between `queue` and `priorityQueue` objects: + * * `push(task, priority, [callback])` - `priority` should be a number. If an + * array of `tasks` is given, all tasks will be assigned the same priority. + * * `pushAsync(task, priority, [callback])` - the same as `priorityQueue.push`, + * except this returns a promise that rejects if an error occurs. + * * The `unshift` and `unshiftAsync` methods were removed. + */ +function priorityQueue(worker, concurrency) { + // Start with a normal queue + var q = queue(worker, concurrency); + + var { + push, + pushAsync + } = q; + + q._tasks = new Heap(); + q._createTaskItem = ({data, priority}, callback) => { + return { + data, + priority, + callback + }; + }; + + function createDataItems(tasks, priority) { + if (!Array.isArray(tasks)) { + return {data: tasks, priority}; + } + return tasks.map(data => { return {data, priority}; }); + } + + // Override push to accept second parameter representing priority + q.push = function(data, priority = 0, callback) { + return push(createDataItems(data, priority), callback); + }; + + q.pushAsync = function(data, priority = 0, callback) { + return pushAsync(createDataItems(data, priority), callback); + }; + + // Remove unshift functions + delete q.unshift; + delete q.unshiftAsync; + + return q; +} + +/** + * Runs the `tasks` array of functions in parallel, without waiting until the + * previous function has completed. Once any of the `tasks` complete or pass an + * error to its callback, the main `callback` is immediately called. It's + * equivalent to `Promise.race()`. + * + * @name race + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} + * to run. Each function can complete with an optional `result` value. + * @param {Function} callback - A callback to run once any of the functions have + * completed. This function gets an error or result from the first function that + * completed. Invoked with (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.race([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // main callback + * function(err, result) { + * // the result will be equal to 'two' as it finishes earlier + * }); + */ +function race(tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!tasks.length) return callback(); + for (var i = 0, l = tasks.length; i < l; i++) { + wrapAsync(tasks[i])(callback); + } +} + +var race$1 = awaitify(race, 2); + +/** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function reduceRight (array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return reduce$1(reversed, memo, iteratee, callback); +} + +/** + * Wraps the async function in another function that always completes with a + * result object, even when it errors. + * + * The result object has either the property `error` or `value`. + * + * @name reflect + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function you want to wrap + * @returns {Function} - A function that always passes null to it's callback as + * the error. The second argument to the callback will be an `object` with + * either an `error` or a `value` property. + * @example + * + * async.parallel([ + * async.reflect(function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }), + * async.reflect(function(callback) { + * // do some more stuff but error ... + * callback('bad stuff happened'); + * }), + * async.reflect(function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * }) + * ], + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = 'bad stuff happened' + * // results[2].value = 'two' + * }); + */ +function reflect(fn) { + var _fn = wrapAsync(fn); + return initialParams(function reflectOn(args, reflectCallback) { + args.push((error, ...cbArgs) => { + let retVal = {}; + if (error) { + retVal.error = error; + } + if (cbArgs.length > 0){ + var value = cbArgs; + if (cbArgs.length <= 1) { + [value] = cbArgs; + } + retVal.value = value; + } + reflectCallback(null, retVal); + }); + + return _fn.apply(this, args); + }); +} + +/** + * A helper function that wraps an array or an object of functions with `reflect`. + * + * @name reflectAll + * @static + * @memberOf module:Utils + * @method + * @see [async.reflect]{@link module:Utils.reflect} + * @category Util + * @param {Array|Object|Iterable} tasks - The collection of + * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. + * @returns {Array} Returns an array of async functions, each wrapped in + * `async.reflect` + * @example + * + * let tasks = [ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * // do some more stuff but error ... + * callback(new Error('bad stuff happened')); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = Error('bad stuff happened') + * // results[2].value = 'two' + * }); + * + * // an example using an object instead of an array + * let tasks = { + * one: function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * two: function(callback) { + * callback('two'); + * }, + * three: function(callback) { + * setTimeout(function() { + * callback(null, 'three'); + * }, 100); + * } + * }; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results.one.value = 'one' + * // results.two.error = 'two' + * // results.three.value = 'three' + * }); + */ +function reflectAll(tasks) { + var results; + if (Array.isArray(tasks)) { + results = tasks.map(reflect); + } else { + results = {}; + Object.keys(tasks).forEach(key => { + results[key] = reflect.call(this, tasks[key]); + }); + } + return results; +} + +function reject$2(eachfn, arr, _iteratee, callback) { + const iteratee = wrapAsync(_iteratee); + return _filter(eachfn, arr, (value, cb) => { + iteratee(value, (err, v) => { + cb(err, !v); + }); + }, callback); +} + +/** + * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. + * + * @name reject + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.reject(fileList, fileExists, function(err, results) { + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }); + * + * // Using Promises + * async.reject(fileList, fileExists) + * .then( results => { + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.reject(fileList, fileExists); + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function reject (coll, iteratee, callback) { + return reject$2(eachOf$1, coll, iteratee, callback) +} +var reject$1 = awaitify(reject, 3); + +/** + * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a + * time. + * + * @name rejectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function rejectLimit (coll, limit, iteratee, callback) { + return reject$2(eachOfLimit$2(limit), coll, iteratee, callback) +} +var rejectLimit$1 = awaitify(rejectLimit, 4); + +/** + * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. + * + * @name rejectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function rejectSeries (coll, iteratee, callback) { + return reject$2(eachOfSeries$1, coll, iteratee, callback) +} +var rejectSeries$1 = awaitify(rejectSeries, 3); + +function constant(value) { + return function () { + return value; + } +} + +/** + * Attempts to get a successful response from `task` no more than `times` times + * before returning an error. If the task is successful, the `callback` will be + * passed the result of the successful task. If all attempts fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name retry + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @see [async.retryable]{@link module:ControlFlow.retryable} + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an + * object with `times` and `interval` or a number. + * * `times` - The number of attempts to make before giving up. The default + * is `5`. + * * `interval` - The time to wait between retries, in milliseconds. The + * default is `0`. The interval may also be specified as a function of the + * retry count (see example). + * * `errorFilter` - An optional synchronous function that is invoked on + * erroneous result. If it returns `true` the retry attempts will continue; + * if the function returns `false` the retry flow is aborted with the current + * attempt's error and result being returned to the final callback. + * Invoked with (err). + * * If `opts` is a number, the number specifies the number of times to retry, + * with the default interval of `0`. + * @param {AsyncFunction} task - An async function to retry. + * Invoked with (callback). + * @param {Function} [callback] - An optional callback which is called when the + * task has succeeded, or after the final failed attempt. It receives the `err` + * and `result` arguments of the last attempt at completing the `task`. Invoked + * with (err, results). + * @returns {Promise} a promise if no callback provided + * + * @example + * + * // The `retry` function can be used as a stand-alone control flow by passing + * // a callback, as shown below: + * + * // try calling apiMethod 3 times + * async.retry(3, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 3 times, waiting 200 ms between each retry + * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 10 times with exponential backoff + * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) + * async.retry({ + * times: 10, + * interval: function(retryCount) { + * return 50 * Math.pow(2, retryCount); + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod the default 5 times no delay between each retry + * async.retry(apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod only when error condition satisfies, all other + * // errors will abort the retry control flow and return to final callback + * async.retry({ + * errorFilter: function(err) { + * return err.message === 'Temporary error'; // only retry on a specific error + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // to retry individual methods that are not as reliable within other + * // control flow functions, use the `retryable` wrapper: + * async.auto({ + * users: api.getUsers.bind(api), + * payments: async.retryable(3, api.getPayments.bind(api)) + * }, function(err, results) { + * // do something with the results + * }); + * + */ +const DEFAULT_TIMES = 5; +const DEFAULT_INTERVAL = 0; + +function retry(opts, task, callback) { + var options = { + times: DEFAULT_TIMES, + intervalFunc: constant(DEFAULT_INTERVAL) + }; + + if (arguments.length < 3 && typeof opts === 'function') { + callback = task || promiseCallback(); + task = opts; + } else { + parseTimes(options, opts); + callback = callback || promiseCallback(); + } + + if (typeof task !== 'function') { + throw new Error("Invalid arguments for async.retry"); + } + + var _task = wrapAsync(task); + + var attempt = 1; + function retryAttempt() { + _task((err, ...args) => { + if (err === false) return + if (err && attempt++ < options.times && + (typeof options.errorFilter != 'function' || + options.errorFilter(err))) { + setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); + } else { + callback(err, ...args); + } + }); + } + + retryAttempt(); + return callback[PROMISE_SYMBOL] +} + +function parseTimes(acc, t) { + if (typeof t === 'object') { + acc.times = +t.times || DEFAULT_TIMES; + + acc.intervalFunc = typeof t.interval === 'function' ? + t.interval : + constant(+t.interval || DEFAULT_INTERVAL); + + acc.errorFilter = t.errorFilter; + } else if (typeof t === 'number' || typeof t === 'string') { + acc.times = +t || DEFAULT_TIMES; + } else { + throw new Error("Invalid arguments for async.retry"); + } +} + +/** + * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method + * wraps a task and makes it retryable, rather than immediately calling it + * with retries. + * + * @name retryable + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.retry]{@link module:ControlFlow.retry} + * @category Control Flow + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional + * options, exactly the same as from `retry`, except for a `opts.arity` that + * is the arity of the `task` function, defaulting to `task.length` + * @param {AsyncFunction} task - the asynchronous function to wrap. + * This function will be passed any arguments passed to the returned wrapper. + * Invoked with (...args, callback). + * @returns {AsyncFunction} The wrapped function, which when invoked, will + * retry on an error, based on the parameters specified in `opts`. + * This function will accept the same parameters as `task`. + * @example + * + * async.auto({ + * dep1: async.retryable(3, getFromFlakyService), + * process: ["dep1", async.retryable(3, function (results, cb) { + * maybeProcessData(results.dep1, cb); + * })] + * }, callback); + */ +function retryable (opts, task) { + if (!task) { + task = opts; + opts = null; + } + let arity = (opts && opts.arity) || task.length; + if (isAsync(task)) { + arity += 1; + } + var _task = wrapAsync(task); + return initialParams((args, callback) => { + if (args.length < arity - 1 || callback == null) { + args.push(callback); + callback = promiseCallback(); + } + function taskFn(cb) { + _task(...args, cb); + } + + if (opts) retry(opts, taskFn, callback); + else retry(taskFn, callback); + + return callback[PROMISE_SYMBOL] + }); +} + +/** + * Run the functions in the `tasks` collection in series, each one running once + * the previous function has completed. If any functions in the series pass an + * error to its callback, no more functions are run, and `callback` is + * immediately called with the value of the error. Otherwise, `callback` + * receives an array of results when `tasks` have completed. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function, and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.series}. + * + * **Note** that while many implementations preserve the order of object + * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) + * explicitly states that + * + * > The mechanics and order of enumerating the properties is not specified. + * + * So if you rely on the order in which your series of functions are executed, + * and want this to work on all platforms, consider using an array. + * + * @name series + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing + * [async functions]{@link AsyncFunction} to run in series. + * Each function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This function gets a results array (or object) + * containing all the result arguments passed to the `task` callbacks. Invoked + * with (err, result). + * @return {Promise} a promise, if no callback is passed + * @example + * + * //Using Callbacks + * async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] + * }); + * + * // an example using objects instead of arrays + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.series([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function series(tasks, callback) { + return _parallel(eachOfSeries$1, tasks, callback); +} + +/** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); + * + * // Using Promises + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function some(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOf$1, coll, iteratee, callback) +} +var some$1 = awaitify(some, 3); + +/** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someLimit(coll, limit, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfLimit$2(limit), coll, iteratee, callback) +} +var someLimit$1 = awaitify(someLimit, 4); + +/** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someSeries(coll, iteratee, callback) { + return _createTester(Boolean, res => res)(eachOfSeries$1, coll, iteratee, callback) +} +var someSeries$1 = awaitify(someSeries, 3); + +/** + * Sorts a list by the results of running each `coll` value through an async + * `iteratee`. + * + * @name sortBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a value to use as the sort criteria as + * its `result`. + * Invoked with (item, callback). + * @param {Function} callback - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is the items + * from the original `coll` sorted by the values returned by the `iteratee` + * calls. Invoked with (err, results). + * @returns {Promise} a promise, if no callback passed + * @example + * + * // bigfile.txt is a file that is 251100 bytes in size + * // mediumfile.txt is a file that is 11000 bytes in size + * // smallfile.txt is a file that is 121 bytes in size + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // By modifying the callback parameter the + * // sorting order can be influenced: + * + * // ascending order + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) return callback(getFileSizeErr); + * callback(null, fileSize); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // descending order + * async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) { + * return callback(getFileSizeErr); + * } + * callback(null, fileSize * -1); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'bigfile.txt', 'mediumfile.txt', 'smallfile.txt'] + * } + * } + * ); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * } + * ); + * + * // Using Promises + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * (async () => { + * try { + * let results = await async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * // Error handling + * async () => { + * try { + * let results = await async.sortBy(['missingfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function sortBy (coll, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return map$1(coll, (x, iterCb) => { + _iteratee(x, (err, criteria) => { + if (err) return iterCb(err); + iterCb(err, {value: x, criteria}); + }); + }, (err, results) => { + if (err) return callback(err); + callback(null, results.sort(comparator).map(v => v.value)); + }); + + function comparator(left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } +} +var sortBy$1 = awaitify(sortBy, 3); + +/** + * Sets a time limit on an asynchronous function. If the function does not call + * its callback within the specified milliseconds, it will be called with a + * timeout error. The code property for the error object will be `'ETIMEDOUT'`. + * + * @name timeout + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} asyncFn - The async function to limit in time. + * @param {number} milliseconds - The specified time limit. + * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) + * to timeout Error for more information.. + * @returns {AsyncFunction} Returns a wrapped function that can be used with any + * of the control flow functions. + * Invoke this function with the same parameters as you would `asyncFunc`. + * @example + * + * function myFunction(foo, callback) { + * doAsyncTask(foo, function(err, data) { + * // handle errors + * if (err) return callback(err); + * + * // do some stuff ... + * + * // return processed data + * return callback(null, data); + * }); + * } + * + * var wrapped = async.timeout(myFunction, 1000); + * + * // call `wrapped` as you would `myFunction` + * wrapped({ bar: 'bar' }, function(err, data) { + * // if `myFunction` takes < 1000 ms to execute, `err` + * // and `data` will have their expected values + * + * // else `err` will be an Error with the code 'ETIMEDOUT' + * }); + */ +function timeout(asyncFn, milliseconds, info) { + var fn = wrapAsync(asyncFn); + + return initialParams((args, callback) => { + var timedOut = false; + var timer; + + function timeoutCallback() { + var name = asyncFn.name || 'anonymous'; + var error = new Error('Callback function "' + name + '" timed out.'); + error.code = 'ETIMEDOUT'; + if (info) { + error.info = info; + } + timedOut = true; + callback(error); + } + + args.push((...cbArgs) => { + if (!timedOut) { + callback(...cbArgs); + clearTimeout(timer); + } + }); + + // setup timer and call original function + timer = setTimeout(timeoutCallback, milliseconds); + fn(...args); + }); +} + +function range(size) { + var result = Array(size); + while (size--) { + result[size] = size; + } + return result; +} + +/** + * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a + * time. + * + * @name timesLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} count - The number of times to run the function. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see [async.map]{@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ +function timesLimit(count, limit, iteratee, callback) { + var _iteratee = wrapAsync(iteratee); + return mapLimit$1(range(count), limit, _iteratee, callback); +} + +/** + * Calls the `iteratee` function `n` times, and accumulates results in the same + * manner you would use with [map]{@link module:Collections.map}. + * + * @name times + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.map]{@link module:Collections.map} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + * @example + * + * // Pretend this is some complicated async factory + * var createUser = function(id, callback) { + * callback(null, { + * id: 'user' + id + * }); + * }; + * + * // generate 5 users + * async.times(5, function(n, next) { + * createUser(n, function(err, user) { + * next(err, user); + * }); + * }, function(err, users) { + * // we should now have 5 users + * }); + */ +function times (n, iteratee, callback) { + return timesLimit(n, Infinity, iteratee, callback) +} + +/** + * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. + * + * @name timesSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ +function timesSeries (n, iteratee, callback) { + return timesLimit(n, 1, iteratee, callback) +} + +/** + * A relative of `reduce`. Takes an Object or Array, and iterates over each + * element in parallel, each step potentially mutating an `accumulator` value. + * The type of the accumulator defaults to the type of collection passed in. + * + * @name transform + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} [accumulator] - The initial state of the transform. If omitted, + * it will default to an empty Object or Array, depending on the type of `coll` + * @param {AsyncFunction} iteratee - A function applied to each item in the + * collection that potentially modifies the accumulator. + * Invoked with (accumulator, item, key, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the transformed accumulator. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileList, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * }); + * + * // Using Promises + * async.transform(fileList, transformFileSize) + * .then(result => { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * (async () => { + * try { + * let result = await async.transform(fileList, transformFileSize); + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileMap = { f1: 'file1.txt', f2: 'file2.txt', f3: 'file3.txt' }; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileMap, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * }); + * + * // Using Promises + * async.transform(fileMap, transformFileSize) + * .then(result => { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.transform(fileMap, transformFileSize); + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function transform (coll, accumulator, iteratee, callback) { + if (arguments.length <= 3 && typeof accumulator === 'function') { + callback = iteratee; + iteratee = accumulator; + accumulator = Array.isArray(coll) ? [] : {}; + } + callback = once(callback || promiseCallback()); + var _iteratee = wrapAsync(iteratee); + + eachOf$1(coll, (v, k, cb) => { + _iteratee(accumulator, v, k, cb); + }, err => callback(err, accumulator)); + return callback[PROMISE_SYMBOL] +} + +/** + * It runs each task in series but stops whenever any of the functions were + * successful. If one of the tasks were successful, the `callback` will be + * passed the result of the successful task. If all tasks fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name tryEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to + * run, each function is passed a `callback(err, result)` it must call on + * completion with an error `err` (which can be `null`) and an optional `result` + * value. + * @param {Function} [callback] - An optional callback which is called when one + * of the tasks has succeeded, or all have failed. It receives the `err` and + * `result` arguments of the last attempt at completing the `task`. Invoked with + * (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * async.tryEach([ + * function getDataFromFirstWebsite(callback) { + * // Try getting the data from the first website + * callback(err, data); + * }, + * function getDataFromSecondWebsite(callback) { + * // First website failed, + * // Try getting the data from the backup website + * callback(err, data); + * } + * ], + * // optional callback + * function(err, results) { + * Now do something with the data. + * }); + * + */ +function tryEach(tasks, callback) { + var error = null; + var result; + return eachSeries$1(tasks, (task, taskCb) => { + wrapAsync(task)((err, ...args) => { + if (err === false) return taskCb(err); + + if (args.length < 2) { + [result] = args; + } else { + result = args; + } + error = err; + taskCb(err ? null : {}); + }); + }, () => callback(error, result)); +} + +var tryEach$1 = awaitify(tryEach); + +/** + * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, + * unmemoized form. Handy for testing. + * + * @name unmemoize + * @static + * @memberOf module:Utils + * @method + * @see [async.memoize]{@link module:Utils.memoize} + * @category Util + * @param {AsyncFunction} fn - the memoized function + * @returns {AsyncFunction} a function that calls the original unmemoized function + */ +function unmemoize(fn) { + return (...args) => { + return (fn.unmemoized || fn)(...args); + }; +} + +/** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ +function whilst(test, iteratee, callback) { + callback = onlyOnce(callback); + var _fn = wrapAsync(iteratee); + var _test = wrapAsync(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); +} +var whilst$1 = awaitify(whilst, 3); + +/** + * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. `callback` will be passed an error and any + * arguments passed to the final `iteratee`'s callback. + * + * The inverse of [whilst]{@link module:ControlFlow.whilst}. + * + * @name until + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * const results = [] + * let finished = false + * async.until(function test(cb) { + * cb(null, finished) + * }, function iter(next) { + * fetchPage(url, (err, body) => { + * if (err) return next(err) + * results = results.concat(body.objects) + * finished = !!body.next + * next(err) + * }) + * }, function done (err) { + * // all pages have been fetched + * }) + */ +function until(test, iteratee, callback) { + const _test = wrapAsync(test); + return whilst$1((cb) => _test((err, truth) => cb (err, !truth)), iteratee, callback); +} + +/** + * Runs the `tasks` array of functions in series, each passing their results to + * the next in the array. However, if any of the `tasks` pass an error to their + * own callback, the next function is not executed, and the main `callback` is + * immediately called with the error. + * + * @name waterfall + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} + * to run. + * Each function should complete with any number of `result` values. + * The `result` values will be passed as arguments, in order, to the next task. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This will be passed the results of the last task's + * callback. Invoked with (err, [results]). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.waterfall([ + * function(callback) { + * callback(null, 'one', 'two'); + * }, + * function(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * }, + * function(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + * ], function (err, result) { + * // result now equals 'done' + * }); + * + * // Or, with named functions: + * async.waterfall([ + * myFirstFunction, + * mySecondFunction, + * myLastFunction, + * ], function (err, result) { + * // result now equals 'done' + * }); + * function myFirstFunction(callback) { + * callback(null, 'one', 'two'); + * } + * function mySecondFunction(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * } + * function myLastFunction(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + */ +function waterfall (tasks, callback) { + callback = once(callback); + if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); + if (!tasks.length) return callback(); + var taskIndex = 0; + + function nextTask(args) { + var task = wrapAsync(tasks[taskIndex++]); + task(...args, onlyOnce(next)); + } + + function next(err, ...args) { + if (err === false) return + if (err || taskIndex === tasks.length) { + return callback(err, ...args); + } + nextTask(args); + } + + nextTask([]); +} + +var waterfall$1 = awaitify(waterfall); + +/** + * An "async function" in the context of Async is an asynchronous function with + * a variable number of parameters, with the final parameter being a callback. + * (`function (arg1, arg2, ..., callback) {}`) + * The final callback is of the form `callback(err, results...)`, which must be + * called once the function is completed. The callback should be called with a + * Error as its first argument to signal that an error occurred. + * Otherwise, if no error occurred, it should be called with `null` as the first + * argument, and any additional `result` arguments that may apply, to signal + * successful completion. + * The callback must be called exactly once, ideally on a later tick of the + * JavaScript event loop. + * + * This type of function is also referred to as a "Node-style async function", + * or a "continuation passing-style function" (CPS). Most of the methods of this + * library are themselves CPS/Node-style async functions, or functions that + * return CPS/Node-style async functions. + * + * Wherever we accept a Node-style async function, we also directly accept an + * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. + * In this case, the `async` function will not be passed a final callback + * argument, and any thrown error will be used as the `err` argument of the + * implicit callback, and the return value will be used as the `result` value. + * (i.e. a `rejected` of the returned Promise becomes the `err` callback + * argument, and a `resolved` value becomes the `result`.) + * + * Note, due to JavaScript limitations, we can only detect native `async` + * functions and not transpilied implementations. + * Your environment must have `async`/`await` support for this to work. + * (e.g. Node > v7.6, or a recent version of a modern browser). + * If you are using `async` functions through a transpiler (e.g. Babel), you + * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, + * because the `async function` will be compiled to an ordinary function that + * returns a promise. + * + * @typedef {Function} AsyncFunction + * @static + */ + + +var index = { + apply, + applyEach, + applyEachSeries, + asyncify, + auto, + autoInject, + cargo: cargo$1, + cargoQueue: cargo, + compose, + concat: concat$1, + concatLimit: concatLimit$1, + concatSeries: concatSeries$1, + constant: constant$1, + detect: detect$1, + detectLimit: detectLimit$1, + detectSeries: detectSeries$1, + dir, + doUntil, + doWhilst: doWhilst$1, + each, + eachLimit: eachLimit$1, + eachOf: eachOf$1, + eachOfLimit: eachOfLimit$1, + eachOfSeries: eachOfSeries$1, + eachSeries: eachSeries$1, + ensureAsync, + every: every$1, + everyLimit: everyLimit$1, + everySeries: everySeries$1, + filter: filter$1, + filterLimit: filterLimit$1, + filterSeries: filterSeries$1, + forever: forever$1, + groupBy, + groupByLimit: groupByLimit$1, + groupBySeries, + log, + map: map$1, + mapLimit: mapLimit$1, + mapSeries: mapSeries$1, + mapValues, + mapValuesLimit: mapValuesLimit$1, + mapValuesSeries, + memoize, + nextTick, + parallel, + parallelLimit, + priorityQueue, + queue, + race: race$1, + reduce: reduce$1, + reduceRight, + reflect, + reflectAll, + reject: reject$1, + rejectLimit: rejectLimit$1, + rejectSeries: rejectSeries$1, + retry, + retryable, + seq, + series, + setImmediate: setImmediate$1, + some: some$1, + someLimit: someLimit$1, + someSeries: someSeries$1, + sortBy: sortBy$1, + timeout, + times, + timesLimit, + timesSeries, + transform, + tryEach: tryEach$1, + unmemoize, + until, + waterfall: waterfall$1, + whilst: whilst$1, + + // aliases + all: every$1, + allLimit: everyLimit$1, + allSeries: everySeries$1, + any: some$1, + anyLimit: someLimit$1, + anySeries: someSeries$1, + find: detect$1, + findLimit: detectLimit$1, + findSeries: detectSeries$1, + flatMap: concat$1, + flatMapLimit: concatLimit$1, + flatMapSeries: concatSeries$1, + forEach: each, + forEachSeries: eachSeries$1, + forEachLimit: eachLimit$1, + forEachOf: eachOf$1, + forEachOfSeries: eachOfSeries$1, + forEachOfLimit: eachOfLimit$1, + inject: reduce$1, + foldl: reduce$1, + foldr: reduceRight, + select: filter$1, + selectLimit: filterLimit$1, + selectSeries: filterSeries$1, + wrapSync: asyncify, + during: whilst$1, + doDuring: doWhilst$1 +}; + +export { every$1 as all, everyLimit$1 as allLimit, everySeries$1 as allSeries, some$1 as any, someLimit$1 as anyLimit, someSeries$1 as anySeries, apply, applyEach, applyEachSeries, asyncify, auto, autoInject, cargo$1 as cargo, cargo as cargoQueue, compose, concat$1 as concat, concatLimit$1 as concatLimit, concatSeries$1 as concatSeries, constant$1 as constant, index as default, detect$1 as detect, detectLimit$1 as detectLimit, detectSeries$1 as detectSeries, dir, doWhilst$1 as doDuring, doUntil, doWhilst$1 as doWhilst, whilst$1 as during, each, eachLimit$1 as eachLimit, eachOf$1 as eachOf, eachOfLimit$1 as eachOfLimit, eachOfSeries$1 as eachOfSeries, eachSeries$1 as eachSeries, ensureAsync, every$1 as every, everyLimit$1 as everyLimit, everySeries$1 as everySeries, filter$1 as filter, filterLimit$1 as filterLimit, filterSeries$1 as filterSeries, detect$1 as find, detectLimit$1 as findLimit, detectSeries$1 as findSeries, concat$1 as flatMap, concatLimit$1 as flatMapLimit, concatSeries$1 as flatMapSeries, reduce$1 as foldl, reduceRight as foldr, each as forEach, eachLimit$1 as forEachLimit, eachOf$1 as forEachOf, eachOfLimit$1 as forEachOfLimit, eachOfSeries$1 as forEachOfSeries, eachSeries$1 as forEachSeries, forever$1 as forever, groupBy, groupByLimit$1 as groupByLimit, groupBySeries, reduce$1 as inject, log, map$1 as map, mapLimit$1 as mapLimit, mapSeries$1 as mapSeries, mapValues, mapValuesLimit$1 as mapValuesLimit, mapValuesSeries, memoize, nextTick, parallel, parallelLimit, priorityQueue, queue, race$1 as race, reduce$1 as reduce, reduceRight, reflect, reflectAll, reject$1 as reject, rejectLimit$1 as rejectLimit, rejectSeries$1 as rejectSeries, retry, retryable, filter$1 as select, filterLimit$1 as selectLimit, filterSeries$1 as selectSeries, seq, series, setImmediate$1 as setImmediate, some$1 as some, someLimit$1 as someLimit, someSeries$1 as someSeries, sortBy$1 as sortBy, timeout, times, timesLimit, timesSeries, transform, tryEach$1 as tryEach, unmemoize, until, waterfall$1 as waterfall, whilst$1 as whilst, asyncify as wrapSync }; diff --git a/backend/node_modules/async/doDuring.js b/backend/node_modules/async/doDuring.js new file mode 100644 index 00000000..c72766d0 --- /dev/null +++ b/backend/node_modules/async/doDuring.js @@ -0,0 +1,68 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doWhilst(iteratee, test, callback) { + callback = (0, _onlyOnce2.default)(callback); + var _fn = (0, _wrapAsync2.default)(iteratee); + var _test = (0, _wrapAsync2.default)(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return check(null, true); +} + +exports.default = (0, _awaitify2.default)(doWhilst, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/doUntil.js b/backend/node_modules/async/doUntil.js new file mode 100644 index 00000000..519900ee --- /dev/null +++ b/backend/node_modules/async/doUntil.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = doUntil; + +var _doWhilst = require('./doWhilst.js'); + +var _doWhilst2 = _interopRequireDefault(_doWhilst); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Like ['doWhilst']{@link module:ControlFlow.doWhilst}, except the `test` is inverted. Note the + * argument ordering differs from `until`. + * + * @name doUntil + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.doWhilst]{@link module:ControlFlow.doWhilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee` + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doUntil(iteratee, test, callback) { + const _test = (0, _wrapAsync2.default)(test); + return (0, _doWhilst2.default)(iteratee, (...args) => { + const cb = args.pop(); + _test(...args, (err, truth) => cb(err, !truth)); + }, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/doWhilst.js b/backend/node_modules/async/doWhilst.js new file mode 100644 index 00000000..c72766d0 --- /dev/null +++ b/backend/node_modules/async/doWhilst.js @@ -0,0 +1,68 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The post-check version of [`whilst`]{@link module:ControlFlow.whilst}. To reflect the difference in + * the order of operations, the arguments `test` and `iteratee` are switched. + * + * `doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + * + * @name doWhilst + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} iteratee - A function which is called each time `test` + * passes. Invoked with (callback). + * @param {AsyncFunction} test - asynchronous truth test to perform after each + * execution of `iteratee`. Invoked with (...args, callback), where `...args` are the + * non-error args from the previous callback of `iteratee`. + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. + * `callback` will be passed an error and any arguments passed to the final + * `iteratee`'s callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + */ +function doWhilst(iteratee, test, callback) { + callback = (0, _onlyOnce2.default)(callback); + var _fn = (0, _wrapAsync2.default)(iteratee); + var _test = (0, _wrapAsync2.default)(test); + var results; + + function next(err, ...args) { + if (err) return callback(err); + if (err === false) return; + results = args; + _test(...args, check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return check(null, true); +} + +exports.default = (0, _awaitify2.default)(doWhilst, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/during.js b/backend/node_modules/async/during.js new file mode 100644 index 00000000..4165543f --- /dev/null +++ b/backend/node_modules/async/during.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ +function whilst(test, iteratee, callback) { + callback = (0, _onlyOnce2.default)(callback); + var _fn = (0, _wrapAsync2.default)(iteratee); + var _test = (0, _wrapAsync2.default)(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); +} +exports.default = (0, _awaitify2.default)(whilst, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/each.js b/backend/node_modules/async/each.js new file mode 100644 index 00000000..fdfcbd88 --- /dev/null +++ b/backend/node_modules/async/each.js @@ -0,0 +1,129 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _withoutIndex = require('./internal/withoutIndex.js'); + +var _withoutIndex2 = _interopRequireDefault(_withoutIndex); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); + * } + * }); + * + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await + * async () => { + * try { + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * } + * } + * + */ +function eachLimit(coll, iteratee, callback) { + return (0, _eachOf2.default)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); +} + +exports.default = (0, _awaitify2.default)(eachLimit, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/eachLimit.js b/backend/node_modules/async/eachLimit.js new file mode 100644 index 00000000..7f5928c6 --- /dev/null +++ b/backend/node_modules/async/eachLimit.js @@ -0,0 +1,50 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _withoutIndex = require('./internal/withoutIndex.js'); + +var _withoutIndex2 = _interopRequireDefault(_withoutIndex); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachLimit(coll, limit, iteratee, callback) { + return (0, _eachOfLimit2.default)(limit)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); +} +exports.default = (0, _awaitify2.default)(eachLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/eachOf.js b/backend/node_modules/async/eachOf.js new file mode 100644 index 00000000..9ed20f66 --- /dev/null +++ b/backend/node_modules/async/eachOf.js @@ -0,0 +1,185 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _isArrayLike = require('./internal/isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _breakLoop = require('./internal/breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +var _eachOfLimit = require('./eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// eachOf implementation optimized for array-likes +function eachOfArrayLike(coll, iteratee, callback) { + callback = (0, _once2.default)(callback); + var index = 0, + completed = 0, + { length } = coll, + canceled = false; + if (length === 0) { + callback(null); + } + + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return; + if (err) { + callback(err); + } else if (++completed === length || value === _breakLoop2.default) { + callback(null); + } + } + + for (; index < length; index++) { + iteratee(coll[index], index, (0, _onlyOnce2.default)(iteratorCallback)); + } +} + +// a generic version of eachOf which can handle array, object, and iterator cases. +function eachOfGeneric(coll, iteratee, callback) { + return (0, _eachOfLimit2.default)(coll, Infinity, iteratee, callback); +} + +/** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ +function eachOf(coll, iteratee, callback) { + var eachOfImplementation = (0, _isArrayLike2.default)(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, (0, _wrapAsync2.default)(iteratee), callback); +} + +exports.default = (0, _awaitify2.default)(eachOf, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/eachOfLimit.js b/backend/node_modules/async/eachOfLimit.js new file mode 100644 index 00000000..a596e5ac --- /dev/null +++ b/backend/node_modules/async/eachOfLimit.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit2 = require('./internal/eachOfLimit.js'); + +var _eachOfLimit3 = _interopRequireDefault(_eachOfLimit2); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfLimit(coll, limit, iteratee, callback) { + return (0, _eachOfLimit3.default)(limit)(coll, (0, _wrapAsync2.default)(iteratee), callback); +} + +exports.default = (0, _awaitify2.default)(eachOfLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/eachOfSeries.js b/backend/node_modules/async/eachOfSeries.js new file mode 100644 index 00000000..04243ada --- /dev/null +++ b/backend/node_modules/async/eachOfSeries.js @@ -0,0 +1,39 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfSeries(coll, iteratee, callback) { + return (0, _eachOfLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(eachOfSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/eachSeries.js b/backend/node_modules/async/eachSeries.js new file mode 100644 index 00000000..b04896e3 --- /dev/null +++ b/backend/node_modules/async/eachSeries.js @@ -0,0 +1,44 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachLimit = require('./eachLimit.js'); + +var _eachLimit2 = _interopRequireDefault(_eachLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. + + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachSeries(coll, iteratee, callback) { + return (0, _eachLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(eachSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/ensureAsync.js b/backend/node_modules/async/ensureAsync.js new file mode 100644 index 00000000..749c5dac --- /dev/null +++ b/backend/node_modules/async/ensureAsync.js @@ -0,0 +1,67 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = ensureAsync; + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Wrap an async function and ensure it calls its callback on a later tick of + * the event loop. If the function already calls its callback on a next tick, + * no extra deferral is added. This is useful for preventing stack overflows + * (`RangeError: Maximum call stack size exceeded`) and generally keeping + * [Zalgo](http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony) + * contained. ES2017 `async` functions are returned as-is -- they are immune + * to Zalgo's corrupting influences, as they always resolve on a later tick. + * + * @name ensureAsync + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - an async function, one that expects a node-style + * callback as its last argument. + * @returns {AsyncFunction} Returns a wrapped function with the exact same call + * signature as the function passed in. + * @example + * + * function sometimesAsync(arg, callback) { + * if (cache[arg]) { + * return callback(null, cache[arg]); // this would be synchronous!! + * } else { + * doSomeIO(arg, callback); // this IO would be asynchronous + * } + * } + * + * // this has a risk of stack overflows if many results are cached in a row + * async.mapSeries(args, sometimesAsync, done); + * + * // this will defer sometimesAsync's callback if necessary, + * // preventing stack overflows + * async.mapSeries(args, async.ensureAsync(sometimesAsync), done); + */ +function ensureAsync(fn) { + if ((0, _wrapAsync.isAsync)(fn)) return fn; + return function (...args /*, callback*/) { + var callback = args.pop(); + var sync = true; + args.push((...innerArgs) => { + if (sync) { + (0, _setImmediate2.default)(() => callback(...innerArgs)); + } else { + callback(...innerArgs); + } + }); + fn.apply(this, args); + sync = false; + }; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/every.js b/backend/node_modules/async/every.js new file mode 100644 index 00000000..622b3019 --- /dev/null +++ b/backend/node_modules/async/every.js @@ -0,0 +1,119 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns `true` if every element in `coll` satisfies an async test. If any + * iteratee call returns `false`, the main `callback` is immediately called. + * + * @name every + * @static + * @memberOf module:Collections + * @method + * @alias all + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file5.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.every(fileList, fileExists, function(err, result) { + * console.log(result); + * // true + * // result is true since every file exists + * }); + * + * async.every(withMissingFileList, fileExists, function(err, result) { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }); + * + * // Using Promises + * async.every(fileList, fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.every(withMissingFileList, fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since NOT every file exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.every(fileList, fileExists); + * console.log(result); + * // true + * // result is true since every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.every(withMissingFileList, fileExists); + * console.log(result); + * // false + * // result is false since NOT every file exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function every(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(every, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/everyLimit.js b/backend/node_modules/async/everyLimit.js new file mode 100644 index 00000000..375e1260 --- /dev/null +++ b/backend/node_modules/async/everyLimit.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`every`]{@link module:Collections.every} but runs a maximum of `limit` async operations at a time. + * + * @name everyLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in parallel. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everyLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(everyLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/everySeries.js b/backend/node_modules/async/everySeries.js new file mode 100644 index 00000000..9a6bf7d4 --- /dev/null +++ b/backend/node_modules/async/everySeries.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`every`]{@link module:Collections.every} but runs only a single async operation at a time. + * + * @name everySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.every]{@link module:Collections.every} + * @alias allSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collection in series. + * The iteratee must complete with a boolean result value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result will be either `true` or `false` + * depending on the values of the async tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function everySeries(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => !bool, res => !res)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(everySeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/filter.js b/backend/node_modules/async/filter.js new file mode 100644 index 00000000..2c9a63de --- /dev/null +++ b/backend/node_modules/async/filter.js @@ -0,0 +1,93 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function filter(coll, iteratee, callback) { + return (0, _filter3.default)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filter, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/filterLimit.js b/backend/node_modules/async/filterLimit.js new file mode 100644 index 00000000..d3b3f50c --- /dev/null +++ b/backend/node_modules/async/filterLimit.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ +function filterLimit(coll, limit, iteratee, callback) { + return (0, _filter3.default)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filterLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/filterSeries.js b/backend/node_modules/async/filterSeries.js new file mode 100644 index 00000000..019a2d0a --- /dev/null +++ b/backend/node_modules/async/filterSeries.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ +function filterSeries(coll, iteratee, callback) { + return (0, _filter3.default)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filterSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/find.js b/backend/node_modules/async/find.js new file mode 100644 index 00000000..d5896ef6 --- /dev/null +++ b/backend/node_modules/async/find.js @@ -0,0 +1,96 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns the first value in `coll` that passes an async truth test. The + * `iteratee` is applied in parallel, meaning the first iteratee to return + * `true` will fire the detect `callback` with that result. That means the + * result might not be the first item in the original `coll` (in terms of order) + * that passes the test. + + * If order within the original `coll` is important, then look at + * [`detectSeries`]{@link module:Collections.detectSeries}. + * + * @name detect + * @static + * @memberOf module:Collections + * @method + * @alias find + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * } + *); + * + * // Using Promises + * async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists) + * .then(result => { + * console.log(result); + * // dir1/file1.txt + * // result now equals the first file in the list that exists + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.detect(['file3.txt','file2.txt','dir1/file1.txt'], fileExists); + * console.log(result); + * // dir1/file1.txt + * // result now equals the file in the list that exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function detect(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(detect, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/findLimit.js b/backend/node_modules/async/findLimit.js new file mode 100644 index 00000000..c59843b6 --- /dev/null +++ b/backend/node_modules/async/findLimit.js @@ -0,0 +1,48 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs a maximum of `limit` async operations at a + * time. + * + * @name detectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findLimit + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(detectLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/findSeries.js b/backend/node_modules/async/findSeries.js new file mode 100644 index 00000000..b4868996 --- /dev/null +++ b/backend/node_modules/async/findSeries.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`detect`]{@link module:Collections.detect} but runs only a single async operation at a time. + * + * @name detectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.detect]{@link module:Collections.detect} + * @alias findSeries + * @category Collections + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A truth test to apply to each item in `coll`. + * The iteratee must complete with a boolean value as its result. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the `iteratee` functions have finished. + * Result will be the first item in the array that passes the truth test + * (iteratee) or the value `undefined` if none passed. Invoked with + * (err, result). + * @returns {Promise} a promise, if a callback is omitted + */ +function detectSeries(coll, iteratee, callback) { + return (0, _createTester2.default)(bool => bool, (res, item) => item)((0, _eachOfLimit2.default)(1), coll, iteratee, callback); +} + +exports.default = (0, _awaitify2.default)(detectSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/flatMap.js b/backend/node_modules/async/flatMap.js new file mode 100644 index 00000000..4540a79c --- /dev/null +++ b/backend/node_modules/async/flatMap.js @@ -0,0 +1,115 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _concatLimit = require('./concatLimit.js'); + +var _concatLimit2 = _interopRequireDefault(_concatLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Applies `iteratee` to each item in `coll`, concatenating the results. Returns + * the concatenated list. The `iteratee`s are called in parallel, and the + * results are concatenated as they return. The results array will be returned in + * the original order of `coll` passed to the `iteratee` function. + * + * @name concat + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @alias flatMap + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * let directoryList = ['dir1','dir2','dir3']; + * let withMissingDirectoryList = ['dir1','dir2','dir3', 'dir4']; + * + * // Using callbacks + * async.concat(directoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.concat(directoryList, fs.readdir) + * .then(results => { + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Error Handling + * async.concat(withMissingDirectoryList, fs.readdir) + * .then(results => { + * console.log(results); + * }).catch(err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.concat(directoryList, fs.readdir); + * console.log(results); + * // [ 'file1.txt', 'file2.txt', 'file3.txt', 'file4.txt', file5.txt ] + * } catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.concat(withMissingDirectoryList, fs.readdir); + * console.log(results); + * } catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4 does not exist + * } + * } + * + */ +function concat(coll, iteratee, callback) { + return (0, _concatLimit2.default)(coll, Infinity, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(concat, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/flatMapLimit.js b/backend/node_modules/async/flatMapLimit.js new file mode 100644 index 00000000..a27cc7d4 --- /dev/null +++ b/backend/node_modules/async/flatMapLimit.js @@ -0,0 +1,60 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _mapLimit = require('./mapLimit.js'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs a maximum of `limit` async operations at a time. + * + * @name concatLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapLimit + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`, + * which should use an array as its result. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatLimit(coll, limit, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _mapLimit2.default)(coll, limit, (val, iterCb) => { + _iteratee(val, (err, ...args) => { + if (err) return iterCb(err); + return iterCb(err, args); + }); + }, (err, mapResults) => { + var result = []; + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + result = result.concat(...mapResults[i]); + } + } + + return callback(err, result); + }); +} +exports.default = (0, _awaitify2.default)(concatLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/flatMapSeries.js b/backend/node_modules/async/flatMapSeries.js new file mode 100644 index 00000000..332de3f3 --- /dev/null +++ b/backend/node_modules/async/flatMapSeries.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _concatLimit = require('./concatLimit.js'); + +var _concatLimit2 = _interopRequireDefault(_concatLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`concat`]{@link module:Collections.concat} but runs only a single async operation at a time. + * + * @name concatSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.concat]{@link module:Collections.concat} + * @category Collection + * @alias flatMapSeries + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each item in `coll`. + * The iteratee should complete with an array an array of results. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is an array + * containing the concatenated results of the `iteratee` function. Invoked with + * (err, results). + * @returns A Promise, if no callback is passed + */ +function concatSeries(coll, iteratee, callback) { + return (0, _concatLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(concatSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/foldl.js b/backend/node_modules/async/foldl.js new file mode 100644 index 00000000..8a69548a --- /dev/null +++ b/backend/node_modules/async/foldl.js @@ -0,0 +1,153 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Reduces `coll` into a single value using an async `iteratee` to return each + * successive step. `memo` is the initial state of the reduction. This function + * only operates in series. + * + * For performance reasons, it may make sense to split a call to this function + * into a parallel map, and then use the normal `Array.prototype.reduce` on the + * results. This function is for situations where each step in the reduction + * needs to be async; if you can get the data before reducing it, then it's + * probably a good idea to do so. + * + * @name reduce + * @static + * @memberOf module:Collections + * @method + * @alias inject + * @alias foldl + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file3.txt', 'file4.txt']; + * + * // asynchronous function that computes the file size in bytes + * // file size is added to the memoized value, then returned + * function getFileSizeInBytes(memo, file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, memo + stat.size); + * }); + * } + * + * // Using callbacks + * async.reduce(fileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.reduce(fileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function reduce(coll, memo, iteratee, callback) { + callback = (0, _once2.default)(callback); + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _eachOfSeries2.default)(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); +} +exports.default = (0, _awaitify2.default)(reduce, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/foldr.js b/backend/node_modules/async/foldr.js new file mode 100644 index 00000000..5be1b68d --- /dev/null +++ b/backend/node_modules/async/foldr.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reduceRight; + +var _reduce = require('./reduce.js'); + +var _reduce2 = _interopRequireDefault(_reduce); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function reduceRight(array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return (0, _reduce2.default)(reversed, memo, iteratee, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/forEach.js b/backend/node_modules/async/forEach.js new file mode 100644 index 00000000..fdfcbd88 --- /dev/null +++ b/backend/node_modules/async/forEach.js @@ -0,0 +1,129 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _withoutIndex = require('./internal/withoutIndex.js'); + +var _withoutIndex2 = _interopRequireDefault(_withoutIndex); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Applies the function `iteratee` to each item in `coll`, in parallel. + * The `iteratee` is called with an item from the list, and a callback for when + * it has finished. If the `iteratee` passes an error to its `callback`, the + * main `callback` (for the `each` function) is immediately called with the + * error. + * + * Note, that since this function applies `iteratee` to each item in parallel, + * there is no guarantee that the iteratee functions will complete in order. + * + * @name each + * @static + * @memberOf module:Collections + * @method + * @alias forEach + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to + * each item in `coll`. Invoked with (item, callback). + * The array index is not passed to the iteratee. + * If you need the index, use `eachOf`. + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const fileList = [ 'dir1/file2.txt', 'dir2/file3.txt', 'dir/file5.txt']; + * const withMissingFileList = ['dir1/file1.txt', 'dir4/file2.txt']; + * + * // asynchronous function that deletes a file + * const deleteFile = function(file, callback) { + * fs.unlink(file, callback); + * }; + * + * // Using callbacks + * async.each(fileList, deleteFile, function(err) { + * if( err ) { + * console.log(err); + * } else { + * console.log('All files have been deleted successfully'); + * } + * }); + * + * // Error Handling + * async.each(withMissingFileList, deleteFile, function(err){ + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using Promises + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.each(fileList, deleteFile) + * .then( () => { + * console.log('All files have been deleted successfully'); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * }); + * + * // Using async/await + * async () => { + * try { + * await async.each(files, deleteFile); + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * await async.each(withMissingFileList, deleteFile); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * // since dir4/file2.txt does not exist + * // dir1/file1.txt could have been deleted + * } + * } + * + */ +function eachLimit(coll, iteratee, callback) { + return (0, _eachOf2.default)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); +} + +exports.default = (0, _awaitify2.default)(eachLimit, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/forEachLimit.js b/backend/node_modules/async/forEachLimit.js new file mode 100644 index 00000000..7f5928c6 --- /dev/null +++ b/backend/node_modules/async/forEachLimit.js @@ -0,0 +1,50 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _withoutIndex = require('./internal/withoutIndex.js'); + +var _withoutIndex2 = _interopRequireDefault(_withoutIndex); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`each`]{@link module:Collections.each} but runs a maximum of `limit` async operations at a time. + * + * @name eachLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfLimit`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachLimit(coll, limit, iteratee, callback) { + return (0, _eachOfLimit2.default)(limit)(coll, (0, _withoutIndex2.default)((0, _wrapAsync2.default)(iteratee)), callback); +} +exports.default = (0, _awaitify2.default)(eachLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/forEachOf.js b/backend/node_modules/async/forEachOf.js new file mode 100644 index 00000000..9ed20f66 --- /dev/null +++ b/backend/node_modules/async/forEachOf.js @@ -0,0 +1,185 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _isArrayLike = require('./internal/isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _breakLoop = require('./internal/breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +var _eachOfLimit = require('./eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// eachOf implementation optimized for array-likes +function eachOfArrayLike(coll, iteratee, callback) { + callback = (0, _once2.default)(callback); + var index = 0, + completed = 0, + { length } = coll, + canceled = false; + if (length === 0) { + callback(null); + } + + function iteratorCallback(err, value) { + if (err === false) { + canceled = true; + } + if (canceled === true) return; + if (err) { + callback(err); + } else if (++completed === length || value === _breakLoop2.default) { + callback(null); + } + } + + for (; index < length; index++) { + iteratee(coll[index], index, (0, _onlyOnce2.default)(iteratorCallback)); + } +} + +// a generic version of eachOf which can handle array, object, and iterator cases. +function eachOfGeneric(coll, iteratee, callback) { + return (0, _eachOfLimit2.default)(coll, Infinity, iteratee, callback); +} + +/** + * Like [`each`]{@link module:Collections.each}, except that it passes the key (or index) as the second argument + * to the iteratee. + * + * @name eachOf + * @static + * @memberOf module:Collections + * @method + * @alias forEachOf + * @category Collection + * @see [async.each]{@link module:Collections.each} + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each + * item in `coll`. + * The `key` is the item's key, or index in the case of an array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * // dev.json is a file containing a valid json object config for dev environment + * // dev.json is a file containing a valid json object config for test environment + * // prod.json is a file containing a valid json object config for prod environment + * // invalid.json is a file with a malformed json object + * + * let configs = {}; //global variable + * let validConfigFileMap = {dev: 'dev.json', test: 'test.json', prod: 'prod.json'}; + * let invalidConfigFileMap = {dev: 'dev.json', test: 'test.json', invalid: 'invalid.json'}; + * + * // asynchronous function that reads a json file and parses the contents as json object + * function parseFile(file, key, callback) { + * fs.readFile(file, "utf8", function(err, data) { + * if (err) return calback(err); + * try { + * configs[key] = JSON.parse(data); + * } catch (e) { + * return callback(e); + * } + * callback(); + * }); + * } + * + * // Using callbacks + * async.forEachOf(validConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * } else { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile, function (err) { + * if (err) { + * console.error(err); + * // JSON parse error exception + * } else { + * console.log(configs); + * } + * }); + * + * // Using Promises + * async.forEachOf(validConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * }).catch( err => { + * console.error(err); + * }); + * + * //Error handing + * async.forEachOf(invalidConfigFileMap, parseFile) + * .then( () => { + * console.log(configs); + * }).catch( err => { + * console.error(err); + * // JSON parse error exception + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.forEachOf(validConfigFileMap, parseFile); + * console.log(configs); + * // configs is now a map of JSON data, e.g. + * // { dev: //parsed dev.json, test: //parsed test.json, prod: //parsed prod.json} + * } + * catch (err) { + * console.log(err); + * } + * } + * + * //Error handing + * async () => { + * try { + * let result = await async.forEachOf(invalidConfigFileMap, parseFile); + * console.log(configs); + * } + * catch (err) { + * console.log(err); + * // JSON parse error exception + * } + * } + * + */ +function eachOf(coll, iteratee, callback) { + var eachOfImplementation = (0, _isArrayLike2.default)(coll) ? eachOfArrayLike : eachOfGeneric; + return eachOfImplementation(coll, (0, _wrapAsync2.default)(iteratee), callback); +} + +exports.default = (0, _awaitify2.default)(eachOf, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/forEachOfLimit.js b/backend/node_modules/async/forEachOfLimit.js new file mode 100644 index 00000000..a596e5ac --- /dev/null +++ b/backend/node_modules/async/forEachOfLimit.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit2 = require('./internal/eachOfLimit.js'); + +var _eachOfLimit3 = _interopRequireDefault(_eachOfLimit2); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs a maximum of `limit` async operations at a + * time. + * + * @name eachOfLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. The `key` is the item's key, or index in the case of an + * array. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfLimit(coll, limit, iteratee, callback) { + return (0, _eachOfLimit3.default)(limit)(coll, (0, _wrapAsync2.default)(iteratee), callback); +} + +exports.default = (0, _awaitify2.default)(eachOfLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/forEachOfSeries.js b/backend/node_modules/async/forEachOfSeries.js new file mode 100644 index 00000000..04243ada --- /dev/null +++ b/backend/node_modules/async/forEachOfSeries.js @@ -0,0 +1,39 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`eachOf`]{@link module:Collections.eachOf} but runs only a single async operation at a time. + * + * @name eachOfSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.eachOf]{@link module:Collections.eachOf} + * @alias forEachOfSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * Invoked with (item, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachOfSeries(coll, iteratee, callback) { + return (0, _eachOfLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(eachOfSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/forEachSeries.js b/backend/node_modules/async/forEachSeries.js new file mode 100644 index 00000000..b04896e3 --- /dev/null +++ b/backend/node_modules/async/forEachSeries.js @@ -0,0 +1,44 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachLimit = require('./eachLimit.js'); + +var _eachLimit2 = _interopRequireDefault(_eachLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`each`]{@link module:Collections.each} but runs only a single async operation at a time. + * + * Note, that unlike [`each`]{@link module:Collections.each}, this function applies iteratee to each item + * in series and therefore the iteratee functions will complete in order. + + * @name eachSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.each]{@link module:Collections.each} + * @alias forEachSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each + * item in `coll`. + * The array index is not passed to the iteratee. + * If you need the index, use `eachOfSeries`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all + * `iteratee` functions have finished, or an error occurs. Invoked with (err). + * @returns {Promise} a promise, if a callback is omitted + */ +function eachSeries(coll, iteratee, callback) { + return (0, _eachLimit2.default)(coll, 1, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(eachSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/forever.js b/backend/node_modules/async/forever.js new file mode 100644 index 00000000..22925185 --- /dev/null +++ b/backend/node_modules/async/forever.js @@ -0,0 +1,68 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _ensureAsync = require('./ensureAsync.js'); + +var _ensureAsync2 = _interopRequireDefault(_ensureAsync); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Calls the asynchronous function `fn` with a callback parameter that allows it + * to call itself again, in series, indefinitely. + + * If an error is passed to the callback then `errback` is called with the + * error, and execution stops, otherwise it will never be called. + * + * @name forever + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} fn - an async function to call repeatedly. + * Invoked with (next). + * @param {Function} [errback] - when `fn` passes an error to it's callback, + * this function will be called, and execution stops. Invoked with (err). + * @returns {Promise} a promise that rejects if an error occurs and an errback + * is not passed + * @example + * + * async.forever( + * function(next) { + * // next is suitable for passing to things that need a callback(err [, whatever]); + * // it will result in this function being called again. + * }, + * function(err) { + * // if next is called with a value in its first parameter, it will appear + * // in here as 'err', and execution will stop. + * } + * ); + */ +function forever(fn, errback) { + var done = (0, _onlyOnce2.default)(errback); + var task = (0, _wrapAsync2.default)((0, _ensureAsync2.default)(fn)); + + function next(err) { + if (err) return done(err); + if (err === false) return; + task(next); + } + return next(); +} +exports.default = (0, _awaitify2.default)(forever, 2); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/groupBy.js b/backend/node_modules/async/groupBy.js new file mode 100644 index 00000000..f2957630 --- /dev/null +++ b/backend/node_modules/async/groupBy.js @@ -0,0 +1,108 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = groupBy; + +var _groupByLimit = require('./groupByLimit.js'); + +var _groupByLimit2 = _interopRequireDefault(_groupByLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns a new object, where each value corresponds to an array of items, from + * `coll`, that returned the corresponding key. That is, the keys of the object + * correspond to the values passed to the `iteratee` callback. + * + * Note: Since this function applies the `iteratee` to each item in parallel, + * there is no guarantee that the `iteratee` functions will complete in order. + * However, the values for each key in the `result` will be in the same order as + * the original `coll`. For Objects, the values will roughly be in the order of + * the original Objects' keys (but this can vary across JavaScript engines). + * + * @name groupBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * const files = ['dir1/file1.txt','dir2','dir4'] + * + * // asynchronous function that detects file type as none, file, or directory + * function detectFile(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(null, 'none'); + * } + * callback(null, stat.isDirectory() ? 'directory' : 'file'); + * }); + * } + * + * //Using callbacks + * async.groupBy(files, detectFile, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * }); + * + * // Using Promises + * async.groupBy(files, detectFile) + * .then( result => { + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.groupBy(files, detectFile); + * console.log(result); + * // { + * // file: [ 'dir1/file1.txt' ], + * // none: [ 'dir4' ], + * // directory: [ 'dir2'] + * // } + * // result is object containing the files grouped by type + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function groupBy(coll, iteratee, callback) { + return (0, _groupByLimit2.default)(coll, Infinity, iteratee, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/groupByLimit.js b/backend/node_modules/async/groupByLimit.js new file mode 100644 index 00000000..30fd290a --- /dev/null +++ b/backend/node_modules/async/groupByLimit.js @@ -0,0 +1,71 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _mapLimit = require('./mapLimit.js'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs a maximum of `limit` async operations at a time. + * + * @name groupByLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whoses + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ +function groupByLimit(coll, limit, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _mapLimit2.default)(coll, limit, (val, iterCb) => { + _iteratee(val, (err, key) => { + if (err) return iterCb(err); + return iterCb(err, { key, val }); + }); + }, (err, mapResults) => { + var result = {}; + // from MDN, handle object having an `hasOwnProperty` prop + var { hasOwnProperty } = Object.prototype; + + for (var i = 0; i < mapResults.length; i++) { + if (mapResults[i]) { + var { key } = mapResults[i]; + var { val } = mapResults[i]; + + if (hasOwnProperty.call(result, key)) { + result[key].push(val); + } else { + result[key] = [val]; + } + } + } + + return callback(err, result); + }); +} + +exports.default = (0, _awaitify2.default)(groupByLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/groupBySeries.js b/backend/node_modules/async/groupBySeries.js new file mode 100644 index 00000000..e2a52874 --- /dev/null +++ b/backend/node_modules/async/groupBySeries.js @@ -0,0 +1,36 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = groupBySeries; + +var _groupByLimit = require('./groupByLimit.js'); + +var _groupByLimit2 = _interopRequireDefault(_groupByLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`groupBy`]{@link module:Collections.groupBy} but runs only a single async operation at a time. + * + * @name groupBySeries + * @static + * @memberOf module:Collections + * @method + * @see [async.groupBy]{@link module:Collections.groupBy} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a `key` to group the value under. + * Invoked with (value, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Result is an `Object` whose + * properties are arrays of values which returned the corresponding key. + * @returns {Promise} a promise, if no callback is passed + */ +function groupBySeries(coll, iteratee, callback) { + return (0, _groupByLimit2.default)(coll, 1, iteratee, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/index.js b/backend/node_modules/async/index.js new file mode 100644 index 00000000..6154647d --- /dev/null +++ b/backend/node_modules/async/index.js @@ -0,0 +1,588 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.doDuring = exports.during = exports.wrapSync = undefined; +exports.selectSeries = exports.selectLimit = exports.select = exports.foldr = exports.foldl = exports.inject = exports.forEachOfLimit = exports.forEachOfSeries = exports.forEachOf = exports.forEachLimit = exports.forEachSeries = exports.forEach = exports.flatMapSeries = exports.flatMapLimit = exports.flatMap = exports.findSeries = exports.findLimit = exports.find = exports.anySeries = exports.anyLimit = exports.any = exports.allSeries = exports.allLimit = exports.all = exports.whilst = exports.waterfall = exports.until = exports.unmemoize = exports.tryEach = exports.transform = exports.timesSeries = exports.timesLimit = exports.times = exports.timeout = exports.sortBy = exports.someSeries = exports.someLimit = exports.some = exports.setImmediate = exports.series = exports.seq = exports.retryable = exports.retry = exports.rejectSeries = exports.rejectLimit = exports.reject = exports.reflectAll = exports.reflect = exports.reduceRight = exports.reduce = exports.race = exports.queue = exports.priorityQueue = exports.parallelLimit = exports.parallel = exports.nextTick = exports.memoize = exports.mapValuesSeries = exports.mapValuesLimit = exports.mapValues = exports.mapSeries = exports.mapLimit = exports.map = exports.log = exports.groupBySeries = exports.groupByLimit = exports.groupBy = exports.forever = exports.filterSeries = exports.filterLimit = exports.filter = exports.everySeries = exports.everyLimit = exports.every = exports.ensureAsync = exports.eachSeries = exports.eachOfSeries = exports.eachOfLimit = exports.eachOf = exports.eachLimit = exports.each = exports.doWhilst = exports.doUntil = exports.dir = exports.detectSeries = exports.detectLimit = exports.detect = exports.constant = exports.concatSeries = exports.concatLimit = exports.concat = exports.compose = exports.cargoQueue = exports.cargo = exports.autoInject = exports.auto = exports.asyncify = exports.applyEachSeries = exports.applyEach = exports.apply = undefined; + +var _apply = require('./apply'); + +var _apply2 = _interopRequireDefault(_apply); + +var _applyEach = require('./applyEach'); + +var _applyEach2 = _interopRequireDefault(_applyEach); + +var _applyEachSeries = require('./applyEachSeries'); + +var _applyEachSeries2 = _interopRequireDefault(_applyEachSeries); + +var _asyncify = require('./asyncify'); + +var _asyncify2 = _interopRequireDefault(_asyncify); + +var _auto = require('./auto'); + +var _auto2 = _interopRequireDefault(_auto); + +var _autoInject = require('./autoInject'); + +var _autoInject2 = _interopRequireDefault(_autoInject); + +var _cargo = require('./cargo'); + +var _cargo2 = _interopRequireDefault(_cargo); + +var _cargoQueue = require('./cargoQueue'); + +var _cargoQueue2 = _interopRequireDefault(_cargoQueue); + +var _compose = require('./compose'); + +var _compose2 = _interopRequireDefault(_compose); + +var _concat = require('./concat'); + +var _concat2 = _interopRequireDefault(_concat); + +var _concatLimit = require('./concatLimit'); + +var _concatLimit2 = _interopRequireDefault(_concatLimit); + +var _concatSeries = require('./concatSeries'); + +var _concatSeries2 = _interopRequireDefault(_concatSeries); + +var _constant = require('./constant'); + +var _constant2 = _interopRequireDefault(_constant); + +var _detect = require('./detect'); + +var _detect2 = _interopRequireDefault(_detect); + +var _detectLimit = require('./detectLimit'); + +var _detectLimit2 = _interopRequireDefault(_detectLimit); + +var _detectSeries = require('./detectSeries'); + +var _detectSeries2 = _interopRequireDefault(_detectSeries); + +var _dir = require('./dir'); + +var _dir2 = _interopRequireDefault(_dir); + +var _doUntil = require('./doUntil'); + +var _doUntil2 = _interopRequireDefault(_doUntil); + +var _doWhilst = require('./doWhilst'); + +var _doWhilst2 = _interopRequireDefault(_doWhilst); + +var _each = require('./each'); + +var _each2 = _interopRequireDefault(_each); + +var _eachLimit = require('./eachLimit'); + +var _eachLimit2 = _interopRequireDefault(_eachLimit); + +var _eachOf = require('./eachOf'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _eachOfLimit = require('./eachOfLimit'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _eachOfSeries = require('./eachOfSeries'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _eachSeries = require('./eachSeries'); + +var _eachSeries2 = _interopRequireDefault(_eachSeries); + +var _ensureAsync = require('./ensureAsync'); + +var _ensureAsync2 = _interopRequireDefault(_ensureAsync); + +var _every = require('./every'); + +var _every2 = _interopRequireDefault(_every); + +var _everyLimit = require('./everyLimit'); + +var _everyLimit2 = _interopRequireDefault(_everyLimit); + +var _everySeries = require('./everySeries'); + +var _everySeries2 = _interopRequireDefault(_everySeries); + +var _filter = require('./filter'); + +var _filter2 = _interopRequireDefault(_filter); + +var _filterLimit = require('./filterLimit'); + +var _filterLimit2 = _interopRequireDefault(_filterLimit); + +var _filterSeries = require('./filterSeries'); + +var _filterSeries2 = _interopRequireDefault(_filterSeries); + +var _forever = require('./forever'); + +var _forever2 = _interopRequireDefault(_forever); + +var _groupBy = require('./groupBy'); + +var _groupBy2 = _interopRequireDefault(_groupBy); + +var _groupByLimit = require('./groupByLimit'); + +var _groupByLimit2 = _interopRequireDefault(_groupByLimit); + +var _groupBySeries = require('./groupBySeries'); + +var _groupBySeries2 = _interopRequireDefault(_groupBySeries); + +var _log = require('./log'); + +var _log2 = _interopRequireDefault(_log); + +var _map = require('./map'); + +var _map2 = _interopRequireDefault(_map); + +var _mapLimit = require('./mapLimit'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _mapSeries = require('./mapSeries'); + +var _mapSeries2 = _interopRequireDefault(_mapSeries); + +var _mapValues = require('./mapValues'); + +var _mapValues2 = _interopRequireDefault(_mapValues); + +var _mapValuesLimit = require('./mapValuesLimit'); + +var _mapValuesLimit2 = _interopRequireDefault(_mapValuesLimit); + +var _mapValuesSeries = require('./mapValuesSeries'); + +var _mapValuesSeries2 = _interopRequireDefault(_mapValuesSeries); + +var _memoize = require('./memoize'); + +var _memoize2 = _interopRequireDefault(_memoize); + +var _nextTick = require('./nextTick'); + +var _nextTick2 = _interopRequireDefault(_nextTick); + +var _parallel = require('./parallel'); + +var _parallel2 = _interopRequireDefault(_parallel); + +var _parallelLimit = require('./parallelLimit'); + +var _parallelLimit2 = _interopRequireDefault(_parallelLimit); + +var _priorityQueue = require('./priorityQueue'); + +var _priorityQueue2 = _interopRequireDefault(_priorityQueue); + +var _queue = require('./queue'); + +var _queue2 = _interopRequireDefault(_queue); + +var _race = require('./race'); + +var _race2 = _interopRequireDefault(_race); + +var _reduce = require('./reduce'); + +var _reduce2 = _interopRequireDefault(_reduce); + +var _reduceRight = require('./reduceRight'); + +var _reduceRight2 = _interopRequireDefault(_reduceRight); + +var _reflect = require('./reflect'); + +var _reflect2 = _interopRequireDefault(_reflect); + +var _reflectAll = require('./reflectAll'); + +var _reflectAll2 = _interopRequireDefault(_reflectAll); + +var _reject = require('./reject'); + +var _reject2 = _interopRequireDefault(_reject); + +var _rejectLimit = require('./rejectLimit'); + +var _rejectLimit2 = _interopRequireDefault(_rejectLimit); + +var _rejectSeries = require('./rejectSeries'); + +var _rejectSeries2 = _interopRequireDefault(_rejectSeries); + +var _retry = require('./retry'); + +var _retry2 = _interopRequireDefault(_retry); + +var _retryable = require('./retryable'); + +var _retryable2 = _interopRequireDefault(_retryable); + +var _seq = require('./seq'); + +var _seq2 = _interopRequireDefault(_seq); + +var _series = require('./series'); + +var _series2 = _interopRequireDefault(_series); + +var _setImmediate = require('./setImmediate'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _some = require('./some'); + +var _some2 = _interopRequireDefault(_some); + +var _someLimit = require('./someLimit'); + +var _someLimit2 = _interopRequireDefault(_someLimit); + +var _someSeries = require('./someSeries'); + +var _someSeries2 = _interopRequireDefault(_someSeries); + +var _sortBy = require('./sortBy'); + +var _sortBy2 = _interopRequireDefault(_sortBy); + +var _timeout = require('./timeout'); + +var _timeout2 = _interopRequireDefault(_timeout); + +var _times = require('./times'); + +var _times2 = _interopRequireDefault(_times); + +var _timesLimit = require('./timesLimit'); + +var _timesLimit2 = _interopRequireDefault(_timesLimit); + +var _timesSeries = require('./timesSeries'); + +var _timesSeries2 = _interopRequireDefault(_timesSeries); + +var _transform = require('./transform'); + +var _transform2 = _interopRequireDefault(_transform); + +var _tryEach = require('./tryEach'); + +var _tryEach2 = _interopRequireDefault(_tryEach); + +var _unmemoize = require('./unmemoize'); + +var _unmemoize2 = _interopRequireDefault(_unmemoize); + +var _until = require('./until'); + +var _until2 = _interopRequireDefault(_until); + +var _waterfall = require('./waterfall'); + +var _waterfall2 = _interopRequireDefault(_waterfall); + +var _whilst = require('./whilst'); + +var _whilst2 = _interopRequireDefault(_whilst); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * An "async function" in the context of Async is an asynchronous function with + * a variable number of parameters, with the final parameter being a callback. + * (`function (arg1, arg2, ..., callback) {}`) + * The final callback is of the form `callback(err, results...)`, which must be + * called once the function is completed. The callback should be called with a + * Error as its first argument to signal that an error occurred. + * Otherwise, if no error occurred, it should be called with `null` as the first + * argument, and any additional `result` arguments that may apply, to signal + * successful completion. + * The callback must be called exactly once, ideally on a later tick of the + * JavaScript event loop. + * + * This type of function is also referred to as a "Node-style async function", + * or a "continuation passing-style function" (CPS). Most of the methods of this + * library are themselves CPS/Node-style async functions, or functions that + * return CPS/Node-style async functions. + * + * Wherever we accept a Node-style async function, we also directly accept an + * [ES2017 `async` function]{@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function}. + * In this case, the `async` function will not be passed a final callback + * argument, and any thrown error will be used as the `err` argument of the + * implicit callback, and the return value will be used as the `result` value. + * (i.e. a `rejected` of the returned Promise becomes the `err` callback + * argument, and a `resolved` value becomes the `result`.) + * + * Note, due to JavaScript limitations, we can only detect native `async` + * functions and not transpilied implementations. + * Your environment must have `async`/`await` support for this to work. + * (e.g. Node > v7.6, or a recent version of a modern browser). + * If you are using `async` functions through a transpiler (e.g. Babel), you + * must still wrap the function with [asyncify]{@link module:Utils.asyncify}, + * because the `async function` will be compiled to an ordinary function that + * returns a promise. + * + * @typedef {Function} AsyncFunction + * @static + */ + +/** + * Async is a utility module which provides straight-forward, powerful functions + * for working with asynchronous JavaScript. Although originally designed for + * use with [Node.js](http://nodejs.org) and installable via + * `npm install --save async`, it can also be used directly in the browser. + * @module async + * @see AsyncFunction + */ + +/** + * A collection of `async` functions for manipulating collections, such as + * arrays and objects. + * @module Collections + */ + +/** + * A collection of `async` functions for controlling the flow through a script. + * @module ControlFlow + */ + +/** + * A collection of `async` utility functions. + * @module Utils + */ + +exports.default = { + apply: _apply2.default, + applyEach: _applyEach2.default, + applyEachSeries: _applyEachSeries2.default, + asyncify: _asyncify2.default, + auto: _auto2.default, + autoInject: _autoInject2.default, + cargo: _cargo2.default, + cargoQueue: _cargoQueue2.default, + compose: _compose2.default, + concat: _concat2.default, + concatLimit: _concatLimit2.default, + concatSeries: _concatSeries2.default, + constant: _constant2.default, + detect: _detect2.default, + detectLimit: _detectLimit2.default, + detectSeries: _detectSeries2.default, + dir: _dir2.default, + doUntil: _doUntil2.default, + doWhilst: _doWhilst2.default, + each: _each2.default, + eachLimit: _eachLimit2.default, + eachOf: _eachOf2.default, + eachOfLimit: _eachOfLimit2.default, + eachOfSeries: _eachOfSeries2.default, + eachSeries: _eachSeries2.default, + ensureAsync: _ensureAsync2.default, + every: _every2.default, + everyLimit: _everyLimit2.default, + everySeries: _everySeries2.default, + filter: _filter2.default, + filterLimit: _filterLimit2.default, + filterSeries: _filterSeries2.default, + forever: _forever2.default, + groupBy: _groupBy2.default, + groupByLimit: _groupByLimit2.default, + groupBySeries: _groupBySeries2.default, + log: _log2.default, + map: _map2.default, + mapLimit: _mapLimit2.default, + mapSeries: _mapSeries2.default, + mapValues: _mapValues2.default, + mapValuesLimit: _mapValuesLimit2.default, + mapValuesSeries: _mapValuesSeries2.default, + memoize: _memoize2.default, + nextTick: _nextTick2.default, + parallel: _parallel2.default, + parallelLimit: _parallelLimit2.default, + priorityQueue: _priorityQueue2.default, + queue: _queue2.default, + race: _race2.default, + reduce: _reduce2.default, + reduceRight: _reduceRight2.default, + reflect: _reflect2.default, + reflectAll: _reflectAll2.default, + reject: _reject2.default, + rejectLimit: _rejectLimit2.default, + rejectSeries: _rejectSeries2.default, + retry: _retry2.default, + retryable: _retryable2.default, + seq: _seq2.default, + series: _series2.default, + setImmediate: _setImmediate2.default, + some: _some2.default, + someLimit: _someLimit2.default, + someSeries: _someSeries2.default, + sortBy: _sortBy2.default, + timeout: _timeout2.default, + times: _times2.default, + timesLimit: _timesLimit2.default, + timesSeries: _timesSeries2.default, + transform: _transform2.default, + tryEach: _tryEach2.default, + unmemoize: _unmemoize2.default, + until: _until2.default, + waterfall: _waterfall2.default, + whilst: _whilst2.default, + + // aliases + all: _every2.default, + allLimit: _everyLimit2.default, + allSeries: _everySeries2.default, + any: _some2.default, + anyLimit: _someLimit2.default, + anySeries: _someSeries2.default, + find: _detect2.default, + findLimit: _detectLimit2.default, + findSeries: _detectSeries2.default, + flatMap: _concat2.default, + flatMapLimit: _concatLimit2.default, + flatMapSeries: _concatSeries2.default, + forEach: _each2.default, + forEachSeries: _eachSeries2.default, + forEachLimit: _eachLimit2.default, + forEachOf: _eachOf2.default, + forEachOfSeries: _eachOfSeries2.default, + forEachOfLimit: _eachOfLimit2.default, + inject: _reduce2.default, + foldl: _reduce2.default, + foldr: _reduceRight2.default, + select: _filter2.default, + selectLimit: _filterLimit2.default, + selectSeries: _filterSeries2.default, + wrapSync: _asyncify2.default, + during: _whilst2.default, + doDuring: _doWhilst2.default +}; +exports.apply = _apply2.default; +exports.applyEach = _applyEach2.default; +exports.applyEachSeries = _applyEachSeries2.default; +exports.asyncify = _asyncify2.default; +exports.auto = _auto2.default; +exports.autoInject = _autoInject2.default; +exports.cargo = _cargo2.default; +exports.cargoQueue = _cargoQueue2.default; +exports.compose = _compose2.default; +exports.concat = _concat2.default; +exports.concatLimit = _concatLimit2.default; +exports.concatSeries = _concatSeries2.default; +exports.constant = _constant2.default; +exports.detect = _detect2.default; +exports.detectLimit = _detectLimit2.default; +exports.detectSeries = _detectSeries2.default; +exports.dir = _dir2.default; +exports.doUntil = _doUntil2.default; +exports.doWhilst = _doWhilst2.default; +exports.each = _each2.default; +exports.eachLimit = _eachLimit2.default; +exports.eachOf = _eachOf2.default; +exports.eachOfLimit = _eachOfLimit2.default; +exports.eachOfSeries = _eachOfSeries2.default; +exports.eachSeries = _eachSeries2.default; +exports.ensureAsync = _ensureAsync2.default; +exports.every = _every2.default; +exports.everyLimit = _everyLimit2.default; +exports.everySeries = _everySeries2.default; +exports.filter = _filter2.default; +exports.filterLimit = _filterLimit2.default; +exports.filterSeries = _filterSeries2.default; +exports.forever = _forever2.default; +exports.groupBy = _groupBy2.default; +exports.groupByLimit = _groupByLimit2.default; +exports.groupBySeries = _groupBySeries2.default; +exports.log = _log2.default; +exports.map = _map2.default; +exports.mapLimit = _mapLimit2.default; +exports.mapSeries = _mapSeries2.default; +exports.mapValues = _mapValues2.default; +exports.mapValuesLimit = _mapValuesLimit2.default; +exports.mapValuesSeries = _mapValuesSeries2.default; +exports.memoize = _memoize2.default; +exports.nextTick = _nextTick2.default; +exports.parallel = _parallel2.default; +exports.parallelLimit = _parallelLimit2.default; +exports.priorityQueue = _priorityQueue2.default; +exports.queue = _queue2.default; +exports.race = _race2.default; +exports.reduce = _reduce2.default; +exports.reduceRight = _reduceRight2.default; +exports.reflect = _reflect2.default; +exports.reflectAll = _reflectAll2.default; +exports.reject = _reject2.default; +exports.rejectLimit = _rejectLimit2.default; +exports.rejectSeries = _rejectSeries2.default; +exports.retry = _retry2.default; +exports.retryable = _retryable2.default; +exports.seq = _seq2.default; +exports.series = _series2.default; +exports.setImmediate = _setImmediate2.default; +exports.some = _some2.default; +exports.someLimit = _someLimit2.default; +exports.someSeries = _someSeries2.default; +exports.sortBy = _sortBy2.default; +exports.timeout = _timeout2.default; +exports.times = _times2.default; +exports.timesLimit = _timesLimit2.default; +exports.timesSeries = _timesSeries2.default; +exports.transform = _transform2.default; +exports.tryEach = _tryEach2.default; +exports.unmemoize = _unmemoize2.default; +exports.until = _until2.default; +exports.waterfall = _waterfall2.default; +exports.whilst = _whilst2.default; +exports.all = _every2.default; +exports.allLimit = _everyLimit2.default; +exports.allSeries = _everySeries2.default; +exports.any = _some2.default; +exports.anyLimit = _someLimit2.default; +exports.anySeries = _someSeries2.default; +exports.find = _detect2.default; +exports.findLimit = _detectLimit2.default; +exports.findSeries = _detectSeries2.default; +exports.flatMap = _concat2.default; +exports.flatMapLimit = _concatLimit2.default; +exports.flatMapSeries = _concatSeries2.default; +exports.forEach = _each2.default; +exports.forEachSeries = _eachSeries2.default; +exports.forEachLimit = _eachLimit2.default; +exports.forEachOf = _eachOf2.default; +exports.forEachOfSeries = _eachOfSeries2.default; +exports.forEachOfLimit = _eachOfLimit2.default; +exports.inject = _reduce2.default; +exports.foldl = _reduce2.default; +exports.foldr = _reduceRight2.default; +exports.select = _filter2.default; +exports.selectLimit = _filterLimit2.default; +exports.selectSeries = _filterSeries2.default; +exports.wrapSync = _asyncify2.default; +exports.during = _whilst2.default; +exports.doDuring = _doWhilst2.default; \ No newline at end of file diff --git a/backend/node_modules/async/inject.js b/backend/node_modules/async/inject.js new file mode 100644 index 00000000..8a69548a --- /dev/null +++ b/backend/node_modules/async/inject.js @@ -0,0 +1,153 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Reduces `coll` into a single value using an async `iteratee` to return each + * successive step. `memo` is the initial state of the reduction. This function + * only operates in series. + * + * For performance reasons, it may make sense to split a call to this function + * into a parallel map, and then use the normal `Array.prototype.reduce` on the + * results. This function is for situations where each step in the reduction + * needs to be async; if you can get the data before reducing it, then it's + * probably a good idea to do so. + * + * @name reduce + * @static + * @memberOf module:Collections + * @method + * @alias inject + * @alias foldl + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file3.txt', 'file4.txt']; + * + * // asynchronous function that computes the file size in bytes + * // file size is added to the memoized value, then returned + * function getFileSizeInBytes(memo, file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, memo + stat.size); + * }); + * } + * + * // Using callbacks + * async.reduce(fileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.reduce(fileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function reduce(coll, memo, iteratee, callback) { + callback = (0, _once2.default)(callback); + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _eachOfSeries2.default)(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); +} +exports.default = (0, _awaitify2.default)(reduce, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/DoublyLinkedList.js b/backend/node_modules/async/internal/DoublyLinkedList.js new file mode 100644 index 00000000..419ce44b --- /dev/null +++ b/backend/node_modules/async/internal/DoublyLinkedList.js @@ -0,0 +1,92 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +// Simple doubly linked list (https://en.wikipedia.org/wiki/Doubly_linked_list) implementation +// used for queues. This implementation assumes that the node provided by the user can be modified +// to adjust the next and last properties. We implement only the minimal functionality +// for queue support. +class DLL { + constructor() { + this.head = this.tail = null; + this.length = 0; + } + + removeLink(node) { + if (node.prev) node.prev.next = node.next;else this.head = node.next; + if (node.next) node.next.prev = node.prev;else this.tail = node.prev; + + node.prev = node.next = null; + this.length -= 1; + return node; + } + + empty() { + while (this.head) this.shift(); + return this; + } + + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next) node.next.prev = newNode;else this.tail = newNode; + node.next = newNode; + this.length += 1; + } + + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) node.prev.next = newNode;else this.head = newNode; + node.prev = newNode; + this.length += 1; + } + + unshift(node) { + if (this.head) this.insertBefore(this.head, node);else setInitial(this, node); + } + + push(node) { + if (this.tail) this.insertAfter(this.tail, node);else setInitial(this, node); + } + + shift() { + return this.head && this.removeLink(this.head); + } + + pop() { + return this.tail && this.removeLink(this.tail); + } + + toArray() { + return [...this]; + } + + *[Symbol.iterator]() { + var cur = this.head; + while (cur) { + yield cur.data; + cur = cur.next; + } + } + + remove(testFn) { + var curr = this.head; + while (curr) { + var { next } = curr; + if (testFn(curr)) { + this.removeLink(curr); + } + curr = next; + } + return this; + } +} + +exports.default = DLL; +function setInitial(dll, node) { + dll.length = 1; + dll.head = dll.tail = node; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/Heap.js b/backend/node_modules/async/internal/Heap.js new file mode 100644 index 00000000..7867c928 --- /dev/null +++ b/backend/node_modules/async/internal/Heap.js @@ -0,0 +1,120 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +// Binary min-heap implementation used for priority queue. +// Implementation is stable, i.e. push time is considered for equal priorities +class Heap { + constructor() { + this.heap = []; + this.pushCount = Number.MIN_SAFE_INTEGER; + } + + get length() { + return this.heap.length; + } + + empty() { + this.heap = []; + return this; + } + + percUp(index) { + let p; + + while (index > 0 && smaller(this.heap[index], this.heap[p = parent(index)])) { + let t = this.heap[index]; + this.heap[index] = this.heap[p]; + this.heap[p] = t; + + index = p; + } + } + + percDown(index) { + let l; + + while ((l = leftChi(index)) < this.heap.length) { + if (l + 1 < this.heap.length && smaller(this.heap[l + 1], this.heap[l])) { + l = l + 1; + } + + if (smaller(this.heap[index], this.heap[l])) { + break; + } + + let t = this.heap[index]; + this.heap[index] = this.heap[l]; + this.heap[l] = t; + + index = l; + } + } + + push(node) { + node.pushCount = ++this.pushCount; + this.heap.push(node); + this.percUp(this.heap.length - 1); + } + + unshift(node) { + return this.heap.push(node); + } + + shift() { + let [top] = this.heap; + + this.heap[0] = this.heap[this.heap.length - 1]; + this.heap.pop(); + this.percDown(0); + + return top; + } + + toArray() { + return [...this]; + } + + *[Symbol.iterator]() { + for (let i = 0; i < this.heap.length; i++) { + yield this.heap[i].data; + } + } + + remove(testFn) { + let j = 0; + for (let i = 0; i < this.heap.length; i++) { + if (!testFn(this.heap[i])) { + this.heap[j] = this.heap[i]; + j++; + } + } + + this.heap.splice(j); + + for (let i = parent(this.heap.length - 1); i >= 0; i--) { + this.percDown(i); + } + + return this; + } +} + +exports.default = Heap; +function leftChi(i) { + return (i << 1) + 1; +} + +function parent(i) { + return (i + 1 >> 1) - 1; +} + +function smaller(x, y) { + if (x.priority !== y.priority) { + return x.priority < y.priority; + } else { + return x.pushCount < y.pushCount; + } +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/applyEach.js b/backend/node_modules/async/internal/applyEach.js new file mode 100644 index 00000000..54449126 --- /dev/null +++ b/backend/node_modules/async/internal/applyEach.js @@ -0,0 +1,29 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (eachfn) { + return function applyEach(fns, ...callArgs) { + const go = (0, _awaitify2.default)(function (callback) { + var that = this; + return eachfn(fns, (fn, cb) => { + (0, _wrapAsync2.default)(fn).apply(that, callArgs.concat(cb)); + }, callback); + }); + return go; + }; +}; + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/asyncEachOfLimit.js b/backend/node_modules/async/internal/asyncEachOfLimit.js new file mode 100644 index 00000000..34dd82bf --- /dev/null +++ b/backend/node_modules/async/internal/asyncEachOfLimit.js @@ -0,0 +1,75 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = asyncEachOfLimit; + +var _breakLoop = require('./breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// for async generators +function asyncEachOfLimit(generator, limit, iteratee, callback) { + let done = false; + let canceled = false; + let awaiting = false; + let running = 0; + let idx = 0; + + function replenish() { + //console.log('replenish') + if (running >= limit || awaiting || done) return; + //console.log('replenish awaiting') + awaiting = true; + generator.next().then(({ value, done: iterDone }) => { + //console.log('got value', value) + if (canceled || done) return; + awaiting = false; + if (iterDone) { + done = true; + if (running <= 0) { + //console.log('done nextCb') + callback(null); + } + return; + } + running++; + iteratee(value, idx, iterateeCallback); + idx++; + replenish(); + }).catch(handleError); + } + + function iterateeCallback(err, result) { + //console.log('iterateeCallback') + running -= 1; + if (canceled) return; + if (err) return handleError(err); + + if (err === false) { + done = true; + canceled = true; + return; + } + + if (result === _breakLoop2.default || done && running <= 0) { + done = true; + //console.log('done iterCb') + return callback(null); + } + replenish(); + } + + function handleError(err) { + if (canceled) return; + awaiting = false; + done = true; + callback(err); + } + + replenish(); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/awaitify.js b/backend/node_modules/async/internal/awaitify.js new file mode 100644 index 00000000..bb7a609a --- /dev/null +++ b/backend/node_modules/async/internal/awaitify.js @@ -0,0 +1,28 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = awaitify; +// conditionally promisify a function. +// only return a promise if a callback is omitted +function awaitify(asyncFn, arity) { + if (!arity) arity = asyncFn.length; + if (!arity) throw new Error('arity is undefined'); + function awaitable(...args) { + if (typeof args[arity - 1] === 'function') { + return asyncFn.apply(this, args); + } + + return new Promise((resolve, reject) => { + args[arity - 1] = (err, ...cbArgs) => { + if (err) return reject(err); + resolve(cbArgs.length > 1 ? cbArgs : cbArgs[0]); + }; + asyncFn.apply(this, args); + }); + } + + return awaitable; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/breakLoop.js b/backend/node_modules/async/internal/breakLoop.js new file mode 100644 index 00000000..87413dd0 --- /dev/null +++ b/backend/node_modules/async/internal/breakLoop.js @@ -0,0 +1,10 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +// A temporary value used to identify if the loop should be broken. +// See #1064, #1293 +const breakLoop = {}; +exports.default = breakLoop; +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/consoleFunc.js b/backend/node_modules/async/internal/consoleFunc.js new file mode 100644 index 00000000..748d54b7 --- /dev/null +++ b/backend/node_modules/async/internal/consoleFunc.js @@ -0,0 +1,31 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = consoleFunc; + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function consoleFunc(name) { + return (fn, ...args) => (0, _wrapAsync2.default)(fn)(...args, (err, ...resultArgs) => { + /* istanbul ignore else */ + if (typeof console === 'object') { + /* istanbul ignore else */ + if (err) { + /* istanbul ignore else */ + if (console.error) { + console.error(err); + } + } else if (console[name]) { + /* istanbul ignore else */ + resultArgs.forEach(x => console[name](x)); + } + } + }); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/createTester.js b/backend/node_modules/async/internal/createTester.js new file mode 100644 index 00000000..cafdf62e --- /dev/null +++ b/backend/node_modules/async/internal/createTester.js @@ -0,0 +1,40 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _createTester; + +var _breakLoop = require('./breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _createTester(check, getResult) { + return (eachfn, arr, _iteratee, cb) => { + var testPassed = false; + var testResult; + const iteratee = (0, _wrapAsync2.default)(_iteratee); + eachfn(arr, (value, _, callback) => { + iteratee(value, (err, result) => { + if (err || err === false) return callback(err); + + if (check(result) && !testResult) { + testPassed = true; + testResult = getResult(true, value); + return callback(null, _breakLoop2.default); + } + callback(); + }); + }, err => { + if (err) return cb(err); + cb(null, testPassed ? testResult : getResult(false)); + }); + }; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/eachOfLimit.js b/backend/node_modules/async/internal/eachOfLimit.js new file mode 100644 index 00000000..ceed60f4 --- /dev/null +++ b/backend/node_modules/async/internal/eachOfLimit.js @@ -0,0 +1,90 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _once = require('./once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _iterator = require('./iterator.js'); + +var _iterator2 = _interopRequireDefault(_iterator); + +var _onlyOnce = require('./onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./wrapAsync.js'); + +var _asyncEachOfLimit = require('./asyncEachOfLimit.js'); + +var _asyncEachOfLimit2 = _interopRequireDefault(_asyncEachOfLimit); + +var _breakLoop = require('./breakLoop.js'); + +var _breakLoop2 = _interopRequireDefault(_breakLoop); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +exports.default = limit => { + return (obj, iteratee, callback) => { + callback = (0, _once2.default)(callback); + if (limit <= 0) { + throw new RangeError('concurrency limit cannot be less than 1'); + } + if (!obj) { + return callback(null); + } + if ((0, _wrapAsync.isAsyncGenerator)(obj)) { + return (0, _asyncEachOfLimit2.default)(obj, limit, iteratee, callback); + } + if ((0, _wrapAsync.isAsyncIterable)(obj)) { + return (0, _asyncEachOfLimit2.default)(obj[Symbol.asyncIterator](), limit, iteratee, callback); + } + var nextElem = (0, _iterator2.default)(obj); + var done = false; + var canceled = false; + var running = 0; + var looping = false; + + function iterateeCallback(err, value) { + if (canceled) return; + running -= 1; + if (err) { + done = true; + callback(err); + } else if (err === false) { + done = true; + canceled = true; + } else if (value === _breakLoop2.default || done && running <= 0) { + done = true; + return callback(null); + } else if (!looping) { + replenish(); + } + } + + function replenish() { + looping = true; + while (running < limit && !done) { + var elem = nextElem(); + if (elem === null) { + done = true; + if (running <= 0) { + callback(null); + } + return; + } + running += 1; + iteratee(elem.value, elem.key, (0, _onlyOnce2.default)(iterateeCallback)); + } + looping = false; + } + + replenish(); + }; +}; + +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/filter.js b/backend/node_modules/async/internal/filter.js new file mode 100644 index 00000000..065c211e --- /dev/null +++ b/backend/node_modules/async/internal/filter.js @@ -0,0 +1,55 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _filter; + +var _isArrayLike = require('./isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function filterArray(eachfn, arr, iteratee, callback) { + var truthValues = new Array(arr.length); + eachfn(arr, (x, index, iterCb) => { + iteratee(x, (err, v) => { + truthValues[index] = !!v; + iterCb(err); + }); + }, err => { + if (err) return callback(err); + var results = []; + for (var i = 0; i < arr.length; i++) { + if (truthValues[i]) results.push(arr[i]); + } + callback(null, results); + }); +} + +function filterGeneric(eachfn, coll, iteratee, callback) { + var results = []; + eachfn(coll, (x, index, iterCb) => { + iteratee(x, (err, v) => { + if (err) return iterCb(err); + if (v) { + results.push({ index, value: x }); + } + iterCb(err); + }); + }, err => { + if (err) return callback(err); + callback(null, results.sort((a, b) => a.index - b.index).map(v => v.value)); + }); +} + +function _filter(eachfn, coll, iteratee, callback) { + var filter = (0, _isArrayLike2.default)(coll) ? filterArray : filterGeneric; + return filter(eachfn, coll, (0, _wrapAsync2.default)(iteratee), callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/getIterator.js b/backend/node_modules/async/internal/getIterator.js new file mode 100644 index 00000000..f518fce8 --- /dev/null +++ b/backend/node_modules/async/internal/getIterator.js @@ -0,0 +1,11 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (coll) { + return coll[Symbol.iterator] && coll[Symbol.iterator](); +}; + +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/initialParams.js b/backend/node_modules/async/internal/initialParams.js new file mode 100644 index 00000000..04c0eff7 --- /dev/null +++ b/backend/node_modules/async/internal/initialParams.js @@ -0,0 +1,14 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (fn) { + return function (...args /*, callback*/) { + var callback = args.pop(); + return fn.call(this, args, callback); + }; +}; + +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/isArrayLike.js b/backend/node_modules/async/internal/isArrayLike.js new file mode 100644 index 00000000..a4c4c8a5 --- /dev/null +++ b/backend/node_modules/async/internal/isArrayLike.js @@ -0,0 +1,10 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = isArrayLike; +function isArrayLike(value) { + return value && typeof value.length === 'number' && value.length >= 0 && value.length % 1 === 0; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/iterator.js b/backend/node_modules/async/internal/iterator.js new file mode 100644 index 00000000..5778b1e8 --- /dev/null +++ b/backend/node_modules/async/internal/iterator.js @@ -0,0 +1,57 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = createIterator; + +var _isArrayLike = require('./isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _getIterator = require('./getIterator.js'); + +var _getIterator2 = _interopRequireDefault(_getIterator); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function createArrayIterator(coll) { + var i = -1; + var len = coll.length; + return function next() { + return ++i < len ? { value: coll[i], key: i } : null; + }; +} + +function createES2015Iterator(iterator) { + var i = -1; + return function next() { + var item = iterator.next(); + if (item.done) return null; + i++; + return { value: item.value, key: i }; + }; +} + +function createObjectIterator(obj) { + var okeys = obj ? Object.keys(obj) : []; + var i = -1; + var len = okeys.length; + return function next() { + var key = okeys[++i]; + if (key === '__proto__') { + return next(); + } + return i < len ? { value: obj[key], key } : null; + }; +} + +function createIterator(coll) { + if ((0, _isArrayLike2.default)(coll)) { + return createArrayIterator(coll); + } + + var iterator = (0, _getIterator2.default)(coll); + return iterator ? createES2015Iterator(iterator) : createObjectIterator(coll); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/map.js b/backend/node_modules/async/internal/map.js new file mode 100644 index 00000000..acab1e72 --- /dev/null +++ b/backend/node_modules/async/internal/map.js @@ -0,0 +1,30 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _asyncMap; + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function _asyncMap(eachfn, arr, iteratee, callback) { + arr = arr || []; + var results = []; + var counter = 0; + var _iteratee = (0, _wrapAsync2.default)(iteratee); + + return eachfn(arr, (value, _, iterCb) => { + var index = counter++; + _iteratee(value, (err, v) => { + results[index] = v; + iterCb(err); + }); + }, err => { + callback(err, results); + }); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/once.js b/backend/node_modules/async/internal/once.js new file mode 100644 index 00000000..a8b57926 --- /dev/null +++ b/backend/node_modules/async/internal/once.js @@ -0,0 +1,17 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = once; +function once(fn) { + function wrapper(...args) { + if (fn === null) return; + var callFn = fn; + fn = null; + callFn.apply(this, args); + } + Object.assign(wrapper, fn); + return wrapper; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/onlyOnce.js b/backend/node_modules/async/internal/onlyOnce.js new file mode 100644 index 00000000..c95a92d4 --- /dev/null +++ b/backend/node_modules/async/internal/onlyOnce.js @@ -0,0 +1,15 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = onlyOnce; +function onlyOnce(fn) { + return function (...args) { + if (fn === null) throw new Error("Callback was already called."); + var callFn = fn; + fn = null; + callFn.apply(this, args); + }; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/parallel.js b/backend/node_modules/async/internal/parallel.js new file mode 100644 index 00000000..57fbd0d0 --- /dev/null +++ b/backend/node_modules/async/internal/parallel.js @@ -0,0 +1,34 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _isArrayLike = require('./isArrayLike.js'); + +var _isArrayLike2 = _interopRequireDefault(_isArrayLike); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +exports.default = (0, _awaitify2.default)((eachfn, tasks, callback) => { + var results = (0, _isArrayLike2.default)(tasks) ? [] : {}; + + eachfn(tasks, (task, key, taskCb) => { + (0, _wrapAsync2.default)(task)((err, ...result) => { + if (result.length < 2) { + [result] = result; + } + results[key] = result; + taskCb(err); + }); + }, err => callback(err, results)); +}, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/promiseCallback.js b/backend/node_modules/async/internal/promiseCallback.js new file mode 100644 index 00000000..17a83016 --- /dev/null +++ b/backend/node_modules/async/internal/promiseCallback.js @@ -0,0 +1,23 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +const PROMISE_SYMBOL = Symbol('promiseCallback'); + +function promiseCallback() { + let resolve, reject; + function callback(err, ...args) { + if (err) return reject(err); + resolve(args.length > 1 ? args : args[0]); + } + + callback[PROMISE_SYMBOL] = new Promise((res, rej) => { + resolve = res, reject = rej; + }); + + return callback; +} + +exports.promiseCallback = promiseCallback; +exports.PROMISE_SYMBOL = PROMISE_SYMBOL; \ No newline at end of file diff --git a/backend/node_modules/async/internal/queue.js b/backend/node_modules/async/internal/queue.js new file mode 100644 index 00000000..7414e03b --- /dev/null +++ b/backend/node_modules/async/internal/queue.js @@ -0,0 +1,294 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = queue; + +var _onlyOnce = require('./onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _setImmediate = require('./setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _DoublyLinkedList = require('./DoublyLinkedList.js'); + +var _DoublyLinkedList2 = _interopRequireDefault(_DoublyLinkedList); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function queue(worker, concurrency, payload) { + if (concurrency == null) { + concurrency = 1; + } else if (concurrency === 0) { + throw new RangeError('Concurrency must not be zero'); + } + + var _worker = (0, _wrapAsync2.default)(worker); + var numRunning = 0; + var workersList = []; + const events = { + error: [], + drain: [], + saturated: [], + unsaturated: [], + empty: [] + }; + + function on(event, handler) { + events[event].push(handler); + } + + function once(event, handler) { + const handleAndRemove = (...args) => { + off(event, handleAndRemove); + handler(...args); + }; + events[event].push(handleAndRemove); + } + + function off(event, handler) { + if (!event) return Object.keys(events).forEach(ev => events[ev] = []); + if (!handler) return events[event] = []; + events[event] = events[event].filter(ev => ev !== handler); + } + + function trigger(event, ...args) { + events[event].forEach(handler => handler(...args)); + } + + var processingScheduled = false; + function _insert(data, insertAtFront, rejectOnError, callback) { + if (callback != null && typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; + + var res, rej; + function promiseCallback(err, ...args) { + // we don't care about the error, let the global error handler + // deal with it + if (err) return rejectOnError ? rej(err) : res(); + if (args.length <= 1) return res(args[0]); + res(args); + } + + var item = q._createTaskItem(data, rejectOnError ? promiseCallback : callback || promiseCallback); + + if (insertAtFront) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } + + if (!processingScheduled) { + processingScheduled = true; + (0, _setImmediate2.default)(() => { + processingScheduled = false; + q.process(); + }); + } + + if (rejectOnError || !callback) { + return new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }); + } + } + + function _createCB(tasks) { + return function (err, ...args) { + numRunning -= 1; + + for (var i = 0, l = tasks.length; i < l; i++) { + var task = tasks[i]; + + var index = workersList.indexOf(task); + if (index === 0) { + workersList.shift(); + } else if (index > 0) { + workersList.splice(index, 1); + } + + task.callback(err, ...args); + + if (err != null) { + trigger('error', err, task.data); + } + } + + if (numRunning <= q.concurrency - q.buffer) { + trigger('unsaturated'); + } + + if (q.idle()) { + trigger('drain'); + } + q.process(); + }; + } + + function _maybeDrain(data) { + if (data.length === 0 && q.idle()) { + // call drain immediately if there are no tasks + (0, _setImmediate2.default)(() => trigger('drain')); + return true; + } + return false; + } + + const eventMethod = name => handler => { + if (!handler) { + return new Promise((resolve, reject) => { + once(name, (err, data) => { + if (err) return reject(err); + resolve(data); + }); + }); + } + off(name); + on(name, handler); + }; + + var isProcessing = false; + var q = { + _tasks: new _DoublyLinkedList2.default(), + _createTaskItem(data, callback) { + return { + data, + callback + }; + }, + *[Symbol.iterator]() { + yield* q._tasks[Symbol.iterator](); + }, + concurrency, + payload, + buffer: concurrency / 4, + started: false, + paused: false, + push(data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return; + return data.map(datum => _insert(datum, false, false, callback)); + } + return _insert(data, false, false, callback); + }, + pushAsync(data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return; + return data.map(datum => _insert(datum, false, true, callback)); + } + return _insert(data, false, true, callback); + }, + kill() { + off(); + q._tasks.empty(); + }, + unshift(data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return; + return data.map(datum => _insert(datum, true, false, callback)); + } + return _insert(data, true, false, callback); + }, + unshiftAsync(data, callback) { + if (Array.isArray(data)) { + if (_maybeDrain(data)) return; + return data.map(datum => _insert(datum, true, true, callback)); + } + return _insert(data, true, true, callback); + }, + remove(testFn) { + q._tasks.remove(testFn); + }, + process() { + // Avoid trying to start too many processing operations. This can occur + // when callbacks resolve synchronously (#1267). + if (isProcessing) { + return; + } + isProcessing = true; + while (!q.paused && numRunning < q.concurrency && q._tasks.length) { + var tasks = [], + data = []; + var l = q._tasks.length; + if (q.payload) l = Math.min(l, q.payload); + for (var i = 0; i < l; i++) { + var node = q._tasks.shift(); + tasks.push(node); + workersList.push(node); + data.push(node.data); + } + + numRunning += 1; + + if (q._tasks.length === 0) { + trigger('empty'); + } + + if (numRunning === q.concurrency) { + trigger('saturated'); + } + + var cb = (0, _onlyOnce2.default)(_createCB(tasks)); + _worker(data, cb); + } + isProcessing = false; + }, + length() { + return q._tasks.length; + }, + running() { + return numRunning; + }, + workersList() { + return workersList; + }, + idle() { + return q._tasks.length + numRunning === 0; + }, + pause() { + q.paused = true; + }, + resume() { + if (q.paused === false) { + return; + } + q.paused = false; + (0, _setImmediate2.default)(q.process); + } + }; + // define these as fixed properties, so people get useful errors when updating + Object.defineProperties(q, { + saturated: { + writable: false, + value: eventMethod('saturated') + }, + unsaturated: { + writable: false, + value: eventMethod('unsaturated') + }, + empty: { + writable: false, + value: eventMethod('empty') + }, + drain: { + writable: false, + value: eventMethod('drain') + }, + error: { + writable: false, + value: eventMethod('error') + } + }); + return q; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/range.js b/backend/node_modules/async/internal/range.js new file mode 100644 index 00000000..cc7b3a9c --- /dev/null +++ b/backend/node_modules/async/internal/range.js @@ -0,0 +1,14 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = range; +function range(size) { + var result = Array(size); + while (size--) { + result[size] = size; + } + return result; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/reject.js b/backend/node_modules/async/internal/reject.js new file mode 100644 index 00000000..9d9bc80b --- /dev/null +++ b/backend/node_modules/async/internal/reject.js @@ -0,0 +1,26 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reject; + +var _filter = require('./filter.js'); + +var _filter2 = _interopRequireDefault(_filter); + +var _wrapAsync = require('./wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function reject(eachfn, arr, _iteratee, callback) { + const iteratee = (0, _wrapAsync2.default)(_iteratee); + return (0, _filter2.default)(eachfn, arr, (value, cb) => { + iteratee(value, (err, v) => { + cb(err, !v); + }); + }, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/setImmediate.js b/backend/node_modules/async/internal/setImmediate.js new file mode 100644 index 00000000..513efd13 --- /dev/null +++ b/backend/node_modules/async/internal/setImmediate.js @@ -0,0 +1,34 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.fallback = fallback; +exports.wrap = wrap; +/* istanbul ignore file */ + +var hasQueueMicrotask = exports.hasQueueMicrotask = typeof queueMicrotask === 'function' && queueMicrotask; +var hasSetImmediate = exports.hasSetImmediate = typeof setImmediate === 'function' && setImmediate; +var hasNextTick = exports.hasNextTick = typeof process === 'object' && typeof process.nextTick === 'function'; + +function fallback(fn) { + setTimeout(fn, 0); +} + +function wrap(defer) { + return (fn, ...args) => defer(() => fn(...args)); +} + +var _defer; + +if (hasQueueMicrotask) { + _defer = queueMicrotask; +} else if (hasSetImmediate) { + _defer = setImmediate; +} else if (hasNextTick) { + _defer = process.nextTick; +} else { + _defer = fallback; +} + +exports.default = wrap(_defer); \ No newline at end of file diff --git a/backend/node_modules/async/internal/withoutIndex.js b/backend/node_modules/async/internal/withoutIndex.js new file mode 100644 index 00000000..fa91c2d5 --- /dev/null +++ b/backend/node_modules/async/internal/withoutIndex.js @@ -0,0 +1,10 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = _withoutIndex; +function _withoutIndex(iteratee) { + return (value, index, callback) => iteratee(value, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/internal/wrapAsync.js b/backend/node_modules/async/internal/wrapAsync.js new file mode 100644 index 00000000..ad4d6198 --- /dev/null +++ b/backend/node_modules/async/internal/wrapAsync.js @@ -0,0 +1,34 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.isAsyncIterable = exports.isAsyncGenerator = exports.isAsync = undefined; + +var _asyncify = require('../asyncify.js'); + +var _asyncify2 = _interopRequireDefault(_asyncify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function isAsync(fn) { + return fn[Symbol.toStringTag] === 'AsyncFunction'; +} + +function isAsyncGenerator(fn) { + return fn[Symbol.toStringTag] === 'AsyncGenerator'; +} + +function isAsyncIterable(obj) { + return typeof obj[Symbol.asyncIterator] === 'function'; +} + +function wrapAsync(asyncFn) { + if (typeof asyncFn !== 'function') throw new Error('expected a function'); + return isAsync(asyncFn) ? (0, _asyncify2.default)(asyncFn) : asyncFn; +} + +exports.default = wrapAsync; +exports.isAsync = isAsync; +exports.isAsyncGenerator = isAsyncGenerator; +exports.isAsyncIterable = isAsyncIterable; \ No newline at end of file diff --git a/backend/node_modules/async/log.js b/backend/node_modules/async/log.js new file mode 100644 index 00000000..332b9da7 --- /dev/null +++ b/backend/node_modules/async/log.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _consoleFunc = require('./internal/consoleFunc.js'); + +var _consoleFunc2 = _interopRequireDefault(_consoleFunc); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Logs the result of an `async` function to the `console`. Only works in + * Node.js or in browsers that support `console.log` and `console.error` (such + * as FF and Chrome). If multiple arguments are returned from the async + * function, `console.log` is called on each argument in order. + * + * @name log + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} function - The function you want to eventually apply + * all arguments to. + * @param {...*} arguments... - Any number of arguments to apply to the function. + * @example + * + * // in a module + * var hello = function(name, callback) { + * setTimeout(function() { + * callback(null, 'hello ' + name); + * }, 1000); + * }; + * + * // in the node repl + * node> async.log(hello, 'world'); + * 'hello world' + */ +exports.default = (0, _consoleFunc2.default)('log'); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/map.js b/backend/node_modules/async/map.js new file mode 100644 index 00000000..c4b7a5ac --- /dev/null +++ b/backend/node_modules/async/map.js @@ -0,0 +1,142 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _map2 = require('./internal/map.js'); + +var _map3 = _interopRequireDefault(_map2); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Produces a new collection of values by mapping each value in `coll` through + * the `iteratee` function. The `iteratee` is called with an item from `coll` + * and a callback for when it has finished processing. Each of these callbacks + * takes 2 arguments: an `error`, and the transformed item from `coll`. If + * `iteratee` passes an error to its callback, the main `callback` (for the + * `map` function) is immediately called with the error. + * + * Note, that since this function applies the `iteratee` to each item in + * parallel, there is no guarantee that the `iteratee` functions will complete + * in order. However, the results array will be in the same order as the + * original `coll`. + * + * If `map` is passed an Object, the results will be an Array. The results + * will roughly be in the order of the original Objects' keys (but this can + * vary across JavaScript engines). + * + * @name map + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an Array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file4.txt']; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.map(fileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes, function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * }); + * + * // Using Promises + * async.map(fileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.map(withMissingFileList, getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.map(fileList, getFileSizeInBytes); + * console.log(results); + * // results is now an array of the file size in bytes for each file, e.g. + * // [ 1000, 2000, 3000] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let results = await async.map(withMissingFileList, getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function map(coll, iteratee, callback) { + return (0, _map3.default)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(map, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/mapLimit.js b/backend/node_modules/async/mapLimit.js new file mode 100644 index 00000000..6ad35724 --- /dev/null +++ b/backend/node_modules/async/mapLimit.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _map2 = require('./internal/map.js'); + +var _map3 = _interopRequireDefault(_map2); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`map`]{@link module:Collections.map} but runs a maximum of `limit` async operations at a time. + * + * @name mapLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function mapLimit(coll, limit, iteratee, callback) { + return (0, _map3.default)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(mapLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/mapSeries.js b/backend/node_modules/async/mapSeries.js new file mode 100644 index 00000000..8dfdd8aa --- /dev/null +++ b/backend/node_modules/async/mapSeries.js @@ -0,0 +1,44 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _map2 = require('./internal/map.js'); + +var _map3 = _interopRequireDefault(_map2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`map`]{@link module:Collections.map} but runs only a single async operation at a time. + * + * @name mapSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.map]{@link module:Collections.map} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with the transformed item. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. Results is an array of the + * transformed items from the `coll`. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function mapSeries(coll, iteratee, callback) { + return (0, _map3.default)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(mapSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/mapValues.js b/backend/node_modules/async/mapValues.js new file mode 100644 index 00000000..3d0470e3 --- /dev/null +++ b/backend/node_modules/async/mapValues.js @@ -0,0 +1,152 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = mapValues; + +var _mapValuesLimit = require('./mapValuesLimit.js'); + +var _mapValuesLimit2 = _interopRequireDefault(_mapValuesLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * A relative of [`map`]{@link module:Collections.map}, designed for use with objects. + * + * Produces a new Object by mapping each value of `obj` through the `iteratee` + * function. The `iteratee` is called each `value` and `key` from `obj` and a + * callback for when it has finished processing. Each of these callbacks takes + * two arguments: an `error`, and the transformed item from `obj`. If `iteratee` + * passes an error to its callback, the main `callback` (for the `mapValues` + * function) is immediately called with the error. + * + * Note, the order of the keys in the result is not guaranteed. The keys will + * be roughly in the order they complete, (but this is very engine-specific) + * + * @name mapValues + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file3.txt' + * }; + * + * const withMissingFileMap = { + * f1: 'file1.txt', + * f2: 'file2.txt', + * f3: 'file4.txt' + * }; + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, key, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.mapValues(fileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * }); + * + * // Error handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.mapValues(fileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * }).catch (err => { + * console.log(err); + * }); + * + * // Error Handling + * async.mapValues(withMissingFileMap, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch (err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.mapValues(fileMap, getFileSizeInBytes); + * console.log(result); + * // result is now a map of file size in bytes for each file, e.g. + * // { + * // f1: 1000, + * // f2: 2000, + * // f3: 3000 + * // } + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.mapValues(withMissingFileMap, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function mapValues(obj, iteratee, callback) { + return (0, _mapValuesLimit2.default)(obj, Infinity, iteratee, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/mapValuesLimit.js b/backend/node_modules/async/mapValuesLimit.js new file mode 100644 index 00000000..f59e36f2 --- /dev/null +++ b/backend/node_modules/async/mapValuesLimit.js @@ -0,0 +1,61 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs a maximum of `limit` async operations at a + * time. + * + * @name mapValuesLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function mapValuesLimit(obj, limit, iteratee, callback) { + callback = (0, _once2.default)(callback); + var newObj = {}; + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _eachOfLimit2.default)(limit)(obj, (val, key, next) => { + _iteratee(val, key, (err, result) => { + if (err) return next(err); + newObj[key] = result; + next(err); + }); + }, err => callback(err, newObj)); +} + +exports.default = (0, _awaitify2.default)(mapValuesLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/mapValuesSeries.js b/backend/node_modules/async/mapValuesSeries.js new file mode 100644 index 00000000..5f05bf21 --- /dev/null +++ b/backend/node_modules/async/mapValuesSeries.js @@ -0,0 +1,37 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = mapValuesSeries; + +var _mapValuesLimit = require('./mapValuesLimit.js'); + +var _mapValuesLimit2 = _interopRequireDefault(_mapValuesLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`mapValues`]{@link module:Collections.mapValues} but runs only a single async operation at a time. + * + * @name mapValuesSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.mapValues]{@link module:Collections.mapValues} + * @category Collection + * @param {Object} obj - A collection to iterate over. + * @param {AsyncFunction} iteratee - A function to apply to each value and key + * in `coll`. + * The iteratee should complete with the transformed value as its result. + * Invoked with (value, key, callback). + * @param {Function} [callback] - A callback which is called when all `iteratee` + * functions have finished, or an error occurs. `result` is a new object consisting + * of each key from `obj`, with each transformed value on the right-hand side. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function mapValuesSeries(obj, iteratee, callback) { + return (0, _mapValuesLimit2.default)(obj, 1, iteratee, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/memoize.js b/backend/node_modules/async/memoize.js new file mode 100644 index 00000000..6535d4e4 --- /dev/null +++ b/backend/node_modules/async/memoize.js @@ -0,0 +1,91 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = memoize; + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Caches the results of an async function. When creating a hash to store + * function results against, the callback is omitted from the hash and an + * optional hash function can be used. + * + * **Note: if the async function errs, the result will not be cached and + * subsequent calls will call the wrapped function.** + * + * If no hash function is specified, the first argument is used as a hash key, + * which may work reasonably if it is a string or a data type that converts to a + * distinct string. Note that objects and arrays will not behave reasonably. + * Neither will cases where the other arguments are significant. In such cases, + * specify your own hash function. + * + * The cache of results is exposed as the `memo` property of the function + * returned by `memoize`. + * + * @name memoize + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function to proxy and cache results from. + * @param {Function} hasher - An optional function for generating a custom hash + * for storing results. It has all the arguments applied to it apart from the + * callback, and must be synchronous. + * @returns {AsyncFunction} a memoized version of `fn` + * @example + * + * var slow_fn = function(name, callback) { + * // do something + * callback(null, result); + * }; + * var fn = async.memoize(slow_fn); + * + * // fn can now be used as if it were slow_fn + * fn('some name', function() { + * // callback + * }); + */ +function memoize(fn, hasher = v => v) { + var memo = Object.create(null); + var queues = Object.create(null); + var _fn = (0, _wrapAsync2.default)(fn); + var memoized = (0, _initialParams2.default)((args, callback) => { + var key = hasher(...args); + if (key in memo) { + (0, _setImmediate2.default)(() => callback(null, ...memo[key])); + } else if (key in queues) { + queues[key].push(callback); + } else { + queues[key] = [callback]; + _fn(...args, (err, ...resultArgs) => { + // #1465 don't memoize if an error occurred + if (!err) { + memo[key] = resultArgs; + } + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i](err, ...resultArgs); + } + }); + } + }); + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/nextTick.js b/backend/node_modules/async/nextTick.js new file mode 100644 index 00000000..8ebfda9e --- /dev/null +++ b/backend/node_modules/async/nextTick.js @@ -0,0 +1,52 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _setImmediate = require('./internal/setImmediate.js'); + +/** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `process.nextTick`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name nextTick + * @static + * @memberOf module:Utils + * @method + * @see [async.setImmediate]{@link module:Utils.setImmediate} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ +var _defer; /* istanbul ignore file */ + + +if (_setImmediate.hasNextTick) { + _defer = process.nextTick; +} else if (_setImmediate.hasSetImmediate) { + _defer = setImmediate; +} else { + _defer = _setImmediate.fallback; +} + +exports.default = (0, _setImmediate.wrap)(_defer); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/package.json b/backend/node_modules/async/package.json new file mode 100644 index 00000000..bcf4c41b --- /dev/null +++ b/backend/node_modules/async/package.json @@ -0,0 +1,75 @@ +{ + "name": "async", + "description": "Higher-order functions and common patterns for asynchronous code", + "version": "3.2.6", + "main": "dist/async.js", + "author": "Caolan McMahon", + "homepage": "https://caolan.github.io/async/", + "repository": { + "type": "git", + "url": "https://github.com/caolan/async.git" + }, + "bugs": { + "url": "https://github.com/caolan/async/issues" + }, + "keywords": [ + "async", + "callback", + "module", + "utility" + ], + "devDependencies": { + "@babel/eslint-parser": "^7.16.5", + "@babel/core": "7.25.2", + "babel-minify": "^0.5.0", + "babel-plugin-add-module-exports": "^1.0.4", + "babel-plugin-istanbul": "^7.0.0", + "babel-plugin-syntax-async-generators": "^6.13.0", + "babel-plugin-transform-es2015-modules-commonjs": "^6.26.2", + "babel-preset-es2015": "^6.3.13", + "babel-preset-es2017": "^6.22.0", + "babel-register": "^6.26.0", + "babelify": "^10.0.0", + "benchmark": "^2.1.1", + "bluebird": "^3.4.6", + "browserify": "^17.0.0", + "chai": "^4.2.0", + "cheerio": "^0.22.0", + "es6-promise": "^4.2.8", + "eslint": "^8.6.0", + "eslint-plugin-prefer-arrow": "^1.2.3", + "fs-extra": "^11.1.1", + "jsdoc": "^4.0.3", + "karma": "^6.3.12", + "karma-browserify": "^8.1.0", + "karma-firefox-launcher": "^2.1.2", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.0", + "karma-safari-launcher": "^1.0.0", + "mocha": "^6.1.4", + "native-promise-only": "^0.8.0-a", + "nyc": "^17.0.0", + "rollup": "^4.2.0", + "rollup-plugin-node-resolve": "^5.2.0", + "rollup-plugin-npm": "^2.0.0", + "rsvp": "^4.8.5", + "semver": "^7.3.5", + "yargs": "^17.3.1" + }, + "scripts": { + "coverage": "nyc npm run mocha-node-test -- --grep @nycinvalid --invert", + "jsdoc": "jsdoc -c ./support/jsdoc/jsdoc.json && node support/jsdoc/jsdoc-fix-html.js", + "lint": "eslint --fix .", + "mocha-browser-test": "karma start", + "mocha-node-test": "mocha", + "mocha-test": "npm run mocha-node-test && npm run mocha-browser-test", + "test": "npm run lint && npm run mocha-node-test" + }, + "license": "MIT", + "nyc": { + "exclude": [ + "test" + ] + }, + "module": "dist/async.mjs" +} \ No newline at end of file diff --git a/backend/node_modules/async/parallel.js b/backend/node_modules/async/parallel.js new file mode 100644 index 00000000..2c7976fe --- /dev/null +++ b/backend/node_modules/async/parallel.js @@ -0,0 +1,180 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = parallel; + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _parallel2 = require('./internal/parallel.js'); + +var _parallel3 = _interopRequireDefault(_parallel2); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Run the `tasks` collection of functions in parallel, without waiting until + * the previous function has completed. If any of the functions pass an error to + * its callback, the main `callback` is immediately called with the value of the + * error. Once the `tasks` have completed, the results are passed to the final + * `callback` as an array. + * + * **Note:** `parallel` is about kicking-off I/O tasks in parallel, not about + * parallel execution of code. If your tasks do not use any timers or perform + * any I/O, they will actually be executed in series. Any synchronous setup + * sections for each task will happen one after the other. JavaScript remains + * single-threaded. + * + * **Hint:** Use [`reflect`]{@link module:Utils.reflect} to continue the + * execution of other tasks when a task fails. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.parallel}. + * + * @name parallel + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * + * //Using Callbacks + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.parallel([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] even though + * // the second function had a shorter timeout. + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function parallel(tasks, callback) { + return (0, _parallel3.default)(_eachOf2.default, tasks, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/parallelLimit.js b/backend/node_modules/async/parallelLimit.js new file mode 100644 index 00000000..43379572 --- /dev/null +++ b/backend/node_modules/async/parallelLimit.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = parallelLimit; + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _parallel = require('./internal/parallel.js'); + +var _parallel2 = _interopRequireDefault(_parallel); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`parallel`]{@link module:ControlFlow.parallel} but runs a maximum of `limit` async operations at a + * time. + * + * @name parallelLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.parallel]{@link module:ControlFlow.parallel} + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection of + * [async functions]{@link AsyncFunction} to run. + * Each async function can complete with any number of optional `result` values. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed successfully. This function gets a results array + * (or object) containing all the result arguments passed to the task callbacks. + * Invoked with (err, results). + * @returns {Promise} a promise, if a callback is not passed + */ +function parallelLimit(tasks, limit, callback) { + return (0, _parallel2.default)((0, _eachOfLimit2.default)(limit), tasks, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/priorityQueue.js b/backend/node_modules/async/priorityQueue.js new file mode 100644 index 00000000..16c4daa0 --- /dev/null +++ b/backend/node_modules/async/priorityQueue.js @@ -0,0 +1,60 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (worker, concurrency) { + // Start with a normal queue + var q = (0, _queue2.default)(worker, concurrency); + + var { + push, + pushAsync + } = q; + + q._tasks = new _Heap2.default(); + q._createTaskItem = ({ data, priority }, callback) => { + return { + data, + priority, + callback + }; + }; + + function createDataItems(tasks, priority) { + if (!Array.isArray(tasks)) { + return { data: tasks, priority }; + } + return tasks.map(data => { + return { data, priority }; + }); + } + + // Override push to accept second parameter representing priority + q.push = function (data, priority = 0, callback) { + return push(createDataItems(data, priority), callback); + }; + + q.pushAsync = function (data, priority = 0, callback) { + return pushAsync(createDataItems(data, priority), callback); + }; + + // Remove unshift functions + delete q.unshift; + delete q.unshiftAsync; + + return q; +}; + +var _queue = require('./queue.js'); + +var _queue2 = _interopRequireDefault(_queue); + +var _Heap = require('./internal/Heap.js'); + +var _Heap2 = _interopRequireDefault(_Heap); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/queue.js b/backend/node_modules/async/queue.js new file mode 100644 index 00000000..c01340dc --- /dev/null +++ b/backend/node_modules/async/queue.js @@ -0,0 +1,24 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +exports.default = function (worker, concurrency) { + var _worker = (0, _wrapAsync2.default)(worker); + return (0, _queue2.default)((items, cb) => { + _worker(items[0], cb); + }, concurrency, 1); +}; + +var _queue = require('./internal/queue.js'); + +var _queue2 = _interopRequireDefault(_queue); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/race.js b/backend/node_modules/async/race.js new file mode 100644 index 00000000..aa167be3 --- /dev/null +++ b/backend/node_modules/async/race.js @@ -0,0 +1,67 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Runs the `tasks` array of functions in parallel, without waiting until the + * previous function has completed. Once any of the `tasks` complete or pass an + * error to its callback, the main `callback` is immediately called. It's + * equivalent to `Promise.race()`. + * + * @name race + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array containing [async functions]{@link AsyncFunction} + * to run. Each function can complete with an optional `result` value. + * @param {Function} callback - A callback to run once any of the functions have + * completed. This function gets an error or result from the first function that + * completed. Invoked with (err, result). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.race([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ], + * // main callback + * function(err, result) { + * // the result will be equal to 'two' as it finishes earlier + * }); + */ +function race(tasks, callback) { + callback = (0, _once2.default)(callback); + if (!Array.isArray(tasks)) return callback(new TypeError('First argument to race must be an array of functions')); + if (!tasks.length) return callback(); + for (var i = 0, l = tasks.length; i < l; i++) { + (0, _wrapAsync2.default)(tasks[i])(callback); + } +} + +exports.default = (0, _awaitify2.default)(race, 2); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/reduce.js b/backend/node_modules/async/reduce.js new file mode 100644 index 00000000..8a69548a --- /dev/null +++ b/backend/node_modules/async/reduce.js @@ -0,0 +1,153 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Reduces `coll` into a single value using an async `iteratee` to return each + * successive step. `memo` is the initial state of the reduction. This function + * only operates in series. + * + * For performance reasons, it may make sense to split a call to this function + * into a parallel map, and then use the normal `Array.prototype.reduce` on the + * results. This function is for situations where each step in the reduction + * needs to be async; if you can get the data before reducing it, then it's + * probably a good idea to do so. + * + * @name reduce + * @static + * @memberOf module:Collections + * @method + * @alias inject + * @alias foldl + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * // file4.txt does not exist + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * const withMissingFileList = ['file1.txt','file2.txt','file3.txt', 'file4.txt']; + * + * // asynchronous function that computes the file size in bytes + * // file size is added to the memoized value, then returned + * function getFileSizeInBytes(memo, file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, memo + stat.size); + * }); + * } + * + * // Using callbacks + * async.reduce(fileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * } else { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes, function(err, result) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(result); + * } + * }); + * + * // Using Promises + * async.reduce(fileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * }).catch( err => { + * console.log(err); + * }); + * + * // Error Handling + * async.reduce(withMissingFileList, 0, getFileSizeInBytes) + * .then( result => { + * console.log(result); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.reduce(fileList, 0, getFileSizeInBytes); + * console.log(result); + * // 6000 + * // which is the sum of the file sizes of the three files + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // Error Handling + * async () => { + * try { + * let result = await async.reduce(withMissingFileList, 0, getFileSizeInBytes); + * console.log(result); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function reduce(coll, memo, iteratee, callback) { + callback = (0, _once2.default)(callback); + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _eachOfSeries2.default)(coll, (x, i, iterCb) => { + _iteratee(memo, x, (err, v) => { + memo = v; + iterCb(err); + }); + }, err => callback(err, memo)); +} +exports.default = (0, _awaitify2.default)(reduce, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/reduceRight.js b/backend/node_modules/async/reduceRight.js new file mode 100644 index 00000000..5be1b68d --- /dev/null +++ b/backend/node_modules/async/reduceRight.js @@ -0,0 +1,41 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reduceRight; + +var _reduce = require('./reduce.js'); + +var _reduce2 = _interopRequireDefault(_reduce); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Same as [`reduce`]{@link module:Collections.reduce}, only operates on `array` in reverse order. + * + * @name reduceRight + * @static + * @memberOf module:Collections + * @method + * @see [async.reduce]{@link module:Collections.reduce} + * @alias foldr + * @category Collection + * @param {Array} array - A collection to iterate over. + * @param {*} memo - The initial state of the reduction. + * @param {AsyncFunction} iteratee - A function applied to each item in the + * array to produce the next step in the reduction. + * The `iteratee` should complete with the next state of the reduction. + * If the iteratee completes with an error, the reduction is stopped and the + * main `callback` is immediately called with the error. + * Invoked with (memo, item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the reduced value. Invoked with + * (err, result). + * @returns {Promise} a promise, if no callback is passed + */ +function reduceRight(array, memo, iteratee, callback) { + var reversed = [...array].reverse(); + return (0, _reduce2.default)(reversed, memo, iteratee, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/reflect.js b/backend/node_modules/async/reflect.js new file mode 100644 index 00000000..39544956 --- /dev/null +++ b/backend/node_modules/async/reflect.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reflect; + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Wraps the async function in another function that always completes with a + * result object, even when it errors. + * + * The result object has either the property `error` or `value`. + * + * @name reflect + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} fn - The async function you want to wrap + * @returns {Function} - A function that always passes null to it's callback as + * the error. The second argument to the callback will be an `object` with + * either an `error` or a `value` property. + * @example + * + * async.parallel([ + * async.reflect(function(callback) { + * // do some stuff ... + * callback(null, 'one'); + * }), + * async.reflect(function(callback) { + * // do some more stuff but error ... + * callback('bad stuff happened'); + * }), + * async.reflect(function(callback) { + * // do some more stuff ... + * callback(null, 'two'); + * }) + * ], + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = 'bad stuff happened' + * // results[2].value = 'two' + * }); + */ +function reflect(fn) { + var _fn = (0, _wrapAsync2.default)(fn); + return (0, _initialParams2.default)(function reflectOn(args, reflectCallback) { + args.push((error, ...cbArgs) => { + let retVal = {}; + if (error) { + retVal.error = error; + } + if (cbArgs.length > 0) { + var value = cbArgs; + if (cbArgs.length <= 1) { + [value] = cbArgs; + } + retVal.value = value; + } + reflectCallback(null, retVal); + }); + + return _fn.apply(this, args); + }); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/reflectAll.js b/backend/node_modules/async/reflectAll.js new file mode 100644 index 00000000..b78d598b --- /dev/null +++ b/backend/node_modules/async/reflectAll.js @@ -0,0 +1,93 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = reflectAll; + +var _reflect = require('./reflect.js'); + +var _reflect2 = _interopRequireDefault(_reflect); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * A helper function that wraps an array or an object of functions with `reflect`. + * + * @name reflectAll + * @static + * @memberOf module:Utils + * @method + * @see [async.reflect]{@link module:Utils.reflect} + * @category Util + * @param {Array|Object|Iterable} tasks - The collection of + * [async functions]{@link AsyncFunction} to wrap in `async.reflect`. + * @returns {Array} Returns an array of async functions, each wrapped in + * `async.reflect` + * @example + * + * let tasks = [ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * // do some more stuff but error ... + * callback(new Error('bad stuff happened')); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results[0].value = 'one' + * // results[1].error = Error('bad stuff happened') + * // results[2].value = 'two' + * }); + * + * // an example using an object instead of an array + * let tasks = { + * one: function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * two: function(callback) { + * callback('two'); + * }, + * three: function(callback) { + * setTimeout(function() { + * callback(null, 'three'); + * }, 100); + * } + * }; + * + * async.parallel(async.reflectAll(tasks), + * // optional callback + * function(err, results) { + * // values + * // results.one.value = 'one' + * // results.two.error = 'two' + * // results.three.value = 'three' + * }); + */ +function reflectAll(tasks) { + var results; + if (Array.isArray(tasks)) { + results = tasks.map(_reflect2.default); + } else { + results = {}; + Object.keys(tasks).forEach(key => { + results[key] = _reflect2.default.call(this, tasks[key]); + }); + } + return results; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/reject.js b/backend/node_modules/async/reject.js new file mode 100644 index 00000000..895949bd --- /dev/null +++ b/backend/node_modules/async/reject.js @@ -0,0 +1,87 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _reject2 = require('./internal/reject.js'); + +var _reject3 = _interopRequireDefault(_reject2); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The opposite of [`filter`]{@link module:Collections.filter}. Removes values that pass an `async` truth test. + * + * @name reject + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const fileList = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.reject(fileList, fileExists, function(err, results) { + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }); + * + * // Using Promises + * async.reject(fileList, fileExists) + * .then( results => { + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.reject(fileList, fileExists); + * console.log(results); + * // [ 'dir3/file6.txt' ] + * // results now equals an array of the non-existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function reject(coll, iteratee, callback) { + return (0, _reject3.default)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(reject, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/rejectLimit.js b/backend/node_modules/async/rejectLimit.js new file mode 100644 index 00000000..ce10edf5 --- /dev/null +++ b/backend/node_modules/async/rejectLimit.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _reject2 = require('./internal/reject.js'); + +var _reject3 = _interopRequireDefault(_reject2); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`reject`]{@link module:Collections.reject} but runs a maximum of `limit` async operations at a + * time. + * + * @name rejectLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function rejectLimit(coll, limit, iteratee, callback) { + return (0, _reject3.default)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(rejectLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/rejectSeries.js b/backend/node_modules/async/rejectSeries.js new file mode 100644 index 00000000..c08e413f --- /dev/null +++ b/backend/node_modules/async/rejectSeries.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _reject2 = require('./internal/reject.js'); + +var _reject3 = _interopRequireDefault(_reject2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`reject`]{@link module:Collections.reject} but runs only a single async operation at a time. + * + * @name rejectSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.reject]{@link module:Collections.reject} + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - An async truth test to apply to each item in + * `coll`. + * The should complete with a boolean value as its `result`. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback is passed + */ +function rejectSeries(coll, iteratee, callback) { + return (0, _reject3.default)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(rejectSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/retry.js b/backend/node_modules/async/retry.js new file mode 100644 index 00000000..a4b02356 --- /dev/null +++ b/backend/node_modules/async/retry.js @@ -0,0 +1,159 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = retry; + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function constant(value) { + return function () { + return value; + }; +} + +/** + * Attempts to get a successful response from `task` no more than `times` times + * before returning an error. If the task is successful, the `callback` will be + * passed the result of the successful task. If all attempts fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name retry + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @see [async.retryable]{@link module:ControlFlow.retryable} + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - Can be either an + * object with `times` and `interval` or a number. + * * `times` - The number of attempts to make before giving up. The default + * is `5`. + * * `interval` - The time to wait between retries, in milliseconds. The + * default is `0`. The interval may also be specified as a function of the + * retry count (see example). + * * `errorFilter` - An optional synchronous function that is invoked on + * erroneous result. If it returns `true` the retry attempts will continue; + * if the function returns `false` the retry flow is aborted with the current + * attempt's error and result being returned to the final callback. + * Invoked with (err). + * * If `opts` is a number, the number specifies the number of times to retry, + * with the default interval of `0`. + * @param {AsyncFunction} task - An async function to retry. + * Invoked with (callback). + * @param {Function} [callback] - An optional callback which is called when the + * task has succeeded, or after the final failed attempt. It receives the `err` + * and `result` arguments of the last attempt at completing the `task`. Invoked + * with (err, results). + * @returns {Promise} a promise if no callback provided + * + * @example + * + * // The `retry` function can be used as a stand-alone control flow by passing + * // a callback, as shown below: + * + * // try calling apiMethod 3 times + * async.retry(3, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 3 times, waiting 200 ms between each retry + * async.retry({times: 3, interval: 200}, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod 10 times with exponential backoff + * // (i.e. intervals of 100, 200, 400, 800, 1600, ... milliseconds) + * async.retry({ + * times: 10, + * interval: function(retryCount) { + * return 50 * Math.pow(2, retryCount); + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod the default 5 times no delay between each retry + * async.retry(apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // try calling apiMethod only when error condition satisfies, all other + * // errors will abort the retry control flow and return to final callback + * async.retry({ + * errorFilter: function(err) { + * return err.message === 'Temporary error'; // only retry on a specific error + * } + * }, apiMethod, function(err, result) { + * // do something with the result + * }); + * + * // to retry individual methods that are not as reliable within other + * // control flow functions, use the `retryable` wrapper: + * async.auto({ + * users: api.getUsers.bind(api), + * payments: async.retryable(3, api.getPayments.bind(api)) + * }, function(err, results) { + * // do something with the results + * }); + * + */ +const DEFAULT_TIMES = 5; +const DEFAULT_INTERVAL = 0; + +function retry(opts, task, callback) { + var options = { + times: DEFAULT_TIMES, + intervalFunc: constant(DEFAULT_INTERVAL) + }; + + if (arguments.length < 3 && typeof opts === 'function') { + callback = task || (0, _promiseCallback.promiseCallback)(); + task = opts; + } else { + parseTimes(options, opts); + callback = callback || (0, _promiseCallback.promiseCallback)(); + } + + if (typeof task !== 'function') { + throw new Error("Invalid arguments for async.retry"); + } + + var _task = (0, _wrapAsync2.default)(task); + + var attempt = 1; + function retryAttempt() { + _task((err, ...args) => { + if (err === false) return; + if (err && attempt++ < options.times && (typeof options.errorFilter != 'function' || options.errorFilter(err))) { + setTimeout(retryAttempt, options.intervalFunc(attempt - 1)); + } else { + callback(err, ...args); + } + }); + } + + retryAttempt(); + return callback[_promiseCallback.PROMISE_SYMBOL]; +} + +function parseTimes(acc, t) { + if (typeof t === 'object') { + acc.times = +t.times || DEFAULT_TIMES; + + acc.intervalFunc = typeof t.interval === 'function' ? t.interval : constant(+t.interval || DEFAULT_INTERVAL); + + acc.errorFilter = t.errorFilter; + } else if (typeof t === 'number' || typeof t === 'string') { + acc.times = +t || DEFAULT_TIMES; + } else { + throw new Error("Invalid arguments for async.retry"); + } +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/retryable.js b/backend/node_modules/async/retryable.js new file mode 100644 index 00000000..68256c31 --- /dev/null +++ b/backend/node_modules/async/retryable.js @@ -0,0 +1,77 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = retryable; + +var _retry = require('./retry.js'); + +var _retry2 = _interopRequireDefault(_retry); + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * A close relative of [`retry`]{@link module:ControlFlow.retry}. This method + * wraps a task and makes it retryable, rather than immediately calling it + * with retries. + * + * @name retryable + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.retry]{@link module:ControlFlow.retry} + * @category Control Flow + * @param {Object|number} [opts = {times: 5, interval: 0}| 5] - optional + * options, exactly the same as from `retry`, except for a `opts.arity` that + * is the arity of the `task` function, defaulting to `task.length` + * @param {AsyncFunction} task - the asynchronous function to wrap. + * This function will be passed any arguments passed to the returned wrapper. + * Invoked with (...args, callback). + * @returns {AsyncFunction} The wrapped function, which when invoked, will + * retry on an error, based on the parameters specified in `opts`. + * This function will accept the same parameters as `task`. + * @example + * + * async.auto({ + * dep1: async.retryable(3, getFromFlakyService), + * process: ["dep1", async.retryable(3, function (results, cb) { + * maybeProcessData(results.dep1, cb); + * })] + * }, callback); + */ +function retryable(opts, task) { + if (!task) { + task = opts; + opts = null; + } + let arity = opts && opts.arity || task.length; + if ((0, _wrapAsync.isAsync)(task)) { + arity += 1; + } + var _task = (0, _wrapAsync2.default)(task); + return (0, _initialParams2.default)((args, callback) => { + if (args.length < arity - 1 || callback == null) { + args.push(callback); + callback = (0, _promiseCallback.promiseCallback)(); + } + function taskFn(cb) { + _task(...args, cb); + } + + if (opts) (0, _retry2.default)(opts, taskFn, callback);else (0, _retry2.default)(taskFn, callback); + + return callback[_promiseCallback.PROMISE_SYMBOL]; + }); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/select.js b/backend/node_modules/async/select.js new file mode 100644 index 00000000..2c9a63de --- /dev/null +++ b/backend/node_modules/async/select.js @@ -0,0 +1,93 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns a new array of all the values in `coll` which pass an async truth + * test. This operation is performed in parallel, but the results array will be + * in the same order as the original. + * + * @name filter + * @static + * @memberOf module:Collections + * @method + * @alias select + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * + * const files = ['dir1/file1.txt','dir2/file3.txt','dir3/file6.txt']; + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.filter(files, fileExists, function(err, results) { + * if(err) { + * console.log(err); + * } else { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * }); + * + * // Using Promises + * async.filter(files, fileExists) + * .then(results => { + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let results = await async.filter(files, fileExists); + * console.log(results); + * // [ 'dir1/file1.txt', 'dir2/file3.txt' ] + * // results is now an array of the existing files + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function filter(coll, iteratee, callback) { + return (0, _filter3.default)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filter, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/selectLimit.js b/backend/node_modules/async/selectLimit.js new file mode 100644 index 00000000..d3b3f50c --- /dev/null +++ b/backend/node_modules/async/selectLimit.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs a maximum of `limit` async operations at a + * time. + * + * @name filterLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results). + * @returns {Promise} a promise, if no callback provided + */ +function filterLimit(coll, limit, iteratee, callback) { + return (0, _filter3.default)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filterLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/selectSeries.js b/backend/node_modules/async/selectSeries.js new file mode 100644 index 00000000..019a2d0a --- /dev/null +++ b/backend/node_modules/async/selectSeries.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _filter2 = require('./internal/filter.js'); + +var _filter3 = _interopRequireDefault(_filter2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`filter`]{@link module:Collections.filter} but runs only a single async operation at a time. + * + * @name filterSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.filter]{@link module:Collections.filter} + * @alias selectSeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {Function} iteratee - A truth test to apply to each item in `coll`. + * The `iteratee` is passed a `callback(err, truthValue)`, which must be called + * with a boolean argument once it has completed. Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Invoked with (err, results) + * @returns {Promise} a promise, if no callback provided + */ +function filterSeries(coll, iteratee, callback) { + return (0, _filter3.default)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(filterSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/seq.js b/backend/node_modules/async/seq.js new file mode 100644 index 00000000..e7881cd0 --- /dev/null +++ b/backend/node_modules/async/seq.js @@ -0,0 +1,79 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = seq; + +var _reduce = require('./reduce.js'); + +var _reduce2 = _interopRequireDefault(_reduce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Version of the compose function that is more natural to read. Each function + * consumes the return value of the previous function. It is the equivalent of + * [compose]{@link module:ControlFlow.compose} with the arguments reversed. + * + * Each function is executed with the `this` binding of the composed function. + * + * @name seq + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.compose]{@link module:ControlFlow.compose} + * @category Control Flow + * @param {...AsyncFunction} functions - the asynchronous functions to compose + * @returns {Function} a function that composes the `functions` in order + * @example + * + * // Requires lodash (or underscore), express3 and dresende's orm2. + * // Part of an app, that fetches cats of the logged user. + * // This example uses `seq` function to avoid overnesting and error + * // handling clutter. + * app.get('/cats', function(request, response) { + * var User = request.models.User; + * async.seq( + * User.get.bind(User), // 'User.get' has signature (id, callback(err, data)) + * function(user, fn) { + * user.getCats(fn); // 'getCats' has signature (callback(err, data)) + * } + * )(req.session.user_id, function (err, cats) { + * if (err) { + * console.error(err); + * response.json({ status: 'error', message: err.message }); + * } else { + * response.json({ status: 'ok', message: 'Cats found', data: cats }); + * } + * }); + * }); + */ +function seq(...functions) { + var _functions = functions.map(_wrapAsync2.default); + return function (...args) { + var that = this; + + var cb = args[args.length - 1]; + if (typeof cb == 'function') { + args.pop(); + } else { + cb = (0, _promiseCallback.promiseCallback)(); + } + + (0, _reduce2.default)(_functions, args, (newargs, fn, iterCb) => { + fn.apply(that, newargs.concat((err, ...nextargs) => { + iterCb(err, nextargs); + })); + }, (err, results) => cb(err, ...results)); + + return cb[_promiseCallback.PROMISE_SYMBOL]; + }; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/series.js b/backend/node_modules/async/series.js new file mode 100644 index 00000000..60c17ed6 --- /dev/null +++ b/backend/node_modules/async/series.js @@ -0,0 +1,186 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = series; + +var _parallel2 = require('./internal/parallel.js'); + +var _parallel3 = _interopRequireDefault(_parallel2); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Run the functions in the `tasks` collection in series, each one running once + * the previous function has completed. If any functions in the series pass an + * error to its callback, no more functions are run, and `callback` is + * immediately called with the value of the error. Otherwise, `callback` + * receives an array of results when `tasks` have completed. + * + * It is also possible to use an object instead of an array. Each property will + * be run as a function, and the results will be passed to the final `callback` + * as an object instead of an array. This can be a more readable way of handling + * results from {@link async.series}. + * + * **Note** that while many implementations preserve the order of object + * properties, the [ECMAScript Language Specification](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) + * explicitly states that + * + * > The mechanics and order of enumerating the properties is not specified. + * + * So if you rely on the order in which your series of functions are executed, + * and want this to work on all platforms, consider using an array. + * + * @name series + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing + * [async functions]{@link AsyncFunction} to run in series. + * Each function can complete with any number of optional `result` values. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This function gets a results array (or object) + * containing all the result arguments passed to the `task` callbacks. Invoked + * with (err, result). + * @return {Promise} a promise, if no callback is passed + * @example + * + * //Using Callbacks + * async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ], function(err, results) { + * console.log(results); + * // results is equal to ['one','two'] + * }); + * + * // an example using objects instead of arrays + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }, function(err, results) { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }); + * + * //Using Promises + * async.series([ + * function(callback) { + * setTimeout(function() { + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * callback(null, 'two'); + * }, 100); + * } + * ]).then(results => { + * console.log(results); + * // results is equal to ['one','two'] + * }).catch(err => { + * console.log(err); + * }); + * + * // an example using an object instead of an array + * async.series({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }).then(results => { + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * }).catch(err => { + * console.log(err); + * }); + * + * //Using async/await + * async () => { + * try { + * let results = await async.series([ + * function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 'one'); + * }, 200); + * }, + * function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 'two'); + * }, 100); + * } + * ]); + * console.log(results); + * // results is equal to ['one','two'] + * } + * catch (err) { + * console.log(err); + * } + * } + * + * // an example using an object instead of an array + * async () => { + * try { + * let results = await async.parallel({ + * one: function(callback) { + * setTimeout(function() { + * // do some async task + * callback(null, 1); + * }, 200); + * }, + * two: function(callback) { + * setTimeout(function() { + * // then do another async task + * callback(null, 2); + * }, 100); + * } + * }); + * console.log(results); + * // results is equal to: { one: 1, two: 2 } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function series(tasks, callback) { + return (0, _parallel3.default)(_eachOfSeries2.default, tasks, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/setImmediate.js b/backend/node_modules/async/setImmediate.js new file mode 100644 index 00000000..eea86771 --- /dev/null +++ b/backend/node_modules/async/setImmediate.js @@ -0,0 +1,45 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Calls `callback` on a later loop around the event loop. In Node.js this just + * calls `setImmediate`. In the browser it will use `setImmediate` if + * available, otherwise `setTimeout(callback, 0)`, which means other higher + * priority events may precede the execution of `callback`. + * + * This is used internally for browser-compatibility purposes. + * + * @name setImmediate + * @static + * @memberOf module:Utils + * @method + * @see [async.nextTick]{@link module:Utils.nextTick} + * @category Util + * @param {Function} callback - The function to call on a later loop around + * the event loop. Invoked with (args...). + * @param {...*} args... - any number of additional arguments to pass to the + * callback on the next tick. + * @example + * + * var call_order = []; + * async.nextTick(function() { + * call_order.push('two'); + * // call_order now equals ['one','two'] + * }); + * call_order.push('one'); + * + * async.setImmediate(function (a, b, c) { + * // a, b, and c equal 1, 2, and 3 + * }, 1, 2, 3); + */ +exports.default = _setImmediate2.default; +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/some.js b/backend/node_modules/async/some.js new file mode 100644 index 00000000..a5bd328a --- /dev/null +++ b/backend/node_modules/async/some.js @@ -0,0 +1,122 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Returns `true` if at least one element in the `coll` satisfies an async test. + * If any iteratee call returns `true`, the main `callback` is immediately + * called. + * + * @name some + * @static + * @memberOf module:Collections + * @method + * @alias any + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // dir1 is a directory that contains file1.txt, file2.txt + * // dir2 is a directory that contains file3.txt, file4.txt + * // dir3 is a directory that contains file5.txt + * // dir4 does not exist + * + * // asynchronous function that checks if a file exists + * function fileExists(file, callback) { + * fs.access(file, fs.constants.F_OK, (err) => { + * callback(null, !err); + * }); + * } + * + * // Using callbacks + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + *); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists, + * function(err, result) { + * console.log(result); + * // false + * // result is false since none of the files exists + * } + *); + * + * // Using Promises + * async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists) + * .then( result => { + * console.log(result); + * // true + * // result is true since some file in the list exists + * }).catch( err => { + * console.log(err); + * }); + * + * async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists) + * .then( result => { + * console.log(result); + * // false + * // result is false since none of the files exists + * }).catch( err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir3/file5.txt'], fileExists); + * console.log(result); + * // true + * // result is true since some file in the list exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + * async () => { + * try { + * let result = await async.some(['dir1/missing.txt','dir2/missing.txt','dir4/missing.txt'], fileExists); + * console.log(result); + * // false + * // result is false since none of the files exists + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function some(coll, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)(_eachOf2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(some, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/someLimit.js b/backend/node_modules/async/someLimit.js new file mode 100644 index 00000000..3a8096fd --- /dev/null +++ b/backend/node_modules/async/someLimit.js @@ -0,0 +1,47 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfLimit = require('./internal/eachOfLimit.js'); + +var _eachOfLimit2 = _interopRequireDefault(_eachOfLimit); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`some`]{@link module:Collections.some} but runs a maximum of `limit` async operations at a time. + * + * @name someLimit + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anyLimit + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in parallel. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someLimit(coll, limit, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)((0, _eachOfLimit2.default)(limit), coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(someLimit, 4); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/someSeries.js b/backend/node_modules/async/someSeries.js new file mode 100644 index 00000000..51aad19b --- /dev/null +++ b/backend/node_modules/async/someSeries.js @@ -0,0 +1,46 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _createTester = require('./internal/createTester.js'); + +var _createTester2 = _interopRequireDefault(_createTester); + +var _eachOfSeries = require('./eachOfSeries.js'); + +var _eachOfSeries2 = _interopRequireDefault(_eachOfSeries); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [`some`]{@link module:Collections.some} but runs only a single async operation at a time. + * + * @name someSeries + * @static + * @memberOf module:Collections + * @method + * @see [async.some]{@link module:Collections.some} + * @alias anySeries + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async truth test to apply to each item + * in the collections in series. + * The iteratee should complete with a boolean `result` value. + * Invoked with (item, callback). + * @param {Function} [callback] - A callback which is called as soon as any + * iteratee returns `true`, or after all the iteratee functions have finished. + * Result will be either `true` or `false` depending on the values of the async + * tests. Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + */ +function someSeries(coll, iteratee, callback) { + return (0, _createTester2.default)(Boolean, res => res)(_eachOfSeries2.default, coll, iteratee, callback); +} +exports.default = (0, _awaitify2.default)(someSeries, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/sortBy.js b/backend/node_modules/async/sortBy.js new file mode 100644 index 00000000..0988b612 --- /dev/null +++ b/backend/node_modules/async/sortBy.js @@ -0,0 +1,190 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _map = require('./map.js'); + +var _map2 = _interopRequireDefault(_map); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Sorts a list by the results of running each `coll` value through an async + * `iteratee`. + * + * @name sortBy + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {AsyncFunction} iteratee - An async function to apply to each item in + * `coll`. + * The iteratee should complete with a value to use as the sort criteria as + * its `result`. + * Invoked with (item, callback). + * @param {Function} callback - A callback which is called after all the + * `iteratee` functions have finished, or an error occurs. Results is the items + * from the original `coll` sorted by the values returned by the `iteratee` + * calls. Invoked with (err, results). + * @returns {Promise} a promise, if no callback passed + * @example + * + * // bigfile.txt is a file that is 251100 bytes in size + * // mediumfile.txt is a file that is 11000 bytes in size + * // smallfile.txt is a file that is 121 bytes in size + * + * // asynchronous function that returns the file size in bytes + * function getFileSizeInBytes(file, callback) { + * fs.stat(file, function(err, stat) { + * if (err) { + * return callback(err); + * } + * callback(null, stat.size); + * }); + * } + * + * // Using callbacks + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // By modifying the callback parameter the + * // sorting order can be influenced: + * + * // ascending order + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) return callback(getFileSizeErr); + * callback(null, fileSize); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * } + * ); + * + * // descending order + * async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], function(file, callback) { + * getFileSizeInBytes(file, function(getFileSizeErr, fileSize) { + * if (getFileSizeErr) { + * return callback(getFileSizeErr); + * } + * callback(null, fileSize * -1); + * }); + * }, function(err, results) { + * if (err) { + * console.log(err); + * } else { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'bigfile.txt', 'mediumfile.txt', 'smallfile.txt'] + * } + * } + * ); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes, + * function(err, results) { + * if (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } else { + * console.log(results); + * } + * } + * ); + * + * // Using Promises + * async.sortBy(['mediumfile.txt','smallfile.txt','bigfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * }).catch( err => { + * console.log(err); + * }); + * + * // Error handling + * async.sortBy(['mediumfile.txt','smallfile.txt','missingfile.txt'], getFileSizeInBytes) + * .then( results => { + * console.log(results); + * }).catch( err => { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * }); + * + * // Using async/await + * (async () => { + * try { + * let results = await async.sortBy(['bigfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * // results is now the original array of files sorted by + * // file size (ascending by default), e.g. + * // [ 'smallfile.txt', 'mediumfile.txt', 'bigfile.txt'] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * // Error handling + * async () => { + * try { + * let results = await async.sortBy(['missingfile.txt','mediumfile.txt','smallfile.txt'], getFileSizeInBytes); + * console.log(results); + * } + * catch (err) { + * console.log(err); + * // [ Error: ENOENT: no such file or directory ] + * } + * } + * + */ +function sortBy(coll, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _map2.default)(coll, (x, iterCb) => { + _iteratee(x, (err, criteria) => { + if (err) return iterCb(err); + iterCb(err, { value: x, criteria }); + }); + }, (err, results) => { + if (err) return callback(err); + callback(null, results.sort(comparator).map(v => v.value)); + }); + + function comparator(left, right) { + var a = left.criteria, + b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + } +} +exports.default = (0, _awaitify2.default)(sortBy, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/timeout.js b/backend/node_modules/async/timeout.js new file mode 100644 index 00000000..46bb233a --- /dev/null +++ b/backend/node_modules/async/timeout.js @@ -0,0 +1,89 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = timeout; + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Sets a time limit on an asynchronous function. If the function does not call + * its callback within the specified milliseconds, it will be called with a + * timeout error. The code property for the error object will be `'ETIMEDOUT'`. + * + * @name timeout + * @static + * @memberOf module:Utils + * @method + * @category Util + * @param {AsyncFunction} asyncFn - The async function to limit in time. + * @param {number} milliseconds - The specified time limit. + * @param {*} [info] - Any variable you want attached (`string`, `object`, etc) + * to timeout Error for more information.. + * @returns {AsyncFunction} Returns a wrapped function that can be used with any + * of the control flow functions. + * Invoke this function with the same parameters as you would `asyncFunc`. + * @example + * + * function myFunction(foo, callback) { + * doAsyncTask(foo, function(err, data) { + * // handle errors + * if (err) return callback(err); + * + * // do some stuff ... + * + * // return processed data + * return callback(null, data); + * }); + * } + * + * var wrapped = async.timeout(myFunction, 1000); + * + * // call `wrapped` as you would `myFunction` + * wrapped({ bar: 'bar' }, function(err, data) { + * // if `myFunction` takes < 1000 ms to execute, `err` + * // and `data` will have their expected values + * + * // else `err` will be an Error with the code 'ETIMEDOUT' + * }); + */ +function timeout(asyncFn, milliseconds, info) { + var fn = (0, _wrapAsync2.default)(asyncFn); + + return (0, _initialParams2.default)((args, callback) => { + var timedOut = false; + var timer; + + function timeoutCallback() { + var name = asyncFn.name || 'anonymous'; + var error = new Error('Callback function "' + name + '" timed out.'); + error.code = 'ETIMEDOUT'; + if (info) { + error.info = info; + } + timedOut = true; + callback(error); + } + + args.push((...cbArgs) => { + if (!timedOut) { + callback(...cbArgs); + clearTimeout(timer); + } + }); + + // setup timer and call original function + timer = setTimeout(timeoutCallback, milliseconds); + fn(...args); + }); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/times.js b/backend/node_modules/async/times.js new file mode 100644 index 00000000..ca7df517 --- /dev/null +++ b/backend/node_modules/async/times.js @@ -0,0 +1,50 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = times; + +var _timesLimit = require('./timesLimit.js'); + +var _timesLimit2 = _interopRequireDefault(_timesLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Calls the `iteratee` function `n` times, and accumulates results in the same + * manner you would use with [map]{@link module:Collections.map}. + * + * @name times + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.map]{@link module:Collections.map} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + * @example + * + * // Pretend this is some complicated async factory + * var createUser = function(id, callback) { + * callback(null, { + * id: 'user' + id + * }); + * }; + * + * // generate 5 users + * async.times(5, function(n, next) { + * createUser(n, function(err, user) { + * next(err, user); + * }); + * }, function(err, users) { + * // we should now have 5 users + * }); + */ +function times(n, iteratee, callback) { + return (0, _timesLimit2.default)(n, Infinity, iteratee, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/timesLimit.js b/backend/node_modules/async/timesLimit.js new file mode 100644 index 00000000..f76de250 --- /dev/null +++ b/backend/node_modules/async/timesLimit.js @@ -0,0 +1,43 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = timesLimit; + +var _mapLimit = require('./mapLimit.js'); + +var _mapLimit2 = _interopRequireDefault(_mapLimit); + +var _range = require('./internal/range.js'); + +var _range2 = _interopRequireDefault(_range); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [times]{@link module:ControlFlow.times} but runs a maximum of `limit` async operations at a + * time. + * + * @name timesLimit + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} count - The number of times to run the function. + * @param {number} limit - The maximum number of async operations at a time. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see [async.map]{@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ +function timesLimit(count, limit, iteratee, callback) { + var _iteratee = (0, _wrapAsync2.default)(iteratee); + return (0, _mapLimit2.default)((0, _range2.default)(count), limit, _iteratee, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/timesSeries.js b/backend/node_modules/async/timesSeries.js new file mode 100644 index 00000000..776b4f3a --- /dev/null +++ b/backend/node_modules/async/timesSeries.js @@ -0,0 +1,32 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = timesSeries; + +var _timesLimit = require('./timesLimit.js'); + +var _timesLimit2 = _interopRequireDefault(_timesLimit); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * The same as [times]{@link module:ControlFlow.times} but runs only a single async operation at a time. + * + * @name timesSeries + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.times]{@link module:ControlFlow.times} + * @category Control Flow + * @param {number} n - The number of times to run the function. + * @param {AsyncFunction} iteratee - The async function to call `n` times. + * Invoked with the iteration index and a callback: (n, next). + * @param {Function} callback - see {@link module:Collections.map}. + * @returns {Promise} a promise, if no callback is provided + */ +function timesSeries(n, iteratee, callback) { + return (0, _timesLimit2.default)(n, 1, iteratee, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/transform.js b/backend/node_modules/async/transform.js new file mode 100644 index 00000000..75dadea8 --- /dev/null +++ b/backend/node_modules/async/transform.js @@ -0,0 +1,173 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = transform; + +var _eachOf = require('./eachOf.js'); + +var _eachOf2 = _interopRequireDefault(_eachOf); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _promiseCallback = require('./internal/promiseCallback.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * A relative of `reduce`. Takes an Object or Array, and iterates over each + * element in parallel, each step potentially mutating an `accumulator` value. + * The type of the accumulator defaults to the type of collection passed in. + * + * @name transform + * @static + * @memberOf module:Collections + * @method + * @category Collection + * @param {Array|Iterable|AsyncIterable|Object} coll - A collection to iterate over. + * @param {*} [accumulator] - The initial state of the transform. If omitted, + * it will default to an empty Object or Array, depending on the type of `coll` + * @param {AsyncFunction} iteratee - A function applied to each item in the + * collection that potentially modifies the accumulator. + * Invoked with (accumulator, item, key, callback). + * @param {Function} [callback] - A callback which is called after all the + * `iteratee` functions have finished. Result is the transformed accumulator. + * Invoked with (err, result). + * @returns {Promise} a promise, if no callback provided + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileList = ['file1.txt','file2.txt','file3.txt']; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileList, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * }); + * + * // Using Promises + * async.transform(fileList, transformFileSize) + * .then(result => { + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * (async () => { + * try { + * let result = await async.transform(fileList, transformFileSize); + * console.log(result); + * // [ '1000 Bytes', '1.95 KB', '2.93 KB' ] + * } + * catch (err) { + * console.log(err); + * } + * })(); + * + * @example + * + * // file1.txt is a file that is 1000 bytes in size + * // file2.txt is a file that is 2000 bytes in size + * // file3.txt is a file that is 3000 bytes in size + * + * // helper function that returns human-readable size format from bytes + * function formatBytes(bytes, decimals = 2) { + * // implementation not included for brevity + * return humanReadbleFilesize; + * } + * + * const fileMap = { f1: 'file1.txt', f2: 'file2.txt', f3: 'file3.txt' }; + * + * // asynchronous function that returns the file size, transformed to human-readable format + * // e.g. 1024 bytes = 1KB, 1234 bytes = 1.21 KB, 1048576 bytes = 1MB, etc. + * function transformFileSize(acc, value, key, callback) { + * fs.stat(value, function(err, stat) { + * if (err) { + * return callback(err); + * } + * acc[key] = formatBytes(stat.size); + * callback(null); + * }); + * } + * + * // Using callbacks + * async.transform(fileMap, transformFileSize, function(err, result) { + * if(err) { + * console.log(err); + * } else { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * }); + * + * // Using Promises + * async.transform(fileMap, transformFileSize) + * .then(result => { + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * }).catch(err => { + * console.log(err); + * }); + * + * // Using async/await + * async () => { + * try { + * let result = await async.transform(fileMap, transformFileSize); + * console.log(result); + * // { f1: '1000 Bytes', f2: '1.95 KB', f3: '2.93 KB' } + * } + * catch (err) { + * console.log(err); + * } + * } + * + */ +function transform(coll, accumulator, iteratee, callback) { + if (arguments.length <= 3 && typeof accumulator === 'function') { + callback = iteratee; + iteratee = accumulator; + accumulator = Array.isArray(coll) ? [] : {}; + } + callback = (0, _once2.default)(callback || (0, _promiseCallback.promiseCallback)()); + var _iteratee = (0, _wrapAsync2.default)(iteratee); + + (0, _eachOf2.default)(coll, (v, k, cb) => { + _iteratee(accumulator, v, k, cb); + }, err => callback(err, accumulator)); + return callback[_promiseCallback.PROMISE_SYMBOL]; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/tryEach.js b/backend/node_modules/async/tryEach.js new file mode 100644 index 00000000..7e63f9d5 --- /dev/null +++ b/backend/node_modules/async/tryEach.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _eachSeries = require('./eachSeries.js'); + +var _eachSeries2 = _interopRequireDefault(_eachSeries); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * It runs each task in series but stops whenever any of the functions were + * successful. If one of the tasks were successful, the `callback` will be + * passed the result of the successful task. If all tasks fail, the callback + * will be passed the error and result (if any) of the final attempt. + * + * @name tryEach + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array|Iterable|AsyncIterable|Object} tasks - A collection containing functions to + * run, each function is passed a `callback(err, result)` it must call on + * completion with an error `err` (which can be `null`) and an optional `result` + * value. + * @param {Function} [callback] - An optional callback which is called when one + * of the tasks has succeeded, or all have failed. It receives the `err` and + * `result` arguments of the last attempt at completing the `task`. Invoked with + * (err, results). + * @returns {Promise} a promise, if no callback is passed + * @example + * async.tryEach([ + * function getDataFromFirstWebsite(callback) { + * // Try getting the data from the first website + * callback(err, data); + * }, + * function getDataFromSecondWebsite(callback) { + * // First website failed, + * // Try getting the data from the backup website + * callback(err, data); + * } + * ], + * // optional callback + * function(err, results) { + * Now do something with the data. + * }); + * + */ +function tryEach(tasks, callback) { + var error = null; + var result; + return (0, _eachSeries2.default)(tasks, (task, taskCb) => { + (0, _wrapAsync2.default)(task)((err, ...args) => { + if (err === false) return taskCb(err); + + if (args.length < 2) { + [result] = args; + } else { + result = args; + } + error = err; + taskCb(err ? null : {}); + }); + }, () => callback(error, result)); +} + +exports.default = (0, _awaitify2.default)(tryEach); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/unmemoize.js b/backend/node_modules/async/unmemoize.js new file mode 100644 index 00000000..badd7aec --- /dev/null +++ b/backend/node_modules/async/unmemoize.js @@ -0,0 +1,25 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = unmemoize; +/** + * Undoes a [memoize]{@link module:Utils.memoize}d function, reverting it to the original, + * unmemoized form. Handy for testing. + * + * @name unmemoize + * @static + * @memberOf module:Utils + * @method + * @see [async.memoize]{@link module:Utils.memoize} + * @category Util + * @param {AsyncFunction} fn - the memoized function + * @returns {AsyncFunction} a function that calls the original unmemoized function + */ +function unmemoize(fn) { + return (...args) => { + return (fn.unmemoized || fn)(...args); + }; +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/until.js b/backend/node_modules/async/until.js new file mode 100644 index 00000000..4b69b972 --- /dev/null +++ b/backend/node_modules/async/until.js @@ -0,0 +1,61 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = until; + +var _whilst = require('./whilst.js'); + +var _whilst2 = _interopRequireDefault(_whilst); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Repeatedly call `iteratee` until `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. `callback` will be passed an error and any + * arguments passed to the final `iteratee`'s callback. + * + * The inverse of [whilst]{@link module:ControlFlow.whilst}. + * + * @name until + * @static + * @memberOf module:ControlFlow + * @method + * @see [async.whilst]{@link module:ControlFlow.whilst} + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` fails. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has passed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if a callback is not passed + * + * @example + * const results = [] + * let finished = false + * async.until(function test(cb) { + * cb(null, finished) + * }, function iter(next) { + * fetchPage(url, (err, body) => { + * if (err) return next(err) + * results = results.concat(body.objects) + * finished = !!body.next + * next(err) + * }) + * }, function done (err) { + * // all pages have been fetched + * }) + */ +function until(test, iteratee, callback) { + const _test = (0, _wrapAsync2.default)(test); + return (0, _whilst2.default)(cb => _test((err, truth) => cb(err, !truth)), iteratee, callback); +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/waterfall.js b/backend/node_modules/async/waterfall.js new file mode 100644 index 00000000..c3242f79 --- /dev/null +++ b/backend/node_modules/async/waterfall.js @@ -0,0 +1,105 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _once = require('./internal/once.js'); + +var _once2 = _interopRequireDefault(_once); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Runs the `tasks` array of functions in series, each passing their results to + * the next in the array. However, if any of the `tasks` pass an error to their + * own callback, the next function is not executed, and the main `callback` is + * immediately called with the error. + * + * @name waterfall + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {Array} tasks - An array of [async functions]{@link AsyncFunction} + * to run. + * Each function should complete with any number of `result` values. + * The `result` values will be passed as arguments, in order, to the next task. + * @param {Function} [callback] - An optional callback to run once all the + * functions have completed. This will be passed the results of the last task's + * callback. Invoked with (err, [results]). + * @returns {Promise} a promise, if a callback is omitted + * @example + * + * async.waterfall([ + * function(callback) { + * callback(null, 'one', 'two'); + * }, + * function(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * }, + * function(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + * ], function (err, result) { + * // result now equals 'done' + * }); + * + * // Or, with named functions: + * async.waterfall([ + * myFirstFunction, + * mySecondFunction, + * myLastFunction, + * ], function (err, result) { + * // result now equals 'done' + * }); + * function myFirstFunction(callback) { + * callback(null, 'one', 'two'); + * } + * function mySecondFunction(arg1, arg2, callback) { + * // arg1 now equals 'one' and arg2 now equals 'two' + * callback(null, 'three'); + * } + * function myLastFunction(arg1, callback) { + * // arg1 now equals 'three' + * callback(null, 'done'); + * } + */ +function waterfall(tasks, callback) { + callback = (0, _once2.default)(callback); + if (!Array.isArray(tasks)) return callback(new Error('First argument to waterfall must be an array of functions')); + if (!tasks.length) return callback(); + var taskIndex = 0; + + function nextTask(args) { + var task = (0, _wrapAsync2.default)(tasks[taskIndex++]); + task(...args, (0, _onlyOnce2.default)(next)); + } + + function next(err, ...args) { + if (err === false) return; + if (err || taskIndex === tasks.length) { + return callback(err, ...args); + } + nextTask(args); + } + + nextTask([]); +} + +exports.default = (0, _awaitify2.default)(waterfall); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/whilst.js b/backend/node_modules/async/whilst.js new file mode 100644 index 00000000..4165543f --- /dev/null +++ b/backend/node_modules/async/whilst.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); + +var _onlyOnce = require('./internal/onlyOnce.js'); + +var _onlyOnce2 = _interopRequireDefault(_onlyOnce); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +var _wrapAsync2 = _interopRequireDefault(_wrapAsync); + +var _awaitify = require('./internal/awaitify.js'); + +var _awaitify2 = _interopRequireDefault(_awaitify); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Repeatedly call `iteratee`, while `test` returns `true`. Calls `callback` when + * stopped, or an error occurs. + * + * @name whilst + * @static + * @memberOf module:ControlFlow + * @method + * @category Control Flow + * @param {AsyncFunction} test - asynchronous truth test to perform before each + * execution of `iteratee`. Invoked with (callback). + * @param {AsyncFunction} iteratee - An async function which is called each time + * `test` passes. Invoked with (callback). + * @param {Function} [callback] - A callback which is called after the test + * function has failed and repeated execution of `iteratee` has stopped. `callback` + * will be passed an error and any arguments passed to the final `iteratee`'s + * callback. Invoked with (err, [results]); + * @returns {Promise} a promise, if no callback is passed + * @example + * + * var count = 0; + * async.whilst( + * function test(cb) { cb(null, count < 5); }, + * function iter(callback) { + * count++; + * setTimeout(function() { + * callback(null, count); + * }, 1000); + * }, + * function (err, n) { + * // 5 seconds have passed, n = 5 + * } + * ); + */ +function whilst(test, iteratee, callback) { + callback = (0, _onlyOnce2.default)(callback); + var _fn = (0, _wrapAsync2.default)(iteratee); + var _test = (0, _wrapAsync2.default)(test); + var results = []; + + function next(err, ...rest) { + if (err) return callback(err); + results = rest; + if (err === false) return; + _test(check); + } + + function check(err, truth) { + if (err) return callback(err); + if (err === false) return; + if (!truth) return callback(null, ...results); + _fn(next); + } + + return _test(check); +} +exports.default = (0, _awaitify2.default)(whilst, 3); +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/async/wrapSync.js b/backend/node_modules/async/wrapSync.js new file mode 100644 index 00000000..ddc3f02f --- /dev/null +++ b/backend/node_modules/async/wrapSync.js @@ -0,0 +1,118 @@ +'use strict'; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = asyncify; + +var _initialParams = require('./internal/initialParams.js'); + +var _initialParams2 = _interopRequireDefault(_initialParams); + +var _setImmediate = require('./internal/setImmediate.js'); + +var _setImmediate2 = _interopRequireDefault(_setImmediate); + +var _wrapAsync = require('./internal/wrapAsync.js'); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Take a sync function and make it async, passing its return value to a + * callback. This is useful for plugging sync functions into a waterfall, + * series, or other async functions. Any arguments passed to the generated + * function will be passed to the wrapped function (except for the final + * callback argument). Errors thrown will be passed to the callback. + * + * If the function passed to `asyncify` returns a Promise, that promises's + * resolved/rejected state will be used to call the callback, rather than simply + * the synchronous return value. + * + * This also means you can asyncify ES2017 `async` functions. + * + * @name asyncify + * @static + * @memberOf module:Utils + * @method + * @alias wrapSync + * @category Util + * @param {Function} func - The synchronous function, or Promise-returning + * function to convert to an {@link AsyncFunction}. + * @returns {AsyncFunction} An asynchronous wrapper of the `func`. To be + * invoked with `(args..., callback)`. + * @example + * + * // passing a regular synchronous function + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(JSON.parse), + * function (data, next) { + * // data is the result of parsing the text. + * // If there was a parsing error, it would have been caught. + * } + * ], callback); + * + * // passing a function returning a promise + * async.waterfall([ + * async.apply(fs.readFile, filename, "utf8"), + * async.asyncify(function (contents) { + * return db.model.create(contents); + * }), + * function (model, next) { + * // `model` is the instantiated model object. + * // If there was an error, this function would be skipped. + * } + * ], callback); + * + * // es2017 example, though `asyncify` is not needed if your JS environment + * // supports async functions out of the box + * var q = async.queue(async.asyncify(async function(file) { + * var intermediateStep = await processFile(file); + * return await somePromise(intermediateStep) + * })); + * + * q.push(files); + */ +function asyncify(func) { + if ((0, _wrapAsync.isAsync)(func)) { + return function (...args /*, callback*/) { + const callback = args.pop(); + const promise = func.apply(this, args); + return handlePromise(promise, callback); + }; + } + + return (0, _initialParams2.default)(function (args, callback) { + var result; + try { + result = func.apply(this, args); + } catch (e) { + return callback(e); + } + // if result is Promise object + if (result && typeof result.then === 'function') { + return handlePromise(result, callback); + } else { + callback(null, result); + } + }); +} + +function handlePromise(promise, callback) { + return promise.then(value => { + invokeCallback(callback, null, value); + }, err => { + invokeCallback(callback, err && (err instanceof Error || err.message) ? err : new Error(err)); + }); +} + +function invokeCallback(callback, error, value) { + try { + callback(error, value); + } catch (err) { + (0, _setImmediate2.default)(e => { + throw e; + }, err); + } +} +module.exports = exports.default; \ No newline at end of file diff --git a/backend/node_modules/b4a/LICENSE b/backend/node_modules/b4a/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/backend/node_modules/b4a/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/node_modules/b4a/README.md b/backend/node_modules/b4a/README.md new file mode 100644 index 00000000..01a6ce76 --- /dev/null +++ b/backend/node_modules/b4a/README.md @@ -0,0 +1,153 @@ +# Buffer for Array + +Buffer for Array (B4A) provides a set of functions for bridging the gap between the Node.js `Buffer` class and the `Uint8Array` class. A browser compatibility layer is also included, making it possible to use B4A in both Node.js and browsers without having to worry about whether you're dealing with buffers or typed arrays. + +## Installation + +```sh +npm i b4a +``` + +### React Native + +When imported from React Native `b4a` will make use of `react-native-b4a` for optimised buffer operations when available. All you need to do is install it: + +```sh +npm i react-native-b4a +``` + +## API + +#### `b4a.isBuffer(value)` + +See https://nodejs.org/api/buffer.html#static-method-bufferisbufferobj + +This will also return `true` when passed a `Uint8Array`. + +#### `b4a.isEncoding(encoding)` + +See https://nodejs.org/api/buffer.html#static-method-bufferisencodingencoding + +#### `b4a.alloc(size[, fill[, encoding]])` + +See https://nodejs.org/api/buffer.html#static-method-bufferallocsize-fill-encoding + +#### `b4a.allocUnsafe(size)` + +See https://nodejs.org/api/buffer.html#static-method-bufferallocunsafesize + +#### `b4a.allocUnsafeSlow(size)` + +See https://nodejs.org/api/buffer.html#static-method-bufferallocunsafeslowsize + +#### `b4a.byteLength(string)` + +See https://nodejs.org/api/buffer.html#static-method-bufferbytelengthstring-encoding + +#### `b4a.compare(buf1, buf2)` + +See https://nodejs.org/api/buffer.html#static-method-buffercomparebuf1-buf2 + +#### `b4a.concat(buffers[, totalLength])` + +See https://nodejs.org/api/buffer.html#static-method-bufferconcatlist-totallength + +#### `b4a.copy(source, target[, targetStart[, sourceStart[, sourceEnd]]])` + +See https://nodejs.org/api/buffer.html#bufcopytarget-targetstart-sourcestart-sourceend + +#### `b4a.equals(buf1, buf2)` + +See https://nodejs.org/api/buffer.html#bufequalsotherbuffer + +#### `b4a.fill(buffer, value[, offset[, end]][, encoding])` + +See https://nodejs.org/api/buffer.html#buffillvalue-offset-end-encoding + +#### `b4a.from(array)` + +See https://nodejs.org/api/buffer.html#static-method-bufferfromarray + +#### `b4a.from(arrayBuffer[, byteOffset[, length]])` + +See https://nodejs.org/api/buffer.html#static-method-bufferfromarraybuffer-byteoffset-length + +#### `b4a.from(buffer)` + +See https://nodejs.org/api/buffer.html#static-method-bufferfrombuffer + +#### `b4a.from(string[, encoding])` + +See https://nodejs.org/api/buffer.html#static-method-bufferfromstring-encoding + +#### `b4a.includes(buffer, value[, byteOffset][, encoding])` + +See https://nodejs.org/api/buffer.html#bufincludesvalue-byteoffset-encoding + +#### `b4a.indexOf(buffer, value[, byteOffset][, encoding])` + +See https://nodejs.org/api/buffer.html#bufindexofvalue-byteoffset-encoding + +#### `b4a.lastIndexOf(buffer, value[, byteOffset][, encoding])` + +See https://nodejs.org/api/buffer.html#buflastindexofvalue-byteoffset-encoding + +#### `b4a.swap16(buffer)` + +See https://nodejs.org/api/buffer.html#bufswap16 + +#### `b4a.swap32(buffer)` + +See https://nodejs.org/api/buffer.html#bufswap32 + +#### `b4a.swap64(buffer)` + +See https://nodejs.org/api/buffer.html#bufswap64 + +#### `b4a.toBuffer(buffer)` + +Convert a buffer to its canonical representation. In Node.js, the canonical representation is a `Buffer`. In the browser, the canonical representation is a `Uint8Array`. + +#### `b4a.toString(buffer, [encoding[, start[, end]]])` + +See https://nodejs.org/api/buffer.html#buftostringencoding-start-end + +#### `b4a.write(buffer, string[, offset[, length]][, encoding])` + +See https://nodejs.org/api/buffer.html#bufwritestring-offset-length-encoding + +#### `b4a.writeDoubleLE(buffer, value[, offset])` + +See https://nodejs.org/api/buffer.html#bufwritedoublelevalue-offset + +#### `b4a.writeFloatLE(buffer, value[, offset])` + +See https://nodejs.org/api/buffer.html#bufwritefloatlevalue-offset + +#### `b4a.writeUInt32LE(buffer, value[, offset])` + +https://nodejs.org/api/buffer.html#bufwriteuint32levalue-offset + +#### `b4a.writeInt32LE(buffer, value[, offset])` + +See https://nodejs.org/api/buffer.html#bufwriteint32levalue-offset + +#### `b4a.readDoubleLE(buffer[, offset])` + +See https://nodejs.org/api/buffer.html#bufreaddoubleleoffset + +#### `b4a.readFloatLE(buffer[, offset])` + +See https://nodejs.org/api/buffer.html#bufreadfloatleoffset + +#### `b4a.readUInt32LE(buffer[, offset])` + +See https://nodejs.org/api/buffer.html#bufreaduint32leoffset + +#### `b4a.readInt32LE(buffer[, offset])` + +See https://nodejs.org/api/buffer.html#bufreadint32leoffset + +## License + +Apache 2.0 diff --git a/backend/node_modules/b4a/browser.js b/backend/node_modules/b4a/browser.js new file mode 100644 index 00000000..c35f7f23 --- /dev/null +++ b/backend/node_modules/b4a/browser.js @@ -0,0 +1,563 @@ +const ascii = require('./lib/ascii') +const base64 = require('./lib/base64') +const hex = require('./lib/hex') +const utf8 = require('./lib/utf8') +const utf16le = require('./lib/utf16le') + +const LE = new Uint8Array(Uint16Array.of(0xff).buffer)[0] === 0xff + +function codecFor(encoding) { + switch (encoding) { + case 'ascii': + return ascii + case 'base64': + return base64 + case 'hex': + return hex + case 'utf8': + case 'utf-8': + case undefined: + case null: + return utf8 + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return utf16le + default: + throw new Error(`Unknown encoding '${encoding}'`) + } +} + +function isBuffer(value) { + return value instanceof Uint8Array +} + +function isEncoding(encoding) { + try { + codecFor(encoding) + return true + } catch { + return false + } +} + +function alloc(size, fill, encoding) { + const buffer = new Uint8Array(size) + if (fill !== undefined) { + exports.fill(buffer, fill, 0, buffer.byteLength, encoding) + } + return buffer +} + +function allocUnsafe(size) { + return new Uint8Array(size) +} + +function allocUnsafeSlow(size) { + return new Uint8Array(size) +} + +function byteLength(string, encoding) { + return codecFor(encoding).byteLength(string) +} + +function compare(a, b) { + if (a === b) return 0 + + const len = Math.min(a.byteLength, b.byteLength) + + a = new DataView(a.buffer, a.byteOffset, a.byteLength) + b = new DataView(b.buffer, b.byteOffset, b.byteLength) + + let i = 0 + + for (let n = len - (len % 4); i < n; i += 4) { + const x = a.getUint32(i, LE) + const y = b.getUint32(i, LE) + if (x !== y) break + } + + for (; i < len; i++) { + const x = a.getUint8(i) + const y = b.getUint8(i) + if (x < y) return -1 + if (x > y) return 1 + } + + return a.byteLength > b.byteLength ? 1 : a.byteLength < b.byteLength ? -1 : 0 +} + +function concat(buffers, length) { + if (length === undefined) { + length = buffers.reduce((len, buffer) => len + buffer.byteLength, 0) + } + + const result = new Uint8Array(length) + + let offset = 0 + + for (const buffer of buffers) { + if (offset + buffer.byteLength > result.byteLength) { + result.set(buffer.subarray(0, result.byteLength - offset), offset) + return result + } + + result.set(buffer, offset) + offset += buffer.byteLength + } + + return result +} + +function copy( + source, + target, + targetStart = 0, + sourceStart = 0, + sourceEnd = source.byteLength +) { + if (targetStart < 0) targetStart = 0 + if (targetStart >= target.byteLength) return 0 + + const targetLength = target.byteLength - targetStart + + if (sourceStart < 0) sourceStart = 0 + if (sourceStart >= source.byteLength) return 0 + + if (sourceEnd <= sourceStart) return 0 + if (sourceEnd > source.byteLength) sourceEnd = source.byteLength + + if (sourceEnd - sourceStart > targetLength) { + sourceEnd = sourceStart + targetLength + } + + const sourceLength = sourceEnd - sourceStart + + if (source === target) { + target.copyWithin(targetStart, sourceStart, sourceEnd) + } else { + if (sourceStart !== 0 || sourceEnd !== source.byteLength) { + source = source.subarray(sourceStart, sourceEnd) + } + + target.set(source, targetStart) + } + + return sourceLength +} + +function equals(a, b) { + if (a === b) return true + if (a.byteLength !== b.byteLength) return false + + return compare(a, b) === 0 +} + +function fill( + buffer, + value, + offset = 0, + end = buffer.byteLength, + encoding = 'utf8' +) { + if (typeof value === 'string') { + if (typeof offset === 'string') { + // fill(string, encoding) + encoding = offset + offset = 0 + end = buffer.byteLength + } else if (typeof end === 'string') { + // fill(string, offset, encoding) + encoding = end + end = buffer.byteLength + } + } else if (typeof value === 'number') { + value = value & 0xff + } else if (typeof value === 'boolean') { + value = +value + } + + if (offset < 0) offset = 0 + if (offset >= buffer.byteLength) return buffer + + if (end <= offset) return buffer + if (end > buffer.byteLength) end = buffer.byteLength + + if (typeof value === 'number') return buffer.fill(value, offset, end) + + if (typeof value === 'string') value = exports.from(value, encoding) + + const len = value.byteLength + + for (let i = 0, n = end - offset; i < n; ++i) { + buffer[i + offset] = value[i % len] + } + + return buffer +} + +function from(value, encodingOrOffset, length) { + // from(string, encoding) + if (typeof value === 'string') return fromString(value, encodingOrOffset) + + // from(array) + if (Array.isArray(value)) return fromArray(value) + + // from(buffer) + if (ArrayBuffer.isView(value)) return fromBuffer(value) + + // from(arrayBuffer[, byteOffset[, length]]) + return fromArrayBuffer(value, encodingOrOffset, length) +} + +function fromString(string, encoding) { + const codec = codecFor(encoding) + const buffer = new Uint8Array(codec.byteLength(string)) + codec.write(buffer, string) + return buffer +} + +function fromArray(array) { + const buffer = new Uint8Array(array.length) + buffer.set(array) + return buffer +} + +function fromBuffer(buffer) { + const copy = new Uint8Array(buffer.byteLength) + copy.set(buffer) + return copy +} + +function fromArrayBuffer(arrayBuffer, byteOffset, length) { + return new Uint8Array(arrayBuffer, byteOffset, length) +} + +function includes(buffer, value, byteOffset, encoding) { + return indexOf(buffer, value, byteOffset, encoding) !== -1 +} + +function indexOf(buffer, value, byteOffset, encoding) { + return bidirectionalIndexOf( + buffer, + value, + byteOffset, + encoding, + true /* first */ + ) +} + +function lastIndexOf(buffer, value, byteOffset, encoding) { + return bidirectionalIndexOf( + buffer, + value, + byteOffset, + encoding, + false /* last */ + ) +} + +function bidirectionalIndexOf(buffer, value, byteOffset, encoding, first) { + if (buffer.byteLength === 0) return -1 + + if (typeof byteOffset === 'string') { + encoding = byteOffset + byteOffset = 0 + } else if (byteOffset === undefined) { + byteOffset = first ? 0 : buffer.length - 1 + } else if (byteOffset < 0) { + byteOffset += buffer.byteLength + } + + if (byteOffset >= buffer.byteLength) { + if (first) return -1 + else byteOffset = buffer.byteLength - 1 + } else if (byteOffset < 0) { + if (first) byteOffset = 0 + else return -1 + } + + if (typeof value === 'string') { + value = from(value, encoding) + } else if (typeof value === 'number') { + value = value & 0xff + + if (first) { + return buffer.indexOf(value, byteOffset) + } else { + return buffer.lastIndexOf(value, byteOffset) + } + } + + if (value.byteLength === 0) return -1 + + if (first) { + let foundIndex = -1 + + for (let i = byteOffset; i < buffer.byteLength; i++) { + if (buffer[i] === value[foundIndex === -1 ? 0 : i - foundIndex]) { + if (foundIndex === -1) foundIndex = i + if (i - foundIndex + 1 === value.byteLength) return foundIndex + } else { + if (foundIndex !== -1) i -= i - foundIndex + foundIndex = -1 + } + } + } else { + if (byteOffset + value.byteLength > buffer.byteLength) { + byteOffset = buffer.byteLength - value.byteLength + } + + for (let i = byteOffset; i >= 0; i--) { + let found = true + + for (let j = 0; j < value.byteLength; j++) { + if (buffer[i + j] !== value[j]) { + found = false + break + } + } + + if (found) return i + } + } + + return -1 +} + +function swap(buffer, n, m) { + const i = buffer[n] + buffer[n] = buffer[m] + buffer[m] = i +} + +function swap16(buffer) { + const len = buffer.byteLength + + if (len % 2 !== 0) + throw new RangeError('Buffer size must be a multiple of 16-bits') + + for (let i = 0; i < len; i += 2) swap(buffer, i, i + 1) + + return buffer +} + +function swap32(buffer) { + const len = buffer.byteLength + + if (len % 4 !== 0) + throw new RangeError('Buffer size must be a multiple of 32-bits') + + for (let i = 0; i < len; i += 4) { + swap(buffer, i, i + 3) + swap(buffer, i + 1, i + 2) + } + + return buffer +} + +function swap64(buffer) { + const len = buffer.byteLength + + if (len % 8 !== 0) + throw new RangeError('Buffer size must be a multiple of 64-bits') + + for (let i = 0; i < len; i += 8) { + swap(buffer, i, i + 7) + swap(buffer, i + 1, i + 6) + swap(buffer, i + 2, i + 5) + swap(buffer, i + 3, i + 4) + } + + return buffer +} + +function toBuffer(buffer) { + return buffer +} + +function toString( + buffer, + encoding = 'utf8', + start = 0, + end = buffer.byteLength +) { + // toString(buffer) + if (arguments.length === 1) return utf8.toString(buffer) + + // toString(buffer, encoding) + if (arguments.length === 2) return codecFor(encoding).toString(buffer) + + if (start < 0) start = 0 + if (start >= buffer.byteLength) return '' + + if (end <= start) return '' + if (end > buffer.byteLength) end = buffer.byteLength + + if (start !== 0 || end !== buffer.byteLength) { + buffer = buffer.subarray(start, end) + } + + return codecFor(encoding).toString(buffer) +} + +function write(buffer, string, offset, length, encoding) { + // write(buffer, string) + if (arguments.length === 2) return utf8.write(buffer, string) + + if (typeof offset === 'string') { + // write(buffer, string, encoding) + encoding = offset + offset = 0 + length = buffer.byteLength + } else if (typeof length === 'string') { + // write(buffer, string, offset, encoding) + encoding = length + length = buffer.byteLength - offset + } + + length = Math.min(length, exports.byteLength(string, encoding)) + + let start = offset + if (start < 0) start = 0 + if (start >= buffer.byteLength) return 0 + + let end = offset + length + if (end <= start) return 0 + if (end > buffer.byteLength) end = buffer.byteLength + + if (start !== 0 || end !== buffer.byteLength) { + buffer = buffer.subarray(start, end) + } + + return codecFor(encoding).write(buffer, string) +} + +function readDoubleBE(buffer, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + return view.getFloat64(offset, false) +} + +function readDoubleLE(buffer, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + return view.getFloat64(offset, true) +} + +function readFloatBE(buffer, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + return view.getFloat32(offset, false) +} + +function readFloatLE(buffer, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + return view.getFloat32(offset, true) +} + +function readInt32BE(buffer, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + return view.getInt32(offset, false) +} + +function readInt32LE(buffer, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + return view.getInt32(offset, true) +} + +function readUInt32BE(buffer, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + return view.getUint32(offset, false) +} + +function readUInt32LE(buffer, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + return view.getUint32(offset, true) +} + +function writeDoubleBE(buffer, value, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + view.setFloat64(offset, value, false) + return offset + 8 +} + +function writeDoubleLE(buffer, value, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + view.setFloat64(offset, value, true) + return offset + 8 +} + +function writeFloatBE(buffer, value, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + view.setFloat32(offset, value, false) + return offset + 4 +} + +function writeFloatLE(buffer, value, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + view.setFloat32(offset, value, true) + return offset + 4 +} + +function writeInt32BE(buffer, value, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + view.setInt32(offset, value, false) + return offset + 4 +} + +function writeInt32LE(buffer, value, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + view.setInt32(offset, value, true) + return offset + 4 +} + +function writeUInt32BE(buffer, value, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + view.setUint32(offset, value, false) + return offset + 4 +} + +function writeUInt32LE(buffer, value, offset = 0) { + const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength) + view.setUint32(offset, value, true) + return offset + 4 +} + +module.exports = exports = { + isBuffer, + isEncoding, + alloc, + allocUnsafe, + allocUnsafeSlow, + byteLength, + compare, + concat, + copy, + equals, + fill, + from, + includes, + indexOf, + lastIndexOf, + swap16, + swap32, + swap64, + toBuffer, + toString, + write, + readDoubleBE, + readDoubleLE, + readFloatBE, + readFloatLE, + readInt32BE, + readInt32LE, + readUInt32BE, + readUInt32LE, + writeDoubleBE, + writeDoubleLE, + writeFloatBE, + writeFloatLE, + writeInt32BE, + writeInt32LE, + writeUInt32BE, + writeUInt32LE +} diff --git a/backend/node_modules/b4a/index.js b/backend/node_modules/b4a/index.js new file mode 100644 index 00000000..7e0251a9 --- /dev/null +++ b/backend/node_modules/b4a/index.js @@ -0,0 +1,188 @@ +function isBuffer(value) { + return Buffer.isBuffer(value) || value instanceof Uint8Array +} + +function isEncoding(encoding) { + return Buffer.isEncoding(encoding) +} + +function alloc(size, fill, encoding) { + return Buffer.alloc(size, fill, encoding) +} + +function allocUnsafe(size) { + return Buffer.allocUnsafe(size) +} + +function allocUnsafeSlow(size) { + return Buffer.allocUnsafeSlow(size) +} + +function byteLength(string, encoding) { + return Buffer.byteLength(string, encoding) +} + +function compare(a, b) { + return Buffer.compare(a, b) +} + +function concat(buffers, totalLength) { + return Buffer.concat(buffers, totalLength) +} + +function copy(source, target, targetStart, start, end) { + return toBuffer(source).copy(target, targetStart, start, end) +} + +function equals(a, b) { + return toBuffer(a).equals(b) +} + +function fill(buffer, value, offset, end, encoding) { + return toBuffer(buffer).fill(value, offset, end, encoding) +} + +function from(value, encodingOrOffset, length) { + return Buffer.from(value, encodingOrOffset, length) +} + +function includes(buffer, value, byteOffset, encoding) { + return toBuffer(buffer).includes(value, byteOffset, encoding) +} + +function indexOf(buffer, value, byfeOffset, encoding) { + return toBuffer(buffer).indexOf(value, byfeOffset, encoding) +} + +function lastIndexOf(buffer, value, byteOffset, encoding) { + return toBuffer(buffer).lastIndexOf(value, byteOffset, encoding) +} + +function swap16(buffer) { + return toBuffer(buffer).swap16() +} + +function swap32(buffer) { + return toBuffer(buffer).swap32() +} + +function swap64(buffer) { + return toBuffer(buffer).swap64() +} + +function toBuffer(buffer) { + if (Buffer.isBuffer(buffer)) return buffer + return Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength) +} + +function toString(buffer, encoding, start, end) { + return toBuffer(buffer).toString(encoding, start, end) +} + +function write(buffer, string, offset, length, encoding) { + return toBuffer(buffer).write(string, offset, length, encoding) +} + +function readDoubleBE(buffer, offset) { + return toBuffer(buffer).readDoubleBE(offset) +} + +function readDoubleLE(buffer, offset) { + return toBuffer(buffer).readDoubleLE(offset) +} + +function readFloatBE(buffer, offset) { + return toBuffer(buffer).readFloatBE(offset) +} + +function readFloatLE(buffer, offset) { + return toBuffer(buffer).readFloatLE(offset) +} + +function readInt32BE(buffer, offset) { + return toBuffer(buffer).readInt32BE(offset) +} + +function readInt32LE(buffer, offset) { + return toBuffer(buffer).readInt32LE(offset) +} + +function readUInt32BE(buffer, offset) { + return toBuffer(buffer).readUInt32BE(offset) +} + +function readUInt32LE(buffer, offset) { + return toBuffer(buffer).readUInt32LE(offset) +} + +function writeDoubleBE(buffer, value, offset) { + return toBuffer(buffer).writeDoubleBE(value, offset) +} + +function writeDoubleLE(buffer, value, offset) { + return toBuffer(buffer).writeDoubleLE(value, offset) +} + +function writeFloatBE(buffer, value, offset) { + return toBuffer(buffer).writeFloatBE(value, offset) +} + +function writeFloatLE(buffer, value, offset) { + return toBuffer(buffer).writeFloatLE(value, offset) +} + +function writeInt32BE(buffer, value, offset) { + return toBuffer(buffer).writeInt32BE(value, offset) +} + +function writeInt32LE(buffer, value, offset) { + return toBuffer(buffer).writeInt32LE(value, offset) +} + +function writeUInt32BE(buffer, value, offset) { + return toBuffer(buffer).writeUInt32BE(value, offset) +} + +function writeUInt32LE(buffer, value, offset) { + return toBuffer(buffer).writeUInt32LE(value, offset) +} + +module.exports = { + isBuffer, + isEncoding, + alloc, + allocUnsafe, + allocUnsafeSlow, + byteLength, + compare, + concat, + copy, + equals, + fill, + from, + includes, + indexOf, + lastIndexOf, + swap16, + swap32, + swap64, + toBuffer, + toString, + write, + readDoubleBE, + readDoubleLE, + readFloatBE, + readFloatLE, + readInt32BE, + readInt32LE, + readUInt32BE, + readUInt32LE, + writeDoubleBE, + writeDoubleLE, + writeFloatBE, + writeFloatLE, + writeInt32BE, + writeInt32LE, + writeUInt32BE, + writeUInt32LE +} diff --git a/backend/node_modules/b4a/lib/ascii.js b/backend/node_modules/b4a/lib/ascii.js new file mode 100644 index 00000000..f2714378 --- /dev/null +++ b/backend/node_modules/b4a/lib/ascii.js @@ -0,0 +1,31 @@ +function byteLength(string) { + return string.length +} + +function toString(buffer) { + const len = buffer.byteLength + + let result = '' + + for (let i = 0; i < len; i++) { + result += String.fromCharCode(buffer[i]) + } + + return result +} + +function write(buffer, string) { + const len = buffer.byteLength + + for (let i = 0; i < len; i++) { + buffer[i] = string.charCodeAt(i) + } + + return len +} + +module.exports = { + byteLength, + toString, + write +} diff --git a/backend/node_modules/b4a/lib/base64.js b/backend/node_modules/b4a/lib/base64.js new file mode 100644 index 00000000..43a03712 --- /dev/null +++ b/backend/node_modules/b4a/lib/base64.js @@ -0,0 +1,65 @@ +const alphabet = + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' + +const codes = new Uint8Array(256) + +for (let i = 0; i < alphabet.length; i++) { + codes[alphabet.charCodeAt(i)] = i +} + +codes[/* - */ 0x2d] = 62 +codes[/* _ */ 0x5f] = 63 + +function byteLength(string) { + let len = string.length + + if (string.charCodeAt(len - 1) === 0x3d) len-- + if (len > 1 && string.charCodeAt(len - 1) === 0x3d) len-- + + return (len * 3) >>> 2 +} + +function toString(buffer) { + const len = buffer.byteLength + + let result = '' + + for (let i = 0; i < len; i += 3) { + result += + alphabet[buffer[i] >> 2] + + alphabet[((buffer[i] & 3) << 4) | (buffer[i + 1] >> 4)] + + alphabet[((buffer[i + 1] & 15) << 2) | (buffer[i + 2] >> 6)] + + alphabet[buffer[i + 2] & 63] + } + + if (len % 3 === 2) { + result = result.substring(0, result.length - 1) + '=' + } else if (len % 3 === 1) { + result = result.substring(0, result.length - 2) + '==' + } + + return result +} + +function write(buffer, string) { + const len = buffer.byteLength + + for (let i = 0, j = 0; j < len; i += 4) { + const a = codes[string.charCodeAt(i)] + const b = codes[string.charCodeAt(i + 1)] + const c = codes[string.charCodeAt(i + 2)] + const d = codes[string.charCodeAt(i + 3)] + + buffer[j++] = (a << 2) | (b >> 4) + buffer[j++] = ((b & 15) << 4) | (c >> 2) + buffer[j++] = ((c & 3) << 6) | (d & 63) + } + + return len +} + +module.exports = { + byteLength, + toString, + write +} diff --git a/backend/node_modules/b4a/lib/hex.js b/backend/node_modules/b4a/lib/hex.js new file mode 100644 index 00000000..a5a1cf33 --- /dev/null +++ b/backend/node_modules/b4a/lib/hex.js @@ -0,0 +1,51 @@ +function byteLength(string) { + return string.length >>> 1 +} + +function toString(buffer) { + const len = buffer.byteLength + + buffer = new DataView(buffer.buffer, buffer.byteOffset, len) + + let result = '' + let i = 0 + + for (let n = len - (len % 4); i < n; i += 4) { + result += buffer.getUint32(i).toString(16).padStart(8, '0') + } + + for (; i < len; i++) { + result += buffer.getUint8(i).toString(16).padStart(2, '0') + } + + return result +} + +function write(buffer, string) { + const len = buffer.byteLength + + for (let i = 0; i < len; i++) { + const a = hexValue(string.charCodeAt(i * 2)) + const b = hexValue(string.charCodeAt(i * 2 + 1)) + + if (a === undefined || b === undefined) { + return buffer.subarray(0, i) + } + + buffer[i] = (a << 4) | b + } + + return len +} + +module.exports = { + byteLength, + toString, + write +} + +function hexValue(char) { + if (char >= 0x30 && char <= 0x39) return char - 0x30 + if (char >= 0x41 && char <= 0x46) return char - 0x41 + 10 + if (char >= 0x61 && char <= 0x66) return char - 0x61 + 10 +} diff --git a/backend/node_modules/b4a/lib/utf16le.js b/backend/node_modules/b4a/lib/utf16le.js new file mode 100644 index 00000000..77f75d42 --- /dev/null +++ b/backend/node_modules/b4a/lib/utf16le.js @@ -0,0 +1,40 @@ +function byteLength(string) { + return string.length * 2 +} + +function toString(buffer) { + const len = buffer.byteLength + + let result = '' + + for (let i = 0; i < len - 1; i += 2) { + result += String.fromCharCode(buffer[i] + buffer[i + 1] * 256) + } + + return result +} + +function write(buffer, string) { + const len = buffer.byteLength + + let units = len + + for (let i = 0; i < string.length; ++i) { + if ((units -= 2) < 0) break + + const c = string.charCodeAt(i) + const hi = c >> 8 + const lo = c % 256 + + buffer[i * 2] = lo + buffer[i * 2 + 1] = hi + } + + return len +} + +module.exports = { + byteLength, + toString, + write +} diff --git a/backend/node_modules/b4a/lib/utf8.js b/backend/node_modules/b4a/lib/utf8.js new file mode 100644 index 00000000..fded2245 --- /dev/null +++ b/backend/node_modules/b4a/lib/utf8.js @@ -0,0 +1,141 @@ +function byteLength(string) { + let length = 0 + + for (let i = 0, n = string.length; i < n; i++) { + const code = string.charCodeAt(i) + + if (code >= 0xd800 && code <= 0xdbff && i + 1 < n) { + const code = string.charCodeAt(i + 1) + + if (code >= 0xdc00 && code <= 0xdfff) { + length += 4 + i++ + continue + } + } + + if (code <= 0x7f) length += 1 + else if (code <= 0x7ff) length += 2 + else length += 3 + } + + return length +} + +let toString + +if (typeof TextDecoder !== 'undefined') { + const decoder = new TextDecoder() + + toString = function toString(buffer) { + return decoder.decode(buffer) + } +} else { + toString = function toString(buffer) { + const len = buffer.byteLength + + let output = '' + let i = 0 + + while (i < len) { + let byte = buffer[i] + + if (byte <= 0x7f) { + output += String.fromCharCode(byte) + i++ + continue + } + + let bytesNeeded = 0 + let codePoint = 0 + + if (byte <= 0xdf) { + bytesNeeded = 1 + codePoint = byte & 0x1f + } else if (byte <= 0xef) { + bytesNeeded = 2 + codePoint = byte & 0x0f + } else if (byte <= 0xf4) { + bytesNeeded = 3 + codePoint = byte & 0x07 + } + + if (len - i - bytesNeeded > 0) { + let k = 0 + + while (k < bytesNeeded) { + byte = buffer[i + k + 1] + codePoint = (codePoint << 6) | (byte & 0x3f) + k += 1 + } + } else { + codePoint = 0xfffd + bytesNeeded = len - i + } + + output += String.fromCodePoint(codePoint) + i += bytesNeeded + 1 + } + + return output + } +} + +let write + +if (typeof TextEncoder !== 'undefined') { + const encoder = new TextEncoder() + + write = function write(buffer, string) { + return encoder.encodeInto(string, buffer).written + } +} else { + write = function write(buffer, string) { + const len = buffer.byteLength + + let i = 0 + let j = 0 + + while (i < string.length) { + const code = string.codePointAt(i) + + if (code <= 0x7f) { + buffer[j++] = code + i++ + continue + } + + let count = 0 + let bits = 0 + + if (code <= 0x7ff) { + count = 6 + bits = 0xc0 + } else if (code <= 0xffff) { + count = 12 + bits = 0xe0 + } else if (code <= 0x1fffff) { + count = 18 + bits = 0xf0 + } + + buffer[j++] = bits | (code >> count) + count -= 6 + + while (count >= 0) { + buffer[j++] = 0x80 | ((code >> count) & 0x3f) + count -= 6 + } + + i += code >= 0x10000 ? 2 : 1 + } + + return len + } +} + +module.exports = { + byteLength, + toString, + write +} diff --git a/backend/node_modules/b4a/package.json b/backend/node_modules/b4a/package.json new file mode 100644 index 00000000..2bcf65e9 --- /dev/null +++ b/backend/node_modules/b4a/package.json @@ -0,0 +1,49 @@ +{ + "name": "b4a", + "version": "1.7.3", + "description": "Bridging the gap between buffers and typed arrays", + "exports": { + "./package": "./package.json", + ".": { + "react-native": "./react-native.js", + "browser": "./browser.js", + "default": "./index.js" + } + }, + "files": [ + "browser.js", + "index.js", + "react-native.js", + "lib" + ], + "scripts": { + "test": "npm run lint && npm run test:bare && npm run test:node", + "test:bare": "bare test.mjs", + "test:node": "node test.mjs", + "lint": "prettier . --check" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/holepunchto/b4a.git" + }, + "author": "Holepunch", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/holepunchto/b4a/issues" + }, + "homepage": "https://github.com/holepunchto/b4a#readme", + "devDependencies": { + "brittle": "^3.5.2", + "nanobench": "^3.0.0", + "prettier": "^3.6.2", + "prettier-config-holepunch": "^1.0.0" + }, + "peerDependencies": { + "react-native-b4a": "*" + }, + "peerDependenciesMeta": { + "react-native-b4a": { + "optional": true + } + } +} diff --git a/backend/node_modules/b4a/react-native.js b/backend/node_modules/b4a/react-native.js new file mode 100644 index 00000000..0bf04e8e --- /dev/null +++ b/backend/node_modules/b4a/react-native.js @@ -0,0 +1,5 @@ +try { + module.exports = require('react-native-b4a') +} catch { + module.exports = require('./browser') +} diff --git a/backend/node_modules/bare-events/LICENSE b/backend/node_modules/bare-events/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/backend/node_modules/bare-events/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/node_modules/bare-events/README.md b/backend/node_modules/bare-events/README.md new file mode 100644 index 00000000..f56b4593 --- /dev/null +++ b/backend/node_modules/bare-events/README.md @@ -0,0 +1,25 @@ +# bare-events + +Event emitters for JavaScript. + +``` +npm install bare-events +``` + +## Usage + +```js +const EventEmitter = require('bare-events') + +const e = new EventEmitter() + +e.on('hello', function (data) { + console.log(data) +}) + +e.emit('hello', 'world') +``` + +## License + +Apache-2.0 diff --git a/backend/node_modules/bare-events/global.d.ts b/backend/node_modules/bare-events/global.d.ts new file mode 100644 index 00000000..92ea0d6e --- /dev/null +++ b/backend/node_modules/bare-events/global.d.ts @@ -0,0 +1,15 @@ +import * as events from './web' + +type EventConstructor = typeof events.Event +type CustomEventConstructor = typeof events.CustomEvent +type EventTargetConstructor = typeof events.EventTarget + +declare global { + type Event = events.Event + type CustomEvent = events.CustomEvent + type EventTarget = events.EventTarget + + const Event: EventConstructor + const CustomEvent: CustomEventConstructor + const EventTarget: EventTargetConstructor +} diff --git a/backend/node_modules/bare-events/global.js b/backend/node_modules/bare-events/global.js new file mode 100644 index 00000000..d8caa0bf --- /dev/null +++ b/backend/node_modules/bare-events/global.js @@ -0,0 +1,5 @@ +const events = require('./web') + +global.Event = events.Event +global.CustomEvent = events.CustomEvent +global.EventTarget = events.EventTarget diff --git a/backend/node_modules/bare-events/index.d.ts b/backend/node_modules/bare-events/index.d.ts new file mode 100644 index 00000000..f0849f74 --- /dev/null +++ b/backend/node_modules/bare-events/index.d.ts @@ -0,0 +1,81 @@ +import { AbortSignal } from 'bare-abort-controller' + +interface EventMap { + [event: string | symbol]: unknown[] +} + +interface EventHandler { + (...args: A): R +} + +declare class EventEmitterError extends Error { + static OPERATION_ABORTED(cause: Error, msg?: string): EventEmitterError + static UNHANDLED_ERROR(cause: Error, msg?: string): EventEmitterError +} + +interface EventEmitter { + addListener(name: E, fn: EventHandler): this + + addOnceListener(name: E, fn: EventHandler): this + + prependListener(name: E, fn: EventHandler): this + + prependOnceListener(name: E, fn: EventHandler): this + + removeListener(name: E, fn: EventHandler): this + + removeAllListeners(name?: E): this + + on(name: E, fn: EventHandler): this + + once(name: E, fn: EventHandler): this + + off(name: E, fn: EventHandler): this + + emit(name: E, ...args: M[E]): boolean + + listeners(name: E): EventHandler + + listenerCount(name: E): number + + getMaxListeners(): number + setMaxListeners(n: number): void +} + +declare class EventEmitter {} + +declare namespace EventEmitter { + export function on( + emitter: EventEmitter, + name: E, + opts?: { signal?: AbortSignal } + ): AsyncIterableIterator + + export function once( + emitter: EventEmitter, + name: E, + opts?: { signal?: AbortSignal } + ): Promise + + export function forward>( + from: EventEmitter, + to: EventEmitter, + names: E | E[], + opts?: { emit?: (name: E, ...args: T[E]) => void } + ): void + + export function listenerCount( + emitter: EventEmitter, + name: E + ): number + + export function getMaxListeners(emitter: EventEmitter): number + + export function setMaxListeners(n: number, ...emitters: EventEmitter[]): void + + export let defaultMaxListeners: number + + export { EventEmitter, EventEmitterError as errors, EventMap, EventHandler } +} + +export = EventEmitter diff --git a/backend/node_modules/bare-events/index.js b/backend/node_modules/bare-events/index.js new file mode 100644 index 00000000..ac648714 --- /dev/null +++ b/backend/node_modules/bare-events/index.js @@ -0,0 +1,367 @@ +const errors = require('./lib/errors') + +class EventListener { + constructor() { + this.list = [] + this.count = 0 + } + + append(ctx, name, fn, once) { + this.count++ + ctx.emit('newListener', name, fn) // Emit BEFORE adding + this.list.push([fn, once]) + } + + prepend(ctx, name, fn, once) { + this.count++ + ctx.emit('newListener', name, fn) // Emit BEFORE adding + this.list.unshift([fn, once]) + } + + remove(ctx, name, fn) { + for (let i = 0, n = this.list.length; i < n; i++) { + const l = this.list[i] + + if (l[0] === fn) { + this.list.splice(i, 1) + + if (this.count === 1) delete ctx._events[name] + + ctx.emit('removeListener', name, fn) // Emit AFTER removing + + this.count-- + return + } + } + } + + removeAll(ctx, name) { + const list = [...this.list] + this.list = [] + + if (this.count === list.length) delete ctx._events[name] + + for (let i = list.length - 1; i >= 0; i--) { + ctx.emit('removeListener', name, list[i][0]) // Emit AFTER removing + } + + this.count -= list.length + } + + emit(ctx, name, ...args) { + const list = [...this.list] + + for (let i = 0, n = list.length; i < n; i++) { + const l = list[i] + + if (l[1] === true) this.remove(ctx, name, l[0]) + + Reflect.apply(l[0], ctx, args) + } + + return list.length > 0 + } +} + +function appendListener(ctx, name, fn, once) { + if (ctx._events === undefined) ctx._events = Object.create(null) + const e = ctx._events[name] || (ctx._events[name] = new EventListener()) + e.append(ctx, name, fn, once) + return ctx +} + +function prependListener(ctx, name, fn, once) { + if (ctx._events === undefined) ctx._events = Object.create(null) + const e = ctx._events[name] || (ctx._events[name] = new EventListener()) + e.prepend(ctx, name, fn, once) + return ctx +} + +function removeListener(ctx, name, fn) { + if (ctx._events === undefined) return ctx + const e = ctx._events[name] + if (e !== undefined) e.remove(ctx, name, fn) + return ctx +} + +function throwUnhandledError(...args) { + let err + + if (args.length > 0) err = args[0] + + if (err instanceof Error === false) err = errors.UNHANDLED_ERROR(err) + + if (Error.captureStackTrace) { + Error.captureStackTrace(err, exports.prototype.emit) + } + + queueMicrotask(() => { + throw err + }) +} + +module.exports = exports = class EventEmitter { + constructor() { + this._events = Object.create(null) + } + + addListener(name, fn) { + return appendListener(this, name, fn, false) + } + + addOnceListener(name, fn) { + return appendListener(this, name, fn, true) + } + + prependListener(name, fn) { + return prependListener(this, name, fn, false) + } + + prependOnceListener(name, fn) { + return prependListener(this, name, fn, true) + } + + removeListener(name, fn) { + return removeListener(this, name, fn) + } + + on(name, fn) { + return appendListener(this, name, fn, false) + } + + once(name, fn) { + return appendListener(this, name, fn, true) + } + + off(name, fn) { + return removeListener(this, name, fn) + } + + emit(name, ...args) { + if (name === 'error' && this._events !== undefined && this._events.error === undefined) { + throwUnhandledError(...args) + } + + if (this._events === undefined) return false + const e = this._events[name] + return e === undefined ? false : e.emit(this, name, ...args) + } + + listeners(name) { + if (this._events === undefined) return [] + const e = this._events[name] + return e === undefined ? [] : [...e.list] + } + + listenerCount(name) { + if (this._events === undefined) return 0 + const e = this._events[name] + return e === undefined ? 0 : e.list.length + } + + getMaxListeners() { + return EventEmitter.defaultMaxListeners + } + + setMaxListeners(n) {} + + removeAllListeners(name) { + if (arguments.length === 0) { + for (const key of Reflect.ownKeys(this._events)) { + if (key === 'removeListener') continue + this.removeAllListeners(key) + } + this.removeAllListeners('removeListener') + } else { + const e = this._events[name] + if (e !== undefined) e.removeAll(this, name) + } + return this + } +} + +exports.EventEmitter = exports + +exports.errors = errors + +exports.defaultMaxListeners = 10 + +exports.on = function on(emitter, name, opts = {}) { + const { signal } = opts + + if (signal && signal.aborted) { + throw errors.OPERATION_ABORTED(signal.reason) + } + + let error = null + let done = false + + const events = [] + const promises = [] + + if (name !== 'error') emitter.on('error', onerror) + + if (signal) signal.addEventListener('abort', onabort) + + emitter.on(name, onevent) + + return { + next() { + if (events.length) { + return Promise.resolve({ value: events.shift(), done: false }) + } + + if (error) { + const err = error + + error = null + + return Promise.reject(err) + } + + if (done) return onclose() + + return new Promise((resolve, reject) => promises.push({ resolve, reject })) + }, + + return() { + return onclose() + }, + + throw(err) { + return onerror(err) + }, + + [Symbol.asyncIterator]() { + return this + } + } + + function onevent(...args) { + if (promises.length) { + promises.shift().resolve({ value: args, done: false }) + } else { + events.push(args) + } + } + + function onerror(err) { + emitter.off(name, onevent).off('error', onerror) + + if (promises.length) { + promises.shift().reject(err) + } else { + error = err + } + + return Promise.resolve({ done: true }) + } + + function onabort() { + signal.removeEventListener('abort', onabort) + + onerror(errors.OPERATION_ABORTED(signal.reason)) + } + + function onclose() { + emitter.off(name, onevent) + + if (name !== 'error') emitter.off('error', onerror) + + if (signal) signal.removeEventListener('abort', onabort) + + done = true + + if (promises.length) promises.shift().resolve({ done: true }) + + return Promise.resolve({ done: true }) + } +} + +exports.once = function once(emitter, name, opts = {}) { + const { signal } = opts + + if (signal && signal.aborted) { + return Promise.reject(errors.OPERATION_ABORTED(signal.reason)) + } + + return new Promise((resolve, reject) => { + if (name !== 'error') emitter.on('error', onerror) + + if (signal) signal.addEventListener('abort', onabort) + + emitter.once(name, onevent) + + function onevent(...args) { + if (name !== 'error') emitter.off('error', onerror) + + if (signal) signal.removeEventListener('abort', onabort) + + resolve(args) + } + + function onerror(err) { + emitter.off(name, onevent) + + if (name !== 'error') emitter.off('error', onerror) + + reject(err) + } + + function onabort() { + signal.removeEventListener('abort', onabort) + + onerror(errors.OPERATION_ABORTED(signal.reason)) + } + }) +} + +exports.forward = function forward(from, to, names, opts = {}) { + if (typeof names === 'string') names = [names] + + const { emit = to.emit.bind(to) } = opts + + const listeners = names.map( + (name) => + function onevent(...args) { + emit(name, ...args) + } + ) + + to.on('newListener', (name) => { + const i = names.indexOf(name) + + if (i !== -1 && to.listenerCount(name) === 0) { + from.on(name, listeners[i]) + } + }).on('removeListener', (name) => { + const i = names.indexOf(name) + + if (i !== -1 && to.listenerCount(name) === 0) { + from.off(name, listeners[i]) + } + }) +} + +exports.listenerCount = function listenerCount(emitter, name) { + return emitter.listenerCount(name) +} + +exports.getMaxListeners = function getMaxListeners(emitter) { + if (typeof emitter.getMaxListeners === 'function') { + return emitter.getMaxListeners() + } + + return exports.defaultMaxListeners +} + +exports.setMaxListeners = function setMaxListeners(n, ...emitters) { + if (emitters.length === 0) exports.defaultMaxListeners = n + else { + for (const emitter of emitters) { + if (typeof emitter.setMaxListeners === 'function') { + emitter.setMaxListeners(n) + } + } + } +} diff --git a/backend/node_modules/bare-events/lib/errors.js b/backend/node_modules/bare-events/lib/errors.js new file mode 100644 index 00000000..39f1d9a1 --- /dev/null +++ b/backend/node_modules/bare-events/lib/errors.js @@ -0,0 +1,26 @@ +module.exports = class EventEmitterError extends Error { + constructor(msg, code, fn = EventEmitterError, opts) { + super(`${code}: ${msg}`, opts) + this.code = code + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, fn) + } + } + + get name() { + return 'EventEmitterError' + } + + static OPERATION_ABORTED(cause, msg = 'Operation aborted') { + return new EventEmitterError(msg, 'OPERATION_ABORTED', EventEmitterError.OPERATION_ABORTED, { + cause + }) + } + + static UNHANDLED_ERROR(cause, msg = 'Unhandled error') { + return new EventEmitterError(msg, 'UNHANDLED_ERROR', EventEmitterError.UNHANDLED_ERROR, { + cause + }) + } +} diff --git a/backend/node_modules/bare-events/package.json b/backend/node_modules/bare-events/package.json new file mode 100644 index 00000000..3ef071cd --- /dev/null +++ b/backend/node_modules/bare-events/package.json @@ -0,0 +1,61 @@ +{ + "name": "bare-events", + "version": "2.8.2", + "description": "Event emitters for JavaScript", + "exports": { + "./package": "./package.json", + ".": { + "types": "./index.d.ts", + "default": "./index.js" + }, + "./global": { + "types": "./global.d.ts", + "default": "./global.js" + }, + "./web": { + "types": "./web.d.ts", + "default": "./web.js" + }, + "./errors": "./lib/errors.js" + }, + "files": [ + "index.js", + "index.d.ts", + "global.js", + "global.d.ts", + "web.js", + "web.d.ts", + "lib" + ], + "scripts": { + "test": "npm run lint && npm run test:bare && npm run test:node", + "test:bare": "bare test.js", + "test:node": "node test.js", + "lint": "prettier . --check" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/holepunchto/bare-events.git" + }, + "author": "Holepunch", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/holepunchto/bare-events/issues" + }, + "homepage": "https://github.com/holepunchto/bare-events#readme", + "devDependencies": { + "bare-abort-controller": "^1.0.0", + "brittle": "^3.3.2", + "prettier": "^3.4.2", + "prettier-config-holepunch": "^2.0.0", + "uncaughts": "^1.1.1" + }, + "peerDependencies": { + "bare-abort-controller": "*" + }, + "peerDependenciesMeta": { + "bare-abort-controller": { + "optional": true + } + } +} diff --git a/backend/node_modules/bare-events/web.d.ts b/backend/node_modules/bare-events/web.d.ts new file mode 100644 index 00000000..0aee0596 --- /dev/null +++ b/backend/node_modules/bare-events/web.d.ts @@ -0,0 +1,79 @@ +interface AbortSignal extends EventTarget {} + +export interface EventOptions { + bubbles?: boolean + cancelable?: boolean + composed?: boolean +} + +export interface Event { + readonly type: string + readonly target: EventTarget | null + readonly currentTarget: EventTarget | null + readonly bubbles: boolean + readonly cancelable: boolean + readonly composed: boolean + readonly defaultPrevented: boolean + readonly isTrusted: boolean + + preventDefault(): void + stopPropagation(): void + stopImmediatePropagation(): void +} + +export class Event { + constructor(type: string, options?: EventOptions) +} + +export interface CustomEventOptions extends EventOptions { + detail?: T +} + +export interface CustomEvent extends Event { + readonly detail: T +} + +export class CustomEvent { + constructor(type: string, options?: CustomEventOptions) +} + +export interface AddEventListenerOptions { + capture?: boolean + passive?: boolean + once?: boolean + signal?: AbortSignal | null +} + +export interface RemoveEventListenerOptions { + capture?: boolean +} + +export interface EventTarget { + addEventListener( + type: string, + callback: EventListener, + options?: AddEventListenerOptions | boolean + ): void + + removeEventListener( + type: string, + callback: EventListener, + options?: RemoveEventListenerOptions | boolean + ): void + + dispatchEvent(event: Event): boolean +} + +export class EventTarget { + constructor() +} + +export type EventListener = EventCallback | EventHandler + +export interface EventCallback { + (event: Event): void +} + +export interface EventHandler { + handleEvent(event: Event): void +} diff --git a/backend/node_modules/bare-events/web.js b/backend/node_modules/bare-events/web.js new file mode 100644 index 00000000..ed25e695 --- /dev/null +++ b/backend/node_modules/bare-events/web.js @@ -0,0 +1,335 @@ +// Event state +const BUBBLES = 0x1 +const CANCELABLE = 0x2 +const COMPOSED = 0x4 +const CANCELED = 0x8 +const DISPATCH = 0x10 +const STOP = 0x20 + +// EventTarget state +const CAPTURE = 0x1 +const PASSIVE = 0x2 +const ONCE = 0x4 + +// https://dom.spec.whatwg.org/#event +class Event { + // https://dom.spec.whatwg.org/#dom-event-event + constructor(type, options = {}) { + const { bubbles = false, cancelable = false, composed = false } = options + + this._type = type + this._target = null + this._state = 0 + + if (bubbles) this._state |= BUBBLES + if (cancelable) this._state |= CANCELABLE + if (composed) this._state |= COMPOSED + } + + // https://dom.spec.whatwg.org/#dom-event-type + get type() { + return this._type + } + + // https://dom.spec.whatwg.org/#dom-event-target + get target() { + return this._target + } + + // https://dom.spec.whatwg.org/#dom-event-currenttarget + get currentTarget() { + return null + } + + // https://dom.spec.whatwg.org/#dom-event-bubbles + get bubbles() { + return (this._state & BUBBLES) !== 0 + } + + // https://dom.spec.whatwg.org/#dom-event-cancelable + get cancelable() { + return (this._state & CANCELABLE) !== 0 + } + + // https://dom.spec.whatwg.org/#dom-event-composed + get composed() { + return (this._state & COMPOSED) !== 0 + } + + // https://dom.spec.whatwg.org/#dom-event-defaultprevented + get defaultPrevented() { + return (this._state & CANCELED) !== 0 + } + + // https://dom.spec.whatwg.org/#dom-event-istrusted + get isTrusted() { + return false + } + + // https://dom.spec.whatwg.org/#dom-event-preventdefault + preventDefault() { + if (this._state & CANCELABLE) this._state |= CANCELED + } + + // https://dom.spec.whatwg.org/#dom-event-stoppropagation + stopPropagation() {} + + // https://dom.spec.whatwg.org/#dom-event-stopimmediatepropagation + stopImmediatePropagation() { + this._state |= STOP + } + + toJSON() { + return { + type: this.type, + target: this.target, + bubbles: this.bubbles, + cancelable: this.cancelable, + composed: this.composed, + defaultPrevented: this.defaultPrevented, + isTrusted: this.isTrusted + } + } + + [Symbol.for('bare.inspect')]() { + return { + __proto__: { constructor: Event }, + + type: this.type, + target: this.target, + bubbles: this.bubbles, + cancelable: this.cancelable, + composed: this.composed, + defaultPrevented: this.defaultPrevented, + isTrusted: this.isTrusted + } + } +} + +exports.Event = Event + +// https://dom.spec.whatwg.org/#customevent +exports.CustomEvent = class CustomEvent extends Event { + constructor(type, options = {}) { + super(type, options) + + const { detail = null } = options + + this._detail = detail + } + + // https://dom.spec.whatwg.org/#dom-customevent-detail + get detail() { + return this._detail + } +} + +// https://dom.spec.whatwg.org/#eventtarget +exports.EventTarget = class EventTarget { + // https://dom.spec.whatwg.org/#dom-eventtarget-eventtarget + constructor() { + this._listeners = new Map() + } + + // https://dom.spec.whatwg.org/#dom-eventtarget-addeventlistener + addEventListener(type, callback = null, options = {}) { + if (typeof options === 'boolean') options = { capture: options } + + const { capture = false, passive = false, once = false, signal = null } = options + + if (signal !== null && signal.aborted) return + if (callback === null) return + + const listener = new EventListener(type, callback, capture, passive, once, signal) + + const listeners = this._listeners.get(type) + + if (listeners === undefined) this._listeners.set(type, listener) + else { + for (const existing of listeners) { + if (callback === existing.callback && capture === existing.capture) { + return // Duplicate listener + } + } + + listener.link(listeners) + + if (signal !== null) { + signal.addEventListener('abort', onabort) + + function onabort() { + listener.unlink() + } + } + } + } + + // https://dom.spec.whatwg.org/#dom-eventtarget-removeeventlistener + removeEventListener(type, callback = null, options = {}) { + if (typeof options === 'boolean') options = { capture: options } + + const { capture = false } = options + + const listeners = this._listeners.get(type) + + if (listeners === undefined) return + + for (const existing of listeners) { + if (callback === existing.callback && capture === existing.capture) { + const next = existing.unlink() + + if (listeners === existing) this._listeners.set(type, next) + + return + } + } + } + + // https://dom.spec.whatwg.org/#dom-eventtarget-dispatchevent + dispatchEvent(event) { + event._target = this + event._state |= DISPATCH + + const listeners = this._listeners.get(event.type) + + try { + if (listeners === undefined) return true + + for (const listener of listeners) { + // https://dom.spec.whatwg.org/#concept-event-listener-inner-invoke + + if (listener.once) listener.unlink() + + let callback = listener.callback + let context = this + + if (typeof callback === 'object') { + context = callback + callback = callback.handleEvent + } + + Reflect.apply(callback, context, [event]) + + if (event._state & STOP) break + } + + return (event._state & CANCELED) === 0 + } finally { + event._state &= ~DISPATCH + event._state &= ~STOP + } + } + + toJSON() { + return {} + } + + [Symbol.for('bare.inspect')]() { + return { + __proto__: { constructor: EventTarget } + } + } +} + +// https://dom.spec.whatwg.org/#concept-event-listener +class EventListener { + constructor(type, callback, capture, passive, once, signal) { + this._type = type + this._callback = callback + this._signal = signal + this._state = 0 + + if (capture) this._state |= CAPTURE + if (passive) this._state |= PASSIVE + if (once) this._state |= ONCE + + this._previous = this + this._next = this + } + + get type() { + return this._type + } + + get callback() { + return this._callback + } + + get capture() { + return (this._state & CAPTURE) !== 0 + } + + get passive() { + return (this._state & PASSIVE) !== 0 + } + + get once() { + return (this._state & ONCE) !== 0 + } + + get removed() { + return this._previous === this && this._next === this + } + + link(listener) { + const next = this._next + const previous = listener._previous + + this._next = listener + listener._previous = this + + previous._next = next + next._previous = previous + + return listener + } + + unlink() { + if (this.removed) return this + + const next = this._next + const previous = this._previous + + this._next = this + this._previous = this + + previous._next = next + next._previous = previous + + return next + } + + *[Symbol.iterator]() { + let current = this + + while (true) { + const next = current._next + yield current + if (next === this) break + current = next + } + } + + toJSON() { + return { + type: this.type, + capture: this.capture, + passive: this.passive, + once: this.once, + removed: this.removed + } + } + + [Symbol.for('bare.inspect')]() { + return { + __proto__: { constructor: EventListener }, + + type: this.type, + callback: this.callback, + capture: this.capture, + passive: this.passive, + once: this.once, + removed: this.removed + } + } +} diff --git a/backend/node_modules/buffer-crc32/LICENSE b/backend/node_modules/buffer-crc32/LICENSE new file mode 100644 index 00000000..4cef10eb --- /dev/null +++ b/backend/node_modules/buffer-crc32/LICENSE @@ -0,0 +1,19 @@ +The MIT License + +Copyright (c) 2013 Brian J. Brennan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/node_modules/buffer-crc32/README.md b/backend/node_modules/buffer-crc32/README.md new file mode 100644 index 00000000..0d9d8b83 --- /dev/null +++ b/backend/node_modules/buffer-crc32/README.md @@ -0,0 +1,47 @@ +# buffer-crc32 + +[![Build Status](https://secure.travis-ci.org/brianloveswords/buffer-crc32.png?branch=master)](http://travis-ci.org/brianloveswords/buffer-crc32) + +crc32 that works with binary data and fancy character sets, outputs +buffer, signed or unsigned data and has tests. + +Derived from the sample CRC implementation in the PNG specification: http://www.w3.org/TR/PNG/#D-CRCAppendix + +# install +``` +npm install buffer-crc32 +``` + +# example +```js +var crc32 = require('buffer-crc32'); +// works with buffers +var buf = Buffer([0x00, 0x73, 0x75, 0x70, 0x20, 0x62, 0x72, 0x6f, 0x00]) +crc32(buf) // -> + +// has convenience methods for getting signed or unsigned ints +crc32.signed(buf) // -> -1805997238 +crc32.unsigned(buf) // -> 2488970058 + +// will cast to buffer if given a string, so you can +// directly use foreign characters safely +crc32('自動販売機') // -> + +// and works in append mode too +var partialCrc = crc32('hey'); +var partialCrc = crc32(' ', partialCrc); +var partialCrc = crc32('sup', partialCrc); +var partialCrc = crc32(' ', partialCrc); +var finalCrc = crc32('bros', partialCrc); // -> +``` + +# tests +This was tested against the output of zlib's crc32 method. You can run +the tests with`npm test` (requires tap) + +# see also +https://github.com/alexgorbatchev/node-crc, `crc.buffer.crc32` also +supports buffer inputs and return unsigned ints (thanks @tjholowaychuk). + +# license +MIT/X11 diff --git a/backend/node_modules/buffer-crc32/index.js b/backend/node_modules/buffer-crc32/index.js new file mode 100644 index 00000000..6727dd39 --- /dev/null +++ b/backend/node_modules/buffer-crc32/index.js @@ -0,0 +1,111 @@ +var Buffer = require('buffer').Buffer; + +var CRC_TABLE = [ + 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, + 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, + 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, + 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, + 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856, + 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, + 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, + 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, + 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, + 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a, + 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, + 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, + 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, + 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, + 0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e, + 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, + 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed, + 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, + 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, + 0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, + 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, + 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5, + 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010, + 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, + 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, + 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, + 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615, + 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, + 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344, + 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, + 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, + 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, + 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, + 0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c, + 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, + 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, + 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, + 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, + 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c, + 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, + 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b, + 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, + 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, + 0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, + 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278, + 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7, + 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, + 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, + 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, + 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, + 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, + 0x2d02ef8d +]; + +if (typeof Int32Array !== 'undefined') { + CRC_TABLE = new Int32Array(CRC_TABLE); +} + +function ensureBuffer(input) { + if (Buffer.isBuffer(input)) { + return input; + } + + var hasNewBufferAPI = + typeof Buffer.alloc === "function" && + typeof Buffer.from === "function"; + + if (typeof input === "number") { + return hasNewBufferAPI ? Buffer.alloc(input) : new Buffer(input); + } + else if (typeof input === "string") { + return hasNewBufferAPI ? Buffer.from(input) : new Buffer(input); + } + else { + throw new Error("input must be buffer, number, or string, received " + + typeof input); + } +} + +function bufferizeInt(num) { + var tmp = ensureBuffer(4); + tmp.writeInt32BE(num, 0); + return tmp; +} + +function _crc32(buf, previous) { + buf = ensureBuffer(buf); + if (Buffer.isBuffer(previous)) { + previous = previous.readUInt32BE(0); + } + var crc = ~~previous ^ -1; + for (var n = 0; n < buf.length; n++) { + crc = CRC_TABLE[(crc ^ buf[n]) & 0xff] ^ (crc >>> 8); + } + return (crc ^ -1); +} + +function crc32() { + return bufferizeInt(_crc32.apply(null, arguments)); +} +crc32.signed = function () { + return _crc32.apply(null, arguments); +}; +crc32.unsigned = function () { + return _crc32.apply(null, arguments) >>> 0; +}; + +module.exports = crc32; diff --git a/backend/node_modules/buffer-crc32/package.json b/backend/node_modules/buffer-crc32/package.json new file mode 100644 index 00000000..e896bec5 --- /dev/null +++ b/backend/node_modules/buffer-crc32/package.json @@ -0,0 +1,39 @@ +{ + "author": "Brian J. Brennan ", + "name": "buffer-crc32", + "description": "A pure javascript CRC32 algorithm that plays nice with binary data", + "version": "0.2.13", + "licenses": [ + { + "type": "MIT", + "url": "https://github.com/brianloveswords/buffer-crc32/raw/master/LICENSE" + } + ], + "contributors": [ + { + "name": "Vladimir Kuznetsov", + "github": "mistakster" + } + ], + "homepage": "https://github.com/brianloveswords/buffer-crc32", + "repository": { + "type": "git", + "url": "git://github.com/brianloveswords/buffer-crc32.git" + }, + "main": "index.js", + "scripts": { + "test": "./node_modules/.bin/tap tests/*.test.js" + }, + "dependencies": {}, + "devDependencies": { + "tap": "~0.2.5" + }, + "optionalDependencies": {}, + "engines": { + "node": "*" + }, + "license": "MIT", + "files": [ + "index.js" + ] +} diff --git a/backend/node_modules/compress-commons/CHANGELOG.md b/backend/node_modules/compress-commons/CHANGELOG.md new file mode 100644 index 00000000..8ff2e381 --- /dev/null +++ b/backend/node_modules/compress-commons/CHANGELOG.md @@ -0,0 +1,67 @@ +## Changelog + +**5.0.2** — _February 26, 2024_ — [Diff](https://github.com/archiverjs/node-compress-commons/compare/5.0.1...5.0.2) + +**5.0.1** — _September 3, 2023_ — [Diff](https://github.com/archiverjs/node-compress-commons/compare/5.0.0...5.0.1) + +**5.0.0** — _September 2, 2023_ — [Diff](https://github.com/archiverjs/node-compress-commons/compare/4.1.2...5.0.0) + + +**4.1.2** — _September 2, 2023_ — [Diff](https://github.com/archiverjs/node-compress-commons/compare/4.1.1...4.1.2) + +**4.1.1** — _May 30th, 2021_ — [Diff](https://github.com/archiverjs/node-compress-commons/compare/4.1.0...4.1.1) + +### Maintenance +- Bump mocha from 8.2.1 to 8.4.0 (#70) +- Bump crc32-stream from 4.0.1 to 4.0.2 (#59) +- Bump y18n from 4.0.0 to 4.0.1 (#69) +- Bump chai from 4.2.0 to 4.3.4 (#67) +- Bump actions/setup-node from 2.1.4 to 2.1.5 (#71) + +**4.1.0** — _March 2, 2021_ — [Diff](https://github.com/archiverjs/node-compress-commons/compare/4.0.1...4.1.0) + +### Features + +- Allow prepending forward slash in entry name (#63) + +### Maintenance + +- Bump actions/setup-node from v2.1.2 to v2.1.4 (#58) + +**4.0.1** — _July 20, 2020_ — [Diff](https://github.com/archiverjs/node-compress-commons/compare/4.0.0...4.0.1) + +* Bump crc32-stream from 3.0.1 to 4.0.0 (#43) @dependabot + +**4.0.0** — _July 18, 2020_ — [Diff](https://github.com/archiverjs/node-compress-commons/compare/3.0.0...4.0.0) + +* Bump mocha from 5.2.0 to 8.0.1 (#36) @dependabot +* Bump readable-stream from 2.3.7 to 3.6.0 (#39) @dependabot +* Bump actions/setup-node from v1 to v2.1.0 (#41) @dependabot +* Bump rimraf from 2.7.1 to 3.0.2 (#38) @dependabot +* Bump mkdirp from 0.5.5 to 1.0.4 (#37) @dependabot +* Bump actions/checkout from v1 to v2.3.1 (#40) @dependabot +* remove support for node < 10 (#42) @ctalkington + +**3.0.0** — _April 14, 2020_ — [Diff](https://github.com/archiverjs/node-compress-commons/compare/2.1.1...3.0.0) + +- breaking: slowly catch up with node LTS, remove support for versions under 8. +- update multiple deps. + +**2.1.1** — _August 2, 2019_ — [Diff](https://github.com/archiverjs/node-compress-commons/compare/2.1.0...2.1.1) + +- update crc32-stream to v3.0.1 + +**2.1.0** — _August 2, 2019_ — [Diff](https://github.com/archiverjs/node-compress-commons/compare/2.0.0...2.1.0) + +- update crc32-stream to v3.0.0 + +**2.0.0** — _July 19, 2019_ — [Diff](https://github.com/archiverjs/node-compress-commons/compare/1.2.2...2.0.0) + +- breaking: follow node LTS, remove support for versions under 6. +- test: now targeting node v10 and v12 +- fix: update Buffer calls to alloc/from +- fix: Add offset to buffer call (#31) +- other: update normalize-path@3 (#34) +- other: update dependencies + +[Release Archive](https://github.com/archiverjs/node-compress-commons/releases) diff --git a/backend/node_modules/compress-commons/LICENSE b/backend/node_modules/compress-commons/LICENSE new file mode 100644 index 00000000..56420a6a --- /dev/null +++ b/backend/node_modules/compress-commons/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2014 Chris Talkington, contributors. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/backend/node_modules/compress-commons/README.md b/backend/node_modules/compress-commons/README.md new file mode 100644 index 00000000..6a74ce14 --- /dev/null +++ b/backend/node_modules/compress-commons/README.md @@ -0,0 +1,25 @@ +# Compress Commons + +Compress Commons is a library that defines a common interface for working with archive formats within node. + +[![NPM](https://nodei.co/npm/compress-commons.png)](https://nodei.co/npm/compress-commons/) + +## Install + +```bash +npm install compress-commons --save +``` + +You can also use `npm install https://github.com/archiverjs/node-compress-commons/archive/master.tar.gz` to test upcoming versions. + +## Things of Interest + +- [Changelog](https://github.com/archiverjs/node-compress-commons/releases) +- [Contributing](https://github.com/archiverjs/node-compress-commons/blob/master/CONTRIBUTING.md) +- [MIT License](https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT) + +## Credits + +Concept inspired by [Apache Commons Compress](http://commons.apache.org/proper/commons-compress/)™. + +Some logic derived from [Apache Commons Compress](http://commons.apache.org/proper/commons-compress/)™ and [OpenJDK 7](http://openjdk.java.net/). \ No newline at end of file diff --git a/backend/node_modules/compress-commons/lib/archivers/archive-entry.js b/backend/node_modules/compress-commons/lib/archivers/archive-entry.js new file mode 100644 index 00000000..86bc5988 --- /dev/null +++ b/backend/node_modules/compress-commons/lib/archivers/archive-entry.js @@ -0,0 +1,16 @@ +/** + * node-compress-commons + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT + */ +var ArchiveEntry = module.exports = function() {}; + +ArchiveEntry.prototype.getName = function() {}; + +ArchiveEntry.prototype.getSize = function() {}; + +ArchiveEntry.prototype.getLastModifiedDate = function() {}; + +ArchiveEntry.prototype.isDirectory = function() {}; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/lib/archivers/archive-output-stream.js b/backend/node_modules/compress-commons/lib/archivers/archive-output-stream.js new file mode 100644 index 00000000..b5fa4939 --- /dev/null +++ b/backend/node_modules/compress-commons/lib/archivers/archive-output-stream.js @@ -0,0 +1,117 @@ +/** + * node-compress-commons + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT + */ +var inherits = require('util').inherits; +var Transform = require('readable-stream').Transform; + +var ArchiveEntry = require('./archive-entry'); +var util = require('../util'); + +var ArchiveOutputStream = module.exports = function(options) { + if (!(this instanceof ArchiveOutputStream)) { + return new ArchiveOutputStream(options); + } + + Transform.call(this, options); + + this.offset = 0; + this._archive = { + finish: false, + finished: false, + processing: false + }; +}; + +inherits(ArchiveOutputStream, Transform); + +ArchiveOutputStream.prototype._appendBuffer = function(zae, source, callback) { + // scaffold only +}; + +ArchiveOutputStream.prototype._appendStream = function(zae, source, callback) { + // scaffold only +}; + +ArchiveOutputStream.prototype._emitErrorCallback = function(err) { + if (err) { + this.emit('error', err); + } +}; + +ArchiveOutputStream.prototype._finish = function(ae) { + // scaffold only +}; + +ArchiveOutputStream.prototype._normalizeEntry = function(ae) { + // scaffold only +}; + +ArchiveOutputStream.prototype._transform = function(chunk, encoding, callback) { + callback(null, chunk); +}; + +ArchiveOutputStream.prototype.entry = function(ae, source, callback) { + source = source || null; + + if (typeof callback !== 'function') { + callback = this._emitErrorCallback.bind(this); + } + + if (!(ae instanceof ArchiveEntry)) { + callback(new Error('not a valid instance of ArchiveEntry')); + return; + } + + if (this._archive.finish || this._archive.finished) { + callback(new Error('unacceptable entry after finish')); + return; + } + + if (this._archive.processing) { + callback(new Error('already processing an entry')); + return; + } + + this._archive.processing = true; + this._normalizeEntry(ae); + this._entry = ae; + + source = util.normalizeInputSource(source); + + if (Buffer.isBuffer(source)) { + this._appendBuffer(ae, source, callback); + } else if (util.isStream(source)) { + this._appendStream(ae, source, callback); + } else { + this._archive.processing = false; + callback(new Error('input source must be valid Stream or Buffer instance')); + return; + } + + return this; +}; + +ArchiveOutputStream.prototype.finish = function() { + if (this._archive.processing) { + this._archive.finish = true; + return; + } + + this._finish(); +}; + +ArchiveOutputStream.prototype.getBytesWritten = function() { + return this.offset; +}; + +ArchiveOutputStream.prototype.write = function(chunk, cb) { + if (chunk) { + this.offset += chunk.length; + } + + return Transform.prototype.write.call(this, chunk, cb); +}; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/lib/archivers/zip/constants.js b/backend/node_modules/compress-commons/lib/archivers/zip/constants.js new file mode 100644 index 00000000..c30b3250 --- /dev/null +++ b/backend/node_modules/compress-commons/lib/archivers/zip/constants.js @@ -0,0 +1,71 @@ +/** + * node-compress-commons + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT + */ +module.exports = { + WORD: 4, + DWORD: 8, + EMPTY: Buffer.alloc(0), + + SHORT: 2, + SHORT_MASK: 0xffff, + SHORT_SHIFT: 16, + SHORT_ZERO: Buffer.from(Array(2)), + LONG: 4, + LONG_ZERO: Buffer.from(Array(4)), + + MIN_VERSION_INITIAL: 10, + MIN_VERSION_DATA_DESCRIPTOR: 20, + MIN_VERSION_ZIP64: 45, + VERSION_MADEBY: 45, + + METHOD_STORED: 0, + METHOD_DEFLATED: 8, + + PLATFORM_UNIX: 3, + PLATFORM_FAT: 0, + + SIG_LFH: 0x04034b50, + SIG_DD: 0x08074b50, + SIG_CFH: 0x02014b50, + SIG_EOCD: 0x06054b50, + SIG_ZIP64_EOCD: 0x06064B50, + SIG_ZIP64_EOCD_LOC: 0x07064B50, + + ZIP64_MAGIC_SHORT: 0xffff, + ZIP64_MAGIC: 0xffffffff, + ZIP64_EXTRA_ID: 0x0001, + + ZLIB_NO_COMPRESSION: 0, + ZLIB_BEST_SPEED: 1, + ZLIB_BEST_COMPRESSION: 9, + ZLIB_DEFAULT_COMPRESSION: -1, + + MODE_MASK: 0xFFF, + DEFAULT_FILE_MODE: 33188, // 010644 = -rw-r--r-- = S_IFREG | S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH + DEFAULT_DIR_MODE: 16877, // 040755 = drwxr-xr-x = S_IFDIR | S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH | S_IXOTH + + EXT_FILE_ATTR_DIR: 1106051088, // 010173200020 = drwxr-xr-x = (((S_IFDIR | 0755) << 16) | S_DOS_D) + EXT_FILE_ATTR_FILE: 2175008800, // 020151000040 = -rw-r--r-- = (((S_IFREG | 0644) << 16) | S_DOS_A) >>> 0 + + // Unix file types + S_IFMT: 61440, // 0170000 type of file mask + S_IFIFO: 4096, // 010000 named pipe (fifo) + S_IFCHR: 8192, // 020000 character special + S_IFDIR: 16384, // 040000 directory + S_IFBLK: 24576, // 060000 block special + S_IFREG: 32768, // 0100000 regular + S_IFLNK: 40960, // 0120000 symbolic link + S_IFSOCK: 49152, // 0140000 socket + + // DOS file type flags + S_DOS_A: 32, // 040 Archive + S_DOS_D: 16, // 020 Directory + S_DOS_V: 8, // 010 Volume + S_DOS_S: 4, // 04 System + S_DOS_H: 2, // 02 Hidden + S_DOS_R: 1 // 01 Read Only +}; diff --git a/backend/node_modules/compress-commons/lib/archivers/zip/general-purpose-bit.js b/backend/node_modules/compress-commons/lib/archivers/zip/general-purpose-bit.js new file mode 100644 index 00000000..62703996 --- /dev/null +++ b/backend/node_modules/compress-commons/lib/archivers/zip/general-purpose-bit.js @@ -0,0 +1,101 @@ +/** + * node-compress-commons + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT + */ +var zipUtil = require('./util'); + +var DATA_DESCRIPTOR_FLAG = 1 << 3; +var ENCRYPTION_FLAG = 1 << 0; +var NUMBER_OF_SHANNON_FANO_TREES_FLAG = 1 << 2; +var SLIDING_DICTIONARY_SIZE_FLAG = 1 << 1; +var STRONG_ENCRYPTION_FLAG = 1 << 6; +var UFT8_NAMES_FLAG = 1 << 11; + +var GeneralPurposeBit = module.exports = function() { + if (!(this instanceof GeneralPurposeBit)) { + return new GeneralPurposeBit(); + } + + this.descriptor = false; + this.encryption = false; + this.utf8 = false; + this.numberOfShannonFanoTrees = 0; + this.strongEncryption = false; + this.slidingDictionarySize = 0; + + return this; +}; + +GeneralPurposeBit.prototype.encode = function() { + return zipUtil.getShortBytes( + (this.descriptor ? DATA_DESCRIPTOR_FLAG : 0) | + (this.utf8 ? UFT8_NAMES_FLAG : 0) | + (this.encryption ? ENCRYPTION_FLAG : 0) | + (this.strongEncryption ? STRONG_ENCRYPTION_FLAG : 0) + ); +}; + +GeneralPurposeBit.prototype.parse = function(buf, offset) { + var flag = zipUtil.getShortBytesValue(buf, offset); + var gbp = new GeneralPurposeBit(); + + gbp.useDataDescriptor((flag & DATA_DESCRIPTOR_FLAG) !== 0); + gbp.useUTF8ForNames((flag & UFT8_NAMES_FLAG) !== 0); + gbp.useStrongEncryption((flag & STRONG_ENCRYPTION_FLAG) !== 0); + gbp.useEncryption((flag & ENCRYPTION_FLAG) !== 0); + gbp.setSlidingDictionarySize((flag & SLIDING_DICTIONARY_SIZE_FLAG) !== 0 ? 8192 : 4096); + gbp.setNumberOfShannonFanoTrees((flag & NUMBER_OF_SHANNON_FANO_TREES_FLAG) !== 0 ? 3 : 2); + + return gbp; +}; + +GeneralPurposeBit.prototype.setNumberOfShannonFanoTrees = function(n) { + this.numberOfShannonFanoTrees = n; +}; + +GeneralPurposeBit.prototype.getNumberOfShannonFanoTrees = function() { + return this.numberOfShannonFanoTrees; +}; + +GeneralPurposeBit.prototype.setSlidingDictionarySize = function(n) { + this.slidingDictionarySize = n; +}; + +GeneralPurposeBit.prototype.getSlidingDictionarySize = function() { + return this.slidingDictionarySize; +}; + +GeneralPurposeBit.prototype.useDataDescriptor = function(b) { + this.descriptor = b; +}; + +GeneralPurposeBit.prototype.usesDataDescriptor = function() { + return this.descriptor; +}; + +GeneralPurposeBit.prototype.useEncryption = function(b) { + this.encryption = b; +}; + +GeneralPurposeBit.prototype.usesEncryption = function() { + return this.encryption; +}; + +GeneralPurposeBit.prototype.useStrongEncryption = function(b) { + this.strongEncryption = b; +}; + +GeneralPurposeBit.prototype.usesStrongEncryption = function() { + return this.strongEncryption; +}; + +GeneralPurposeBit.prototype.useUTF8ForNames = function(b) { + this.utf8 = b; +}; + +GeneralPurposeBit.prototype.usesUTF8ForNames = function() { + return this.utf8; +}; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/lib/archivers/zip/unix-stat.js b/backend/node_modules/compress-commons/lib/archivers/zip/unix-stat.js new file mode 100644 index 00000000..1326cd1d --- /dev/null +++ b/backend/node_modules/compress-commons/lib/archivers/zip/unix-stat.js @@ -0,0 +1,53 @@ +/** + * node-compress-commons + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT + */ +module.exports = { + /** + * Bits used for permissions (and sticky bit) + */ + PERM_MASK: 4095, // 07777 + + /** + * Bits used to indicate the filesystem object type. + */ + FILE_TYPE_FLAG: 61440, // 0170000 + + /** + * Indicates symbolic links. + */ + LINK_FLAG: 40960, // 0120000 + + /** + * Indicates plain files. + */ + FILE_FLAG: 32768, // 0100000 + + /** + * Indicates directories. + */ + DIR_FLAG: 16384, // 040000 + + // ---------------------------------------------------------- + // somewhat arbitrary choices that are quite common for shared + // installations + // ----------------------------------------------------------- + + /** + * Default permissions for symbolic links. + */ + DEFAULT_LINK_PERM: 511, // 0777 + + /** + * Default permissions for directories. + */ + DEFAULT_DIR_PERM: 493, // 0755 + + /** + * Default permissions for plain files. + */ + DEFAULT_FILE_PERM: 420 // 0644 +}; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/lib/archivers/zip/util.js b/backend/node_modules/compress-commons/lib/archivers/zip/util.js new file mode 100644 index 00000000..22055ae5 --- /dev/null +++ b/backend/node_modules/compress-commons/lib/archivers/zip/util.js @@ -0,0 +1,74 @@ +/** + * node-compress-commons + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT + */ +var util = module.exports = {}; + +util.dateToDos = function(d, forceLocalTime) { + forceLocalTime = forceLocalTime || false; + + var year = forceLocalTime ? d.getFullYear() : d.getUTCFullYear(); + + if (year < 1980) { + return 2162688; // 1980-1-1 00:00:00 + } else if (year >= 2044) { + return 2141175677; // 2043-12-31 23:59:58 + } + + var val = { + year: year, + month: forceLocalTime ? d.getMonth() : d.getUTCMonth(), + date: forceLocalTime ? d.getDate() : d.getUTCDate(), + hours: forceLocalTime ? d.getHours() : d.getUTCHours(), + minutes: forceLocalTime ? d.getMinutes() : d.getUTCMinutes(), + seconds: forceLocalTime ? d.getSeconds() : d.getUTCSeconds() + }; + + return ((val.year - 1980) << 25) | ((val.month + 1) << 21) | (val.date << 16) | + (val.hours << 11) | (val.minutes << 5) | (val.seconds / 2); +}; + +util.dosToDate = function(dos) { + return new Date(((dos >> 25) & 0x7f) + 1980, ((dos >> 21) & 0x0f) - 1, (dos >> 16) & 0x1f, (dos >> 11) & 0x1f, (dos >> 5) & 0x3f, (dos & 0x1f) << 1); +}; + +util.fromDosTime = function(buf) { + return util.dosToDate(buf.readUInt32LE(0)); +}; + +util.getEightBytes = function(v) { + var buf = Buffer.alloc(8); + buf.writeUInt32LE(v % 0x0100000000, 0); + buf.writeUInt32LE((v / 0x0100000000) | 0, 4); + + return buf; +}; + +util.getShortBytes = function(v) { + var buf = Buffer.alloc(2); + buf.writeUInt16LE((v & 0xFFFF) >>> 0, 0); + + return buf; +}; + +util.getShortBytesValue = function(buf, offset) { + return buf.readUInt16LE(offset); +}; + +util.getLongBytes = function(v) { + var buf = Buffer.alloc(4); + buf.writeUInt32LE((v & 0xFFFFFFFF) >>> 0, 0); + + return buf; +}; + +util.getLongBytesValue = function(buf, offset) { + return buf.readUInt32LE(offset); +}; + +util.toDosTime = function(d) { + return util.getLongBytes(util.dateToDos(d)); +}; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/lib/archivers/zip/zip-archive-entry.js b/backend/node_modules/compress-commons/lib/archivers/zip/zip-archive-entry.js new file mode 100644 index 00000000..c53ad0ad --- /dev/null +++ b/backend/node_modules/compress-commons/lib/archivers/zip/zip-archive-entry.js @@ -0,0 +1,413 @@ +/** + * node-compress-commons + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT + */ +var inherits = require('util').inherits; +var normalizePath = require('normalize-path'); + +var ArchiveEntry = require('../archive-entry'); +var GeneralPurposeBit = require('./general-purpose-bit'); +var UnixStat = require('./unix-stat'); + +var constants = require('./constants'); +var zipUtil = require('./util'); + +var ZipArchiveEntry = module.exports = function(name) { + if (!(this instanceof ZipArchiveEntry)) { + return new ZipArchiveEntry(name); + } + + ArchiveEntry.call(this); + + this.platform = constants.PLATFORM_FAT; + this.method = -1; + + this.name = null; + this.size = 0; + this.csize = 0; + this.gpb = new GeneralPurposeBit(); + this.crc = 0; + this.time = -1; + + this.minver = constants.MIN_VERSION_INITIAL; + this.mode = -1; + this.extra = null; + this.exattr = 0; + this.inattr = 0; + this.comment = null; + + if (name) { + this.setName(name); + } +}; + +inherits(ZipArchiveEntry, ArchiveEntry); + +/** + * Returns the extra fields related to the entry. + * + * @returns {Buffer} + */ +ZipArchiveEntry.prototype.getCentralDirectoryExtra = function() { + return this.getExtra(); +}; + +/** + * Returns the comment set for the entry. + * + * @returns {string} + */ +ZipArchiveEntry.prototype.getComment = function() { + return this.comment !== null ? this.comment : ''; +}; + +/** + * Returns the compressed size of the entry. + * + * @returns {number} + */ +ZipArchiveEntry.prototype.getCompressedSize = function() { + return this.csize; +}; + +/** + * Returns the CRC32 digest for the entry. + * + * @returns {number} + */ +ZipArchiveEntry.prototype.getCrc = function() { + return this.crc; +}; + +/** + * Returns the external file attributes for the entry. + * + * @returns {number} + */ +ZipArchiveEntry.prototype.getExternalAttributes = function() { + return this.exattr; +}; + +/** + * Returns the extra fields related to the entry. + * + * @returns {Buffer} + */ +ZipArchiveEntry.prototype.getExtra = function() { + return this.extra !== null ? this.extra : constants.EMPTY; +}; + +/** + * Returns the general purpose bits related to the entry. + * + * @returns {GeneralPurposeBit} + */ +ZipArchiveEntry.prototype.getGeneralPurposeBit = function() { + return this.gpb; +}; + +/** + * Returns the internal file attributes for the entry. + * + * @returns {number} + */ +ZipArchiveEntry.prototype.getInternalAttributes = function() { + return this.inattr; +}; + +/** + * Returns the last modified date of the entry. + * + * @returns {number} + */ +ZipArchiveEntry.prototype.getLastModifiedDate = function() { + return this.getTime(); +}; + +/** + * Returns the extra fields related to the entry. + * + * @returns {Buffer} + */ +ZipArchiveEntry.prototype.getLocalFileDataExtra = function() { + return this.getExtra(); +}; + +/** + * Returns the compression method used on the entry. + * + * @returns {number} + */ +ZipArchiveEntry.prototype.getMethod = function() { + return this.method; +}; + +/** + * Returns the filename of the entry. + * + * @returns {string} + */ +ZipArchiveEntry.prototype.getName = function() { + return this.name; +}; + +/** + * Returns the platform on which the entry was made. + * + * @returns {number} + */ +ZipArchiveEntry.prototype.getPlatform = function() { + return this.platform; +}; + +/** + * Returns the size of the entry. + * + * @returns {number} + */ +ZipArchiveEntry.prototype.getSize = function() { + return this.size; +}; + +/** + * Returns a date object representing the last modified date of the entry. + * + * @returns {number|Date} + */ +ZipArchiveEntry.prototype.getTime = function() { + return this.time !== -1 ? zipUtil.dosToDate(this.time) : -1; +}; + +/** + * Returns the DOS timestamp for the entry. + * + * @returns {number} + */ +ZipArchiveEntry.prototype.getTimeDos = function() { + return this.time !== -1 ? this.time : 0; +}; + +/** + * Returns the UNIX file permissions for the entry. + * + * @returns {number} + */ +ZipArchiveEntry.prototype.getUnixMode = function() { + return this.platform !== constants.PLATFORM_UNIX ? 0 : ((this.getExternalAttributes() >> constants.SHORT_SHIFT) & constants.SHORT_MASK); +}; + +/** + * Returns the version of ZIP needed to extract the entry. + * + * @returns {number} + */ +ZipArchiveEntry.prototype.getVersionNeededToExtract = function() { + return this.minver; +}; + +/** + * Sets the comment of the entry. + * + * @param comment + */ +ZipArchiveEntry.prototype.setComment = function(comment) { + if (Buffer.byteLength(comment) !== comment.length) { + this.getGeneralPurposeBit().useUTF8ForNames(true); + } + + this.comment = comment; +}; + +/** + * Sets the compressed size of the entry. + * + * @param size + */ +ZipArchiveEntry.prototype.setCompressedSize = function(size) { + if (size < 0) { + throw new Error('invalid entry compressed size'); + } + + this.csize = size; +}; + +/** + * Sets the checksum of the entry. + * + * @param crc + */ +ZipArchiveEntry.prototype.setCrc = function(crc) { + if (crc < 0) { + throw new Error('invalid entry crc32'); + } + + this.crc = crc; +}; + +/** + * Sets the external file attributes of the entry. + * + * @param attr + */ +ZipArchiveEntry.prototype.setExternalAttributes = function(attr) { + this.exattr = attr >>> 0; +}; + +/** + * Sets the extra fields related to the entry. + * + * @param extra + */ +ZipArchiveEntry.prototype.setExtra = function(extra) { + this.extra = extra; +}; + +/** + * Sets the general purpose bits related to the entry. + * + * @param gpb + */ +ZipArchiveEntry.prototype.setGeneralPurposeBit = function(gpb) { + if (!(gpb instanceof GeneralPurposeBit)) { + throw new Error('invalid entry GeneralPurposeBit'); + } + + this.gpb = gpb; +}; + +/** + * Sets the internal file attributes of the entry. + * + * @param attr + */ +ZipArchiveEntry.prototype.setInternalAttributes = function(attr) { + this.inattr = attr; +}; + +/** + * Sets the compression method of the entry. + * + * @param method + */ +ZipArchiveEntry.prototype.setMethod = function(method) { + if (method < 0) { + throw new Error('invalid entry compression method'); + } + + this.method = method; +}; + +/** + * Sets the name of the entry. + * + * @param name + * @param prependSlash + */ +ZipArchiveEntry.prototype.setName = function(name, prependSlash = false) { + name = normalizePath(name, false) + .replace(/^\w+:/, '') + .replace(/^(\.\.\/|\/)+/, ''); + + if (prependSlash) { + name = `/${name}`; + } + + if (Buffer.byteLength(name) !== name.length) { + this.getGeneralPurposeBit().useUTF8ForNames(true); + } + + this.name = name; +}; + +/** + * Sets the platform on which the entry was made. + * + * @param platform + */ +ZipArchiveEntry.prototype.setPlatform = function(platform) { + this.platform = platform; +}; + +/** + * Sets the size of the entry. + * + * @param size + */ +ZipArchiveEntry.prototype.setSize = function(size) { + if (size < 0) { + throw new Error('invalid entry size'); + } + + this.size = size; +}; + +/** + * Sets the time of the entry. + * + * @param time + * @param forceLocalTime + */ +ZipArchiveEntry.prototype.setTime = function(time, forceLocalTime) { + if (!(time instanceof Date)) { + throw new Error('invalid entry time'); + } + + this.time = zipUtil.dateToDos(time, forceLocalTime); +}; + +/** + * Sets the UNIX file permissions for the entry. + * + * @param mode + */ +ZipArchiveEntry.prototype.setUnixMode = function(mode) { + mode |= this.isDirectory() ? constants.S_IFDIR : constants.S_IFREG; + + var extattr = 0; + extattr |= (mode << constants.SHORT_SHIFT) | (this.isDirectory() ? constants.S_DOS_D : constants.S_DOS_A); + + this.setExternalAttributes(extattr); + this.mode = mode & constants.MODE_MASK; + this.platform = constants.PLATFORM_UNIX; +}; + +/** + * Sets the version of ZIP needed to extract this entry. + * + * @param minver + */ +ZipArchiveEntry.prototype.setVersionNeededToExtract = function(minver) { + this.minver = minver; +}; + +/** + * Returns true if this entry represents a directory. + * + * @returns {boolean} + */ +ZipArchiveEntry.prototype.isDirectory = function() { + return this.getName().slice(-1) === '/'; +}; + +/** + * Returns true if this entry represents a unix symlink, + * in which case the entry's content contains the target path + * for the symlink. + * + * @returns {boolean} + */ +ZipArchiveEntry.prototype.isUnixSymlink = function() { + return (this.getUnixMode() & UnixStat.FILE_TYPE_FLAG) === UnixStat.LINK_FLAG; +}; + +/** + * Returns true if this entry is using the ZIP64 extension of ZIP. + * + * @returns {boolean} + */ +ZipArchiveEntry.prototype.isZip64 = function() { + return this.csize > constants.ZIP64_MAGIC || this.size > constants.ZIP64_MAGIC; +}; diff --git a/backend/node_modules/compress-commons/lib/archivers/zip/zip-archive-output-stream.js b/backend/node_modules/compress-commons/lib/archivers/zip/zip-archive-output-stream.js new file mode 100644 index 00000000..f56cea93 --- /dev/null +++ b/backend/node_modules/compress-commons/lib/archivers/zip/zip-archive-output-stream.js @@ -0,0 +1,437 @@ +/** + * node-compress-commons + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT + */ +var inherits = require('util').inherits; +var crc32 = require('crc-32'); +var {CRC32Stream} = require('crc32-stream'); +var {DeflateCRC32Stream} = require('crc32-stream'); + +var ArchiveOutputStream = require('../archive-output-stream'); +var ZipArchiveEntry = require('./zip-archive-entry'); +var GeneralPurposeBit = require('./general-purpose-bit'); + +var constants = require('./constants'); +var util = require('../../util'); +var zipUtil = require('./util'); + +var ZipArchiveOutputStream = module.exports = function(options) { + if (!(this instanceof ZipArchiveOutputStream)) { + return new ZipArchiveOutputStream(options); + } + + options = this.options = this._defaults(options); + + ArchiveOutputStream.call(this, options); + + this._entry = null; + this._entries = []; + this._archive = { + centralLength: 0, + centralOffset: 0, + comment: '', + finish: false, + finished: false, + processing: false, + forceZip64: options.forceZip64, + forceLocalTime: options.forceLocalTime + }; +}; + +inherits(ZipArchiveOutputStream, ArchiveOutputStream); + +ZipArchiveOutputStream.prototype._afterAppend = function(ae) { + this._entries.push(ae); + + if (ae.getGeneralPurposeBit().usesDataDescriptor()) { + this._writeDataDescriptor(ae); + } + + this._archive.processing = false; + this._entry = null; + + if (this._archive.finish && !this._archive.finished) { + this._finish(); + } +}; + +ZipArchiveOutputStream.prototype._appendBuffer = function(ae, source, callback) { + if (source.length === 0) { + ae.setMethod(constants.METHOD_STORED); + } + + var method = ae.getMethod(); + + if (method === constants.METHOD_STORED) { + ae.setSize(source.length); + ae.setCompressedSize(source.length); + ae.setCrc(crc32.buf(source) >>> 0); + } + + this._writeLocalFileHeader(ae); + + if (method === constants.METHOD_STORED) { + this.write(source); + this._afterAppend(ae); + callback(null, ae); + return; + } else if (method === constants.METHOD_DEFLATED) { + this._smartStream(ae, callback).end(source); + return; + } else { + callback(new Error('compression method ' + method + ' not implemented')); + return; + } +}; + +ZipArchiveOutputStream.prototype._appendStream = function(ae, source, callback) { + ae.getGeneralPurposeBit().useDataDescriptor(true); + ae.setVersionNeededToExtract(constants.MIN_VERSION_DATA_DESCRIPTOR); + + this._writeLocalFileHeader(ae); + + var smart = this._smartStream(ae, callback); + source.once('error', function(err) { + smart.emit('error', err); + smart.end(); + }) + source.pipe(smart); +}; + +ZipArchiveOutputStream.prototype._defaults = function(o) { + if (typeof o !== 'object') { + o = {}; + } + + if (typeof o.zlib !== 'object') { + o.zlib = {}; + } + + if (typeof o.zlib.level !== 'number') { + o.zlib.level = constants.ZLIB_BEST_SPEED; + } + + o.forceZip64 = !!o.forceZip64; + o.forceLocalTime = !!o.forceLocalTime; + + return o; +}; + +ZipArchiveOutputStream.prototype._finish = function() { + this._archive.centralOffset = this.offset; + + this._entries.forEach(function(ae) { + this._writeCentralFileHeader(ae); + }.bind(this)); + + this._archive.centralLength = this.offset - this._archive.centralOffset; + + if (this.isZip64()) { + this._writeCentralDirectoryZip64(); + } + + this._writeCentralDirectoryEnd(); + + this._archive.processing = false; + this._archive.finish = true; + this._archive.finished = true; + this.end(); +}; + +ZipArchiveOutputStream.prototype._normalizeEntry = function(ae) { + if (ae.getMethod() === -1) { + ae.setMethod(constants.METHOD_DEFLATED); + } + + if (ae.getMethod() === constants.METHOD_DEFLATED) { + ae.getGeneralPurposeBit().useDataDescriptor(true); + ae.setVersionNeededToExtract(constants.MIN_VERSION_DATA_DESCRIPTOR); + } + + if (ae.getTime() === -1) { + ae.setTime(new Date(), this._archive.forceLocalTime); + } + + ae._offsets = { + file: 0, + data: 0, + contents: 0, + }; +}; + +ZipArchiveOutputStream.prototype._smartStream = function(ae, callback) { + var deflate = ae.getMethod() === constants.METHOD_DEFLATED; + var process = deflate ? new DeflateCRC32Stream(this.options.zlib) : new CRC32Stream(); + var error = null; + + function handleStuff() { + var digest = process.digest().readUInt32BE(0); + ae.setCrc(digest); + ae.setSize(process.size()); + ae.setCompressedSize(process.size(true)); + this._afterAppend(ae); + callback(error, ae); + } + + process.once('end', handleStuff.bind(this)); + process.once('error', function(err) { + error = err; + }); + + process.pipe(this, { end: false }); + + return process; +}; + +ZipArchiveOutputStream.prototype._writeCentralDirectoryEnd = function() { + var records = this._entries.length; + var size = this._archive.centralLength; + var offset = this._archive.centralOffset; + + if (this.isZip64()) { + records = constants.ZIP64_MAGIC_SHORT; + size = constants.ZIP64_MAGIC; + offset = constants.ZIP64_MAGIC; + } + + // signature + this.write(zipUtil.getLongBytes(constants.SIG_EOCD)); + + // disk numbers + this.write(constants.SHORT_ZERO); + this.write(constants.SHORT_ZERO); + + // number of entries + this.write(zipUtil.getShortBytes(records)); + this.write(zipUtil.getShortBytes(records)); + + // length and location of CD + this.write(zipUtil.getLongBytes(size)); + this.write(zipUtil.getLongBytes(offset)); + + // archive comment + var comment = this.getComment(); + var commentLength = Buffer.byteLength(comment); + this.write(zipUtil.getShortBytes(commentLength)); + this.write(comment); +}; + +ZipArchiveOutputStream.prototype._writeCentralDirectoryZip64 = function() { + // signature + this.write(zipUtil.getLongBytes(constants.SIG_ZIP64_EOCD)); + + // size of the ZIP64 EOCD record + this.write(zipUtil.getEightBytes(44)); + + // version made by + this.write(zipUtil.getShortBytes(constants.MIN_VERSION_ZIP64)); + + // version to extract + this.write(zipUtil.getShortBytes(constants.MIN_VERSION_ZIP64)); + + // disk numbers + this.write(constants.LONG_ZERO); + this.write(constants.LONG_ZERO); + + // number of entries + this.write(zipUtil.getEightBytes(this._entries.length)); + this.write(zipUtil.getEightBytes(this._entries.length)); + + // length and location of CD + this.write(zipUtil.getEightBytes(this._archive.centralLength)); + this.write(zipUtil.getEightBytes(this._archive.centralOffset)); + + // extensible data sector + // not implemented at this time + + // end of central directory locator + this.write(zipUtil.getLongBytes(constants.SIG_ZIP64_EOCD_LOC)); + + // disk number holding the ZIP64 EOCD record + this.write(constants.LONG_ZERO); + + // relative offset of the ZIP64 EOCD record + this.write(zipUtil.getEightBytes(this._archive.centralOffset + this._archive.centralLength)); + + // total number of disks + this.write(zipUtil.getLongBytes(1)); +}; + +ZipArchiveOutputStream.prototype._writeCentralFileHeader = function(ae) { + var gpb = ae.getGeneralPurposeBit(); + var method = ae.getMethod(); + var fileOffset = ae._offsets.file; + + var size = ae.getSize(); + var compressedSize = ae.getCompressedSize(); + + if (ae.isZip64() || fileOffset > constants.ZIP64_MAGIC) { + size = constants.ZIP64_MAGIC; + compressedSize = constants.ZIP64_MAGIC; + fileOffset = constants.ZIP64_MAGIC; + + ae.setVersionNeededToExtract(constants.MIN_VERSION_ZIP64); + + var extraBuf = Buffer.concat([ + zipUtil.getShortBytes(constants.ZIP64_EXTRA_ID), + zipUtil.getShortBytes(24), + zipUtil.getEightBytes(ae.getSize()), + zipUtil.getEightBytes(ae.getCompressedSize()), + zipUtil.getEightBytes(ae._offsets.file) + ], 28); + + ae.setExtra(extraBuf); + } + + // signature + this.write(zipUtil.getLongBytes(constants.SIG_CFH)); + + // version made by + this.write(zipUtil.getShortBytes((ae.getPlatform() << 8) | constants.VERSION_MADEBY)); + + // version to extract and general bit flag + this.write(zipUtil.getShortBytes(ae.getVersionNeededToExtract())); + this.write(gpb.encode()); + + // compression method + this.write(zipUtil.getShortBytes(method)); + + // datetime + this.write(zipUtil.getLongBytes(ae.getTimeDos())); + + // crc32 checksum + this.write(zipUtil.getLongBytes(ae.getCrc())); + + // sizes + this.write(zipUtil.getLongBytes(compressedSize)); + this.write(zipUtil.getLongBytes(size)); + + var name = ae.getName(); + var comment = ae.getComment(); + var extra = ae.getCentralDirectoryExtra(); + + if (gpb.usesUTF8ForNames()) { + name = Buffer.from(name); + comment = Buffer.from(comment); + } + + // name length + this.write(zipUtil.getShortBytes(name.length)); + + // extra length + this.write(zipUtil.getShortBytes(extra.length)); + + // comments length + this.write(zipUtil.getShortBytes(comment.length)); + + // disk number start + this.write(constants.SHORT_ZERO); + + // internal attributes + this.write(zipUtil.getShortBytes(ae.getInternalAttributes())); + + // external attributes + this.write(zipUtil.getLongBytes(ae.getExternalAttributes())); + + // relative offset of LFH + this.write(zipUtil.getLongBytes(fileOffset)); + + // name + this.write(name); + + // extra + this.write(extra); + + // comment + this.write(comment); +}; + +ZipArchiveOutputStream.prototype._writeDataDescriptor = function(ae) { + // signature + this.write(zipUtil.getLongBytes(constants.SIG_DD)); + + // crc32 checksum + this.write(zipUtil.getLongBytes(ae.getCrc())); + + // sizes + if (ae.isZip64()) { + this.write(zipUtil.getEightBytes(ae.getCompressedSize())); + this.write(zipUtil.getEightBytes(ae.getSize())); + } else { + this.write(zipUtil.getLongBytes(ae.getCompressedSize())); + this.write(zipUtil.getLongBytes(ae.getSize())); + } +}; + +ZipArchiveOutputStream.prototype._writeLocalFileHeader = function(ae) { + var gpb = ae.getGeneralPurposeBit(); + var method = ae.getMethod(); + var name = ae.getName(); + var extra = ae.getLocalFileDataExtra(); + + if (ae.isZip64()) { + gpb.useDataDescriptor(true); + ae.setVersionNeededToExtract(constants.MIN_VERSION_ZIP64); + } + + if (gpb.usesUTF8ForNames()) { + name = Buffer.from(name); + } + + ae._offsets.file = this.offset; + + // signature + this.write(zipUtil.getLongBytes(constants.SIG_LFH)); + + // version to extract and general bit flag + this.write(zipUtil.getShortBytes(ae.getVersionNeededToExtract())); + this.write(gpb.encode()); + + // compression method + this.write(zipUtil.getShortBytes(method)); + + // datetime + this.write(zipUtil.getLongBytes(ae.getTimeDos())); + + ae._offsets.data = this.offset; + + // crc32 checksum and sizes + if (gpb.usesDataDescriptor()) { + this.write(constants.LONG_ZERO); + this.write(constants.LONG_ZERO); + this.write(constants.LONG_ZERO); + } else { + this.write(zipUtil.getLongBytes(ae.getCrc())); + this.write(zipUtil.getLongBytes(ae.getCompressedSize())); + this.write(zipUtil.getLongBytes(ae.getSize())); + } + + // name length + this.write(zipUtil.getShortBytes(name.length)); + + // extra length + this.write(zipUtil.getShortBytes(extra.length)); + + // name + this.write(name); + + // extra + this.write(extra); + + ae._offsets.contents = this.offset; +}; + +ZipArchiveOutputStream.prototype.getComment = function(comment) { + return this._archive.comment !== null ? this._archive.comment : ''; +}; + +ZipArchiveOutputStream.prototype.isZip64 = function() { + return this._archive.forceZip64 || this._entries.length > constants.ZIP64_MAGIC_SHORT || this._archive.centralLength > constants.ZIP64_MAGIC || this._archive.centralOffset > constants.ZIP64_MAGIC; +}; + +ZipArchiveOutputStream.prototype.setComment = function(comment) { + this._archive.comment = comment; +}; diff --git a/backend/node_modules/compress-commons/lib/compress-commons.js b/backend/node_modules/compress-commons/lib/compress-commons.js new file mode 100644 index 00000000..ef3bc1db --- /dev/null +++ b/backend/node_modules/compress-commons/lib/compress-commons.js @@ -0,0 +1,13 @@ +/** + * node-compress-commons + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT + */ +module.exports = { + ArchiveEntry: require('./archivers/archive-entry'), + ZipArchiveEntry: require('./archivers/zip/zip-archive-entry'), + ArchiveOutputStream: require('./archivers/archive-output-stream'), + ZipArchiveOutputStream: require('./archivers/zip/zip-archive-output-stream') +}; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/lib/util/index.js b/backend/node_modules/compress-commons/lib/util/index.js new file mode 100644 index 00000000..20a67833 --- /dev/null +++ b/backend/node_modules/compress-commons/lib/util/index.js @@ -0,0 +1,30 @@ +/** + * node-compress-commons + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-compress-commons/blob/master/LICENSE-MIT + */ +var Stream = require('stream').Stream; +var PassThrough = require('readable-stream').PassThrough; + +var util = module.exports = {}; + +util.isStream = function(source) { + return source instanceof Stream; +}; + +util.normalizeInputSource = function(source) { + if (source === null) { + return Buffer.alloc(0); + } else if (typeof source === 'string') { + return Buffer.from(source); + } else if (util.isStream(source) && !source._readableState) { + var normalized = new PassThrough(); + source.pipe(normalized); + + return normalized; + } + + return source; +}; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/CONTRIBUTING.md b/backend/node_modules/compress-commons/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 00000000..f478d58d --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/GOVERNANCE.md b/backend/node_modules/compress-commons/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 00000000..16ffb93f --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/LICENSE b/backend/node_modules/compress-commons/node_modules/readable-stream/LICENSE new file mode 100644 index 00000000..2873b3b2 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/README.md b/backend/node_modules/compress-commons/node_modules/readable-stream/README.md new file mode 100644 index 00000000..19117c1a --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.18.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/errors-browser.js b/backend/node_modules/compress-commons/node_modules/readable-stream/errors-browser.js new file mode 100644 index 00000000..fb8e73e1 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/errors.js b/backend/node_modules/compress-commons/node_modules/readable-stream/errors.js new file mode 100644 index 00000000..8471526d --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/experimentalWarning.js b/backend/node_modules/compress-commons/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 00000000..78e84149 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_duplex.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 00000000..19abfa60 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,126 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); +require('inherits')(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); \ No newline at end of file diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_passthrough.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 00000000..24a6bdde --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,37 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; +var Transform = require('./_stream_transform'); +require('inherits')(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_readable.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 00000000..df1f608d --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1027 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +var debugUtil = require('util'); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + +var BufferList = require('./internal/streams/buffer_list'); +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +require('inherits')(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + return createReadableStreamAsyncIterator(this); + }; +} +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_transform.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 00000000..1ccb7157 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,190 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = require('./_stream_duplex'); +require('inherits')(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} \ No newline at end of file diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_writable.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 00000000..292415e2 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,641 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +require('inherits')(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 00000000..742c5a46 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,180 @@ +'use strict'; + +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = require('./end-of-stream'); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } + + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 00000000..69bda497 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,183 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = require('buffer'), + Buffer = _require.Buffer; +var _require2 = require('util'), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); \ No newline at end of file diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/destroy.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 00000000..31a17c4d --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,96 @@ +'use strict'; + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 00000000..59c671b5 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/from-browser.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 00000000..a4ce56f3 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/from.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 00000000..0a34ee92 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,52 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/pipeline.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 00000000..e6f39241 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; + + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/state.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 00000000..3fbf8927 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,22 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 00000000..9332a3fd --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/stream.js b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 00000000..ce2ad5b6 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/package.json b/backend/node_modules/compress-commons/node_modules/readable-stream/package.json new file mode 100644 index 00000000..ade59e71 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/package.json @@ -0,0 +1,68 @@ +{ + "name": "readable-stream", + "version": "3.6.2", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "engines": { + "node": ">= 6" + }, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "scripts": { + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/readable-browser.js b/backend/node_modules/compress-commons/node_modules/readable-stream/readable-browser.js new file mode 100644 index 00000000..adbf60de --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/backend/node_modules/compress-commons/node_modules/readable-stream/readable.js b/backend/node_modules/compress-commons/node_modules/readable-stream/readable.js new file mode 100644 index 00000000..9e0ca120 --- /dev/null +++ b/backend/node_modules/compress-commons/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/backend/node_modules/compress-commons/package.json b/backend/node_modules/compress-commons/package.json new file mode 100644 index 00000000..cdc58db8 --- /dev/null +++ b/backend/node_modules/compress-commons/package.json @@ -0,0 +1,45 @@ +{ + "name": "compress-commons", + "version": "5.0.3", + "description": "a library that defines a common interface for working with archive formats within node", + "homepage": "https://github.com/archiverjs/node-compress-commons", + "author": { + "name": "Chris Talkington", + "url": "http://christalkington.com/" + }, + "repository": { + "type": "git", + "url": "https://github.com/archiverjs/node-compress-commons.git" + }, + "bugs": { + "url": "https://github.com/archiverjs/node-compress-commons/issues" + }, + "license": "MIT", + "main": "lib/compress-commons.js", + "files": [ + "lib" + ], + "engines": { + "node": ">= 12.0.0" + }, + "scripts": { + "test": "mocha --reporter dot" + }, + "dependencies": { + "crc-32": "^1.2.0", + "crc32-stream": "^5.0.0", + "normalize-path": "^3.0.0", + "readable-stream": "^3.6.0" + }, + "devDependencies": { + "chai": "4.3.8", + "mkdirp": "3.0.1", + "mocha": "9.2.2", + "rimraf": "3.0.2" + }, + "keywords": [ + "compress", + "commons", + "archive" + ] +} diff --git a/backend/node_modules/core-util-is/LICENSE b/backend/node_modules/core-util-is/LICENSE new file mode 100644 index 00000000..d8d7f943 --- /dev/null +++ b/backend/node_modules/core-util-is/LICENSE @@ -0,0 +1,19 @@ +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/backend/node_modules/core-util-is/README.md b/backend/node_modules/core-util-is/README.md new file mode 100644 index 00000000..5a76b414 --- /dev/null +++ b/backend/node_modules/core-util-is/README.md @@ -0,0 +1,3 @@ +# core-util-is + +The `util.is*` functions introduced in Node v0.12. diff --git a/backend/node_modules/core-util-is/lib/util.js b/backend/node_modules/core-util-is/lib/util.js new file mode 100644 index 00000000..6e5a20d7 --- /dev/null +++ b/backend/node_modules/core-util-is/lib/util.js @@ -0,0 +1,107 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// NOTE: These type checking functions intentionally don't use `instanceof` +// because it is fragile and can be easily faked with `Object.create()`. + +function isArray(arg) { + if (Array.isArray) { + return Array.isArray(arg); + } + return objectToString(arg) === '[object Array]'; +} +exports.isArray = isArray; + +function isBoolean(arg) { + return typeof arg === 'boolean'; +} +exports.isBoolean = isBoolean; + +function isNull(arg) { + return arg === null; +} +exports.isNull = isNull; + +function isNullOrUndefined(arg) { + return arg == null; +} +exports.isNullOrUndefined = isNullOrUndefined; + +function isNumber(arg) { + return typeof arg === 'number'; +} +exports.isNumber = isNumber; + +function isString(arg) { + return typeof arg === 'string'; +} +exports.isString = isString; + +function isSymbol(arg) { + return typeof arg === 'symbol'; +} +exports.isSymbol = isSymbol; + +function isUndefined(arg) { + return arg === void 0; +} +exports.isUndefined = isUndefined; + +function isRegExp(re) { + return objectToString(re) === '[object RegExp]'; +} +exports.isRegExp = isRegExp; + +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} +exports.isObject = isObject; + +function isDate(d) { + return objectToString(d) === '[object Date]'; +} +exports.isDate = isDate; + +function isError(e) { + return (objectToString(e) === '[object Error]' || e instanceof Error); +} +exports.isError = isError; + +function isFunction(arg) { + return typeof arg === 'function'; +} +exports.isFunction = isFunction; + +function isPrimitive(arg) { + return arg === null || + typeof arg === 'boolean' || + typeof arg === 'number' || + typeof arg === 'string' || + typeof arg === 'symbol' || // ES6 symbol + typeof arg === 'undefined'; +} +exports.isPrimitive = isPrimitive; + +exports.isBuffer = require('buffer').Buffer.isBuffer; + +function objectToString(o) { + return Object.prototype.toString.call(o); +} diff --git a/backend/node_modules/core-util-is/package.json b/backend/node_modules/core-util-is/package.json new file mode 100644 index 00000000..b0c51f58 --- /dev/null +++ b/backend/node_modules/core-util-is/package.json @@ -0,0 +1,38 @@ +{ + "name": "core-util-is", + "version": "1.0.3", + "description": "The `util.is*` functions introduced in Node v0.12.", + "main": "lib/util.js", + "files": [ + "lib" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/core-util-is" + }, + "keywords": [ + "util", + "isBuffer", + "isArray", + "isNumber", + "isString", + "isRegExp", + "isThis", + "isThat", + "polyfill" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/isaacs/core-util-is/issues" + }, + "scripts": { + "test": "tap test.js", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "devDependencies": { + "tap": "^15.0.9" + } +} diff --git a/backend/node_modules/crc-32/LICENSE b/backend/node_modules/crc-32/LICENSE new file mode 100644 index 00000000..5723c45b --- /dev/null +++ b/backend/node_modules/crc-32/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright (C) 2014-present SheetJS LLC + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/node_modules/crc-32/README.md b/backend/node_modules/crc-32/README.md new file mode 100644 index 00000000..082127a0 --- /dev/null +++ b/backend/node_modules/crc-32/README.md @@ -0,0 +1,200 @@ +# crc32 + +Standard CRC-32 algorithm implementation in JS (for the browser and nodejs). +Emphasis on correctness, performance, and IE6+ support. + +## Installation + +With [npm](https://www.npmjs.org/package/crc-32): + +```bash +$ npm install crc-32 +``` + +When installed globally, npm installs a script `crc32` that computes the +checksum for a specified file or standard input. + +
+ CDN Availability (click to show) + +| CDN | URL | +|-----------:|:-------------------------------------------| +| `unpkg` | | +| `jsDelivr` | | +| `CDNjs` | | + +
+ + +## Integration + +Using NodeJS or a bundler: + +```js +var CRC32 = require("crc-32"); +``` + +In the browser, the `crc32.js` script can be loaded directly: + +```html + +``` + +The browser script exposes a variable `CRC32`. + +The script will manipulate `module.exports` if available . This is not always +desirable. To prevent the behavior, define `DO_NOT_EXPORT_CRC`. + +### CRC32C (Castagnoli) + +The module and CDNs also include a parallel script for CRC32C calculations. + +Using NodeJS or a bundler: + +```js +var CRC32C = require("crc-32/crc32c"); +``` + +In the browser, the `crc32c.js` script can be loaded directly: + +```html + +``` + +The browser exposes a variable `CRC32C`. + +The script will manipulate `module.exports` if available . This is not always +desirable. To prevent the behavior, define `DO_NOT_EXPORT_CRC`. + +## Usage + +In all cases, the relevant function takes an argument representing data and an +optional second argument representing the starting "seed" (for rolling CRC). + +The return value is a signed 32-bit integer. + +- `CRC32.buf(byte array or buffer[, seed])` assumes the argument is a sequence + of 8-bit unsigned integers (nodejs `Buffer`, `Uint8Array` or array of bytes). + +- `CRC32.bstr(binary string[, seed])` assumes the argument is a binary string + where byte `i` is the low byte of the UCS-2 char: `str.charCodeAt(i) & 0xFF` + +- `CRC32.str(string[, seed])` assumes the argument is a standard JS string and + calculates the hash of the UTF-8 encoding. + +For example: + +```js +// var CRC32 = require('crc-32'); // uncomment this line if in node +CRC32.str("SheetJS") // -1647298270 +CRC32.bstr("SheetJS") // -1647298270 +CRC32.buf([ 83, 104, 101, 101, 116, 74, 83 ]) // -1647298270 + +crc32 = CRC32.buf([83, 104]) // -1826163454 "Sh" +crc32 = CRC32.str("eet", crc32) // 1191034598 "Sheet" +CRC32.bstr("JS", crc32) // -1647298270 "SheetJS" + +[CRC32.str("\u2603"), CRC32.str("\u0003")] // [ -1743909036, 1259060791 ] +[CRC32.bstr("\u2603"), CRC32.bstr("\u0003")] // [ 1259060791, 1259060791 ] +[CRC32.buf([0x2603]), CRC32.buf([0x0003])] // [ 1259060791, 1259060791 ] + +// var CRC32C = require('crc-32/crc32c'); // uncomment this line if in node +CRC32C.str("SheetJS") // -284764294 +CRC32C.bstr("SheetJS") // -284764294 +CRC32C.buf([ 83, 104, 101, 101, 116, 74, 83 ]) // -284764294 + +crc32c = CRC32C.buf([83, 104]) // -297065629 "Sh" +crc32c = CRC32C.str("eet", crc32c) // 1241364256 "Sheet" +CRC32C.bstr("JS", crc32c) // -284764294 "SheetJS" + +[CRC32C.str("\u2603"), CRC32C.str("\u0003")] // [ 1253703093, 1093509285 ] +[CRC32C.bstr("\u2603"), CRC32C.bstr("\u0003")] // [ 1093509285, 1093509285 ] +[CRC32C.buf([0x2603]), CRC32C.buf([0x0003])] // [ 1093509285, 1093509285 ] +``` + +### Best Practices + +Even though the initial seed is optional, for performance reasons it is highly +recommended to explicitly pass the default seed 0. + +In NodeJS with the native Buffer implementation, it is oftentimes faster to +convert binary strings with `Buffer.from(bstr, "binary")` first: + +```js +/* Frequently slower in NodeJS */ +crc32 = CRC32.bstr(bstr, 0); +/* Frequently faster in NodeJS */ +crc32 = CRC32.buf(Buffer.from(bstr, "binary"), 0); +``` + +This does not apply to browser `Buffer` shims, and thus is not implemented in +the library directly. + +## Testing + +`make test` will run the nodejs-based test. + +To run the in-browser tests, run a local server and go to the `ctest` directory. +`make ctestserv` will start a python `SimpleHTTPServer` server on port 8000. + +To update the browser artifacts, run `make ctest`. + +To generate the bits file, use the `crc32` function from python `zlib`: + +```python +>>> from zlib import crc32 +>>> x="foo bar baz٪☃🍣" +>>> crc32(x) +1531648243 +>>> crc32(x+x) +-218791105 +>>> crc32(x+x+x) +1834240887 +``` + +The included `crc32.njs` script can process files or standard input: + +```bash +$ echo "this is a test" > t.txt +$ bin/crc32.njs t.txt +1912935186 +``` + +For comparison, the included `crc32.py` script uses python `zlib`: + +```bash +$ bin/crc32.py t.txt +1912935186 +``` + +On OSX the command `cksum` generates unsigned CRC-32 with Algorithm 3: + +```bash +$ cksum -o 3 < IE8.Win7.For.Windows.VMware.zip +1891069052 4161613172 +$ crc32 --unsigned ~/Downloads/IE8.Win7.For.Windows.VMware.zip +1891069052 +``` + +## Performance + +`make perf` will run algorithmic performance tests (which should justify certain +decisions in the code). + +The [`adler-32` project](http://git.io/adler32) has more performance notes + +## License + +Please consult the attached LICENSE file for details. All rights not explicitly +granted by the Apache 2.0 license are reserved by the Original Author. + +## Badges + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/crc32.svg)](https://saucelabs.com/u/crc32) + +[![Build Status](https://travis-ci.org/SheetJS/js-crc32.svg?branch=master)](https://travis-ci.org/SheetJS/js-crc32) +[![Coverage Status](http://img.shields.io/coveralls/SheetJS/js-crc32/master.svg)](https://coveralls.io/r/SheetJS/js-crc32?branch=master) +[![Dependencies Status](https://david-dm.org/sheetjs/js-crc32/status.svg)](https://david-dm.org/sheetjs/js-crc32) +[![NPM Downloads](https://img.shields.io/npm/dt/crc-32.svg)](https://npmjs.org/package/crc-32) +[![ghit.me](https://ghit.me/badge.svg?repo=sheetjs/js-xlsx)](https://ghit.me/repo/sheetjs/js-xlsx) +[![Analytics](https://ga-beacon.appspot.com/UA-36810333-1/SheetJS/js-crc32?pixel)](https://github.com/SheetJS/js-crc32) diff --git a/backend/node_modules/crc-32/bin/crc32.njs b/backend/node_modules/crc-32/bin/crc32.njs new file mode 100755 index 00000000..334e7d2c --- /dev/null +++ b/backend/node_modules/crc-32/bin/crc32.njs @@ -0,0 +1,105 @@ +#!/usr/bin/env node +/* crc32.js (C) 2014-present SheetJS -- http://sheetjs.com */ +/* eslint-env node */ +/* vim: set ts=2 ft=javascript: */ +/*jshint node:true */ + +var X/*:CRC32Module*/; +try { X = require('../'); } catch(e) { X = require('crc-32'); } + +function help()/*:number*/ { +[ +"usage: crc32 [options] [filename]", +"", +"Options:", +" -h, --help output usage information", +" -V, --version output the version number", +" -S, --seed= use integer seed as starting value (rolling CRC)", +" -H, --hex-seed= use hex seed as starting value (rolling CRC)", +" -d, --signed print result with format `%d` (default)", +" -u, --unsigned print result with format `%u`", +" -x, --hex print result with format `%0.8x`", +" -X, --HEX print result with format `%0.8X`", +" -c, --crc32c use CRC32C (Castagnoli)", +" -F, --format= use specified printf format", +"", +"Set filename = '-' or pipe data into crc32 to read from stdin", +"Default output mode is signed (-d)", +"" +].forEach(function(l) { console.log(l); }); + return 0; +} + +function version()/*:number*/ { console.log(X.version); return 0; } + +var fs = require('fs'); +try { require('exit-on-epipe'); } catch(e) {} + +function die(msg/*:string*/, ec/*:?number*/)/*:void*/ { console.error(msg); process.exit(ec || 0); } + +var args/*:Array*/ = process.argv.slice(2); +var filename/*:string*/ = ""; +var fmt/*:string*/ = ""; +var seed = 0, r = 10; + +for(var i = 0; i < args.length; ++i) { + var arg = args[i]; + if(arg.charCodeAt(0) != 45) { if(filename === "") filename = arg; continue; } + var m = arg.indexOf("=") == -1 ? arg : arg.substr(0, arg.indexOf("=")); + switch(m) { + case "-": filename = "-"; break; + + case "--help": case "-h": process.exit(help()); break; + case "--version": case "-V": process.exit(version()); break; + + case "--crc32c": case "-c": try { X = require('../crc32c'); } catch(e) { X = require('crc-32/crc32c'); } break; + + case "--signed": case "-d": fmt = "%d"; break; + case "--unsigned": case "-u": fmt = "%u"; break; + case "--hex": case "-x": fmt = "%0.8x"; break; + case "--HEX": case "-X": fmt = "%0.8X"; break; + case "--format": case "-F": + try { + require("printj"); + fmt = ((m!=arg) ? arg.substr(m.length+1) : args[++i])||""; + } catch(e) { + console.error("The `crc-32` module removed the `printj` dependency for formatting"); + console.error("Use the `crc32-cli` module instead:"); + console.error(" $ npx crc32-cli [options] [filename]"); + } break; + + case "--hex-seed": case "-H": r = 16; + /* falls through */ + case "--seed": case "-S": + seed=parseInt((m!=arg) ? arg.substr(m.length+1) : args[++i], r)||0; break; + + default: die("crc32: unrecognized option `" + arg + "'", 22); + } +} + +if(!process.stdin.isTTY) filename = filename || "-"; +if(filename.length===0) die("crc32: must specify a filename ('-' for stdin)",1); + +var crc32 = seed; +// $FlowIgnore -- Writable is callable but type sig disagrees +var writable = require('stream').Writable(); +writable._write = function(chunk, e, cb) { crc32 = X.buf(chunk, crc32); cb(); }; +writable._writev = function(chunks, cb) { + chunks.forEach(function(c) { crc32 = X.buf(c.chunk, crc32);}); + cb(); +}; +writable.on('finish', function() { + if(fmt === "") console.log(crc32); + else try { console.log(require("printj").sprintf(fmt, crc32)); } catch(e) { + switch(fmt) { + case "%d": console.log(crc32); break; + case "%u": console.log(crc32 >>> 0); break; + case "%0.8x": console.log((crc32 >>> 0).toString(16).padStart(8, "0").toLowerCase()); break; + case "%0.8X": console.log((crc32 >>> 0).toString(16).padStart(8, "0").toUpperCase()); break; + } + } +}); + +if(filename === "-") process.stdin.pipe(writable); +else if(fs.existsSync(filename)) fs.createReadStream(filename).pipe(writable); +else die("crc32: " + filename + ": No such file or directory", 2); diff --git a/backend/node_modules/crc-32/crc32.js b/backend/node_modules/crc-32/crc32.js new file mode 100644 index 00000000..c92664a7 --- /dev/null +++ b/backend/node_modules/crc-32/crc32.js @@ -0,0 +1,115 @@ +/*! crc32.js (C) 2014-present SheetJS -- http://sheetjs.com */ +/* vim: set ts=2: */ +/*exported CRC32 */ +var CRC32; +(function (factory) { + /*jshint ignore:start */ + /*eslint-disable */ + if(typeof DO_NOT_EXPORT_CRC === 'undefined') { + if('object' === typeof exports) { + factory(exports); + } else if ('function' === typeof define && define.amd) { + define(function () { + var module = {}; + factory(module); + return module; + }); + } else { + factory(CRC32 = {}); + } + } else { + factory(CRC32 = {}); + } + /*eslint-enable */ + /*jshint ignore:end */ +}(function(CRC32) { +CRC32.version = '1.2.2'; +/*global Int32Array */ +function signed_crc_table() { + var c = 0, table = new Array(256); + + for(var n =0; n != 256; ++n){ + c = n; + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); + table[n] = c; + } + + return typeof Int32Array !== 'undefined' ? new Int32Array(table) : table; +} + +var T0 = signed_crc_table(); +function slice_by_16_tables(T) { + var c = 0, v = 0, n = 0, table = typeof Int32Array !== 'undefined' ? new Int32Array(4096) : new Array(4096) ; + + for(n = 0; n != 256; ++n) table[n] = T[n]; + for(n = 0; n != 256; ++n) { + v = T[n]; + for(c = 256 + n; c < 4096; c += 256) v = table[c] = (v >>> 8) ^ T[v & 0xFF]; + } + var out = []; + for(n = 1; n != 16; ++n) out[n - 1] = typeof Int32Array !== 'undefined' ? table.subarray(n * 256, n * 256 + 256) : table.slice(n * 256, n * 256 + 256); + return out; +} +var TT = slice_by_16_tables(T0); +var T1 = TT[0], T2 = TT[1], T3 = TT[2], T4 = TT[3], T5 = TT[4]; +var T6 = TT[5], T7 = TT[6], T8 = TT[7], T9 = TT[8], Ta = TT[9]; +var Tb = TT[10], Tc = TT[11], Td = TT[12], Te = TT[13], Tf = TT[14]; +function crc32_bstr(bstr, seed) { + var C = seed ^ -1; + for(var i = 0, L = bstr.length; i < L;) C = (C>>>8) ^ T0[(C^bstr.charCodeAt(i++))&0xFF]; + return ~C; +} + +function crc32_buf(B, seed) { + var C = seed ^ -1, L = B.length - 15, i = 0; + for(; i < L;) C = + Tf[B[i++] ^ (C & 255)] ^ + Te[B[i++] ^ ((C >> 8) & 255)] ^ + Td[B[i++] ^ ((C >> 16) & 255)] ^ + Tc[B[i++] ^ (C >>> 24)] ^ + Tb[B[i++]] ^ Ta[B[i++]] ^ T9[B[i++]] ^ T8[B[i++]] ^ + T7[B[i++]] ^ T6[B[i++]] ^ T5[B[i++]] ^ T4[B[i++]] ^ + T3[B[i++]] ^ T2[B[i++]] ^ T1[B[i++]] ^ T0[B[i++]]; + L += 15; + while(i < L) C = (C>>>8) ^ T0[(C^B[i++])&0xFF]; + return ~C; +} + +function crc32_str(str, seed) { + var C = seed ^ -1; + for(var i = 0, L = str.length, c = 0, d = 0; i < L;) { + c = str.charCodeAt(i++); + if(c < 0x80) { + C = (C>>>8) ^ T0[(C^c)&0xFF]; + } else if(c < 0x800) { + C = (C>>>8) ^ T0[(C ^ (192|((c>>6)&31)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|(c&63)))&0xFF]; + } else if(c >= 0xD800 && c < 0xE000) { + c = (c&1023)+64; d = str.charCodeAt(i++)&1023; + C = (C>>>8) ^ T0[(C ^ (240|((c>>8)&7)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|((c>>2)&63)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|((d>>6)&15)|((c&3)<<4)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|(d&63)))&0xFF]; + } else { + C = (C>>>8) ^ T0[(C ^ (224|((c>>12)&15)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|((c>>6)&63)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|(c&63)))&0xFF]; + } + } + return ~C; +} +CRC32.table = T0; +// $FlowIgnore +CRC32.bstr = crc32_bstr; +// $FlowIgnore +CRC32.buf = crc32_buf; +// $FlowIgnore +CRC32.str = crc32_str; +})); diff --git a/backend/node_modules/crc-32/crc32c.js b/backend/node_modules/crc-32/crc32c.js new file mode 100644 index 00000000..447fc8f7 --- /dev/null +++ b/backend/node_modules/crc-32/crc32c.js @@ -0,0 +1,115 @@ +/*! crc32.js (C) 2014-present SheetJS -- http://sheetjs.com */ +/* vim: set ts=2: */ +/*exported CRC32C */ +var CRC32C; +(function (factory) { + /*jshint ignore:start */ + /*eslint-disable */ + if(typeof DO_NOT_EXPORT_CRC === 'undefined') { + if('object' === typeof exports) { + factory(exports); + } else if ('function' === typeof define && define.amd) { + define(function () { + var module = {}; + factory(module); + return module; + }); + } else { + factory(CRC32C = {}); + } + } else { + factory(CRC32C = {}); + } + /*eslint-enable */ + /*jshint ignore:end */ +}(function(CRC32C) { +CRC32C.version = '1.2.2'; +/*global Int32Array */ +function signed_crc_table() { + var c = 0, table = new Array(256); + + for(var n =0; n != 256; ++n){ + c = n; + c = ((c&1) ? (-2097792136 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-2097792136 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-2097792136 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-2097792136 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-2097792136 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-2097792136 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-2097792136 ^ (c >>> 1)) : (c >>> 1)); + c = ((c&1) ? (-2097792136 ^ (c >>> 1)) : (c >>> 1)); + table[n] = c; + } + + return typeof Int32Array !== 'undefined' ? new Int32Array(table) : table; +} + +var T0 = signed_crc_table(); +function slice_by_16_tables(T) { + var c = 0, v = 0, n = 0, table = typeof Int32Array !== 'undefined' ? new Int32Array(4096) : new Array(4096) ; + + for(n = 0; n != 256; ++n) table[n] = T[n]; + for(n = 0; n != 256; ++n) { + v = T[n]; + for(c = 256 + n; c < 4096; c += 256) v = table[c] = (v >>> 8) ^ T[v & 0xFF]; + } + var out = []; + for(n = 1; n != 16; ++n) out[n - 1] = typeof Int32Array !== 'undefined' ? table.subarray(n * 256, n * 256 + 256) : table.slice(n * 256, n * 256 + 256); + return out; +} +var TT = slice_by_16_tables(T0); +var T1 = TT[0], T2 = TT[1], T3 = TT[2], T4 = TT[3], T5 = TT[4]; +var T6 = TT[5], T7 = TT[6], T8 = TT[7], T9 = TT[8], Ta = TT[9]; +var Tb = TT[10], Tc = TT[11], Td = TT[12], Te = TT[13], Tf = TT[14]; +function crc32_bstr(bstr, seed) { + var C = seed ^ -1; + for(var i = 0, L = bstr.length; i < L;) C = (C>>>8) ^ T0[(C^bstr.charCodeAt(i++))&0xFF]; + return ~C; +} + +function crc32_buf(B, seed) { + var C = seed ^ -1, L = B.length - 15, i = 0; + for(; i < L;) C = + Tf[B[i++] ^ (C & 255)] ^ + Te[B[i++] ^ ((C >> 8) & 255)] ^ + Td[B[i++] ^ ((C >> 16) & 255)] ^ + Tc[B[i++] ^ (C >>> 24)] ^ + Tb[B[i++]] ^ Ta[B[i++]] ^ T9[B[i++]] ^ T8[B[i++]] ^ + T7[B[i++]] ^ T6[B[i++]] ^ T5[B[i++]] ^ T4[B[i++]] ^ + T3[B[i++]] ^ T2[B[i++]] ^ T1[B[i++]] ^ T0[B[i++]]; + L += 15; + while(i < L) C = (C>>>8) ^ T0[(C^B[i++])&0xFF]; + return ~C; +} + +function crc32_str(str, seed) { + var C = seed ^ -1; + for(var i = 0, L = str.length, c = 0, d = 0; i < L;) { + c = str.charCodeAt(i++); + if(c < 0x80) { + C = (C>>>8) ^ T0[(C^c)&0xFF]; + } else if(c < 0x800) { + C = (C>>>8) ^ T0[(C ^ (192|((c>>6)&31)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|(c&63)))&0xFF]; + } else if(c >= 0xD800 && c < 0xE000) { + c = (c&1023)+64; d = str.charCodeAt(i++)&1023; + C = (C>>>8) ^ T0[(C ^ (240|((c>>8)&7)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|((c>>2)&63)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|((d>>6)&15)|((c&3)<<4)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|(d&63)))&0xFF]; + } else { + C = (C>>>8) ^ T0[(C ^ (224|((c>>12)&15)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|((c>>6)&63)))&0xFF]; + C = (C>>>8) ^ T0[(C ^ (128|(c&63)))&0xFF]; + } + } + return ~C; +} +CRC32C.table = T0; +// $FlowIgnore +CRC32C.bstr = crc32_bstr; +// $FlowIgnore +CRC32C.buf = crc32_buf; +// $FlowIgnore +CRC32C.str = crc32_str; +})); diff --git a/backend/node_modules/crc-32/package.json b/backend/node_modules/crc-32/package.json new file mode 100644 index 00000000..a523291a --- /dev/null +++ b/backend/node_modules/crc-32/package.json @@ -0,0 +1,43 @@ +{ + "name": "crc-32", + "version": "1.2.2", + "author": "sheetjs", + "description": "Pure-JS CRC-32", + "keywords": [ "crc", "crc32", "checksum" ], + "bin": { + "crc32": "bin/crc32.njs" + }, + "main": "crc32.js", + "types": "types/index.d.ts", + "typesVersions": { "*": { "*": ["types/index.d.ts" ] } }, + "dependencies": { + }, + "devDependencies": { + "printj": "~1.3.1", + "exit-on-epipe": "~1.0.1", + "mocha": "~2.5.3", + "blanket": "~1.2.3", + "codepage": "~1.10.0", + "@sheetjs/uglify-js": "~2.7.3", + "@types/node": "^8.0.7", + "dtslint": "^0.1.2", + "typescript": "2.2.0" + }, + "repository": { "type": "git", "url": "git://github.com/SheetJS/js-crc32.git" }, + "scripts": { + "test": "make test", + "build": "make", + "lint": "make fullint", + "dtslint": "dtslint types" + }, + "config": { + "blanket": { + "pattern": "crc32.js" + } + }, + "homepage": "https://sheetjs.com/", + "files": ["crc32.js", "crc32c.js", "bin/crc32.njs", "LICENSE", "README.md", "types/index.d.ts", "types/*.json"], + "bugs": { "url": "https://github.com/SheetJS/js-crc32/issues" }, + "license": "Apache-2.0", + "engines": { "node": ">=0.8" } +} diff --git a/backend/node_modules/crc-32/types/index.d.ts b/backend/node_modules/crc-32/types/index.d.ts new file mode 100644 index 00000000..16386900 --- /dev/null +++ b/backend/node_modules/crc-32/types/index.d.ts @@ -0,0 +1,14 @@ +/* crc32.js (C) 2014-present SheetJS -- http://sheetjs.com */ +// TypeScript Version: 2.2 + +/** Version string */ +export const version: string; + +/** Process a node buffer or byte array */ +export function buf(data: number[] | Uint8Array, seed?: number): number; + +/** Process a binary string */ +export function bstr(data: string, seed?: number): number; + +/** Process a JS string based on the UTF8 encoding */ +export function str(data: string, seed?: number): number; diff --git a/backend/node_modules/crc-32/types/tsconfig.json b/backend/node_modules/crc-32/types/tsconfig.json new file mode 100644 index 00000000..6e122c77 --- /dev/null +++ b/backend/node_modules/crc-32/types/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "module": "commonjs", + "lib": [ "es5" ], + "noImplicitAny": true, + "noImplicitThis": true, + "strictNullChecks": false, + "baseUrl": ".", + "paths": { "crc-32": ["."] }, + "types": [], + "noEmit": true, + "forceConsistentCasingInFileNames": true + } +} diff --git a/backend/node_modules/crc-32/types/tslint.json b/backend/node_modules/crc-32/types/tslint.json new file mode 100644 index 00000000..d9401a97 --- /dev/null +++ b/backend/node_modules/crc-32/types/tslint.json @@ -0,0 +1,11 @@ +{ + "extends": "dtslint/dtslint.json", + "rules": { + "whitespace": false, + "no-sparse-arrays": false, + "only-arrow-functions": false, + "no-consecutive-blank-lines": false, + "prefer-conditional-expression": false, + "one-variable-per-declaration": false + } +} diff --git a/backend/node_modules/crc32-stream/CHANGELOG.md b/backend/node_modules/crc32-stream/CHANGELOG.md new file mode 100644 index 00000000..3f8da349 --- /dev/null +++ b/backend/node_modules/crc32-stream/CHANGELOG.md @@ -0,0 +1,63 @@ +## Changelog + +**5.0.1** — _February 28, 2024_ — [Diff](https://github.com/archiverjs/node-crc32-stream/compare/5.0.0...5.0.1) + +**5.0.0** — _September 2, 2023_ — [Diff](https://github.com/archiverjs/node-crc32-stream/compare/4.0.3...5.0.0) + +**4.0.3** — _September 2, 2023_ — [Diff](https://github.com/archiverjs/node-crc32-stream/compare/4.0.2...4.0.3) + +**4.0.2** — _February 3, 2021_ — [Diff](https://github.com/archiverjs/node-crc32-stream/compare/4.0.1...4.0.2) + +### Bug Fixes + +- fix DeflateCRC32Stream to support Node.js 15.6.0+ (#31) (#32) + +### Maintenance + +- Bump actions/setup-node from v2.1.2 to v2.1.4 (#30) + +**4.0.1** — _November 18, 2020_ — [Diff](https://github.com/archiverjs/node-crc32-stream/compare/4.0.0...4.0.1) + +### Bug Fixes + +- use crc-32 rather than crc module (#28) + +### Maintenance + +- Bump mocha from 8.2.0 to 8.2.1 (#25) +- Bump actions/checkout from v2.3.2 to v2.3.4 (#26) +- Bump actions/setup-node from v2.1.1 to v2.1.2 (#23) +- Bump mocha from 8.1.1 to 8.2.0 (#24) +- Bump mocha from 8.1.0 to 8.1.1 (#18) +- Bump actions/checkout from v2.3.1 to v2.3.2 (#19) +- Bump mocha from 8.0.1 to 8.1.0 (#17) +- Bump actions/setup-node from v2.1.0 to v2.1.1 (#16) + +**4.0.0** — _July 18, 2020_ — [Diff](https://github.com/archiverjs/node-crc32-stream/compare/3.0.1...4.0.0) + +* Bump actions/checkout from v1 to v2.3.1 (#13) @dependabot +* Bump readable-stream from 3.4.0 to 3.6.0 (#15) @dependabot +* Bump mocha from 6.2.0 to 8.0.1 (#14) @dependabot +* Bump actions/setup-node from v1 to v2.1.0 (#12) @dependabot +* remove support for node < 10 (#11) @ctalkington + +**3.0.1** — _August 2, 2019_ — [Diff](https://github.com/archiverjs/node-crc32-stream/compare/3.0.0...3.0.1) + +- update dependencies + +**3.0.0** — _April 29, 2019_ — [Diff](https://github.com/archiverjs/node-crc32-stream/compare/2.0.0...3.0.0) + +- Require Node.js 6.9, update dependencies, use modern JS syntax (GH #10) +- Do not use the deprecated Buffer() constructor (GH #8) +- remove node v0.10 and v0.12 support + +**2.0.0** — _February 13, 2017_ — [Diff](https://github.com/archiverjs/node-crc32-stream/compare/1.0.1...2.0.0) + +- adopt nodejs core Hash API (GH #4) + +**1.0.1** — _January 12, 2016_ — [Diff](https://github.com/archiverjs/node-crc32-stream/compare/1.0.0...1.0.1) + +- Switch to node-crc for performance (GH #3) +- bump deps to ensure latest versions are used. + +[Release Archive](https://github.com/archiverjs/node-crc32-stream/releases) diff --git a/backend/node_modules/crc32-stream/LICENSE b/backend/node_modules/crc32-stream/LICENSE new file mode 100644 index 00000000..56420a6a --- /dev/null +++ b/backend/node_modules/crc32-stream/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2014 Chris Talkington, contributors. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/backend/node_modules/crc32-stream/README.md b/backend/node_modules/crc32-stream/README.md new file mode 100644 index 00000000..d91c90f0 --- /dev/null +++ b/backend/node_modules/crc32-stream/README.md @@ -0,0 +1,79 @@ +# CRC32 Stream + +crc32-stream is a streaming CRC32 checksumer. It uses the [crc](https://www.npmjs.org/package/crc) module behind the scenes to reliably handle binary data and fancy character sets. Data is passed through untouched. + +### Install + +```bash +npm install crc32-stream --save +``` + +You can also use `npm install https://github.com/archiverjs/node-crc32-stream/archive/master.tar.gz` to test upcoming versions. + +### Usage + +#### CRC32Stream + +Inherits [Transform Stream](http://nodejs.org/api/stream.html#stream_class_stream_transform) options and methods. + +```js +const {CRC32Stream} = require('crc32-stream'); + +const source = fs.createReadStream('file.txt'); +const checksum = new CRC32Stream(); + +checksum.on('end', function(err) { + // do something with checksum.digest() here +}); + +// either pipe it +source.pipe(checksum); + +// or write it +checksum.write('string'); +checksum.end(); +``` + +#### DeflateCRC32Stream + +Inherits [zlib.DeflateRaw](http://nodejs.org/api/zlib.html#zlib_class_zlib_deflateraw) options and methods. + +```js +const {DeflateCRC32Stream} = require('crc32-stream'); + +const source = fs.createReadStream('file.txt'); +const checksum = new DeflateCRC32Stream(); + +checksum.on('end', function(err) { + // do something with checksum.digest() here +}); + +// either pipe it +source.pipe(checksum); + +// or write it +checksum.write('string'); +checksum.end(); +``` + +### Instance API + +#### digest() + +Returns the checksum digest in unsigned form. + +#### hex() + +Returns the hexadecimal representation of the checksum digest. (ie E81722F0) + +#### size(compressed) + +Returns the raw size/length of passed-through data. + +If `compressed` is `true`, it returns compressed length instead. (DeflateCRC32Stream) + +## Things of Interest + +- [Changelog](https://github.com/archiverjs/node-crc32-stream/releases) +- [Contributing](https://github.com/archiverjs/node-crc32-stream/blob/master/CONTRIBUTING.md) +- [MIT License](https://github.com/archiverjs/node-crc32-stream/blob/master/LICENSE-MIT) diff --git a/backend/node_modules/crc32-stream/lib/crc32-stream.js b/backend/node_modules/crc32-stream/lib/crc32-stream.js new file mode 100644 index 00000000..7942ecc9 --- /dev/null +++ b/backend/node_modules/crc32-stream/lib/crc32-stream.js @@ -0,0 +1,48 @@ +/** + * node-crc32-stream + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-crc32-stream/blob/master/LICENSE-MIT + */ + + 'use strict'; + +const {Transform} = require('readable-stream'); + +const crc32 = require('crc-32'); + +class CRC32Stream extends Transform { + constructor(options) { + super(options); + this.checksum = Buffer.allocUnsafe(4); + this.checksum.writeInt32BE(0, 0); + + this.rawSize = 0; + } + + _transform(chunk, encoding, callback) { + if (chunk) { + this.checksum = crc32.buf(chunk, this.checksum) >>> 0; + this.rawSize += chunk.length; + } + + callback(null, chunk); + } + + digest(encoding) { + const checksum = Buffer.allocUnsafe(4); + checksum.writeUInt32BE(this.checksum >>> 0, 0); + return encoding ? checksum.toString(encoding) : checksum; + } + + hex() { + return this.digest('hex').toUpperCase(); + } + + size() { + return this.rawSize; + } +} + +module.exports = CRC32Stream; diff --git a/backend/node_modules/crc32-stream/lib/deflate-crc32-stream.js b/backend/node_modules/crc32-stream/lib/deflate-crc32-stream.js new file mode 100644 index 00000000..899315eb --- /dev/null +++ b/backend/node_modules/crc32-stream/lib/deflate-crc32-stream.js @@ -0,0 +1,62 @@ +/** + * node-crc32-stream + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-crc32-stream/blob/master/LICENSE-MIT + */ + +'use strict'; + +const {DeflateRaw} = require('zlib'); + +const crc32 = require('crc-32'); + +class DeflateCRC32Stream extends DeflateRaw { + constructor(options) { + super(options); + + this.checksum = Buffer.allocUnsafe(4); + this.checksum.writeInt32BE(0, 0); + + this.rawSize = 0; + this.compressedSize = 0; + } + + push(chunk, encoding) { + if (chunk) { + this.compressedSize += chunk.length; + } + + return super.push(chunk, encoding); + } + + _transform(chunk, encoding, callback) { + if (chunk) { + this.checksum = crc32.buf(chunk, this.checksum) >>> 0; + this.rawSize += chunk.length; + } + + super._transform(chunk, encoding, callback) + } + + digest(encoding) { + const checksum = Buffer.allocUnsafe(4); + checksum.writeUInt32BE(this.checksum >>> 0, 0); + return encoding ? checksum.toString(encoding) : checksum; + } + + hex() { + return this.digest('hex').toUpperCase(); + } + + size(compressed = false) { + if (compressed) { + return this.compressedSize; + } else { + return this.rawSize; + } + } +} + +module.exports = DeflateCRC32Stream; diff --git a/backend/node_modules/crc32-stream/lib/index.js b/backend/node_modules/crc32-stream/lib/index.js new file mode 100644 index 00000000..8e154ecd --- /dev/null +++ b/backend/node_modules/crc32-stream/lib/index.js @@ -0,0 +1,14 @@ +/** + * node-crc32-stream + * + * Copyright (c) 2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-crc32-stream/blob/master/LICENSE-MIT + */ + +'use strict'; + +module.exports = { + CRC32Stream: require('./crc32-stream'), + DeflateCRC32Stream: require('./deflate-crc32-stream') +} diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/CONTRIBUTING.md b/backend/node_modules/crc32-stream/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 00000000..f478d58d --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/GOVERNANCE.md b/backend/node_modules/crc32-stream/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 00000000..16ffb93f --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/LICENSE b/backend/node_modules/crc32-stream/node_modules/readable-stream/LICENSE new file mode 100644 index 00000000..2873b3b2 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/README.md b/backend/node_modules/crc32-stream/node_modules/readable-stream/README.md new file mode 100644 index 00000000..19117c1a --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.18.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/errors-browser.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/errors-browser.js new file mode 100644 index 00000000..fb8e73e1 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/errors.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/errors.js new file mode 100644 index 00000000..8471526d --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/experimentalWarning.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 00000000..78e84149 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_duplex.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 00000000..19abfa60 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,126 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); +require('inherits')(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); \ No newline at end of file diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_passthrough.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 00000000..24a6bdde --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,37 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; +var Transform = require('./_stream_transform'); +require('inherits')(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_readable.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 00000000..df1f608d --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1027 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +var debugUtil = require('util'); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + +var BufferList = require('./internal/streams/buffer_list'); +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +require('inherits')(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + return createReadableStreamAsyncIterator(this); + }; +} +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_transform.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 00000000..1ccb7157 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,190 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = require('./_stream_duplex'); +require('inherits')(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} \ No newline at end of file diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_writable.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 00000000..292415e2 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,641 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +require('inherits')(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; \ No newline at end of file diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 00000000..742c5a46 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,180 @@ +'use strict'; + +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = require('./end-of-stream'); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } + + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 00000000..69bda497 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,183 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = require('buffer'), + Buffer = _require.Buffer; +var _require2 = require('util'), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); \ No newline at end of file diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/destroy.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 00000000..31a17c4d --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,96 @@ +'use strict'; + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; \ No newline at end of file diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 00000000..59c671b5 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; \ No newline at end of file diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/from-browser.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 00000000..a4ce56f3 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/from.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 00000000..0a34ee92 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,52 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/pipeline.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 00000000..e6f39241 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; + + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; \ No newline at end of file diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/state.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 00000000..3fbf8927 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,22 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 00000000..9332a3fd --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/stream.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 00000000..ce2ad5b6 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/package.json b/backend/node_modules/crc32-stream/node_modules/readable-stream/package.json new file mode 100644 index 00000000..ade59e71 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/package.json @@ -0,0 +1,68 @@ +{ + "name": "readable-stream", + "version": "3.6.2", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "engines": { + "node": ">= 6" + }, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "scripts": { + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/readable-browser.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/readable-browser.js new file mode 100644 index 00000000..adbf60de --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/backend/node_modules/crc32-stream/node_modules/readable-stream/readable.js b/backend/node_modules/crc32-stream/node_modules/readable-stream/readable.js new file mode 100644 index 00000000..9e0ca120 --- /dev/null +++ b/backend/node_modules/crc32-stream/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/backend/node_modules/crc32-stream/package.json b/backend/node_modules/crc32-stream/package.json new file mode 100644 index 00000000..be1c0c18 --- /dev/null +++ b/backend/node_modules/crc32-stream/package.json @@ -0,0 +1,45 @@ +{ + "name": "crc32-stream", + "version": "5.0.1", + "description": "a streaming CRC32 checksumer", + "homepage": "https://github.com/archiverjs/node-crc32-stream", + "author": { + "name": "Chris Talkington", + "url": "http://christalkington.com/" + }, + "repository": { + "type": "git", + "url": "https://github.com/archiverjs/node-crc32-stream.git" + }, + "bugs": { + "url": "https://github.com/archiverjs/node-crc32-stream/issues" + }, + "license": "MIT", + "main": "lib/index.js", + "files": [ + "lib" + ], + "engines": { + "node": ">= 12.0.0" + }, + "scripts": { + "test": "mocha --reporter dot" + }, + "dependencies": { + "crc-32": "^1.2.0", + "readable-stream": "^3.4.0" + }, + "devDependencies": { + "chai": "4.4.1", + "mocha": "9.2.2" + }, + "keywords": [ + "crc32-stream", + "crc32", + "stream", + "checksum" + ], + "publishConfig": { + "registry": "https://registry.npmjs.org/" + } +} diff --git a/backend/node_modules/events-universal/LICENSE b/backend/node_modules/events-universal/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/backend/node_modules/events-universal/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/node_modules/events-universal/README.md b/backend/node_modules/events-universal/README.md new file mode 100644 index 00000000..147c4ad2 --- /dev/null +++ b/backend/node_modules/events-universal/README.md @@ -0,0 +1,17 @@ +# events-universal + +Universal wrapper for the Node.js `events` module. + +``` +npm i events-universal +``` + +## Usage + +```js +const EventEmitter = require('events-universal') +``` + +## License + +Apache-2.0 diff --git a/backend/node_modules/events-universal/bare.js b/backend/node_modules/events-universal/bare.js new file mode 100644 index 00000000..35e6abd5 --- /dev/null +++ b/backend/node_modules/events-universal/bare.js @@ -0,0 +1 @@ +module.exports = require('bare-events') diff --git a/backend/node_modules/events-universal/default.js b/backend/node_modules/events-universal/default.js new file mode 100644 index 00000000..6f3af97c --- /dev/null +++ b/backend/node_modules/events-universal/default.js @@ -0,0 +1 @@ +module.exports = require('events') diff --git a/backend/node_modules/events-universal/index.js b/backend/node_modules/events-universal/index.js new file mode 100644 index 00000000..35e6abd5 --- /dev/null +++ b/backend/node_modules/events-universal/index.js @@ -0,0 +1 @@ +module.exports = require('bare-events') diff --git a/backend/node_modules/events-universal/package.json b/backend/node_modules/events-universal/package.json new file mode 100644 index 00000000..0e973f39 --- /dev/null +++ b/backend/node_modules/events-universal/package.json @@ -0,0 +1,39 @@ +{ + "name": "events-universal", + "version": "1.0.1", + "description": "Universal wrapper for the Node.js events module", + "exports": { + "./package": "./package.json", + ".": { + "bare": "./bare.js", + "react-native": "./react-native.js", + "default": "./default.js" + } + }, + "files": [ + "index.js", + "default.js", + "bare.js", + "react-native.js" + ], + "scripts": { + "test": "prettier . --check" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/holepunchto/events-universal.git" + }, + "author": "Holepunch", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/holepunchto/events-universal/issues" + }, + "homepage": "https://github.com/holepunchto/events-universal#readme", + "dependencies": { + "bare-events": "^2.7.0" + }, + "devDependencies": { + "prettier": "^3.4.2", + "prettier-config-holepunch": "^1.0.0" + } +} diff --git a/backend/node_modules/events-universal/react-native.js b/backend/node_modules/events-universal/react-native.js new file mode 100644 index 00000000..35e6abd5 --- /dev/null +++ b/backend/node_modules/events-universal/react-native.js @@ -0,0 +1 @@ +module.exports = require('bare-events') diff --git a/backend/node_modules/fast-fifo/LICENSE b/backend/node_modules/fast-fifo/LICENSE new file mode 100644 index 00000000..c728dc70 --- /dev/null +++ b/backend/node_modules/fast-fifo/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2019 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/backend/node_modules/fast-fifo/README.md b/backend/node_modules/fast-fifo/README.md new file mode 100644 index 00000000..bb75095e --- /dev/null +++ b/backend/node_modules/fast-fifo/README.md @@ -0,0 +1,78 @@ +# fast-fifo + +A fast fifo implementation similar to the one powering nextTick in Node.js core + +``` +npm install fast-fifo +``` + +Uses a linked list of growing fixed sized arrays to implement the FIFO to avoid +allocating a wrapper object for each item. + +## Usage + +``` js +const FIFO = require('fast-fifo') + +const q = new FIFO() + +q.push('hello') +q.push('world') + +q.shift() // returns hello +q.shift() // returns world +``` + +## API + +#### `q = new FIFO()` + +Create a new FIFO. + +#### `q.push(value)` + +Push a value to the FIFO. `value` can be anything other than undefined. + +#### `value = q.shift()` + +Return the oldest value from the FIFO. + +#### `q.clear()` + +Remove all values from the FIFO. + +#### `bool = q.isEmpty()` + +Returns `true` if the FIFO is empty and false otherwise. + +#### `value = q.peek()` + +Return the oldest value from the FIFO without shifting it out. + +#### `len = q.length` + +Get the number of entries remaining in the FIFO. + +## Benchmarks + +Included in bench.js is a simple benchmark that benchmarks this against a simple +linked list based FIFO. + +On my machine the benchmark looks like this: + +``` +fifo bulk push and shift: 2881.508ms +fifo individual push and shift: 3248.437ms +fast-fifo bulk push and shift: 1606.972ms +fast-fifo individual push and shift: 1328.064ms +fifo bulk push and shift: 3266.902ms +fifo individual push and shift: 3320.944ms +fast-fifo bulk push and shift: 1858.307ms +fast-fifo individual push and shift: 1516.983ms +``` + +YMMV + +## License + +MIT diff --git a/backend/node_modules/fast-fifo/fixed-size.js b/backend/node_modules/fast-fifo/fixed-size.js new file mode 100644 index 00000000..b25be618 --- /dev/null +++ b/backend/node_modules/fast-fifo/fixed-size.js @@ -0,0 +1,39 @@ +module.exports = class FixedFIFO { + constructor (hwm) { + if (!(hwm > 0) || ((hwm - 1) & hwm) !== 0) throw new Error('Max size for a FixedFIFO should be a power of two') + this.buffer = new Array(hwm) + this.mask = hwm - 1 + this.top = 0 + this.btm = 0 + this.next = null + } + + clear () { + this.top = this.btm = 0 + this.next = null + this.buffer.fill(undefined) + } + + push (data) { + if (this.buffer[this.top] !== undefined) return false + this.buffer[this.top] = data + this.top = (this.top + 1) & this.mask + return true + } + + shift () { + const last = this.buffer[this.btm] + if (last === undefined) return undefined + this.buffer[this.btm] = undefined + this.btm = (this.btm + 1) & this.mask + return last + } + + peek () { + return this.buffer[this.btm] + } + + isEmpty () { + return this.buffer[this.btm] === undefined + } +} diff --git a/backend/node_modules/fast-fifo/index.js b/backend/node_modules/fast-fifo/index.js new file mode 100644 index 00000000..ae1fb237 --- /dev/null +++ b/backend/node_modules/fast-fifo/index.js @@ -0,0 +1,48 @@ +const FixedFIFO = require('./fixed-size') + +module.exports = class FastFIFO { + constructor (hwm) { + this.hwm = hwm || 16 + this.head = new FixedFIFO(this.hwm) + this.tail = this.head + this.length = 0 + } + + clear () { + this.head = this.tail + this.head.clear() + this.length = 0 + } + + push (val) { + this.length++ + if (!this.head.push(val)) { + const prev = this.head + this.head = prev.next = new FixedFIFO(2 * this.head.buffer.length) + this.head.push(val) + } + } + + shift () { + if (this.length !== 0) this.length-- + const val = this.tail.shift() + if (val === undefined && this.tail.next) { + const next = this.tail.next + this.tail.next = null + this.tail = next + return this.tail.shift() + } + + return val + } + + peek () { + const val = this.tail.peek() + if (val === undefined && this.tail.next) return this.tail.next.peek() + return val + } + + isEmpty () { + return this.length === 0 + } +} diff --git a/backend/node_modules/fast-fifo/package.json b/backend/node_modules/fast-fifo/package.json new file mode 100644 index 00000000..6ef65913 --- /dev/null +++ b/backend/node_modules/fast-fifo/package.json @@ -0,0 +1,28 @@ +{ + "name": "fast-fifo", + "version": "1.3.2", + "description": "A fast fifo implementation similar to the one powering nextTick in Node.js core", + "main": "index.js", + "files": [ + "./index.js", + "./fixed-size.js" + ], + "dependencies": {}, + "devDependencies": { + "standard": "^17.1.0", + "brittle": "^3.3.2" + }, + "scripts": { + "test": "standard && brittle test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/mafintosh/fast-fifo.git" + }, + "author": "Mathias Buus (@mafintosh)", + "license": "MIT", + "bugs": { + "url": "https://github.com/mafintosh/fast-fifo/issues" + }, + "homepage": "https://github.com/mafintosh/fast-fifo" +} diff --git a/backend/node_modules/fs.realpath/LICENSE b/backend/node_modules/fs.realpath/LICENSE new file mode 100644 index 00000000..5bd884c2 --- /dev/null +++ b/backend/node_modules/fs.realpath/LICENSE @@ -0,0 +1,43 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---- + +This library bundles a version of the `fs.realpath` and `fs.realpathSync` +methods from Node.js v0.10 under the terms of the Node.js MIT license. + +Node's license follows, also included at the header of `old.js` which contains +the licensed code: + + Copyright Joyent, Inc. and other Node contributors. + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. diff --git a/backend/node_modules/fs.realpath/README.md b/backend/node_modules/fs.realpath/README.md new file mode 100644 index 00000000..a42ceac6 --- /dev/null +++ b/backend/node_modules/fs.realpath/README.md @@ -0,0 +1,33 @@ +# fs.realpath + +A backwards-compatible fs.realpath for Node v6 and above + +In Node v6, the JavaScript implementation of fs.realpath was replaced +with a faster (but less resilient) native implementation. That raises +new and platform-specific errors and cannot handle long or excessively +symlink-looping paths. + +This module handles those cases by detecting the new errors and +falling back to the JavaScript implementation. On versions of Node +prior to v6, it has no effect. + +## USAGE + +```js +var rp = require('fs.realpath') + +// async version +rp.realpath(someLongAndLoopingPath, function (er, real) { + // the ELOOP was handled, but it was a bit slower +}) + +// sync version +var real = rp.realpathSync(someLongAndLoopingPath) + +// monkeypatch at your own risk! +// This replaces the fs.realpath/fs.realpathSync builtins +rp.monkeypatch() + +// un-do the monkeypatching +rp.unmonkeypatch() +``` diff --git a/backend/node_modules/fs.realpath/index.js b/backend/node_modules/fs.realpath/index.js new file mode 100644 index 00000000..b09c7c7e --- /dev/null +++ b/backend/node_modules/fs.realpath/index.js @@ -0,0 +1,66 @@ +module.exports = realpath +realpath.realpath = realpath +realpath.sync = realpathSync +realpath.realpathSync = realpathSync +realpath.monkeypatch = monkeypatch +realpath.unmonkeypatch = unmonkeypatch + +var fs = require('fs') +var origRealpath = fs.realpath +var origRealpathSync = fs.realpathSync + +var version = process.version +var ok = /^v[0-5]\./.test(version) +var old = require('./old.js') + +function newError (er) { + return er && er.syscall === 'realpath' && ( + er.code === 'ELOOP' || + er.code === 'ENOMEM' || + er.code === 'ENAMETOOLONG' + ) +} + +function realpath (p, cache, cb) { + if (ok) { + return origRealpath(p, cache, cb) + } + + if (typeof cache === 'function') { + cb = cache + cache = null + } + origRealpath(p, cache, function (er, result) { + if (newError(er)) { + old.realpath(p, cache, cb) + } else { + cb(er, result) + } + }) +} + +function realpathSync (p, cache) { + if (ok) { + return origRealpathSync(p, cache) + } + + try { + return origRealpathSync(p, cache) + } catch (er) { + if (newError(er)) { + return old.realpathSync(p, cache) + } else { + throw er + } + } +} + +function monkeypatch () { + fs.realpath = realpath + fs.realpathSync = realpathSync +} + +function unmonkeypatch () { + fs.realpath = origRealpath + fs.realpathSync = origRealpathSync +} diff --git a/backend/node_modules/fs.realpath/old.js b/backend/node_modules/fs.realpath/old.js new file mode 100644 index 00000000..b40305e7 --- /dev/null +++ b/backend/node_modules/fs.realpath/old.js @@ -0,0 +1,303 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var pathModule = require('path'); +var isWindows = process.platform === 'win32'; +var fs = require('fs'); + +// JavaScript implementation of realpath, ported from node pre-v6 + +var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); + +function rethrow() { + // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and + // is fairly slow to generate. + var callback; + if (DEBUG) { + var backtrace = new Error; + callback = debugCallback; + } else + callback = missingCallback; + + return callback; + + function debugCallback(err) { + if (err) { + backtrace.message = err.message; + err = backtrace; + missingCallback(err); + } + } + + function missingCallback(err) { + if (err) { + if (process.throwDeprecation) + throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs + else if (!process.noDeprecation) { + var msg = 'fs: missing callback ' + (err.stack || err.message); + if (process.traceDeprecation) + console.trace(msg); + else + console.error(msg); + } + } + } +} + +function maybeCallback(cb) { + return typeof cb === 'function' ? cb : rethrow(); +} + +var normalize = pathModule.normalize; + +// Regexp that finds the next partion of a (partial) path +// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] +if (isWindows) { + var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; +} else { + var nextPartRe = /(.*?)(?:[\/]+|$)/g; +} + +// Regex to find the device root, including trailing slash. E.g. 'c:\\'. +if (isWindows) { + var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; +} else { + var splitRootRe = /^[\/]*/; +} + +exports.realpathSync = function realpathSync(p, cache) { + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return cache[p]; + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstatSync(base); + knownHard[base] = true; + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + continue; + } + + var resolvedLink; + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // some known symbolic link. no need to stat again. + resolvedLink = cache[base]; + } else { + var stat = fs.lstatSync(base); + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + continue; + } + + // read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + var linkTarget = null; + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + linkTarget = seenLinks[id]; + } + } + if (linkTarget === null) { + fs.statSync(base); + linkTarget = fs.readlinkSync(base); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + // track this, if given a cache. + if (cache) cache[base] = resolvedLink; + if (!isWindows) seenLinks[id] = linkTarget; + } + + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } + + if (cache) cache[original] = p; + + return p; +}; + + +exports.realpath = function realpath(p, cache, cb) { + if (typeof cb !== 'function') { + cb = maybeCallback(cache); + cache = null; + } + + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return process.nextTick(cb.bind(null, null, cache[p])); + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstat(base, function(err) { + if (err) return cb(err); + knownHard[base] = true; + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + function LOOP() { + // stop if scanned past end of path + if (pos >= p.length) { + if (cache) cache[original] = p; + return cb(null, p); + } + + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + return process.nextTick(LOOP); + } + + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // known symbolic link. no need to stat again. + return gotResolvedLink(cache[base]); + } + + return fs.lstat(base, gotStat); + } + + function gotStat(err, stat) { + if (err) return cb(err); + + // if not a symlink, skip to the next path part + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + return process.nextTick(LOOP); + } + + // stat & read the link if not read before + // call gotTarget as soon as the link target is known + // dev/ino always return 0 on windows, so skip the check. + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + return gotTarget(null, seenLinks[id], base); + } + } + fs.stat(base, function(err) { + if (err) return cb(err); + + fs.readlink(base, function(err, target) { + if (!isWindows) seenLinks[id] = target; + gotTarget(err, target); + }); + }); + } + + function gotTarget(err, target, base) { + if (err) return cb(err); + + var resolvedLink = pathModule.resolve(previous, target); + if (cache) cache[base] = resolvedLink; + gotResolvedLink(resolvedLink); + } + + function gotResolvedLink(resolvedLink) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } +}; diff --git a/backend/node_modules/fs.realpath/package.json b/backend/node_modules/fs.realpath/package.json new file mode 100644 index 00000000..3edc57d2 --- /dev/null +++ b/backend/node_modules/fs.realpath/package.json @@ -0,0 +1,26 @@ +{ + "name": "fs.realpath", + "version": "1.0.0", + "description": "Use node's fs.realpath, but fall back to the JS implementation if the native one fails", + "main": "index.js", + "dependencies": {}, + "devDependencies": {}, + "scripts": { + "test": "tap test/*.js --cov" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/fs.realpath.git" + }, + "keywords": [ + "realpath", + "fs", + "polyfill" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "old.js", + "index.js" + ] +} diff --git a/backend/node_modules/graceful-fs/LICENSE b/backend/node_modules/graceful-fs/LICENSE new file mode 100644 index 00000000..e906a25a --- /dev/null +++ b/backend/node_modules/graceful-fs/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2011-2022 Isaac Z. Schlueter, Ben Noordhuis, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/backend/node_modules/graceful-fs/README.md b/backend/node_modules/graceful-fs/README.md new file mode 100644 index 00000000..82d6e4da --- /dev/null +++ b/backend/node_modules/graceful-fs/README.md @@ -0,0 +1,143 @@ +# graceful-fs + +graceful-fs functions as a drop-in replacement for the fs module, +making various improvements. + +The improvements are meant to normalize behavior across different +platforms and environments, and to make filesystem access more +resilient to errors. + +## Improvements over [fs module](https://nodejs.org/api/fs.html) + +* Queues up `open` and `readdir` calls, and retries them once + something closes if there is an EMFILE error from too many file + descriptors. +* fixes `lchmod` for Node versions prior to 0.6.2. +* implements `fs.lutimes` if possible. Otherwise it becomes a noop. +* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or + `lchown` if the user isn't root. +* makes `lchmod` and `lchown` become noops, if not available. +* retries reading a file if `read` results in EAGAIN error. + +On Windows, it retries renaming a file for up to one second if `EACCESS` +or `EPERM` error occurs, likely because antivirus software has locked +the directory. + +## USAGE + +```javascript +// use just like fs +var fs = require('graceful-fs') + +// now go and do stuff with it... +fs.readFile('some-file-or-whatever', (err, data) => { + // Do stuff here. +}) +``` + +## Sync methods + +This module cannot intercept or handle `EMFILE` or `ENFILE` errors from sync +methods. If you use sync methods which open file descriptors then you are +responsible for dealing with any errors. + +This is a known limitation, not a bug. + +## Global Patching + +If you want to patch the global fs module (or any other fs-like +module) you can do this: + +```javascript +// Make sure to read the caveat below. +var realFs = require('fs') +var gracefulFs = require('graceful-fs') +gracefulFs.gracefulify(realFs) +``` + +This should only ever be done at the top-level application layer, in +order to delay on EMFILE errors from any fs-using dependencies. You +should **not** do this in a library, because it can cause unexpected +delays in other parts of the program. + +## Changes + +This module is fairly stable at this point, and used by a lot of +things. That being said, because it implements a subtle behavior +change in a core part of the node API, even modest changes can be +extremely breaking, and the versioning is thus biased towards +bumping the major when in doubt. + +The main change between major versions has been switching between +providing a fully-patched `fs` module vs monkey-patching the node core +builtin, and the approach by which a non-monkey-patched `fs` was +created. + +The goal is to trade `EMFILE` errors for slower fs operations. So, if +you try to open a zillion files, rather than crashing, `open` +operations will be queued up and wait for something else to `close`. + +There are advantages to each approach. Monkey-patching the fs means +that no `EMFILE` errors can possibly occur anywhere in your +application, because everything is using the same core `fs` module, +which is patched. However, it can also obviously cause undesirable +side-effects, especially if the module is loaded multiple times. + +Implementing a separate-but-identical patched `fs` module is more +surgical (and doesn't run the risk of patching multiple times), but +also imposes the challenge of keeping in sync with the core module. + +The current approach loads the `fs` module, and then creates a +lookalike object that has all the same methods, except a few that are +patched. It is safe to use in all versions of Node from 0.8 through +7.0. + +### v4 + +* Do not monkey-patch the fs module. This module may now be used as a + drop-in dep, and users can opt into monkey-patching the fs builtin + if their app requires it. + +### v3 + +* Monkey-patch fs, because the eval approach no longer works on recent + node. +* fixed possible type-error throw if rename fails on windows +* verify that we *never* get EMFILE errors +* Ignore ENOSYS from chmod/chown +* clarify that graceful-fs must be used as a drop-in + +### v2.1.0 + +* Use eval rather than monkey-patching fs. +* readdir: Always sort the results +* win32: requeue a file if error has an OK status + +### v2.0 + +* A return to monkey patching +* wrap process.cwd + +### v1.1 + +* wrap readFile +* Wrap fs.writeFile. +* readdir protection +* Don't clobber the fs builtin +* Handle fs.read EAGAIN errors by trying again +* Expose the curOpen counter +* No-op lchown/lchmod if not implemented +* fs.rename patch only for win32 +* Patch fs.rename to handle AV software on Windows +* Close #4 Chown should not fail on einval or eperm if non-root +* Fix isaacs/fstream#1 Only wrap fs one time +* Fix #3 Start at 1024 max files, then back off on EMFILE +* lutimes that doens't blow up on Linux +* A full on-rewrite using a queue instead of just swallowing the EMFILE error +* Wrap Read/Write streams as well + +### 1.0 + +* Update engines for node 0.6 +* Be lstat-graceful on Windows +* first diff --git a/backend/node_modules/graceful-fs/clone.js b/backend/node_modules/graceful-fs/clone.js new file mode 100644 index 00000000..dff3cc8c --- /dev/null +++ b/backend/node_modules/graceful-fs/clone.js @@ -0,0 +1,23 @@ +'use strict' + +module.exports = clone + +var getPrototypeOf = Object.getPrototypeOf || function (obj) { + return obj.__proto__ +} + +function clone (obj) { + if (obj === null || typeof obj !== 'object') + return obj + + if (obj instanceof Object) + var copy = { __proto__: getPrototypeOf(obj) } + else + var copy = Object.create(null) + + Object.getOwnPropertyNames(obj).forEach(function (key) { + Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) + }) + + return copy +} diff --git a/backend/node_modules/graceful-fs/graceful-fs.js b/backend/node_modules/graceful-fs/graceful-fs.js new file mode 100644 index 00000000..8d5b89e4 --- /dev/null +++ b/backend/node_modules/graceful-fs/graceful-fs.js @@ -0,0 +1,448 @@ +var fs = require('fs') +var polyfills = require('./polyfills.js') +var legacy = require('./legacy-streams.js') +var clone = require('./clone.js') + +var util = require('util') + +/* istanbul ignore next - node 0.x polyfill */ +var gracefulQueue +var previousSymbol + +/* istanbul ignore else - node 0.x polyfill */ +if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { + gracefulQueue = Symbol.for('graceful-fs.queue') + // This is used in testing by future versions + previousSymbol = Symbol.for('graceful-fs.previous') +} else { + gracefulQueue = '___graceful-fs.queue' + previousSymbol = '___graceful-fs.previous' +} + +function noop () {} + +function publishQueue(context, queue) { + Object.defineProperty(context, gracefulQueue, { + get: function() { + return queue + } + }) +} + +var debug = noop +if (util.debuglog) + debug = util.debuglog('gfs4') +else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) + debug = function() { + var m = util.format.apply(util, arguments) + m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') + console.error(m) + } + +// Once time initialization +if (!fs[gracefulQueue]) { + // This queue can be shared by multiple loaded instances + var queue = global[gracefulQueue] || [] + publishQueue(fs, queue) + + // Patch fs.close/closeSync to shared queue version, because we need + // to retry() whenever a close happens *anywhere* in the program. + // This is essential when multiple graceful-fs instances are + // in play at the same time. + fs.close = (function (fs$close) { + function close (fd, cb) { + return fs$close.call(fs, fd, function (err) { + // This function uses the graceful-fs shared queue + if (!err) { + resetQueue() + } + + if (typeof cb === 'function') + cb.apply(this, arguments) + }) + } + + Object.defineProperty(close, previousSymbol, { + value: fs$close + }) + return close + })(fs.close) + + fs.closeSync = (function (fs$closeSync) { + function closeSync (fd) { + // This function uses the graceful-fs shared queue + fs$closeSync.apply(fs, arguments) + resetQueue() + } + + Object.defineProperty(closeSync, previousSymbol, { + value: fs$closeSync + }) + return closeSync + })(fs.closeSync) + + if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { + process.on('exit', function() { + debug(fs[gracefulQueue]) + require('assert').equal(fs[gracefulQueue].length, 0) + }) + } +} + +if (!global[gracefulQueue]) { + publishQueue(global, fs[gracefulQueue]); +} + +module.exports = patch(clone(fs)) +if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { + module.exports = patch(fs) + fs.__patched = true; +} + +function patch (fs) { + // Everything that references the open() function needs to be in here + polyfills(fs) + fs.gracefulify = patch + + fs.createReadStream = createReadStream + fs.createWriteStream = createWriteStream + var fs$readFile = fs.readFile + fs.readFile = readFile + function readFile (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$readFile(path, options, cb) + + function go$readFile (path, options, cb, startTime) { + return fs$readFile(path, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + var fs$writeFile = fs.writeFile + fs.writeFile = writeFile + function writeFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$writeFile(path, data, options, cb) + + function go$writeFile (path, data, options, cb, startTime) { + return fs$writeFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + var fs$appendFile = fs.appendFile + if (fs$appendFile) + fs.appendFile = appendFile + function appendFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$appendFile(path, data, options, cb) + + function go$appendFile (path, data, options, cb, startTime) { + return fs$appendFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + var fs$copyFile = fs.copyFile + if (fs$copyFile) + fs.copyFile = copyFile + function copyFile (src, dest, flags, cb) { + if (typeof flags === 'function') { + cb = flags + flags = 0 + } + return go$copyFile(src, dest, flags, cb) + + function go$copyFile (src, dest, flags, cb, startTime) { + return fs$copyFile(src, dest, flags, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + var fs$readdir = fs.readdir + fs.readdir = readdir + var noReaddirOptionVersions = /^v[0-5]\./ + function readdir (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + var go$readdir = noReaddirOptionVersions.test(process.version) + ? function go$readdir (path, options, cb, startTime) { + return fs$readdir(path, fs$readdirCallback( + path, options, cb, startTime + )) + } + : function go$readdir (path, options, cb, startTime) { + return fs$readdir(path, options, fs$readdirCallback( + path, options, cb, startTime + )) + } + + return go$readdir(path, options, cb) + + function fs$readdirCallback (path, options, cb, startTime) { + return function (err, files) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([ + go$readdir, + [path, options, cb], + err, + startTime || Date.now(), + Date.now() + ]) + else { + if (files && files.sort) + files.sort() + + if (typeof cb === 'function') + cb.call(this, err, files) + } + } + } + } + + if (process.version.substr(0, 4) === 'v0.8') { + var legStreams = legacy(fs) + ReadStream = legStreams.ReadStream + WriteStream = legStreams.WriteStream + } + + var fs$ReadStream = fs.ReadStream + if (fs$ReadStream) { + ReadStream.prototype = Object.create(fs$ReadStream.prototype) + ReadStream.prototype.open = ReadStream$open + } + + var fs$WriteStream = fs.WriteStream + if (fs$WriteStream) { + WriteStream.prototype = Object.create(fs$WriteStream.prototype) + WriteStream.prototype.open = WriteStream$open + } + + Object.defineProperty(fs, 'ReadStream', { + get: function () { + return ReadStream + }, + set: function (val) { + ReadStream = val + }, + enumerable: true, + configurable: true + }) + Object.defineProperty(fs, 'WriteStream', { + get: function () { + return WriteStream + }, + set: function (val) { + WriteStream = val + }, + enumerable: true, + configurable: true + }) + + // legacy names + var FileReadStream = ReadStream + Object.defineProperty(fs, 'FileReadStream', { + get: function () { + return FileReadStream + }, + set: function (val) { + FileReadStream = val + }, + enumerable: true, + configurable: true + }) + var FileWriteStream = WriteStream + Object.defineProperty(fs, 'FileWriteStream', { + get: function () { + return FileWriteStream + }, + set: function (val) { + FileWriteStream = val + }, + enumerable: true, + configurable: true + }) + + function ReadStream (path, options) { + if (this instanceof ReadStream) + return fs$ReadStream.apply(this, arguments), this + else + return ReadStream.apply(Object.create(ReadStream.prototype), arguments) + } + + function ReadStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + if (that.autoClose) + that.destroy() + + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + that.read() + } + }) + } + + function WriteStream (path, options) { + if (this instanceof WriteStream) + return fs$WriteStream.apply(this, arguments), this + else + return WriteStream.apply(Object.create(WriteStream.prototype), arguments) + } + + function WriteStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + that.destroy() + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + } + }) + } + + function createReadStream (path, options) { + return new fs.ReadStream(path, options) + } + + function createWriteStream (path, options) { + return new fs.WriteStream(path, options) + } + + var fs$open = fs.open + fs.open = open + function open (path, flags, mode, cb) { + if (typeof mode === 'function') + cb = mode, mode = null + + return go$open(path, flags, mode, cb) + + function go$open (path, flags, mode, cb, startTime) { + return fs$open(path, flags, mode, function (err, fd) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + return fs +} + +function enqueue (elem) { + debug('ENQUEUE', elem[0].name, elem[1]) + fs[gracefulQueue].push(elem) + retry() +} + +// keep track of the timeout between retry() calls +var retryTimer + +// reset the startTime and lastTime to now +// this resets the start of the 60 second overall timeout as well as the +// delay between attempts so that we'll retry these jobs sooner +function resetQueue () { + var now = Date.now() + for (var i = 0; i < fs[gracefulQueue].length; ++i) { + // entries that are only a length of 2 are from an older version, don't + // bother modifying those since they'll be retried anyway. + if (fs[gracefulQueue][i].length > 2) { + fs[gracefulQueue][i][3] = now // startTime + fs[gracefulQueue][i][4] = now // lastTime + } + } + // call retry to make sure we're actively processing the queue + retry() +} + +function retry () { + // clear the timer and remove it to help prevent unintended concurrency + clearTimeout(retryTimer) + retryTimer = undefined + + if (fs[gracefulQueue].length === 0) + return + + var elem = fs[gracefulQueue].shift() + var fn = elem[0] + var args = elem[1] + // these items may be unset if they were added by an older graceful-fs + var err = elem[2] + var startTime = elem[3] + var lastTime = elem[4] + + // if we don't have a startTime we have no way of knowing if we've waited + // long enough, so go ahead and retry this item now + if (startTime === undefined) { + debug('RETRY', fn.name, args) + fn.apply(null, args) + } else if (Date.now() - startTime >= 60000) { + // it's been more than 60 seconds total, bail now + debug('TIMEOUT', fn.name, args) + var cb = args.pop() + if (typeof cb === 'function') + cb.call(null, err) + } else { + // the amount of time between the last attempt and right now + var sinceAttempt = Date.now() - lastTime + // the amount of time between when we first tried, and when we last tried + // rounded up to at least 1 + var sinceStart = Math.max(lastTime - startTime, 1) + // backoff. wait longer than the total time we've been retrying, but only + // up to a maximum of 100ms + var desiredDelay = Math.min(sinceStart * 1.2, 100) + // it's been long enough since the last retry, do it again + if (sinceAttempt >= desiredDelay) { + debug('RETRY', fn.name, args) + fn.apply(null, args.concat([startTime])) + } else { + // if we can't do this job yet, push it to the end of the queue + // and let the next iteration check again + fs[gracefulQueue].push(elem) + } + } + + // schedule our next run if one isn't already scheduled + if (retryTimer === undefined) { + retryTimer = setTimeout(retry, 0) + } +} diff --git a/backend/node_modules/graceful-fs/legacy-streams.js b/backend/node_modules/graceful-fs/legacy-streams.js new file mode 100644 index 00000000..d617b50f --- /dev/null +++ b/backend/node_modules/graceful-fs/legacy-streams.js @@ -0,0 +1,118 @@ +var Stream = require('stream').Stream + +module.exports = legacy + +function legacy (fs) { + return { + ReadStream: ReadStream, + WriteStream: WriteStream + } + + function ReadStream (path, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path, options); + + Stream.call(this); + + var self = this; + + this.path = path; + this.fd = null; + this.readable = true; + this.paused = false; + + this.flags = 'r'; + this.mode = 438; /*=0666*/ + this.bufferSize = 64 * 1024; + + options = options || {}; + + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } + + if (this.encoding) this.setEncoding(this.encoding); + + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.end === undefined) { + this.end = Infinity; + } else if ('number' !== typeof this.end) { + throw TypeError('end must be a Number'); + } + + if (this.start > this.end) { + throw new Error('start must be <= end'); + } + + this.pos = this.start; + } + + if (this.fd !== null) { + process.nextTick(function() { + self._read(); + }); + return; + } + + fs.open(this.path, this.flags, this.mode, function (err, fd) { + if (err) { + self.emit('error', err); + self.readable = false; + return; + } + + self.fd = fd; + self.emit('open', fd); + self._read(); + }) + } + + function WriteStream (path, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path, options); + + Stream.call(this); + + this.path = path; + this.fd = null; + this.writable = true; + + this.flags = 'w'; + this.encoding = 'binary'; + this.mode = 438; /*=0666*/ + this.bytesWritten = 0; + + options = options || {}; + + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } + + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.start < 0) { + throw new Error('start must be >= zero'); + } + + this.pos = this.start; + } + + this.busy = false; + this._queue = []; + + if (this.fd === null) { + this._open = fs.open; + this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); + this.flush(); + } + } +} diff --git a/backend/node_modules/graceful-fs/package.json b/backend/node_modules/graceful-fs/package.json new file mode 100644 index 00000000..87babf02 --- /dev/null +++ b/backend/node_modules/graceful-fs/package.json @@ -0,0 +1,53 @@ +{ + "name": "graceful-fs", + "description": "A drop-in replacement for fs, making various improvements.", + "version": "4.2.11", + "repository": { + "type": "git", + "url": "https://github.com/isaacs/node-graceful-fs" + }, + "main": "graceful-fs.js", + "directories": { + "test": "test" + }, + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "test": "nyc --silent node test.js | tap -c -", + "posttest": "nyc report" + }, + "keywords": [ + "fs", + "module", + "reading", + "retry", + "retries", + "queue", + "error", + "errors", + "handling", + "EMFILE", + "EAGAIN", + "EINVAL", + "EPERM", + "EACCESS" + ], + "license": "ISC", + "devDependencies": { + "import-fresh": "^2.0.0", + "mkdirp": "^0.5.0", + "rimraf": "^2.2.8", + "tap": "^16.3.4" + }, + "files": [ + "fs.js", + "graceful-fs.js", + "legacy-streams.js", + "polyfills.js", + "clone.js" + ], + "tap": { + "reporter": "classic" + } +} diff --git a/backend/node_modules/graceful-fs/polyfills.js b/backend/node_modules/graceful-fs/polyfills.js new file mode 100644 index 00000000..453f1a9e --- /dev/null +++ b/backend/node_modules/graceful-fs/polyfills.js @@ -0,0 +1,355 @@ +var constants = require('constants') + +var origCwd = process.cwd +var cwd = null + +var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform + +process.cwd = function() { + if (!cwd) + cwd = origCwd.call(process) + return cwd +} +try { + process.cwd() +} catch (er) {} + +// This check is needed until node.js 12 is required +if (typeof process.chdir === 'function') { + var chdir = process.chdir + process.chdir = function (d) { + cwd = null + chdir.call(process, d) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) +} + +module.exports = patch + +function patch (fs) { + // (re-)implement some things that are known busted or missing. + + // lchmod, broken prior to 0.6.2 + // back-port the fix here. + if (constants.hasOwnProperty('O_SYMLINK') && + process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { + patchLchmod(fs) + } + + // lutimes implementation, or no-op + if (!fs.lutimes) { + patchLutimes(fs) + } + + // https://github.com/isaacs/node-graceful-fs/issues/4 + // Chown should not fail on einval or eperm if non-root. + // It should not fail on enosys ever, as this just indicates + // that a fs doesn't support the intended operation. + + fs.chown = chownFix(fs.chown) + fs.fchown = chownFix(fs.fchown) + fs.lchown = chownFix(fs.lchown) + + fs.chmod = chmodFix(fs.chmod) + fs.fchmod = chmodFix(fs.fchmod) + fs.lchmod = chmodFix(fs.lchmod) + + fs.chownSync = chownFixSync(fs.chownSync) + fs.fchownSync = chownFixSync(fs.fchownSync) + fs.lchownSync = chownFixSync(fs.lchownSync) + + fs.chmodSync = chmodFixSync(fs.chmodSync) + fs.fchmodSync = chmodFixSync(fs.fchmodSync) + fs.lchmodSync = chmodFixSync(fs.lchmodSync) + + fs.stat = statFix(fs.stat) + fs.fstat = statFix(fs.fstat) + fs.lstat = statFix(fs.lstat) + + fs.statSync = statFixSync(fs.statSync) + fs.fstatSync = statFixSync(fs.fstatSync) + fs.lstatSync = statFixSync(fs.lstatSync) + + // if lchmod/lchown do not exist, then make them no-ops + if (fs.chmod && !fs.lchmod) { + fs.lchmod = function (path, mode, cb) { + if (cb) process.nextTick(cb) + } + fs.lchmodSync = function () {} + } + if (fs.chown && !fs.lchown) { + fs.lchown = function (path, uid, gid, cb) { + if (cb) process.nextTick(cb) + } + fs.lchownSync = function () {} + } + + // on Windows, A/V software can lock the directory, causing this + // to fail with an EACCES or EPERM if the directory contains newly + // created files. Try again on failure, for up to 60 seconds. + + // Set the timeout this long because some Windows Anti-Virus, such as Parity + // bit9, may lock files for up to a minute, causing npm package install + // failures. Also, take care to yield the scheduler. Windows scheduling gives + // CPU to a busy looping process, which can cause the program causing the lock + // contention to be starved of CPU by node, so the contention doesn't resolve. + if (platform === "win32") { + fs.rename = typeof fs.rename !== 'function' ? fs.rename + : (function (fs$rename) { + function rename (from, to, cb) { + var start = Date.now() + var backoff = 0; + fs$rename(from, to, function CB (er) { + if (er + && (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY") + && Date.now() - start < 60000) { + setTimeout(function() { + fs.stat(to, function (stater, st) { + if (stater && stater.code === "ENOENT") + fs$rename(from, to, CB); + else + cb(er) + }) + }, backoff) + if (backoff < 100) + backoff += 10; + return; + } + if (cb) cb(er) + }) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename) + return rename + })(fs.rename) + } + + // if read() returns EAGAIN, then just try it again. + fs.read = typeof fs.read !== 'function' ? fs.read + : (function (fs$read) { + function read (fd, buffer, offset, length, position, callback_) { + var callback + if (callback_ && typeof callback_ === 'function') { + var eagCounter = 0 + callback = function (er, _, __) { + if (er && er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + callback_.apply(this, arguments) + } + } + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + + // This ensures `util.promisify` works as it does for native `fs.read`. + if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) + return read + })(fs.read) + + fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync + : (function (fs$readSync) { return function (fd, buffer, offset, length, position) { + var eagCounter = 0 + while (true) { + try { + return fs$readSync.call(fs, fd, buffer, offset, length, position) + } catch (er) { + if (er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + continue + } + throw er + } + } + }})(fs.readSync) + + function patchLchmod (fs) { + fs.lchmod = function (path, mode, callback) { + fs.open( path + , constants.O_WRONLY | constants.O_SYMLINK + , mode + , function (err, fd) { + if (err) { + if (callback) callback(err) + return + } + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, function (err) { + fs.close(fd, function(err2) { + if (callback) callback(err || err2) + }) + }) + }) + } + + fs.lchmodSync = function (path, mode) { + var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) + + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + var threw = true + var ret + try { + ret = fs.fchmodSync(fd, mode) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } + } + + function patchLutimes (fs) { + if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) { + fs.lutimes = function (path, at, mt, cb) { + fs.open(path, constants.O_SYMLINK, function (er, fd) { + if (er) { + if (cb) cb(er) + return + } + fs.futimes(fd, at, mt, function (er) { + fs.close(fd, function (er2) { + if (cb) cb(er || er2) + }) + }) + }) + } + + fs.lutimesSync = function (path, at, mt) { + var fd = fs.openSync(path, constants.O_SYMLINK) + var ret + var threw = true + try { + ret = fs.futimesSync(fd, at, mt) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } + + } else if (fs.futimes) { + fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } + fs.lutimesSync = function () {} + } + } + + function chmodFix (orig) { + if (!orig) return orig + return function (target, mode, cb) { + return orig.call(fs, target, mode, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } + } + + function chmodFixSync (orig) { + if (!orig) return orig + return function (target, mode) { + try { + return orig.call(fs, target, mode) + } catch (er) { + if (!chownErOk(er)) throw er + } + } + } + + + function chownFix (orig) { + if (!orig) return orig + return function (target, uid, gid, cb) { + return orig.call(fs, target, uid, gid, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } + } + + function chownFixSync (orig) { + if (!orig) return orig + return function (target, uid, gid) { + try { + return orig.call(fs, target, uid, gid) + } catch (er) { + if (!chownErOk(er)) throw er + } + } + } + + function statFix (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + function callback (er, stats) { + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + if (cb) cb.apply(this, arguments) + } + return options ? orig.call(fs, target, options, callback) + : orig.call(fs, target, callback) + } + } + + function statFixSync (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options) { + var stats = options ? orig.call(fs, target, options) + : orig.call(fs, target) + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + return stats; + } + } + + // ENOSYS means that the fs doesn't support the op. Just ignore + // that, because it doesn't matter. + // + // if there's no getuid, or if getuid() is something other + // than 0, and the error is EINVAL or EPERM, then just ignore + // it. + // + // This specific case is a silent failure in cp, install, tar, + // and most other unix tools that manage permissions. + // + // When running as root, or if other types of errors are + // encountered, then it's strict. + function chownErOk (er) { + if (!er) + return true + + if (er.code === "ENOSYS") + return true + + var nonroot = !process.getuid || process.getuid() !== 0 + if (nonroot) { + if (er.code === "EINVAL" || er.code === "EPERM") + return true + } + + return false + } +} diff --git a/backend/node_modules/inflight/LICENSE b/backend/node_modules/inflight/LICENSE new file mode 100644 index 00000000..05eeeb88 --- /dev/null +++ b/backend/node_modules/inflight/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/backend/node_modules/inflight/README.md b/backend/node_modules/inflight/README.md new file mode 100644 index 00000000..6dc89291 --- /dev/null +++ b/backend/node_modules/inflight/README.md @@ -0,0 +1,37 @@ +# inflight + +Add callbacks to requests in flight to avoid async duplication + +## USAGE + +```javascript +var inflight = require('inflight') + +// some request that does some stuff +function req(key, callback) { + // key is any random string. like a url or filename or whatever. + // + // will return either a falsey value, indicating that the + // request for this key is already in flight, or a new callback + // which when called will call all callbacks passed to inflightk + // with the same key + callback = inflight(key, callback) + + // If we got a falsey value back, then there's already a req going + if (!callback) return + + // this is where you'd fetch the url or whatever + // callback is also once()-ified, so it can safely be assigned + // to multiple events etc. First call wins. + setTimeout(function() { + callback(null, key) + }, 100) +} + +// only assigns a single setTimeout +// when it dings, all cbs get called +req('foo', cb1) +req('foo', cb2) +req('foo', cb3) +req('foo', cb4) +``` diff --git a/backend/node_modules/inflight/inflight.js b/backend/node_modules/inflight/inflight.js new file mode 100644 index 00000000..48202b3c --- /dev/null +++ b/backend/node_modules/inflight/inflight.js @@ -0,0 +1,54 @@ +var wrappy = require('wrappy') +var reqs = Object.create(null) +var once = require('once') + +module.exports = wrappy(inflight) + +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) + } +} + +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) + + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) + } + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) + } else { + delete reqs[key] + } + } + }) +} + +function slice (args) { + var length = args.length + var array = [] + + for (var i = 0; i < length; i++) array[i] = args[i] + return array +} diff --git a/backend/node_modules/inflight/package.json b/backend/node_modules/inflight/package.json new file mode 100644 index 00000000..6084d350 --- /dev/null +++ b/backend/node_modules/inflight/package.json @@ -0,0 +1,29 @@ +{ + "name": "inflight", + "version": "1.0.6", + "description": "Add callbacks to requests in flight to avoid async duplication", + "main": "inflight.js", + "files": [ + "inflight.js" + ], + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.1.2" + }, + "scripts": { + "test": "tap test.js --100" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/inflight.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "bugs": { + "url": "https://github.com/isaacs/inflight/issues" + }, + "homepage": "https://github.com/isaacs/inflight", + "license": "ISC" +} diff --git a/backend/node_modules/isarray/.npmignore b/backend/node_modules/isarray/.npmignore new file mode 100644 index 00000000..3c3629e6 --- /dev/null +++ b/backend/node_modules/isarray/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/backend/node_modules/isarray/.travis.yml b/backend/node_modules/isarray/.travis.yml new file mode 100644 index 00000000..cc4dba29 --- /dev/null +++ b/backend/node_modules/isarray/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/backend/node_modules/isarray/Makefile b/backend/node_modules/isarray/Makefile new file mode 100644 index 00000000..787d56e1 --- /dev/null +++ b/backend/node_modules/isarray/Makefile @@ -0,0 +1,6 @@ + +test: + @node_modules/.bin/tape test.js + +.PHONY: test + diff --git a/backend/node_modules/isarray/README.md b/backend/node_modules/isarray/README.md new file mode 100644 index 00000000..16d2c59c --- /dev/null +++ b/backend/node_modules/isarray/README.md @@ -0,0 +1,60 @@ + +# isarray + +`Array#isArray` for older browsers. + +[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) +[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) + +[![browser support](https://ci.testling.com/juliangruber/isarray.png) +](https://ci.testling.com/juliangruber/isarray) + +## Usage + +```js +var isArray = require('isarray'); + +console.log(isArray([])); // => true +console.log(isArray({})); // => false +``` + +## Installation + +With [npm](http://npmjs.org) do + +```bash +$ npm install isarray +``` + +Then bundle for the browser with +[browserify](https://github.com/substack/browserify). + +With [component](http://component.io) do + +```bash +$ component install juliangruber/isarray +``` + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/node_modules/isarray/component.json b/backend/node_modules/isarray/component.json new file mode 100644 index 00000000..9e31b683 --- /dev/null +++ b/backend/node_modules/isarray/component.json @@ -0,0 +1,19 @@ +{ + "name" : "isarray", + "description" : "Array#isArray for older browsers", + "version" : "0.0.1", + "repository" : "juliangruber/isarray", + "homepage": "https://github.com/juliangruber/isarray", + "main" : "index.js", + "scripts" : [ + "index.js" + ], + "dependencies" : {}, + "keywords": ["browser","isarray","array"], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT" +} diff --git a/backend/node_modules/isarray/index.js b/backend/node_modules/isarray/index.js new file mode 100644 index 00000000..a57f6349 --- /dev/null +++ b/backend/node_modules/isarray/index.js @@ -0,0 +1,5 @@ +var toString = {}.toString; + +module.exports = Array.isArray || function (arr) { + return toString.call(arr) == '[object Array]'; +}; diff --git a/backend/node_modules/isarray/package.json b/backend/node_modules/isarray/package.json new file mode 100644 index 00000000..1a4317a9 --- /dev/null +++ b/backend/node_modules/isarray/package.json @@ -0,0 +1,45 @@ +{ + "name": "isarray", + "description": "Array#isArray for older browsers", + "version": "1.0.0", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/isarray.git" + }, + "homepage": "https://github.com/juliangruber/isarray", + "main": "index.js", + "dependencies": {}, + "devDependencies": { + "tape": "~2.13.4" + }, + "keywords": [ + "browser", + "isarray", + "array" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test.js", + "browsers": [ + "ie/8..latest", + "firefox/17..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + }, + "scripts": { + "test": "tape test.js" + } +} diff --git a/backend/node_modules/isarray/test.js b/backend/node_modules/isarray/test.js new file mode 100644 index 00000000..e0c3444d --- /dev/null +++ b/backend/node_modules/isarray/test.js @@ -0,0 +1,20 @@ +var isArray = require('./'); +var test = require('tape'); + +test('is array', function(t){ + t.ok(isArray([])); + t.notOk(isArray({})); + t.notOk(isArray(null)); + t.notOk(isArray(false)); + + var obj = {}; + obj[0] = true; + t.notOk(isArray(obj)); + + var arr = []; + arr.foo = 'bar'; + t.ok(isArray(arr)); + + t.end(); +}); + diff --git a/backend/node_modules/lazystream/LICENSE b/backend/node_modules/lazystream/LICENSE new file mode 100644 index 00000000..982db139 --- /dev/null +++ b/backend/node_modules/lazystream/LICENSE @@ -0,0 +1,23 @@ +Copyright (c) 2013 J. Pommerening, contributors. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + diff --git a/backend/node_modules/lazystream/README.md b/backend/node_modules/lazystream/README.md new file mode 100644 index 00000000..847f821e --- /dev/null +++ b/backend/node_modules/lazystream/README.md @@ -0,0 +1,114 @@ +# Lazy Streams + +> *Create streams lazily when they are read from or written to.* +> `lazystream: 1.0.1` + +## Why? + +Sometimes you feel the itch to open *all the files* at once. You want to pass a bunch of streams around, so the consumer does not need to worry where the data comes from. +From a software design point-of-view this sounds entirely reasonable. Then there is that neat little function `fs.createReadStream()` that opens a file and gives you a nice `fs.ReadStream` to pass around, so you use what the mighty creator deities of node bestowed upon you. + +> `Error: EMFILE, too many open files` +> ─ *node* + +This package provides two classes based on the node's Streams3 API (courtesy of `readable-stream` to ensure a stable version). + +## Class: lazystream.Readable + +A wrapper for readable streams. Extends [`stream.PassThrough`](http://nodejs.org/api/stream.html#stream_class_stream_passthrough). + +### new lazystream.Readable(fn [, options]) + +* `fn` *{Function}* + The function that the lazy stream will call to obtain the stream to actually read from. +* `options` *{Object}* + Options for the underlying `PassThrough` stream, accessible by `fn`. + +Creates a new readable stream. Once the stream is accessed (for example when you call its `read()` method, or attach a `data`-event listener) the `fn` function is called with the outer `lazystream.Readable` instance bound to `this`. + +If you pass an `options` object to the constuctor, you can access it in your `fn` function. + +```javascript +new lazystream.Readable(function (options) { + return fs.createReadStream('/dev/urandom'); +}); +``` + +## Class: lazystream.Writable + +A wrapper for writable streams. Extends [`stream.PassThrough`](http://nodejs.org/api/stream.html#stream_class_stream_passthrough). + +### new lazystream.Writable(fn [, options]) + +* `fn` *{Function}* + The function that the lazy stream will call to obtain the stream to actually write to. +* `options` *{Object}* + Options for the underlying `PassThrough` stream, accessible by `fn`. + +Creates a new writable stream. Just like the one above but for writable streams. + +```javascript +new lazystream.Writable(function () { + return fs.createWriteStream('/dev/null'); +}); +``` + +## Install + +```console +$ npm install lazystream --save +lazystream@1.0.1 node_modules/lazystream +└── readable-stream@2.0.5 +``` + +## Changelog + +### v1.0.1 + +- [#3](https://github.com/jpommerening/node-lazystream/issues/3): (finally) fixed a long-standing publishing error + +### v1.0.0 + +- [#2](https://github.com/jpommerening/node-lazystream/issues/2): [unconditionally](https://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html) use `readable-stream` _2.x_. + +### v0.2.0 + +- [#1](https://github.com/jpommerening/node-lazystream/pull/1): error events are now propagated + +### v0.1.0 + +- _(this was the first release)_ + +## Contributing + +Fork it, branch it, send me a pull request. We'll work out the rest together. + +## Credits + +[Chris Talkington](https://github.com/ctalkington) and his [node-archiver](https://github.com/ctalkington/node-archiver) for providing a use-case. + +## [License](LICENSE) + +Copyright (c) 2013 J. Pommerening, contributors. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + diff --git a/backend/node_modules/lazystream/lib/lazystream.js b/backend/node_modules/lazystream/lib/lazystream.js new file mode 100644 index 00000000..d9f61709 --- /dev/null +++ b/backend/node_modules/lazystream/lib/lazystream.js @@ -0,0 +1,54 @@ +var util = require('util'); +var PassThrough = require('readable-stream/passthrough'); + +module.exports = { + Readable: Readable, + Writable: Writable +}; + +util.inherits(Readable, PassThrough); +util.inherits(Writable, PassThrough); + +// Patch the given method of instance so that the callback +// is executed once, before the actual method is called the +// first time. +function beforeFirstCall(instance, method, callback) { + instance[method] = function() { + delete instance[method]; + callback.apply(this, arguments); + return this[method].apply(this, arguments); + }; +} + +function Readable(fn, options) { + if (!(this instanceof Readable)) + return new Readable(fn, options); + + PassThrough.call(this, options); + + beforeFirstCall(this, '_read', function() { + var source = fn.call(this, options); + var emit = this.emit.bind(this, 'error'); + source.on('error', emit); + source.pipe(this); + }); + + this.emit('readable'); +} + +function Writable(fn, options) { + if (!(this instanceof Writable)) + return new Writable(fn, options); + + PassThrough.call(this, options); + + beforeFirstCall(this, '_write', function() { + var destination = fn.call(this, options); + var emit = this.emit.bind(this, 'error'); + destination.on('error', emit); + this.pipe(destination); + }); + + this.emit('writable'); +} + diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/.travis.yml b/backend/node_modules/lazystream/node_modules/readable-stream/.travis.yml new file mode 100644 index 00000000..f62cdac0 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/.travis.yml @@ -0,0 +1,34 @@ +sudo: false +language: node_js +before_install: + - (test $NPM_LEGACY && npm install -g npm@2 && npm install -g npm@3) || true +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: NPM_LEGACY=true + - node_js: '0.10' + env: NPM_LEGACY=true + - node_js: '0.11' + env: NPM_LEGACY=true + - node_js: '0.12' + env: NPM_LEGACY=true + - node_js: 1 + env: NPM_LEGACY=true + - node_js: 2 + env: NPM_LEGACY=true + - node_js: 3 + env: NPM_LEGACY=true + - node_js: 4 + - node_js: 5 + - node_js: 6 + - node_js: 7 + - node_js: 8 + - node_js: 9 +script: "npm run test" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/CONTRIBUTING.md b/backend/node_modules/lazystream/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 00000000..f478d58d --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/GOVERNANCE.md b/backend/node_modules/lazystream/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 00000000..16ffb93f --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/LICENSE b/backend/node_modules/lazystream/node_modules/readable-stream/LICENSE new file mode 100644 index 00000000..2873b3b2 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/README.md b/backend/node_modules/lazystream/node_modules/readable-stream/README.md new file mode 100644 index 00000000..f1c5a931 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.17.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.17.0/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/backend/node_modules/lazystream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 00000000..83275f19 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/duplex-browser.js b/backend/node_modules/lazystream/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 00000000..f8b2db83 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/duplex.js b/backend/node_modules/lazystream/node_modules/readable-stream/duplex.js new file mode 100644 index 00000000..46924cbf --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_duplex.js b/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 00000000..57003c32 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_passthrough.js b/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 00000000..612edb4d --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_readable.js b/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 00000000..3af95cb2 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, { hasUnpiped: false }); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_transform.js b/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 00000000..fcfc105a --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_writable.js b/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 00000000..e1e897ff --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,685 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/lib/internal/streams/BufferList.js b/backend/node_modules/lazystream/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 00000000..5e080976 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,78 @@ +'use strict'; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = require('safe-buffer').Buffer; +var util = require('util'); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} \ No newline at end of file diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/lib/internal/streams/destroy.js b/backend/node_modules/lazystream/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 00000000..85a82140 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,84 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + pna.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, this, err); + } + } + + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + pna.nextTick(emitErrorNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, _this, err); + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/backend/node_modules/lazystream/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 00000000..9332a3fd --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/lib/internal/streams/stream.js b/backend/node_modules/lazystream/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 00000000..ce2ad5b6 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/package.json b/backend/node_modules/lazystream/node_modules/readable-stream/package.json new file mode 100644 index 00000000..514c178e --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/package.json @@ -0,0 +1,52 @@ +{ + "name": "readable-stream", + "version": "2.3.8", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "scripts": { + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/passthrough.js b/backend/node_modules/lazystream/node_modules/readable-stream/passthrough.js new file mode 100644 index 00000000..ffd791d7 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/readable-browser.js b/backend/node_modules/lazystream/node_modules/readable-stream/readable-browser.js new file mode 100644 index 00000000..e5037259 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/readable.js b/backend/node_modules/lazystream/node_modules/readable-stream/readable.js new file mode 100644 index 00000000..ec89ec53 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/transform.js b/backend/node_modules/lazystream/node_modules/readable-stream/transform.js new file mode 100644 index 00000000..b1baba26 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/writable-browser.js b/backend/node_modules/lazystream/node_modules/readable-stream/writable-browser.js new file mode 100644 index 00000000..ebdde6a8 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/backend/node_modules/lazystream/node_modules/readable-stream/writable.js b/backend/node_modules/lazystream/node_modules/readable-stream/writable.js new file mode 100644 index 00000000..3211a6f8 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/backend/node_modules/lazystream/node_modules/safe-buffer/LICENSE b/backend/node_modules/lazystream/node_modules/safe-buffer/LICENSE new file mode 100644 index 00000000..0c068cee --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/backend/node_modules/lazystream/node_modules/safe-buffer/README.md b/backend/node_modules/lazystream/node_modules/safe-buffer/README.md new file mode 100644 index 00000000..e9a81afd --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/backend/node_modules/lazystream/node_modules/safe-buffer/index.d.ts b/backend/node_modules/lazystream/node_modules/safe-buffer/index.d.ts new file mode 100644 index 00000000..e9fed809 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/backend/node_modules/lazystream/node_modules/safe-buffer/index.js b/backend/node_modules/lazystream/node_modules/safe-buffer/index.js new file mode 100644 index 00000000..22438dab --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/safe-buffer/index.js @@ -0,0 +1,62 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/backend/node_modules/lazystream/node_modules/safe-buffer/package.json b/backend/node_modules/lazystream/node_modules/safe-buffer/package.json new file mode 100644 index 00000000..623fbc3f --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/safe-buffer/package.json @@ -0,0 +1,37 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.1.2", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/backend/node_modules/lazystream/node_modules/string_decoder/.travis.yml b/backend/node_modules/lazystream/node_modules/string_decoder/.travis.yml new file mode 100644 index 00000000..3347a725 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/string_decoder/.travis.yml @@ -0,0 +1,50 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test diff --git a/backend/node_modules/lazystream/node_modules/string_decoder/LICENSE b/backend/node_modules/lazystream/node_modules/string_decoder/LICENSE new file mode 100644 index 00000000..778edb20 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/backend/node_modules/lazystream/node_modules/string_decoder/README.md b/backend/node_modules/lazystream/node_modules/string_decoder/README.md new file mode 100644 index 00000000..5fd58315 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/backend/node_modules/lazystream/node_modules/string_decoder/lib/string_decoder.js b/backend/node_modules/lazystream/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 00000000..2e89e63f --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/backend/node_modules/lazystream/node_modules/string_decoder/package.json b/backend/node_modules/lazystream/node_modules/string_decoder/package.json new file mode 100644 index 00000000..518c3eb9 --- /dev/null +++ b/backend/node_modules/lazystream/node_modules/string_decoder/package.json @@ -0,0 +1,31 @@ +{ + "name": "string_decoder", + "version": "1.1.1", + "description": "The string_decoder module from Node core", + "main": "lib/string_decoder.js", + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "scripts": { + "test": "tap test/parallel/*.js && node test/verify-dependencies", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT" +} diff --git a/backend/node_modules/lazystream/package.json b/backend/node_modules/lazystream/package.json new file mode 100644 index 00000000..94565e4e --- /dev/null +++ b/backend/node_modules/lazystream/package.json @@ -0,0 +1,46 @@ +{ + "name": "lazystream", + "version": "1.0.1", + "description": "Open Node Streams on demand.", + "homepage": "https://github.com/jpommerening/node-lazystream", + "author": { + "name": "Jonas Pommerening", + "email": "jonas.pommerening@gmail.com", + "url": "https://npmjs.org/~jpommerening" + }, + "contributors": [ + "Mario Casciaro " + ], + "repository": { + "type": "git", + "url": "https://github.com/jpommerening/node-lazystream.git" + }, + "bugs": { + "url": "https://github.com/jpommerening/node-lazystream/issues" + }, + "license": "MIT", + "main": "lib/lazystream.js", + "engines": { + "node": ">= 0.6.3" + }, + "scripts": { + "test": "nodeunit test/readable_test.js test/writable_test.js test/pipe_test.js test/fs_test.js" + }, + "files": [ + "lib/lazystream.js", + "test/*.js", + "test/*.md" + ], + "dependencies": { + "readable-stream": "^2.0.5" + }, + "devDependencies": { + "nodeunit": "^0.9.1" + }, + "keywords": [ + "emfile", + "lazy", + "streams", + "stream" + ] +} diff --git a/backend/node_modules/lazystream/test/data.md b/backend/node_modules/lazystream/test/data.md new file mode 100644 index 00000000..fc482220 --- /dev/null +++ b/backend/node_modules/lazystream/test/data.md @@ -0,0 +1,13 @@ +> Never mind, hey, this is really exciting, so much to find out about, so much to +> look forward to, I'm quite dizzy with anticipation . . . Or is it the wind? +> +> There really is a lot of that now, isn't there? And wow! Hey! What's this thing +> suddenly coming toward me very fast? Very, very fast. So big and flat and round, +> it needs a big wide-sounding name like . . . ow . . . ound . . . round . . . +> ground! That's it! That's a good name- ground! +> +> I wonder if it will be friends with me? +> +> Hello Ground! + +And the rest, after a sudden wet thud, was silence. diff --git a/backend/node_modules/lazystream/test/fs_test.js b/backend/node_modules/lazystream/test/fs_test.js new file mode 100644 index 00000000..149b1c4a --- /dev/null +++ b/backend/node_modules/lazystream/test/fs_test.js @@ -0,0 +1,69 @@ + +var stream = require('../lib/lazystream'); +var fs = require('fs'); +var tmpDir = 'test/tmp/'; +var readFile = 'test/data.md'; +var writeFile = tmpDir + 'data.md'; + +exports.fs = { + readwrite: function(test) { + var readfd, writefd; + + var readable = new stream.Readable(function() { + return fs.createReadStream(readFile) + .on('open', function(fd) { + readfd = fd; + }) + .on('close', function() { + readfd = undefined; + step(); + }); + }); + + var writable = new stream.Writable(function() { + return fs.createWriteStream(writeFile) + .on('open', function(fd) { + writefd = fd; + }) + .on('close', function() { + writefd = undefined; + step(); + }); + }); + + test.expect(3); + + test.equal(readfd, undefined, 'Input file should not be opened until read'); + test.equal(writefd, undefined, 'Output file should not be opened until write'); + + if (!fs.existsSync(tmpDir)) { + fs.mkdirSync(tmpDir); + } + if (fs.existsSync(writeFile)) { + fs.unlinkSync(writeFile); + } + + readable.on('end', function() { step(); }); + writable.on('end', function() { step(); }); + + var steps = 0; + function step() { + steps += 1; + if (steps == 4) { + var input = fs.readFileSync(readFile); + var output = fs.readFileSync(writeFile); + + test.ok(input >= output && input <= output, 'Should be equal'); + + fs.unlinkSync(writeFile); + fs.rmdirSync(tmpDir); + + test.done(); + } + }; + + readable.pipe(writable); + } +}; + + diff --git a/backend/node_modules/lazystream/test/helper.js b/backend/node_modules/lazystream/test/helper.js new file mode 100644 index 00000000..9d41191d --- /dev/null +++ b/backend/node_modules/lazystream/test/helper.js @@ -0,0 +1,39 @@ + +var _Readable = require('readable-stream/readable'); +var _Writable = require('readable-stream/writable'); +var util = require('util'); + +module.exports = { + DummyReadable: DummyReadable, + DummyWritable: DummyWritable +}; + +function DummyReadable(strings) { + _Readable.call(this); + this.strings = strings; + this.emit('readable'); +} + +util.inherits(DummyReadable, _Readable); + +DummyReadable.prototype._read = function _read(n) { + if (this.strings.length) { + this.push(new Buffer(this.strings.shift())); + } else { + this.push(null); + } +}; + +function DummyWritable(strings) { + _Writable.call(this); + this.strings = strings; + this.emit('writable'); +} + +util.inherits(DummyWritable, _Writable); + +DummyWritable.prototype._write = function _write(chunk, encoding, callback) { + this.strings.push(chunk.toString()); + if (callback) callback(); +}; + diff --git a/backend/node_modules/lazystream/test/pipe_test.js b/backend/node_modules/lazystream/test/pipe_test.js new file mode 100644 index 00000000..7129e359 --- /dev/null +++ b/backend/node_modules/lazystream/test/pipe_test.js @@ -0,0 +1,36 @@ + +var stream = require('../lib/lazystream'); +var helper = require('./helper'); + +exports.pipe = { + readwrite: function(test) { + var expected = [ 'line1\n', 'line2\n' ]; + var actual = []; + var readableInstantiated = false; + var writableInstantiated = false; + + test.expect(3); + + var readable = new stream.Readable(function() { + readableInstantiated = true; + return new helper.DummyReadable([].concat(expected)); + }); + + var writable = new stream.Writable(function() { + writableInstantiated = true; + return new helper.DummyWritable(actual); + }); + + test.equal(readableInstantiated, false, 'DummyReadable should only be instantiated when it is needed'); + test.equal(writableInstantiated, false, 'DummyWritable should only be instantiated when it is needed'); + + writable.on('end', function() { + test.equal(actual.join(''), expected.join(''), 'Piping on demand streams should keep data intact'); + test.done(); + }); + + readable.pipe(writable); + } +}; + + diff --git a/backend/node_modules/lazystream/test/readable_test.js b/backend/node_modules/lazystream/test/readable_test.js new file mode 100644 index 00000000..9ae06368 --- /dev/null +++ b/backend/node_modules/lazystream/test/readable_test.js @@ -0,0 +1,90 @@ + +var Readable = require('../lib/lazystream').Readable; +var DummyReadable = require('./helper').DummyReadable; + +exports.readable = { + dummy: function(test) { + var expected = [ 'line1\n', 'line2\n' ]; + var actual = []; + + test.expect(1); + + new DummyReadable([].concat(expected)) + .on('data', function(chunk) { + actual.push(chunk.toString()); + }) + .on('end', function() { + test.equal(actual.join(''), expected.join(''), 'DummyReadable should produce the data it was created with'); + test.done(); + }); + }, + options: function(test) { + test.expect(3); + + var readable = new Readable(function(options) { + test.ok(this instanceof Readable, "Readable should bind itself to callback's this"); + test.equal(options.encoding, "utf-8", "Readable should make options accessible to callback"); + this.ok = true; + return new DummyReadable(["test"]); + }, {encoding: "utf-8"}); + + readable.read(4); + + test.ok(readable.ok); + + test.done(); + }, + streams2: function(test) { + var expected = [ 'line1\n', 'line2\n' ]; + var actual = []; + var instantiated = false; + + test.expect(2); + + var readable = new Readable(function() { + instantiated = true; + return new DummyReadable([].concat(expected)); + }); + + test.equal(instantiated, false, 'DummyReadable should only be instantiated when it is needed'); + + readable.on('readable', function() { + var chunk; + while ((chunk = readable.read())) { + actual.push(chunk.toString()); + } + }); + readable.on('end', function() { + test.equal(actual.join(''), expected.join(''), 'Readable should not change the data of the underlying stream'); + test.done(); + }); + + readable.read(0); + }, + resume: function(test) { + var expected = [ 'line1\n', 'line2\n' ]; + var actual = []; + var instantiated = false; + + test.expect(2); + + var readable = new Readable(function() { + instantiated = true; + return new DummyReadable([].concat(expected)); + }); + + readable.pause(); + + readable.on('data', function(chunk) { + actual.push(chunk.toString()); + }); + readable.on('end', function() { + test.equal(actual.join(''), expected.join(''), 'Readable should not change the data of the underlying stream'); + test.done(); + }); + + test.equal(instantiated, false, 'DummyReadable should only be instantiated when it is needed'); + + readable.resume(); + } +}; diff --git a/backend/node_modules/lazystream/test/writable_test.js b/backend/node_modules/lazystream/test/writable_test.js new file mode 100644 index 00000000..a6638456 --- /dev/null +++ b/backend/node_modules/lazystream/test/writable_test.js @@ -0,0 +1,59 @@ + +var Writable = require('../lib/lazystream').Writable; +var DummyWritable = require('./helper').DummyWritable; + +exports.writable = { + options: function(test) { + test.expect(3); + + var writable = new Writable(function(options) { + test.ok(this instanceof Writable, "Writable should bind itself to callback's this"); + test.equal(options.encoding, "utf-8", "Writable should make options accessible to callback"); + this.ok = true; + return new DummyWritable([]); + }, {encoding: "utf-8"}); + + writable.write("test"); + + test.ok(writable.ok); + + test.done(); + }, + dummy: function(test) { + var expected = [ 'line1\n', 'line2\n' ]; + var actual = []; + + test.expect(0); + + var dummy = new DummyWritable(actual); + + expected.forEach(function(item) { + dummy.write(new Buffer(item)); + }); + test.done(); + }, + streams2: function(test) { + var expected = [ 'line1\n', 'line2\n' ]; + var actual = []; + var instantiated = false; + + test.expect(2); + + var writable = new Writable(function() { + instantiated = true; + return new DummyWritable(actual); + }); + + test.equal(instantiated, false, 'DummyWritable should only be instantiated when it is needed'); + + writable.on('end', function() { + test.equal(actual.join(''), expected.join(''), 'Writable should not change the data of the underlying stream'); + test.done(); + }); + + expected.forEach(function(item) { + writable.write(new Buffer(item)); + }); + writable.end(); + } +}; diff --git a/backend/node_modules/lodash/LICENSE b/backend/node_modules/lodash/LICENSE new file mode 100644 index 00000000..77c42f14 --- /dev/null +++ b/backend/node_modules/lodash/LICENSE @@ -0,0 +1,47 @@ +Copyright OpenJS Foundation and other contributors + +Based on Underscore.js, copyright Jeremy Ashkenas, +DocumentCloud and Investigative Reporters & Editors + +This software consists of voluntary contributions made by many +individuals. For exact contribution history, see the revision history +available at https://github.com/lodash/lodash + +The following license applies to all parts of this software except as +documented below: + +==== + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +==== + +Copyright and related rights for sample code are waived via CC0. Sample +code is defined as all source code displayed within the prose of the +documentation. + +CC0: http://creativecommons.org/publicdomain/zero/1.0/ + +==== + +Files located in the node_modules and vendor directories are externally +maintained libraries used by this software which have their own +licenses; we recommend you read them, as their terms may differ from the +terms above. diff --git a/backend/node_modules/lodash/README.md b/backend/node_modules/lodash/README.md new file mode 100644 index 00000000..c64fce8c --- /dev/null +++ b/backend/node_modules/lodash/README.md @@ -0,0 +1,39 @@ +# lodash v4.17.23 + +The [Lodash](https://lodash.com/) library exported as [Node.js](https://nodejs.org/) modules. + +## Installation + +Using npm: +```shell +$ npm i -g npm +$ npm i --save lodash +``` + +In Node.js: +```js +// Load the full build. +var _ = require('lodash'); +// Load the core build. +var _ = require('lodash/core'); +// Load the FP build for immutable auto-curried iteratee-first data-last methods. +var fp = require('lodash/fp'); + +// Load method categories. +var array = require('lodash/array'); +var object = require('lodash/fp/object'); + +// Cherry-pick methods for smaller browserify/rollup/webpack bundles. +var at = require('lodash/at'); +var curryN = require('lodash/fp/curryN'); +``` + +See the [package source](https://github.com/lodash/lodash/tree/4.17.23-npm) for more details. + +**Note:**
+Install [n_](https://www.npmjs.com/package/n_) for Lodash use in the Node.js < 6 REPL. + +## Support + +Tested in Chrome 74-75, Firefox 66-67, IE 11, Edge 18, Safari 11-12, & Node.js 8-12.
+Automated [browser](https://saucelabs.com/u/lodash) & [CI](https://travis-ci.org/lodash/lodash/) test runs are available. diff --git a/backend/node_modules/lodash/_DataView.js b/backend/node_modules/lodash/_DataView.js new file mode 100644 index 00000000..ac2d57ca --- /dev/null +++ b/backend/node_modules/lodash/_DataView.js @@ -0,0 +1,7 @@ +var getNative = require('./_getNative'), + root = require('./_root'); + +/* Built-in method references that are verified to be native. */ +var DataView = getNative(root, 'DataView'); + +module.exports = DataView; diff --git a/backend/node_modules/lodash/_Hash.js b/backend/node_modules/lodash/_Hash.js new file mode 100644 index 00000000..b504fe34 --- /dev/null +++ b/backend/node_modules/lodash/_Hash.js @@ -0,0 +1,32 @@ +var hashClear = require('./_hashClear'), + hashDelete = require('./_hashDelete'), + hashGet = require('./_hashGet'), + hashHas = require('./_hashHas'), + hashSet = require('./_hashSet'); + +/** + * Creates a hash object. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ +function Hash(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } +} + +// Add methods to `Hash`. +Hash.prototype.clear = hashClear; +Hash.prototype['delete'] = hashDelete; +Hash.prototype.get = hashGet; +Hash.prototype.has = hashHas; +Hash.prototype.set = hashSet; + +module.exports = Hash; diff --git a/backend/node_modules/lodash/_LazyWrapper.js b/backend/node_modules/lodash/_LazyWrapper.js new file mode 100644 index 00000000..81786c7f --- /dev/null +++ b/backend/node_modules/lodash/_LazyWrapper.js @@ -0,0 +1,28 @@ +var baseCreate = require('./_baseCreate'), + baseLodash = require('./_baseLodash'); + +/** Used as references for the maximum length and index of an array. */ +var MAX_ARRAY_LENGTH = 4294967295; + +/** + * Creates a lazy wrapper object which wraps `value` to enable lazy evaluation. + * + * @private + * @constructor + * @param {*} value The value to wrap. + */ +function LazyWrapper(value) { + this.__wrapped__ = value; + this.__actions__ = []; + this.__dir__ = 1; + this.__filtered__ = false; + this.__iteratees__ = []; + this.__takeCount__ = MAX_ARRAY_LENGTH; + this.__views__ = []; +} + +// Ensure `LazyWrapper` is an instance of `baseLodash`. +LazyWrapper.prototype = baseCreate(baseLodash.prototype); +LazyWrapper.prototype.constructor = LazyWrapper; + +module.exports = LazyWrapper; diff --git a/backend/node_modules/lodash/_ListCache.js b/backend/node_modules/lodash/_ListCache.js new file mode 100644 index 00000000..26895c3a --- /dev/null +++ b/backend/node_modules/lodash/_ListCache.js @@ -0,0 +1,32 @@ +var listCacheClear = require('./_listCacheClear'), + listCacheDelete = require('./_listCacheDelete'), + listCacheGet = require('./_listCacheGet'), + listCacheHas = require('./_listCacheHas'), + listCacheSet = require('./_listCacheSet'); + +/** + * Creates an list cache object. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ +function ListCache(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } +} + +// Add methods to `ListCache`. +ListCache.prototype.clear = listCacheClear; +ListCache.prototype['delete'] = listCacheDelete; +ListCache.prototype.get = listCacheGet; +ListCache.prototype.has = listCacheHas; +ListCache.prototype.set = listCacheSet; + +module.exports = ListCache; diff --git a/backend/node_modules/lodash/_LodashWrapper.js b/backend/node_modules/lodash/_LodashWrapper.js new file mode 100644 index 00000000..c1e4d9df --- /dev/null +++ b/backend/node_modules/lodash/_LodashWrapper.js @@ -0,0 +1,22 @@ +var baseCreate = require('./_baseCreate'), + baseLodash = require('./_baseLodash'); + +/** + * The base constructor for creating `lodash` wrapper objects. + * + * @private + * @param {*} value The value to wrap. + * @param {boolean} [chainAll] Enable explicit method chain sequences. + */ +function LodashWrapper(value, chainAll) { + this.__wrapped__ = value; + this.__actions__ = []; + this.__chain__ = !!chainAll; + this.__index__ = 0; + this.__values__ = undefined; +} + +LodashWrapper.prototype = baseCreate(baseLodash.prototype); +LodashWrapper.prototype.constructor = LodashWrapper; + +module.exports = LodashWrapper; diff --git a/backend/node_modules/lodash/_Map.js b/backend/node_modules/lodash/_Map.js new file mode 100644 index 00000000..b73f29a0 --- /dev/null +++ b/backend/node_modules/lodash/_Map.js @@ -0,0 +1,7 @@ +var getNative = require('./_getNative'), + root = require('./_root'); + +/* Built-in method references that are verified to be native. */ +var Map = getNative(root, 'Map'); + +module.exports = Map; diff --git a/backend/node_modules/lodash/_MapCache.js b/backend/node_modules/lodash/_MapCache.js new file mode 100644 index 00000000..4a4eea7b --- /dev/null +++ b/backend/node_modules/lodash/_MapCache.js @@ -0,0 +1,32 @@ +var mapCacheClear = require('./_mapCacheClear'), + mapCacheDelete = require('./_mapCacheDelete'), + mapCacheGet = require('./_mapCacheGet'), + mapCacheHas = require('./_mapCacheHas'), + mapCacheSet = require('./_mapCacheSet'); + +/** + * Creates a map cache object to store key-value pairs. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ +function MapCache(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } +} + +// Add methods to `MapCache`. +MapCache.prototype.clear = mapCacheClear; +MapCache.prototype['delete'] = mapCacheDelete; +MapCache.prototype.get = mapCacheGet; +MapCache.prototype.has = mapCacheHas; +MapCache.prototype.set = mapCacheSet; + +module.exports = MapCache; diff --git a/backend/node_modules/lodash/_Promise.js b/backend/node_modules/lodash/_Promise.js new file mode 100644 index 00000000..247b9e1b --- /dev/null +++ b/backend/node_modules/lodash/_Promise.js @@ -0,0 +1,7 @@ +var getNative = require('./_getNative'), + root = require('./_root'); + +/* Built-in method references that are verified to be native. */ +var Promise = getNative(root, 'Promise'); + +module.exports = Promise; diff --git a/backend/node_modules/lodash/_Set.js b/backend/node_modules/lodash/_Set.js new file mode 100644 index 00000000..b3c8dcbf --- /dev/null +++ b/backend/node_modules/lodash/_Set.js @@ -0,0 +1,7 @@ +var getNative = require('./_getNative'), + root = require('./_root'); + +/* Built-in method references that are verified to be native. */ +var Set = getNative(root, 'Set'); + +module.exports = Set; diff --git a/backend/node_modules/lodash/_SetCache.js b/backend/node_modules/lodash/_SetCache.js new file mode 100644 index 00000000..6468b064 --- /dev/null +++ b/backend/node_modules/lodash/_SetCache.js @@ -0,0 +1,27 @@ +var MapCache = require('./_MapCache'), + setCacheAdd = require('./_setCacheAdd'), + setCacheHas = require('./_setCacheHas'); + +/** + * + * Creates an array cache object to store unique values. + * + * @private + * @constructor + * @param {Array} [values] The values to cache. + */ +function SetCache(values) { + var index = -1, + length = values == null ? 0 : values.length; + + this.__data__ = new MapCache; + while (++index < length) { + this.add(values[index]); + } +} + +// Add methods to `SetCache`. +SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; +SetCache.prototype.has = setCacheHas; + +module.exports = SetCache; diff --git a/backend/node_modules/lodash/_Stack.js b/backend/node_modules/lodash/_Stack.js new file mode 100644 index 00000000..80b2cf1b --- /dev/null +++ b/backend/node_modules/lodash/_Stack.js @@ -0,0 +1,27 @@ +var ListCache = require('./_ListCache'), + stackClear = require('./_stackClear'), + stackDelete = require('./_stackDelete'), + stackGet = require('./_stackGet'), + stackHas = require('./_stackHas'), + stackSet = require('./_stackSet'); + +/** + * Creates a stack cache object to store key-value pairs. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ +function Stack(entries) { + var data = this.__data__ = new ListCache(entries); + this.size = data.size; +} + +// Add methods to `Stack`. +Stack.prototype.clear = stackClear; +Stack.prototype['delete'] = stackDelete; +Stack.prototype.get = stackGet; +Stack.prototype.has = stackHas; +Stack.prototype.set = stackSet; + +module.exports = Stack; diff --git a/backend/node_modules/lodash/_Symbol.js b/backend/node_modules/lodash/_Symbol.js new file mode 100644 index 00000000..a013f7c5 --- /dev/null +++ b/backend/node_modules/lodash/_Symbol.js @@ -0,0 +1,6 @@ +var root = require('./_root'); + +/** Built-in value references. */ +var Symbol = root.Symbol; + +module.exports = Symbol; diff --git a/backend/node_modules/lodash/_Uint8Array.js b/backend/node_modules/lodash/_Uint8Array.js new file mode 100644 index 00000000..2fb30e15 --- /dev/null +++ b/backend/node_modules/lodash/_Uint8Array.js @@ -0,0 +1,6 @@ +var root = require('./_root'); + +/** Built-in value references. */ +var Uint8Array = root.Uint8Array; + +module.exports = Uint8Array; diff --git a/backend/node_modules/lodash/_WeakMap.js b/backend/node_modules/lodash/_WeakMap.js new file mode 100644 index 00000000..567f86c6 --- /dev/null +++ b/backend/node_modules/lodash/_WeakMap.js @@ -0,0 +1,7 @@ +var getNative = require('./_getNative'), + root = require('./_root'); + +/* Built-in method references that are verified to be native. */ +var WeakMap = getNative(root, 'WeakMap'); + +module.exports = WeakMap; diff --git a/backend/node_modules/lodash/_apply.js b/backend/node_modules/lodash/_apply.js new file mode 100644 index 00000000..36436dda --- /dev/null +++ b/backend/node_modules/lodash/_apply.js @@ -0,0 +1,21 @@ +/** + * A faster alternative to `Function#apply`, this function invokes `func` + * with the `this` binding of `thisArg` and the arguments of `args`. + * + * @private + * @param {Function} func The function to invoke. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} args The arguments to invoke `func` with. + * @returns {*} Returns the result of `func`. + */ +function apply(func, thisArg, args) { + switch (args.length) { + case 0: return func.call(thisArg); + case 1: return func.call(thisArg, args[0]); + case 2: return func.call(thisArg, args[0], args[1]); + case 3: return func.call(thisArg, args[0], args[1], args[2]); + } + return func.apply(thisArg, args); +} + +module.exports = apply; diff --git a/backend/node_modules/lodash/_arrayAggregator.js b/backend/node_modules/lodash/_arrayAggregator.js new file mode 100644 index 00000000..d96c3ca4 --- /dev/null +++ b/backend/node_modules/lodash/_arrayAggregator.js @@ -0,0 +1,22 @@ +/** + * A specialized version of `baseAggregator` for arrays. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform keys. + * @param {Object} accumulator The initial aggregated object. + * @returns {Function} Returns `accumulator`. + */ +function arrayAggregator(array, setter, iteratee, accumulator) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + var value = array[index]; + setter(accumulator, value, iteratee(value), array); + } + return accumulator; +} + +module.exports = arrayAggregator; diff --git a/backend/node_modules/lodash/_arrayEach.js b/backend/node_modules/lodash/_arrayEach.js new file mode 100644 index 00000000..2c5f5796 --- /dev/null +++ b/backend/node_modules/lodash/_arrayEach.js @@ -0,0 +1,22 @@ +/** + * A specialized version of `_.forEach` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns `array`. + */ +function arrayEach(array, iteratee) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (iteratee(array[index], index, array) === false) { + break; + } + } + return array; +} + +module.exports = arrayEach; diff --git a/backend/node_modules/lodash/_arrayEachRight.js b/backend/node_modules/lodash/_arrayEachRight.js new file mode 100644 index 00000000..976ca5c2 --- /dev/null +++ b/backend/node_modules/lodash/_arrayEachRight.js @@ -0,0 +1,21 @@ +/** + * A specialized version of `_.forEachRight` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns `array`. + */ +function arrayEachRight(array, iteratee) { + var length = array == null ? 0 : array.length; + + while (length--) { + if (iteratee(array[length], length, array) === false) { + break; + } + } + return array; +} + +module.exports = arrayEachRight; diff --git a/backend/node_modules/lodash/_arrayEvery.js b/backend/node_modules/lodash/_arrayEvery.js new file mode 100644 index 00000000..e26a9184 --- /dev/null +++ b/backend/node_modules/lodash/_arrayEvery.js @@ -0,0 +1,23 @@ +/** + * A specialized version of `_.every` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false`. + */ +function arrayEvery(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (!predicate(array[index], index, array)) { + return false; + } + } + return true; +} + +module.exports = arrayEvery; diff --git a/backend/node_modules/lodash/_arrayFilter.js b/backend/node_modules/lodash/_arrayFilter.js new file mode 100644 index 00000000..75ea2544 --- /dev/null +++ b/backend/node_modules/lodash/_arrayFilter.js @@ -0,0 +1,25 @@ +/** + * A specialized version of `_.filter` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + */ +function arrayFilter(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (predicate(value, index, array)) { + result[resIndex++] = value; + } + } + return result; +} + +module.exports = arrayFilter; diff --git a/backend/node_modules/lodash/_arrayIncludes.js b/backend/node_modules/lodash/_arrayIncludes.js new file mode 100644 index 00000000..3737a6d9 --- /dev/null +++ b/backend/node_modules/lodash/_arrayIncludes.js @@ -0,0 +1,17 @@ +var baseIndexOf = require('./_baseIndexOf'); + +/** + * A specialized version of `_.includes` for arrays without support for + * specifying an index to search from. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ +function arrayIncludes(array, value) { + var length = array == null ? 0 : array.length; + return !!length && baseIndexOf(array, value, 0) > -1; +} + +module.exports = arrayIncludes; diff --git a/backend/node_modules/lodash/_arrayIncludesWith.js b/backend/node_modules/lodash/_arrayIncludesWith.js new file mode 100644 index 00000000..235fd975 --- /dev/null +++ b/backend/node_modules/lodash/_arrayIncludesWith.js @@ -0,0 +1,22 @@ +/** + * This function is like `arrayIncludes` except that it accepts a comparator. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @param {Function} comparator The comparator invoked per element. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ +function arrayIncludesWith(array, value, comparator) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (comparator(value, array[index])) { + return true; + } + } + return false; +} + +module.exports = arrayIncludesWith; diff --git a/backend/node_modules/lodash/_arrayLikeKeys.js b/backend/node_modules/lodash/_arrayLikeKeys.js new file mode 100644 index 00000000..b2ec9ce7 --- /dev/null +++ b/backend/node_modules/lodash/_arrayLikeKeys.js @@ -0,0 +1,49 @@ +var baseTimes = require('./_baseTimes'), + isArguments = require('./isArguments'), + isArray = require('./isArray'), + isBuffer = require('./isBuffer'), + isIndex = require('./_isIndex'), + isTypedArray = require('./isTypedArray'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Creates an array of the enumerable property names of the array-like `value`. + * + * @private + * @param {*} value The value to query. + * @param {boolean} inherited Specify returning inherited property names. + * @returns {Array} Returns the array of property names. + */ +function arrayLikeKeys(value, inherited) { + var isArr = isArray(value), + isArg = !isArr && isArguments(value), + isBuff = !isArr && !isArg && isBuffer(value), + isType = !isArr && !isArg && !isBuff && isTypedArray(value), + skipIndexes = isArr || isArg || isBuff || isType, + result = skipIndexes ? baseTimes(value.length, String) : [], + length = result.length; + + for (var key in value) { + if ((inherited || hasOwnProperty.call(value, key)) && + !(skipIndexes && ( + // Safari 9 has enumerable `arguments.length` in strict mode. + key == 'length' || + // Node.js 0.10 has enumerable non-index properties on buffers. + (isBuff && (key == 'offset' || key == 'parent')) || + // PhantomJS 2 has enumerable non-index properties on typed arrays. + (isType && (key == 'buffer' || key == 'byteLength' || key == 'byteOffset')) || + // Skip index properties. + isIndex(key, length) + ))) { + result.push(key); + } + } + return result; +} + +module.exports = arrayLikeKeys; diff --git a/backend/node_modules/lodash/_arrayMap.js b/backend/node_modules/lodash/_arrayMap.js new file mode 100644 index 00000000..22b22464 --- /dev/null +++ b/backend/node_modules/lodash/_arrayMap.js @@ -0,0 +1,21 @@ +/** + * A specialized version of `_.map` for arrays without support for iteratee + * shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ +function arrayMap(array, iteratee) { + var index = -1, + length = array == null ? 0 : array.length, + result = Array(length); + + while (++index < length) { + result[index] = iteratee(array[index], index, array); + } + return result; +} + +module.exports = arrayMap; diff --git a/backend/node_modules/lodash/_arrayPush.js b/backend/node_modules/lodash/_arrayPush.js new file mode 100644 index 00000000..7d742b38 --- /dev/null +++ b/backend/node_modules/lodash/_arrayPush.js @@ -0,0 +1,20 @@ +/** + * Appends the elements of `values` to `array`. + * + * @private + * @param {Array} array The array to modify. + * @param {Array} values The values to append. + * @returns {Array} Returns `array`. + */ +function arrayPush(array, values) { + var index = -1, + length = values.length, + offset = array.length; + + while (++index < length) { + array[offset + index] = values[index]; + } + return array; +} + +module.exports = arrayPush; diff --git a/backend/node_modules/lodash/_arrayReduce.js b/backend/node_modules/lodash/_arrayReduce.js new file mode 100644 index 00000000..de8b79b2 --- /dev/null +++ b/backend/node_modules/lodash/_arrayReduce.js @@ -0,0 +1,26 @@ +/** + * A specialized version of `_.reduce` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @param {boolean} [initAccum] Specify using the first element of `array` as + * the initial value. + * @returns {*} Returns the accumulated value. + */ +function arrayReduce(array, iteratee, accumulator, initAccum) { + var index = -1, + length = array == null ? 0 : array.length; + + if (initAccum && length) { + accumulator = array[++index]; + } + while (++index < length) { + accumulator = iteratee(accumulator, array[index], index, array); + } + return accumulator; +} + +module.exports = arrayReduce; diff --git a/backend/node_modules/lodash/_arrayReduceRight.js b/backend/node_modules/lodash/_arrayReduceRight.js new file mode 100644 index 00000000..22d8976d --- /dev/null +++ b/backend/node_modules/lodash/_arrayReduceRight.js @@ -0,0 +1,24 @@ +/** + * A specialized version of `_.reduceRight` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @param {boolean} [initAccum] Specify using the last element of `array` as + * the initial value. + * @returns {*} Returns the accumulated value. + */ +function arrayReduceRight(array, iteratee, accumulator, initAccum) { + var length = array == null ? 0 : array.length; + if (initAccum && length) { + accumulator = array[--length]; + } + while (length--) { + accumulator = iteratee(accumulator, array[length], length, array); + } + return accumulator; +} + +module.exports = arrayReduceRight; diff --git a/backend/node_modules/lodash/_arraySample.js b/backend/node_modules/lodash/_arraySample.js new file mode 100644 index 00000000..fcab0105 --- /dev/null +++ b/backend/node_modules/lodash/_arraySample.js @@ -0,0 +1,15 @@ +var baseRandom = require('./_baseRandom'); + +/** + * A specialized version of `_.sample` for arrays. + * + * @private + * @param {Array} array The array to sample. + * @returns {*} Returns the random element. + */ +function arraySample(array) { + var length = array.length; + return length ? array[baseRandom(0, length - 1)] : undefined; +} + +module.exports = arraySample; diff --git a/backend/node_modules/lodash/_arraySampleSize.js b/backend/node_modules/lodash/_arraySampleSize.js new file mode 100644 index 00000000..8c7e364f --- /dev/null +++ b/backend/node_modules/lodash/_arraySampleSize.js @@ -0,0 +1,17 @@ +var baseClamp = require('./_baseClamp'), + copyArray = require('./_copyArray'), + shuffleSelf = require('./_shuffleSelf'); + +/** + * A specialized version of `_.sampleSize` for arrays. + * + * @private + * @param {Array} array The array to sample. + * @param {number} n The number of elements to sample. + * @returns {Array} Returns the random elements. + */ +function arraySampleSize(array, n) { + return shuffleSelf(copyArray(array), baseClamp(n, 0, array.length)); +} + +module.exports = arraySampleSize; diff --git a/backend/node_modules/lodash/_arrayShuffle.js b/backend/node_modules/lodash/_arrayShuffle.js new file mode 100644 index 00000000..46313a39 --- /dev/null +++ b/backend/node_modules/lodash/_arrayShuffle.js @@ -0,0 +1,15 @@ +var copyArray = require('./_copyArray'), + shuffleSelf = require('./_shuffleSelf'); + +/** + * A specialized version of `_.shuffle` for arrays. + * + * @private + * @param {Array} array The array to shuffle. + * @returns {Array} Returns the new shuffled array. + */ +function arrayShuffle(array) { + return shuffleSelf(copyArray(array)); +} + +module.exports = arrayShuffle; diff --git a/backend/node_modules/lodash/_arraySome.js b/backend/node_modules/lodash/_arraySome.js new file mode 100644 index 00000000..6fd02fd4 --- /dev/null +++ b/backend/node_modules/lodash/_arraySome.js @@ -0,0 +1,23 @@ +/** + * A specialized version of `_.some` for arrays without support for iteratee + * shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + */ +function arraySome(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (predicate(array[index], index, array)) { + return true; + } + } + return false; +} + +module.exports = arraySome; diff --git a/backend/node_modules/lodash/_asciiSize.js b/backend/node_modules/lodash/_asciiSize.js new file mode 100644 index 00000000..11d29c33 --- /dev/null +++ b/backend/node_modules/lodash/_asciiSize.js @@ -0,0 +1,12 @@ +var baseProperty = require('./_baseProperty'); + +/** + * Gets the size of an ASCII `string`. + * + * @private + * @param {string} string The string inspect. + * @returns {number} Returns the string size. + */ +var asciiSize = baseProperty('length'); + +module.exports = asciiSize; diff --git a/backend/node_modules/lodash/_asciiToArray.js b/backend/node_modules/lodash/_asciiToArray.js new file mode 100644 index 00000000..8e3dd5b4 --- /dev/null +++ b/backend/node_modules/lodash/_asciiToArray.js @@ -0,0 +1,12 @@ +/** + * Converts an ASCII `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ +function asciiToArray(string) { + return string.split(''); +} + +module.exports = asciiToArray; diff --git a/backend/node_modules/lodash/_asciiWords.js b/backend/node_modules/lodash/_asciiWords.js new file mode 100644 index 00000000..d765f0f7 --- /dev/null +++ b/backend/node_modules/lodash/_asciiWords.js @@ -0,0 +1,15 @@ +/** Used to match words composed of alphanumeric characters. */ +var reAsciiWord = /[^\x00-\x2f\x3a-\x40\x5b-\x60\x7b-\x7f]+/g; + +/** + * Splits an ASCII `string` into an array of its words. + * + * @private + * @param {string} The string to inspect. + * @returns {Array} Returns the words of `string`. + */ +function asciiWords(string) { + return string.match(reAsciiWord) || []; +} + +module.exports = asciiWords; diff --git a/backend/node_modules/lodash/_assignMergeValue.js b/backend/node_modules/lodash/_assignMergeValue.js new file mode 100644 index 00000000..cb1185e9 --- /dev/null +++ b/backend/node_modules/lodash/_assignMergeValue.js @@ -0,0 +1,20 @@ +var baseAssignValue = require('./_baseAssignValue'), + eq = require('./eq'); + +/** + * This function is like `assignValue` except that it doesn't assign + * `undefined` values. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ +function assignMergeValue(object, key, value) { + if ((value !== undefined && !eq(object[key], value)) || + (value === undefined && !(key in object))) { + baseAssignValue(object, key, value); + } +} + +module.exports = assignMergeValue; diff --git a/backend/node_modules/lodash/_assignValue.js b/backend/node_modules/lodash/_assignValue.js new file mode 100644 index 00000000..40839575 --- /dev/null +++ b/backend/node_modules/lodash/_assignValue.js @@ -0,0 +1,28 @@ +var baseAssignValue = require('./_baseAssignValue'), + eq = require('./eq'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Assigns `value` to `key` of `object` if the existing value is not equivalent + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ +function assignValue(object, key, value) { + var objValue = object[key]; + if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) || + (value === undefined && !(key in object))) { + baseAssignValue(object, key, value); + } +} + +module.exports = assignValue; diff --git a/backend/node_modules/lodash/_assocIndexOf.js b/backend/node_modules/lodash/_assocIndexOf.js new file mode 100644 index 00000000..5b77a2bd --- /dev/null +++ b/backend/node_modules/lodash/_assocIndexOf.js @@ -0,0 +1,21 @@ +var eq = require('./eq'); + +/** + * Gets the index at which the `key` is found in `array` of key-value pairs. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} key The key to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + */ +function assocIndexOf(array, key) { + var length = array.length; + while (length--) { + if (eq(array[length][0], key)) { + return length; + } + } + return -1; +} + +module.exports = assocIndexOf; diff --git a/backend/node_modules/lodash/_baseAggregator.js b/backend/node_modules/lodash/_baseAggregator.js new file mode 100644 index 00000000..4bc9e91f --- /dev/null +++ b/backend/node_modules/lodash/_baseAggregator.js @@ -0,0 +1,21 @@ +var baseEach = require('./_baseEach'); + +/** + * Aggregates elements of `collection` on `accumulator` with keys transformed + * by `iteratee` and values set by `setter`. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform keys. + * @param {Object} accumulator The initial aggregated object. + * @returns {Function} Returns `accumulator`. + */ +function baseAggregator(collection, setter, iteratee, accumulator) { + baseEach(collection, function(value, key, collection) { + setter(accumulator, value, iteratee(value), collection); + }); + return accumulator; +} + +module.exports = baseAggregator; diff --git a/backend/node_modules/lodash/_baseAssign.js b/backend/node_modules/lodash/_baseAssign.js new file mode 100644 index 00000000..e5c4a1a5 --- /dev/null +++ b/backend/node_modules/lodash/_baseAssign.js @@ -0,0 +1,17 @@ +var copyObject = require('./_copyObject'), + keys = require('./keys'); + +/** + * The base implementation of `_.assign` without support for multiple sources + * or `customizer` functions. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @returns {Object} Returns `object`. + */ +function baseAssign(object, source) { + return object && copyObject(source, keys(source), object); +} + +module.exports = baseAssign; diff --git a/backend/node_modules/lodash/_baseAssignIn.js b/backend/node_modules/lodash/_baseAssignIn.js new file mode 100644 index 00000000..6624f900 --- /dev/null +++ b/backend/node_modules/lodash/_baseAssignIn.js @@ -0,0 +1,17 @@ +var copyObject = require('./_copyObject'), + keysIn = require('./keysIn'); + +/** + * The base implementation of `_.assignIn` without support for multiple sources + * or `customizer` functions. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @returns {Object} Returns `object`. + */ +function baseAssignIn(object, source) { + return object && copyObject(source, keysIn(source), object); +} + +module.exports = baseAssignIn; diff --git a/backend/node_modules/lodash/_baseAssignValue.js b/backend/node_modules/lodash/_baseAssignValue.js new file mode 100644 index 00000000..d6f66ef3 --- /dev/null +++ b/backend/node_modules/lodash/_baseAssignValue.js @@ -0,0 +1,25 @@ +var defineProperty = require('./_defineProperty'); + +/** + * The base implementation of `assignValue` and `assignMergeValue` without + * value checks. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ +function baseAssignValue(object, key, value) { + if (key == '__proto__' && defineProperty) { + defineProperty(object, key, { + 'configurable': true, + 'enumerable': true, + 'value': value, + 'writable': true + }); + } else { + object[key] = value; + } +} + +module.exports = baseAssignValue; diff --git a/backend/node_modules/lodash/_baseAt.js b/backend/node_modules/lodash/_baseAt.js new file mode 100644 index 00000000..90e4237a --- /dev/null +++ b/backend/node_modules/lodash/_baseAt.js @@ -0,0 +1,23 @@ +var get = require('./get'); + +/** + * The base implementation of `_.at` without support for individual paths. + * + * @private + * @param {Object} object The object to iterate over. + * @param {string[]} paths The property paths to pick. + * @returns {Array} Returns the picked elements. + */ +function baseAt(object, paths) { + var index = -1, + length = paths.length, + result = Array(length), + skip = object == null; + + while (++index < length) { + result[index] = skip ? undefined : get(object, paths[index]); + } + return result; +} + +module.exports = baseAt; diff --git a/backend/node_modules/lodash/_baseClamp.js b/backend/node_modules/lodash/_baseClamp.js new file mode 100644 index 00000000..a1c56929 --- /dev/null +++ b/backend/node_modules/lodash/_baseClamp.js @@ -0,0 +1,22 @@ +/** + * The base implementation of `_.clamp` which doesn't coerce arguments. + * + * @private + * @param {number} number The number to clamp. + * @param {number} [lower] The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the clamped number. + */ +function baseClamp(number, lower, upper) { + if (number === number) { + if (upper !== undefined) { + number = number <= upper ? number : upper; + } + if (lower !== undefined) { + number = number >= lower ? number : lower; + } + } + return number; +} + +module.exports = baseClamp; diff --git a/backend/node_modules/lodash/_baseClone.js b/backend/node_modules/lodash/_baseClone.js new file mode 100644 index 00000000..69f87054 --- /dev/null +++ b/backend/node_modules/lodash/_baseClone.js @@ -0,0 +1,166 @@ +var Stack = require('./_Stack'), + arrayEach = require('./_arrayEach'), + assignValue = require('./_assignValue'), + baseAssign = require('./_baseAssign'), + baseAssignIn = require('./_baseAssignIn'), + cloneBuffer = require('./_cloneBuffer'), + copyArray = require('./_copyArray'), + copySymbols = require('./_copySymbols'), + copySymbolsIn = require('./_copySymbolsIn'), + getAllKeys = require('./_getAllKeys'), + getAllKeysIn = require('./_getAllKeysIn'), + getTag = require('./_getTag'), + initCloneArray = require('./_initCloneArray'), + initCloneByTag = require('./_initCloneByTag'), + initCloneObject = require('./_initCloneObject'), + isArray = require('./isArray'), + isBuffer = require('./isBuffer'), + isMap = require('./isMap'), + isObject = require('./isObject'), + isSet = require('./isSet'), + keys = require('./keys'), + keysIn = require('./keysIn'); + +/** Used to compose bitmasks for cloning. */ +var CLONE_DEEP_FLAG = 1, + CLONE_FLAT_FLAG = 2, + CLONE_SYMBOLS_FLAG = 4; + +/** `Object#toString` result references. */ +var argsTag = '[object Arguments]', + arrayTag = '[object Array]', + boolTag = '[object Boolean]', + dateTag = '[object Date]', + errorTag = '[object Error]', + funcTag = '[object Function]', + genTag = '[object GeneratorFunction]', + mapTag = '[object Map]', + numberTag = '[object Number]', + objectTag = '[object Object]', + regexpTag = '[object RegExp]', + setTag = '[object Set]', + stringTag = '[object String]', + symbolTag = '[object Symbol]', + weakMapTag = '[object WeakMap]'; + +var arrayBufferTag = '[object ArrayBuffer]', + dataViewTag = '[object DataView]', + float32Tag = '[object Float32Array]', + float64Tag = '[object Float64Array]', + int8Tag = '[object Int8Array]', + int16Tag = '[object Int16Array]', + int32Tag = '[object Int32Array]', + uint8Tag = '[object Uint8Array]', + uint8ClampedTag = '[object Uint8ClampedArray]', + uint16Tag = '[object Uint16Array]', + uint32Tag = '[object Uint32Array]'; + +/** Used to identify `toStringTag` values supported by `_.clone`. */ +var cloneableTags = {}; +cloneableTags[argsTag] = cloneableTags[arrayTag] = +cloneableTags[arrayBufferTag] = cloneableTags[dataViewTag] = +cloneableTags[boolTag] = cloneableTags[dateTag] = +cloneableTags[float32Tag] = cloneableTags[float64Tag] = +cloneableTags[int8Tag] = cloneableTags[int16Tag] = +cloneableTags[int32Tag] = cloneableTags[mapTag] = +cloneableTags[numberTag] = cloneableTags[objectTag] = +cloneableTags[regexpTag] = cloneableTags[setTag] = +cloneableTags[stringTag] = cloneableTags[symbolTag] = +cloneableTags[uint8Tag] = cloneableTags[uint8ClampedTag] = +cloneableTags[uint16Tag] = cloneableTags[uint32Tag] = true; +cloneableTags[errorTag] = cloneableTags[funcTag] = +cloneableTags[weakMapTag] = false; + +/** + * The base implementation of `_.clone` and `_.cloneDeep` which tracks + * traversed objects. + * + * @private + * @param {*} value The value to clone. + * @param {boolean} bitmask The bitmask flags. + * 1 - Deep clone + * 2 - Flatten inherited properties + * 4 - Clone symbols + * @param {Function} [customizer] The function to customize cloning. + * @param {string} [key] The key of `value`. + * @param {Object} [object] The parent object of `value`. + * @param {Object} [stack] Tracks traversed objects and their clone counterparts. + * @returns {*} Returns the cloned value. + */ +function baseClone(value, bitmask, customizer, key, object, stack) { + var result, + isDeep = bitmask & CLONE_DEEP_FLAG, + isFlat = bitmask & CLONE_FLAT_FLAG, + isFull = bitmask & CLONE_SYMBOLS_FLAG; + + if (customizer) { + result = object ? customizer(value, key, object, stack) : customizer(value); + } + if (result !== undefined) { + return result; + } + if (!isObject(value)) { + return value; + } + var isArr = isArray(value); + if (isArr) { + result = initCloneArray(value); + if (!isDeep) { + return copyArray(value, result); + } + } else { + var tag = getTag(value), + isFunc = tag == funcTag || tag == genTag; + + if (isBuffer(value)) { + return cloneBuffer(value, isDeep); + } + if (tag == objectTag || tag == argsTag || (isFunc && !object)) { + result = (isFlat || isFunc) ? {} : initCloneObject(value); + if (!isDeep) { + return isFlat + ? copySymbolsIn(value, baseAssignIn(result, value)) + : copySymbols(value, baseAssign(result, value)); + } + } else { + if (!cloneableTags[tag]) { + return object ? value : {}; + } + result = initCloneByTag(value, tag, isDeep); + } + } + // Check for circular references and return its corresponding clone. + stack || (stack = new Stack); + var stacked = stack.get(value); + if (stacked) { + return stacked; + } + stack.set(value, result); + + if (isSet(value)) { + value.forEach(function(subValue) { + result.add(baseClone(subValue, bitmask, customizer, subValue, value, stack)); + }); + } else if (isMap(value)) { + value.forEach(function(subValue, key) { + result.set(key, baseClone(subValue, bitmask, customizer, key, value, stack)); + }); + } + + var keysFunc = isFull + ? (isFlat ? getAllKeysIn : getAllKeys) + : (isFlat ? keysIn : keys); + + var props = isArr ? undefined : keysFunc(value); + arrayEach(props || value, function(subValue, key) { + if (props) { + key = subValue; + subValue = value[key]; + } + // Recursively populate clone (susceptible to call stack limits). + assignValue(result, key, baseClone(subValue, bitmask, customizer, key, value, stack)); + }); + return result; +} + +module.exports = baseClone; diff --git a/backend/node_modules/lodash/_baseConforms.js b/backend/node_modules/lodash/_baseConforms.js new file mode 100644 index 00000000..947e20d4 --- /dev/null +++ b/backend/node_modules/lodash/_baseConforms.js @@ -0,0 +1,18 @@ +var baseConformsTo = require('./_baseConformsTo'), + keys = require('./keys'); + +/** + * The base implementation of `_.conforms` which doesn't clone `source`. + * + * @private + * @param {Object} source The object of property predicates to conform to. + * @returns {Function} Returns the new spec function. + */ +function baseConforms(source) { + var props = keys(source); + return function(object) { + return baseConformsTo(object, source, props); + }; +} + +module.exports = baseConforms; diff --git a/backend/node_modules/lodash/_baseConformsTo.js b/backend/node_modules/lodash/_baseConformsTo.js new file mode 100644 index 00000000..e449cb84 --- /dev/null +++ b/backend/node_modules/lodash/_baseConformsTo.js @@ -0,0 +1,27 @@ +/** + * The base implementation of `_.conformsTo` which accepts `props` to check. + * + * @private + * @param {Object} object The object to inspect. + * @param {Object} source The object of property predicates to conform to. + * @returns {boolean} Returns `true` if `object` conforms, else `false`. + */ +function baseConformsTo(object, source, props) { + var length = props.length; + if (object == null) { + return !length; + } + object = Object(object); + while (length--) { + var key = props[length], + predicate = source[key], + value = object[key]; + + if ((value === undefined && !(key in object)) || !predicate(value)) { + return false; + } + } + return true; +} + +module.exports = baseConformsTo; diff --git a/backend/node_modules/lodash/_baseCreate.js b/backend/node_modules/lodash/_baseCreate.js new file mode 100644 index 00000000..ffa6a52a --- /dev/null +++ b/backend/node_modules/lodash/_baseCreate.js @@ -0,0 +1,30 @@ +var isObject = require('./isObject'); + +/** Built-in value references. */ +var objectCreate = Object.create; + +/** + * The base implementation of `_.create` without support for assigning + * properties to the created object. + * + * @private + * @param {Object} proto The object to inherit from. + * @returns {Object} Returns the new object. + */ +var baseCreate = (function() { + function object() {} + return function(proto) { + if (!isObject(proto)) { + return {}; + } + if (objectCreate) { + return objectCreate(proto); + } + object.prototype = proto; + var result = new object; + object.prototype = undefined; + return result; + }; +}()); + +module.exports = baseCreate; diff --git a/backend/node_modules/lodash/_baseDelay.js b/backend/node_modules/lodash/_baseDelay.js new file mode 100644 index 00000000..1486d697 --- /dev/null +++ b/backend/node_modules/lodash/_baseDelay.js @@ -0,0 +1,21 @@ +/** Error message constants. */ +var FUNC_ERROR_TEXT = 'Expected a function'; + +/** + * The base implementation of `_.delay` and `_.defer` which accepts `args` + * to provide to `func`. + * + * @private + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @param {Array} args The arguments to provide to `func`. + * @returns {number|Object} Returns the timer id or timeout object. + */ +function baseDelay(func, wait, args) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + return setTimeout(function() { func.apply(undefined, args); }, wait); +} + +module.exports = baseDelay; diff --git a/backend/node_modules/lodash/_baseDifference.js b/backend/node_modules/lodash/_baseDifference.js new file mode 100644 index 00000000..343ac19f --- /dev/null +++ b/backend/node_modules/lodash/_baseDifference.js @@ -0,0 +1,67 @@ +var SetCache = require('./_SetCache'), + arrayIncludes = require('./_arrayIncludes'), + arrayIncludesWith = require('./_arrayIncludesWith'), + arrayMap = require('./_arrayMap'), + baseUnary = require('./_baseUnary'), + cacheHas = require('./_cacheHas'); + +/** Used as the size to enable large array optimizations. */ +var LARGE_ARRAY_SIZE = 200; + +/** + * The base implementation of methods like `_.difference` without support + * for excluding multiple arrays or iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Array} values The values to exclude. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + */ +function baseDifference(array, values, iteratee, comparator) { + var index = -1, + includes = arrayIncludes, + isCommon = true, + length = array.length, + result = [], + valuesLength = values.length; + + if (!length) { + return result; + } + if (iteratee) { + values = arrayMap(values, baseUnary(iteratee)); + } + if (comparator) { + includes = arrayIncludesWith; + isCommon = false; + } + else if (values.length >= LARGE_ARRAY_SIZE) { + includes = cacheHas; + isCommon = false; + values = new SetCache(values); + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee == null ? value : iteratee(value); + + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var valuesIndex = valuesLength; + while (valuesIndex--) { + if (values[valuesIndex] === computed) { + continue outer; + } + } + result.push(value); + } + else if (!includes(values, computed, comparator)) { + result.push(value); + } + } + return result; +} + +module.exports = baseDifference; diff --git a/backend/node_modules/lodash/_baseEach.js b/backend/node_modules/lodash/_baseEach.js new file mode 100644 index 00000000..512c0676 --- /dev/null +++ b/backend/node_modules/lodash/_baseEach.js @@ -0,0 +1,14 @@ +var baseForOwn = require('./_baseForOwn'), + createBaseEach = require('./_createBaseEach'); + +/** + * The base implementation of `_.forEach` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + */ +var baseEach = createBaseEach(baseForOwn); + +module.exports = baseEach; diff --git a/backend/node_modules/lodash/_baseEachRight.js b/backend/node_modules/lodash/_baseEachRight.js new file mode 100644 index 00000000..0a8feeca --- /dev/null +++ b/backend/node_modules/lodash/_baseEachRight.js @@ -0,0 +1,14 @@ +var baseForOwnRight = require('./_baseForOwnRight'), + createBaseEach = require('./_createBaseEach'); + +/** + * The base implementation of `_.forEachRight` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + */ +var baseEachRight = createBaseEach(baseForOwnRight, true); + +module.exports = baseEachRight; diff --git a/backend/node_modules/lodash/_baseEvery.js b/backend/node_modules/lodash/_baseEvery.js new file mode 100644 index 00000000..fa52f7bc --- /dev/null +++ b/backend/node_modules/lodash/_baseEvery.js @@ -0,0 +1,21 @@ +var baseEach = require('./_baseEach'); + +/** + * The base implementation of `_.every` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false` + */ +function baseEvery(collection, predicate) { + var result = true; + baseEach(collection, function(value, index, collection) { + result = !!predicate(value, index, collection); + return result; + }); + return result; +} + +module.exports = baseEvery; diff --git a/backend/node_modules/lodash/_baseExtremum.js b/backend/node_modules/lodash/_baseExtremum.js new file mode 100644 index 00000000..9d6aa77e --- /dev/null +++ b/backend/node_modules/lodash/_baseExtremum.js @@ -0,0 +1,32 @@ +var isSymbol = require('./isSymbol'); + +/** + * The base implementation of methods like `_.max` and `_.min` which accepts a + * `comparator` to determine the extremum value. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The iteratee invoked per iteration. + * @param {Function} comparator The comparator used to compare values. + * @returns {*} Returns the extremum value. + */ +function baseExtremum(array, iteratee, comparator) { + var index = -1, + length = array.length; + + while (++index < length) { + var value = array[index], + current = iteratee(value); + + if (current != null && (computed === undefined + ? (current === current && !isSymbol(current)) + : comparator(current, computed) + )) { + var computed = current, + result = value; + } + } + return result; +} + +module.exports = baseExtremum; diff --git a/backend/node_modules/lodash/_baseFill.js b/backend/node_modules/lodash/_baseFill.js new file mode 100644 index 00000000..46ef9c76 --- /dev/null +++ b/backend/node_modules/lodash/_baseFill.js @@ -0,0 +1,32 @@ +var toInteger = require('./toInteger'), + toLength = require('./toLength'); + +/** + * The base implementation of `_.fill` without an iteratee call guard. + * + * @private + * @param {Array} array The array to fill. + * @param {*} value The value to fill `array` with. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns `array`. + */ +function baseFill(array, value, start, end) { + var length = array.length; + + start = toInteger(start); + if (start < 0) { + start = -start > length ? 0 : (length + start); + } + end = (end === undefined || end > length) ? length : toInteger(end); + if (end < 0) { + end += length; + } + end = start > end ? 0 : toLength(end); + while (start < end) { + array[start++] = value; + } + return array; +} + +module.exports = baseFill; diff --git a/backend/node_modules/lodash/_baseFilter.js b/backend/node_modules/lodash/_baseFilter.js new file mode 100644 index 00000000..46784773 --- /dev/null +++ b/backend/node_modules/lodash/_baseFilter.js @@ -0,0 +1,21 @@ +var baseEach = require('./_baseEach'); + +/** + * The base implementation of `_.filter` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + */ +function baseFilter(collection, predicate) { + var result = []; + baseEach(collection, function(value, index, collection) { + if (predicate(value, index, collection)) { + result.push(value); + } + }); + return result; +} + +module.exports = baseFilter; diff --git a/backend/node_modules/lodash/_baseFindIndex.js b/backend/node_modules/lodash/_baseFindIndex.js new file mode 100644 index 00000000..e3f5d8aa --- /dev/null +++ b/backend/node_modules/lodash/_baseFindIndex.js @@ -0,0 +1,24 @@ +/** + * The base implementation of `_.findIndex` and `_.findLastIndex` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {number} fromIndex The index to search from. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {number} Returns the index of the matched value, else `-1`. + */ +function baseFindIndex(array, predicate, fromIndex, fromRight) { + var length = array.length, + index = fromIndex + (fromRight ? 1 : -1); + + while ((fromRight ? index-- : ++index < length)) { + if (predicate(array[index], index, array)) { + return index; + } + } + return -1; +} + +module.exports = baseFindIndex; diff --git a/backend/node_modules/lodash/_baseFindKey.js b/backend/node_modules/lodash/_baseFindKey.js new file mode 100644 index 00000000..2e430f3a --- /dev/null +++ b/backend/node_modules/lodash/_baseFindKey.js @@ -0,0 +1,23 @@ +/** + * The base implementation of methods like `_.findKey` and `_.findLastKey`, + * without support for iteratee shorthands, which iterates over `collection` + * using `eachFunc`. + * + * @private + * @param {Array|Object} collection The collection to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {Function} eachFunc The function to iterate over `collection`. + * @returns {*} Returns the found element or its key, else `undefined`. + */ +function baseFindKey(collection, predicate, eachFunc) { + var result; + eachFunc(collection, function(value, key, collection) { + if (predicate(value, key, collection)) { + result = key; + return false; + } + }); + return result; +} + +module.exports = baseFindKey; diff --git a/backend/node_modules/lodash/_baseFlatten.js b/backend/node_modules/lodash/_baseFlatten.js new file mode 100644 index 00000000..4b1e009b --- /dev/null +++ b/backend/node_modules/lodash/_baseFlatten.js @@ -0,0 +1,38 @@ +var arrayPush = require('./_arrayPush'), + isFlattenable = require('./_isFlattenable'); + +/** + * The base implementation of `_.flatten` with support for restricting flattening. + * + * @private + * @param {Array} array The array to flatten. + * @param {number} depth The maximum recursion depth. + * @param {boolean} [predicate=isFlattenable] The function invoked per iteration. + * @param {boolean} [isStrict] Restrict to values that pass `predicate` checks. + * @param {Array} [result=[]] The initial result value. + * @returns {Array} Returns the new flattened array. + */ +function baseFlatten(array, depth, predicate, isStrict, result) { + var index = -1, + length = array.length; + + predicate || (predicate = isFlattenable); + result || (result = []); + + while (++index < length) { + var value = array[index]; + if (depth > 0 && predicate(value)) { + if (depth > 1) { + // Recursively flatten arrays (susceptible to call stack limits). + baseFlatten(value, depth - 1, predicate, isStrict, result); + } else { + arrayPush(result, value); + } + } else if (!isStrict) { + result[result.length] = value; + } + } + return result; +} + +module.exports = baseFlatten; diff --git a/backend/node_modules/lodash/_baseFor.js b/backend/node_modules/lodash/_baseFor.js new file mode 100644 index 00000000..d946590f --- /dev/null +++ b/backend/node_modules/lodash/_baseFor.js @@ -0,0 +1,16 @@ +var createBaseFor = require('./_createBaseFor'); + +/** + * The base implementation of `baseForOwn` which iterates over `object` + * properties returned by `keysFunc` and invokes `iteratee` for each property. + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {Function} keysFunc The function to get the keys of `object`. + * @returns {Object} Returns `object`. + */ +var baseFor = createBaseFor(); + +module.exports = baseFor; diff --git a/backend/node_modules/lodash/_baseForOwn.js b/backend/node_modules/lodash/_baseForOwn.js new file mode 100644 index 00000000..503d5234 --- /dev/null +++ b/backend/node_modules/lodash/_baseForOwn.js @@ -0,0 +1,16 @@ +var baseFor = require('./_baseFor'), + keys = require('./keys'); + +/** + * The base implementation of `_.forOwn` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Object} Returns `object`. + */ +function baseForOwn(object, iteratee) { + return object && baseFor(object, iteratee, keys); +} + +module.exports = baseForOwn; diff --git a/backend/node_modules/lodash/_baseForOwnRight.js b/backend/node_modules/lodash/_baseForOwnRight.js new file mode 100644 index 00000000..a4b10e6c --- /dev/null +++ b/backend/node_modules/lodash/_baseForOwnRight.js @@ -0,0 +1,16 @@ +var baseForRight = require('./_baseForRight'), + keys = require('./keys'); + +/** + * The base implementation of `_.forOwnRight` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Object} Returns `object`. + */ +function baseForOwnRight(object, iteratee) { + return object && baseForRight(object, iteratee, keys); +} + +module.exports = baseForOwnRight; diff --git a/backend/node_modules/lodash/_baseForRight.js b/backend/node_modules/lodash/_baseForRight.js new file mode 100644 index 00000000..32842cd8 --- /dev/null +++ b/backend/node_modules/lodash/_baseForRight.js @@ -0,0 +1,15 @@ +var createBaseFor = require('./_createBaseFor'); + +/** + * This function is like `baseFor` except that it iterates over properties + * in the opposite order. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {Function} keysFunc The function to get the keys of `object`. + * @returns {Object} Returns `object`. + */ +var baseForRight = createBaseFor(true); + +module.exports = baseForRight; diff --git a/backend/node_modules/lodash/_baseFunctions.js b/backend/node_modules/lodash/_baseFunctions.js new file mode 100644 index 00000000..d23bc9b4 --- /dev/null +++ b/backend/node_modules/lodash/_baseFunctions.js @@ -0,0 +1,19 @@ +var arrayFilter = require('./_arrayFilter'), + isFunction = require('./isFunction'); + +/** + * The base implementation of `_.functions` which creates an array of + * `object` function property names filtered from `props`. + * + * @private + * @param {Object} object The object to inspect. + * @param {Array} props The property names to filter. + * @returns {Array} Returns the function names. + */ +function baseFunctions(object, props) { + return arrayFilter(props, function(key) { + return isFunction(object[key]); + }); +} + +module.exports = baseFunctions; diff --git a/backend/node_modules/lodash/_baseGet.js b/backend/node_modules/lodash/_baseGet.js new file mode 100644 index 00000000..a194913d --- /dev/null +++ b/backend/node_modules/lodash/_baseGet.js @@ -0,0 +1,24 @@ +var castPath = require('./_castPath'), + toKey = require('./_toKey'); + +/** + * The base implementation of `_.get` without support for default values. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to get. + * @returns {*} Returns the resolved value. + */ +function baseGet(object, path) { + path = castPath(path, object); + + var index = 0, + length = path.length; + + while (object != null && index < length) { + object = object[toKey(path[index++])]; + } + return (index && index == length) ? object : undefined; +} + +module.exports = baseGet; diff --git a/backend/node_modules/lodash/_baseGetAllKeys.js b/backend/node_modules/lodash/_baseGetAllKeys.js new file mode 100644 index 00000000..8ad204ea --- /dev/null +++ b/backend/node_modules/lodash/_baseGetAllKeys.js @@ -0,0 +1,20 @@ +var arrayPush = require('./_arrayPush'), + isArray = require('./isArray'); + +/** + * The base implementation of `getAllKeys` and `getAllKeysIn` which uses + * `keysFunc` and `symbolsFunc` to get the enumerable property names and + * symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Function} keysFunc The function to get the keys of `object`. + * @param {Function} symbolsFunc The function to get the symbols of `object`. + * @returns {Array} Returns the array of property names and symbols. + */ +function baseGetAllKeys(object, keysFunc, symbolsFunc) { + var result = keysFunc(object); + return isArray(object) ? result : arrayPush(result, symbolsFunc(object)); +} + +module.exports = baseGetAllKeys; diff --git a/backend/node_modules/lodash/_baseGetTag.js b/backend/node_modules/lodash/_baseGetTag.js new file mode 100644 index 00000000..b927ccc1 --- /dev/null +++ b/backend/node_modules/lodash/_baseGetTag.js @@ -0,0 +1,28 @@ +var Symbol = require('./_Symbol'), + getRawTag = require('./_getRawTag'), + objectToString = require('./_objectToString'); + +/** `Object#toString` result references. */ +var nullTag = '[object Null]', + undefinedTag = '[object Undefined]'; + +/** Built-in value references. */ +var symToStringTag = Symbol ? Symbol.toStringTag : undefined; + +/** + * The base implementation of `getTag` without fallbacks for buggy environments. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. + */ +function baseGetTag(value) { + if (value == null) { + return value === undefined ? undefinedTag : nullTag; + } + return (symToStringTag && symToStringTag in Object(value)) + ? getRawTag(value) + : objectToString(value); +} + +module.exports = baseGetTag; diff --git a/backend/node_modules/lodash/_baseGt.js b/backend/node_modules/lodash/_baseGt.js new file mode 100644 index 00000000..502d273c --- /dev/null +++ b/backend/node_modules/lodash/_baseGt.js @@ -0,0 +1,14 @@ +/** + * The base implementation of `_.gt` which doesn't coerce arguments. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than `other`, + * else `false`. + */ +function baseGt(value, other) { + return value > other; +} + +module.exports = baseGt; diff --git a/backend/node_modules/lodash/_baseHas.js b/backend/node_modules/lodash/_baseHas.js new file mode 100644 index 00000000..1b730321 --- /dev/null +++ b/backend/node_modules/lodash/_baseHas.js @@ -0,0 +1,19 @@ +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * The base implementation of `_.has` without support for deep paths. + * + * @private + * @param {Object} [object] The object to query. + * @param {Array|string} key The key to check. + * @returns {boolean} Returns `true` if `key` exists, else `false`. + */ +function baseHas(object, key) { + return object != null && hasOwnProperty.call(object, key); +} + +module.exports = baseHas; diff --git a/backend/node_modules/lodash/_baseHasIn.js b/backend/node_modules/lodash/_baseHasIn.js new file mode 100644 index 00000000..2e0d0426 --- /dev/null +++ b/backend/node_modules/lodash/_baseHasIn.js @@ -0,0 +1,13 @@ +/** + * The base implementation of `_.hasIn` without support for deep paths. + * + * @private + * @param {Object} [object] The object to query. + * @param {Array|string} key The key to check. + * @returns {boolean} Returns `true` if `key` exists, else `false`. + */ +function baseHasIn(object, key) { + return object != null && key in Object(object); +} + +module.exports = baseHasIn; diff --git a/backend/node_modules/lodash/_baseInRange.js b/backend/node_modules/lodash/_baseInRange.js new file mode 100644 index 00000000..ec956661 --- /dev/null +++ b/backend/node_modules/lodash/_baseInRange.js @@ -0,0 +1,18 @@ +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max, + nativeMin = Math.min; + +/** + * The base implementation of `_.inRange` which doesn't coerce arguments. + * + * @private + * @param {number} number The number to check. + * @param {number} start The start of the range. + * @param {number} end The end of the range. + * @returns {boolean} Returns `true` if `number` is in the range, else `false`. + */ +function baseInRange(number, start, end) { + return number >= nativeMin(start, end) && number < nativeMax(start, end); +} + +module.exports = baseInRange; diff --git a/backend/node_modules/lodash/_baseIndexOf.js b/backend/node_modules/lodash/_baseIndexOf.js new file mode 100644 index 00000000..167e706e --- /dev/null +++ b/backend/node_modules/lodash/_baseIndexOf.js @@ -0,0 +1,20 @@ +var baseFindIndex = require('./_baseFindIndex'), + baseIsNaN = require('./_baseIsNaN'), + strictIndexOf = require('./_strictIndexOf'); + +/** + * The base implementation of `_.indexOf` without `fromIndex` bounds checks. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ +function baseIndexOf(array, value, fromIndex) { + return value === value + ? strictIndexOf(array, value, fromIndex) + : baseFindIndex(array, baseIsNaN, fromIndex); +} + +module.exports = baseIndexOf; diff --git a/backend/node_modules/lodash/_baseIndexOfWith.js b/backend/node_modules/lodash/_baseIndexOfWith.js new file mode 100644 index 00000000..f815fe0d --- /dev/null +++ b/backend/node_modules/lodash/_baseIndexOfWith.js @@ -0,0 +1,23 @@ +/** + * This function is like `baseIndexOf` except that it accepts a comparator. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @param {Function} comparator The comparator invoked per element. + * @returns {number} Returns the index of the matched value, else `-1`. + */ +function baseIndexOfWith(array, value, fromIndex, comparator) { + var index = fromIndex - 1, + length = array.length; + + while (++index < length) { + if (comparator(array[index], value)) { + return index; + } + } + return -1; +} + +module.exports = baseIndexOfWith; diff --git a/backend/node_modules/lodash/_baseIntersection.js b/backend/node_modules/lodash/_baseIntersection.js new file mode 100644 index 00000000..c1d250c2 --- /dev/null +++ b/backend/node_modules/lodash/_baseIntersection.js @@ -0,0 +1,74 @@ +var SetCache = require('./_SetCache'), + arrayIncludes = require('./_arrayIncludes'), + arrayIncludesWith = require('./_arrayIncludesWith'), + arrayMap = require('./_arrayMap'), + baseUnary = require('./_baseUnary'), + cacheHas = require('./_cacheHas'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMin = Math.min; + +/** + * The base implementation of methods like `_.intersection`, without support + * for iteratee shorthands, that accepts an array of arrays to inspect. + * + * @private + * @param {Array} arrays The arrays to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of shared values. + */ +function baseIntersection(arrays, iteratee, comparator) { + var includes = comparator ? arrayIncludesWith : arrayIncludes, + length = arrays[0].length, + othLength = arrays.length, + othIndex = othLength, + caches = Array(othLength), + maxLength = Infinity, + result = []; + + while (othIndex--) { + var array = arrays[othIndex]; + if (othIndex && iteratee) { + array = arrayMap(array, baseUnary(iteratee)); + } + maxLength = nativeMin(array.length, maxLength); + caches[othIndex] = !comparator && (iteratee || (length >= 120 && array.length >= 120)) + ? new SetCache(othIndex && array) + : undefined; + } + array = arrays[0]; + + var index = -1, + seen = caches[0]; + + outer: + while (++index < length && result.length < maxLength) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + value = (comparator || value !== 0) ? value : 0; + if (!(seen + ? cacheHas(seen, computed) + : includes(result, computed, comparator) + )) { + othIndex = othLength; + while (--othIndex) { + var cache = caches[othIndex]; + if (!(cache + ? cacheHas(cache, computed) + : includes(arrays[othIndex], computed, comparator)) + ) { + continue outer; + } + } + if (seen) { + seen.push(computed); + } + result.push(value); + } + } + return result; +} + +module.exports = baseIntersection; diff --git a/backend/node_modules/lodash/_baseInverter.js b/backend/node_modules/lodash/_baseInverter.js new file mode 100644 index 00000000..fbc337f0 --- /dev/null +++ b/backend/node_modules/lodash/_baseInverter.js @@ -0,0 +1,21 @@ +var baseForOwn = require('./_baseForOwn'); + +/** + * The base implementation of `_.invert` and `_.invertBy` which inverts + * `object` with values transformed by `iteratee` and set by `setter`. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform values. + * @param {Object} accumulator The initial inverted object. + * @returns {Function} Returns `accumulator`. + */ +function baseInverter(object, setter, iteratee, accumulator) { + baseForOwn(object, function(value, key, object) { + setter(accumulator, iteratee(value), key, object); + }); + return accumulator; +} + +module.exports = baseInverter; diff --git a/backend/node_modules/lodash/_baseInvoke.js b/backend/node_modules/lodash/_baseInvoke.js new file mode 100644 index 00000000..49bcf3c3 --- /dev/null +++ b/backend/node_modules/lodash/_baseInvoke.js @@ -0,0 +1,24 @@ +var apply = require('./_apply'), + castPath = require('./_castPath'), + last = require('./last'), + parent = require('./_parent'), + toKey = require('./_toKey'); + +/** + * The base implementation of `_.invoke` without support for individual + * method arguments. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path of the method to invoke. + * @param {Array} args The arguments to invoke the method with. + * @returns {*} Returns the result of the invoked method. + */ +function baseInvoke(object, path, args) { + path = castPath(path, object); + object = parent(object, path); + var func = object == null ? object : object[toKey(last(path))]; + return func == null ? undefined : apply(func, object, args); +} + +module.exports = baseInvoke; diff --git a/backend/node_modules/lodash/_baseIsArguments.js b/backend/node_modules/lodash/_baseIsArguments.js new file mode 100644 index 00000000..b3562cca --- /dev/null +++ b/backend/node_modules/lodash/_baseIsArguments.js @@ -0,0 +1,18 @@ +var baseGetTag = require('./_baseGetTag'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var argsTag = '[object Arguments]'; + +/** + * The base implementation of `_.isArguments`. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + */ +function baseIsArguments(value) { + return isObjectLike(value) && baseGetTag(value) == argsTag; +} + +module.exports = baseIsArguments; diff --git a/backend/node_modules/lodash/_baseIsArrayBuffer.js b/backend/node_modules/lodash/_baseIsArrayBuffer.js new file mode 100644 index 00000000..a2c4f30a --- /dev/null +++ b/backend/node_modules/lodash/_baseIsArrayBuffer.js @@ -0,0 +1,17 @@ +var baseGetTag = require('./_baseGetTag'), + isObjectLike = require('./isObjectLike'); + +var arrayBufferTag = '[object ArrayBuffer]'; + +/** + * The base implementation of `_.isArrayBuffer` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array buffer, else `false`. + */ +function baseIsArrayBuffer(value) { + return isObjectLike(value) && baseGetTag(value) == arrayBufferTag; +} + +module.exports = baseIsArrayBuffer; diff --git a/backend/node_modules/lodash/_baseIsDate.js b/backend/node_modules/lodash/_baseIsDate.js new file mode 100644 index 00000000..ba67c785 --- /dev/null +++ b/backend/node_modules/lodash/_baseIsDate.js @@ -0,0 +1,18 @@ +var baseGetTag = require('./_baseGetTag'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var dateTag = '[object Date]'; + +/** + * The base implementation of `_.isDate` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a date object, else `false`. + */ +function baseIsDate(value) { + return isObjectLike(value) && baseGetTag(value) == dateTag; +} + +module.exports = baseIsDate; diff --git a/backend/node_modules/lodash/_baseIsEqual.js b/backend/node_modules/lodash/_baseIsEqual.js new file mode 100644 index 00000000..00a68a4f --- /dev/null +++ b/backend/node_modules/lodash/_baseIsEqual.js @@ -0,0 +1,28 @@ +var baseIsEqualDeep = require('./_baseIsEqualDeep'), + isObjectLike = require('./isObjectLike'); + +/** + * The base implementation of `_.isEqual` which supports partial comparisons + * and tracks traversed objects. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @param {boolean} bitmask The bitmask flags. + * 1 - Unordered comparison + * 2 - Partial comparison + * @param {Function} [customizer] The function to customize comparisons. + * @param {Object} [stack] Tracks traversed `value` and `other` objects. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + */ +function baseIsEqual(value, other, bitmask, customizer, stack) { + if (value === other) { + return true; + } + if (value == null || other == null || (!isObjectLike(value) && !isObjectLike(other))) { + return value !== value && other !== other; + } + return baseIsEqualDeep(value, other, bitmask, customizer, baseIsEqual, stack); +} + +module.exports = baseIsEqual; diff --git a/backend/node_modules/lodash/_baseIsEqualDeep.js b/backend/node_modules/lodash/_baseIsEqualDeep.js new file mode 100644 index 00000000..e3cfd6a8 --- /dev/null +++ b/backend/node_modules/lodash/_baseIsEqualDeep.js @@ -0,0 +1,83 @@ +var Stack = require('./_Stack'), + equalArrays = require('./_equalArrays'), + equalByTag = require('./_equalByTag'), + equalObjects = require('./_equalObjects'), + getTag = require('./_getTag'), + isArray = require('./isArray'), + isBuffer = require('./isBuffer'), + isTypedArray = require('./isTypedArray'); + +/** Used to compose bitmasks for value comparisons. */ +var COMPARE_PARTIAL_FLAG = 1; + +/** `Object#toString` result references. */ +var argsTag = '[object Arguments]', + arrayTag = '[object Array]', + objectTag = '[object Object]'; + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * A specialized version of `baseIsEqual` for arrays and objects which performs + * deep comparisons and tracks traversed objects enabling objects with circular + * references to be compared. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} [stack] Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ +function baseIsEqualDeep(object, other, bitmask, customizer, equalFunc, stack) { + var objIsArr = isArray(object), + othIsArr = isArray(other), + objTag = objIsArr ? arrayTag : getTag(object), + othTag = othIsArr ? arrayTag : getTag(other); + + objTag = objTag == argsTag ? objectTag : objTag; + othTag = othTag == argsTag ? objectTag : othTag; + + var objIsObj = objTag == objectTag, + othIsObj = othTag == objectTag, + isSameTag = objTag == othTag; + + if (isSameTag && isBuffer(object)) { + if (!isBuffer(other)) { + return false; + } + objIsArr = true; + objIsObj = false; + } + if (isSameTag && !objIsObj) { + stack || (stack = new Stack); + return (objIsArr || isTypedArray(object)) + ? equalArrays(object, other, bitmask, customizer, equalFunc, stack) + : equalByTag(object, other, objTag, bitmask, customizer, equalFunc, stack); + } + if (!(bitmask & COMPARE_PARTIAL_FLAG)) { + var objIsWrapped = objIsObj && hasOwnProperty.call(object, '__wrapped__'), + othIsWrapped = othIsObj && hasOwnProperty.call(other, '__wrapped__'); + + if (objIsWrapped || othIsWrapped) { + var objUnwrapped = objIsWrapped ? object.value() : object, + othUnwrapped = othIsWrapped ? other.value() : other; + + stack || (stack = new Stack); + return equalFunc(objUnwrapped, othUnwrapped, bitmask, customizer, stack); + } + } + if (!isSameTag) { + return false; + } + stack || (stack = new Stack); + return equalObjects(object, other, bitmask, customizer, equalFunc, stack); +} + +module.exports = baseIsEqualDeep; diff --git a/backend/node_modules/lodash/_baseIsMap.js b/backend/node_modules/lodash/_baseIsMap.js new file mode 100644 index 00000000..02a4021c --- /dev/null +++ b/backend/node_modules/lodash/_baseIsMap.js @@ -0,0 +1,18 @@ +var getTag = require('./_getTag'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var mapTag = '[object Map]'; + +/** + * The base implementation of `_.isMap` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a map, else `false`. + */ +function baseIsMap(value) { + return isObjectLike(value) && getTag(value) == mapTag; +} + +module.exports = baseIsMap; diff --git a/backend/node_modules/lodash/_baseIsMatch.js b/backend/node_modules/lodash/_baseIsMatch.js new file mode 100644 index 00000000..72494bed --- /dev/null +++ b/backend/node_modules/lodash/_baseIsMatch.js @@ -0,0 +1,62 @@ +var Stack = require('./_Stack'), + baseIsEqual = require('./_baseIsEqual'); + +/** Used to compose bitmasks for value comparisons. */ +var COMPARE_PARTIAL_FLAG = 1, + COMPARE_UNORDERED_FLAG = 2; + +/** + * The base implementation of `_.isMatch` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @param {Array} matchData The property names, values, and compare flags to match. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + */ +function baseIsMatch(object, source, matchData, customizer) { + var index = matchData.length, + length = index, + noCustomizer = !customizer; + + if (object == null) { + return !length; + } + object = Object(object); + while (index--) { + var data = matchData[index]; + if ((noCustomizer && data[2]) + ? data[1] !== object[data[0]] + : !(data[0] in object) + ) { + return false; + } + } + while (++index < length) { + data = matchData[index]; + var key = data[0], + objValue = object[key], + srcValue = data[1]; + + if (noCustomizer && data[2]) { + if (objValue === undefined && !(key in object)) { + return false; + } + } else { + var stack = new Stack; + if (customizer) { + var result = customizer(objValue, srcValue, key, object, source, stack); + } + if (!(result === undefined + ? baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG, customizer, stack) + : result + )) { + return false; + } + } + } + return true; +} + +module.exports = baseIsMatch; diff --git a/backend/node_modules/lodash/_baseIsNaN.js b/backend/node_modules/lodash/_baseIsNaN.js new file mode 100644 index 00000000..316f1eb1 --- /dev/null +++ b/backend/node_modules/lodash/_baseIsNaN.js @@ -0,0 +1,12 @@ +/** + * The base implementation of `_.isNaN` without support for number objects. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + */ +function baseIsNaN(value) { + return value !== value; +} + +module.exports = baseIsNaN; diff --git a/backend/node_modules/lodash/_baseIsNative.js b/backend/node_modules/lodash/_baseIsNative.js new file mode 100644 index 00000000..87023304 --- /dev/null +++ b/backend/node_modules/lodash/_baseIsNative.js @@ -0,0 +1,47 @@ +var isFunction = require('./isFunction'), + isMasked = require('./_isMasked'), + isObject = require('./isObject'), + toSource = require('./_toSource'); + +/** + * Used to match `RegExp` + * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). + */ +var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; + +/** Used to detect host constructors (Safari). */ +var reIsHostCtor = /^\[object .+?Constructor\]$/; + +/** Used for built-in method references. */ +var funcProto = Function.prototype, + objectProto = Object.prototype; + +/** Used to resolve the decompiled source of functions. */ +var funcToString = funcProto.toString; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** Used to detect if a method is native. */ +var reIsNative = RegExp('^' + + funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') + .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' +); + +/** + * The base implementation of `_.isNative` without bad shim checks. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a native function, + * else `false`. + */ +function baseIsNative(value) { + if (!isObject(value) || isMasked(value)) { + return false; + } + var pattern = isFunction(value) ? reIsNative : reIsHostCtor; + return pattern.test(toSource(value)); +} + +module.exports = baseIsNative; diff --git a/backend/node_modules/lodash/_baseIsRegExp.js b/backend/node_modules/lodash/_baseIsRegExp.js new file mode 100644 index 00000000..6cd7c1ae --- /dev/null +++ b/backend/node_modules/lodash/_baseIsRegExp.js @@ -0,0 +1,18 @@ +var baseGetTag = require('./_baseGetTag'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var regexpTag = '[object RegExp]'; + +/** + * The base implementation of `_.isRegExp` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a regexp, else `false`. + */ +function baseIsRegExp(value) { + return isObjectLike(value) && baseGetTag(value) == regexpTag; +} + +module.exports = baseIsRegExp; diff --git a/backend/node_modules/lodash/_baseIsSet.js b/backend/node_modules/lodash/_baseIsSet.js new file mode 100644 index 00000000..6dee3671 --- /dev/null +++ b/backend/node_modules/lodash/_baseIsSet.js @@ -0,0 +1,18 @@ +var getTag = require('./_getTag'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var setTag = '[object Set]'; + +/** + * The base implementation of `_.isSet` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a set, else `false`. + */ +function baseIsSet(value) { + return isObjectLike(value) && getTag(value) == setTag; +} + +module.exports = baseIsSet; diff --git a/backend/node_modules/lodash/_baseIsTypedArray.js b/backend/node_modules/lodash/_baseIsTypedArray.js new file mode 100644 index 00000000..1edb32ff --- /dev/null +++ b/backend/node_modules/lodash/_baseIsTypedArray.js @@ -0,0 +1,60 @@ +var baseGetTag = require('./_baseGetTag'), + isLength = require('./isLength'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var argsTag = '[object Arguments]', + arrayTag = '[object Array]', + boolTag = '[object Boolean]', + dateTag = '[object Date]', + errorTag = '[object Error]', + funcTag = '[object Function]', + mapTag = '[object Map]', + numberTag = '[object Number]', + objectTag = '[object Object]', + regexpTag = '[object RegExp]', + setTag = '[object Set]', + stringTag = '[object String]', + weakMapTag = '[object WeakMap]'; + +var arrayBufferTag = '[object ArrayBuffer]', + dataViewTag = '[object DataView]', + float32Tag = '[object Float32Array]', + float64Tag = '[object Float64Array]', + int8Tag = '[object Int8Array]', + int16Tag = '[object Int16Array]', + int32Tag = '[object Int32Array]', + uint8Tag = '[object Uint8Array]', + uint8ClampedTag = '[object Uint8ClampedArray]', + uint16Tag = '[object Uint16Array]', + uint32Tag = '[object Uint32Array]'; + +/** Used to identify `toStringTag` values of typed arrays. */ +var typedArrayTags = {}; +typedArrayTags[float32Tag] = typedArrayTags[float64Tag] = +typedArrayTags[int8Tag] = typedArrayTags[int16Tag] = +typedArrayTags[int32Tag] = typedArrayTags[uint8Tag] = +typedArrayTags[uint8ClampedTag] = typedArrayTags[uint16Tag] = +typedArrayTags[uint32Tag] = true; +typedArrayTags[argsTag] = typedArrayTags[arrayTag] = +typedArrayTags[arrayBufferTag] = typedArrayTags[boolTag] = +typedArrayTags[dataViewTag] = typedArrayTags[dateTag] = +typedArrayTags[errorTag] = typedArrayTags[funcTag] = +typedArrayTags[mapTag] = typedArrayTags[numberTag] = +typedArrayTags[objectTag] = typedArrayTags[regexpTag] = +typedArrayTags[setTag] = typedArrayTags[stringTag] = +typedArrayTags[weakMapTag] = false; + +/** + * The base implementation of `_.isTypedArray` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. + */ +function baseIsTypedArray(value) { + return isObjectLike(value) && + isLength(value.length) && !!typedArrayTags[baseGetTag(value)]; +} + +module.exports = baseIsTypedArray; diff --git a/backend/node_modules/lodash/_baseIteratee.js b/backend/node_modules/lodash/_baseIteratee.js new file mode 100644 index 00000000..995c2575 --- /dev/null +++ b/backend/node_modules/lodash/_baseIteratee.js @@ -0,0 +1,31 @@ +var baseMatches = require('./_baseMatches'), + baseMatchesProperty = require('./_baseMatchesProperty'), + identity = require('./identity'), + isArray = require('./isArray'), + property = require('./property'); + +/** + * The base implementation of `_.iteratee`. + * + * @private + * @param {*} [value=_.identity] The value to convert to an iteratee. + * @returns {Function} Returns the iteratee. + */ +function baseIteratee(value) { + // Don't store the `typeof` result in a variable to avoid a JIT bug in Safari 9. + // See https://bugs.webkit.org/show_bug.cgi?id=156034 for more details. + if (typeof value == 'function') { + return value; + } + if (value == null) { + return identity; + } + if (typeof value == 'object') { + return isArray(value) + ? baseMatchesProperty(value[0], value[1]) + : baseMatches(value); + } + return property(value); +} + +module.exports = baseIteratee; diff --git a/backend/node_modules/lodash/_baseKeys.js b/backend/node_modules/lodash/_baseKeys.js new file mode 100644 index 00000000..45e9e6f3 --- /dev/null +++ b/backend/node_modules/lodash/_baseKeys.js @@ -0,0 +1,30 @@ +var isPrototype = require('./_isPrototype'), + nativeKeys = require('./_nativeKeys'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * The base implementation of `_.keys` which doesn't treat sparse arrays as dense. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ +function baseKeys(object) { + if (!isPrototype(object)) { + return nativeKeys(object); + } + var result = []; + for (var key in Object(object)) { + if (hasOwnProperty.call(object, key) && key != 'constructor') { + result.push(key); + } + } + return result; +} + +module.exports = baseKeys; diff --git a/backend/node_modules/lodash/_baseKeysIn.js b/backend/node_modules/lodash/_baseKeysIn.js new file mode 100644 index 00000000..ea8a0a17 --- /dev/null +++ b/backend/node_modules/lodash/_baseKeysIn.js @@ -0,0 +1,33 @@ +var isObject = require('./isObject'), + isPrototype = require('./_isPrototype'), + nativeKeysIn = require('./_nativeKeysIn'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * The base implementation of `_.keysIn` which doesn't treat sparse arrays as dense. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ +function baseKeysIn(object) { + if (!isObject(object)) { + return nativeKeysIn(object); + } + var isProto = isPrototype(object), + result = []; + + for (var key in object) { + if (!(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) { + result.push(key); + } + } + return result; +} + +module.exports = baseKeysIn; diff --git a/backend/node_modules/lodash/_baseLodash.js b/backend/node_modules/lodash/_baseLodash.js new file mode 100644 index 00000000..f76c790e --- /dev/null +++ b/backend/node_modules/lodash/_baseLodash.js @@ -0,0 +1,10 @@ +/** + * The function whose prototype chain sequence wrappers inherit from. + * + * @private + */ +function baseLodash() { + // No operation performed. +} + +module.exports = baseLodash; diff --git a/backend/node_modules/lodash/_baseLt.js b/backend/node_modules/lodash/_baseLt.js new file mode 100644 index 00000000..8674d294 --- /dev/null +++ b/backend/node_modules/lodash/_baseLt.js @@ -0,0 +1,14 @@ +/** + * The base implementation of `_.lt` which doesn't coerce arguments. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than `other`, + * else `false`. + */ +function baseLt(value, other) { + return value < other; +} + +module.exports = baseLt; diff --git a/backend/node_modules/lodash/_baseMap.js b/backend/node_modules/lodash/_baseMap.js new file mode 100644 index 00000000..0bf5cead --- /dev/null +++ b/backend/node_modules/lodash/_baseMap.js @@ -0,0 +1,22 @@ +var baseEach = require('./_baseEach'), + isArrayLike = require('./isArrayLike'); + +/** + * The base implementation of `_.map` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ +function baseMap(collection, iteratee) { + var index = -1, + result = isArrayLike(collection) ? Array(collection.length) : []; + + baseEach(collection, function(value, key, collection) { + result[++index] = iteratee(value, key, collection); + }); + return result; +} + +module.exports = baseMap; diff --git a/backend/node_modules/lodash/_baseMatches.js b/backend/node_modules/lodash/_baseMatches.js new file mode 100644 index 00000000..e56582ad --- /dev/null +++ b/backend/node_modules/lodash/_baseMatches.js @@ -0,0 +1,22 @@ +var baseIsMatch = require('./_baseIsMatch'), + getMatchData = require('./_getMatchData'), + matchesStrictComparable = require('./_matchesStrictComparable'); + +/** + * The base implementation of `_.matches` which doesn't clone `source`. + * + * @private + * @param {Object} source The object of property values to match. + * @returns {Function} Returns the new spec function. + */ +function baseMatches(source) { + var matchData = getMatchData(source); + if (matchData.length == 1 && matchData[0][2]) { + return matchesStrictComparable(matchData[0][0], matchData[0][1]); + } + return function(object) { + return object === source || baseIsMatch(object, source, matchData); + }; +} + +module.exports = baseMatches; diff --git a/backend/node_modules/lodash/_baseMatchesProperty.js b/backend/node_modules/lodash/_baseMatchesProperty.js new file mode 100644 index 00000000..24afd893 --- /dev/null +++ b/backend/node_modules/lodash/_baseMatchesProperty.js @@ -0,0 +1,33 @@ +var baseIsEqual = require('./_baseIsEqual'), + get = require('./get'), + hasIn = require('./hasIn'), + isKey = require('./_isKey'), + isStrictComparable = require('./_isStrictComparable'), + matchesStrictComparable = require('./_matchesStrictComparable'), + toKey = require('./_toKey'); + +/** Used to compose bitmasks for value comparisons. */ +var COMPARE_PARTIAL_FLAG = 1, + COMPARE_UNORDERED_FLAG = 2; + +/** + * The base implementation of `_.matchesProperty` which doesn't clone `srcValue`. + * + * @private + * @param {string} path The path of the property to get. + * @param {*} srcValue The value to match. + * @returns {Function} Returns the new spec function. + */ +function baseMatchesProperty(path, srcValue) { + if (isKey(path) && isStrictComparable(srcValue)) { + return matchesStrictComparable(toKey(path), srcValue); + } + return function(object) { + var objValue = get(object, path); + return (objValue === undefined && objValue === srcValue) + ? hasIn(object, path) + : baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG); + }; +} + +module.exports = baseMatchesProperty; diff --git a/backend/node_modules/lodash/_baseMean.js b/backend/node_modules/lodash/_baseMean.js new file mode 100644 index 00000000..fa9e00a0 --- /dev/null +++ b/backend/node_modules/lodash/_baseMean.js @@ -0,0 +1,20 @@ +var baseSum = require('./_baseSum'); + +/** Used as references for various `Number` constants. */ +var NAN = 0 / 0; + +/** + * The base implementation of `_.mean` and `_.meanBy` without support for + * iteratee shorthands. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {number} Returns the mean. + */ +function baseMean(array, iteratee) { + var length = array == null ? 0 : array.length; + return length ? (baseSum(array, iteratee) / length) : NAN; +} + +module.exports = baseMean; diff --git a/backend/node_modules/lodash/_baseMerge.js b/backend/node_modules/lodash/_baseMerge.js new file mode 100644 index 00000000..c98b5eb0 --- /dev/null +++ b/backend/node_modules/lodash/_baseMerge.js @@ -0,0 +1,42 @@ +var Stack = require('./_Stack'), + assignMergeValue = require('./_assignMergeValue'), + baseFor = require('./_baseFor'), + baseMergeDeep = require('./_baseMergeDeep'), + isObject = require('./isObject'), + keysIn = require('./keysIn'), + safeGet = require('./_safeGet'); + +/** + * The base implementation of `_.merge` without support for multiple sources. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @param {number} srcIndex The index of `source`. + * @param {Function} [customizer] The function to customize merged values. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + */ +function baseMerge(object, source, srcIndex, customizer, stack) { + if (object === source) { + return; + } + baseFor(source, function(srcValue, key) { + stack || (stack = new Stack); + if (isObject(srcValue)) { + baseMergeDeep(object, source, key, srcIndex, baseMerge, customizer, stack); + } + else { + var newValue = customizer + ? customizer(safeGet(object, key), srcValue, (key + ''), object, source, stack) + : undefined; + + if (newValue === undefined) { + newValue = srcValue; + } + assignMergeValue(object, key, newValue); + } + }, keysIn); +} + +module.exports = baseMerge; diff --git a/backend/node_modules/lodash/_baseMergeDeep.js b/backend/node_modules/lodash/_baseMergeDeep.js new file mode 100644 index 00000000..4679e8dc --- /dev/null +++ b/backend/node_modules/lodash/_baseMergeDeep.js @@ -0,0 +1,94 @@ +var assignMergeValue = require('./_assignMergeValue'), + cloneBuffer = require('./_cloneBuffer'), + cloneTypedArray = require('./_cloneTypedArray'), + copyArray = require('./_copyArray'), + initCloneObject = require('./_initCloneObject'), + isArguments = require('./isArguments'), + isArray = require('./isArray'), + isArrayLikeObject = require('./isArrayLikeObject'), + isBuffer = require('./isBuffer'), + isFunction = require('./isFunction'), + isObject = require('./isObject'), + isPlainObject = require('./isPlainObject'), + isTypedArray = require('./isTypedArray'), + safeGet = require('./_safeGet'), + toPlainObject = require('./toPlainObject'); + +/** + * A specialized version of `baseMerge` for arrays and objects which performs + * deep merges and tracks traversed objects enabling objects with circular + * references to be merged. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @param {string} key The key of the value to merge. + * @param {number} srcIndex The index of `source`. + * @param {Function} mergeFunc The function to merge values. + * @param {Function} [customizer] The function to customize assigned values. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + */ +function baseMergeDeep(object, source, key, srcIndex, mergeFunc, customizer, stack) { + var objValue = safeGet(object, key), + srcValue = safeGet(source, key), + stacked = stack.get(srcValue); + + if (stacked) { + assignMergeValue(object, key, stacked); + return; + } + var newValue = customizer + ? customizer(objValue, srcValue, (key + ''), object, source, stack) + : undefined; + + var isCommon = newValue === undefined; + + if (isCommon) { + var isArr = isArray(srcValue), + isBuff = !isArr && isBuffer(srcValue), + isTyped = !isArr && !isBuff && isTypedArray(srcValue); + + newValue = srcValue; + if (isArr || isBuff || isTyped) { + if (isArray(objValue)) { + newValue = objValue; + } + else if (isArrayLikeObject(objValue)) { + newValue = copyArray(objValue); + } + else if (isBuff) { + isCommon = false; + newValue = cloneBuffer(srcValue, true); + } + else if (isTyped) { + isCommon = false; + newValue = cloneTypedArray(srcValue, true); + } + else { + newValue = []; + } + } + else if (isPlainObject(srcValue) || isArguments(srcValue)) { + newValue = objValue; + if (isArguments(objValue)) { + newValue = toPlainObject(objValue); + } + else if (!isObject(objValue) || isFunction(objValue)) { + newValue = initCloneObject(srcValue); + } + } + else { + isCommon = false; + } + } + if (isCommon) { + // Recursively merge objects and arrays (susceptible to call stack limits). + stack.set(srcValue, newValue); + mergeFunc(newValue, srcValue, srcIndex, customizer, stack); + stack['delete'](srcValue); + } + assignMergeValue(object, key, newValue); +} + +module.exports = baseMergeDeep; diff --git a/backend/node_modules/lodash/_baseNth.js b/backend/node_modules/lodash/_baseNth.js new file mode 100644 index 00000000..0403c2a3 --- /dev/null +++ b/backend/node_modules/lodash/_baseNth.js @@ -0,0 +1,20 @@ +var isIndex = require('./_isIndex'); + +/** + * The base implementation of `_.nth` which doesn't coerce arguments. + * + * @private + * @param {Array} array The array to query. + * @param {number} n The index of the element to return. + * @returns {*} Returns the nth element of `array`. + */ +function baseNth(array, n) { + var length = array.length; + if (!length) { + return; + } + n += n < 0 ? length : 0; + return isIndex(n, length) ? array[n] : undefined; +} + +module.exports = baseNth; diff --git a/backend/node_modules/lodash/_baseOrderBy.js b/backend/node_modules/lodash/_baseOrderBy.js new file mode 100644 index 00000000..775a0174 --- /dev/null +++ b/backend/node_modules/lodash/_baseOrderBy.js @@ -0,0 +1,49 @@ +var arrayMap = require('./_arrayMap'), + baseGet = require('./_baseGet'), + baseIteratee = require('./_baseIteratee'), + baseMap = require('./_baseMap'), + baseSortBy = require('./_baseSortBy'), + baseUnary = require('./_baseUnary'), + compareMultiple = require('./_compareMultiple'), + identity = require('./identity'), + isArray = require('./isArray'); + +/** + * The base implementation of `_.orderBy` without param guards. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function[]|Object[]|string[]} iteratees The iteratees to sort by. + * @param {string[]} orders The sort orders of `iteratees`. + * @returns {Array} Returns the new sorted array. + */ +function baseOrderBy(collection, iteratees, orders) { + if (iteratees.length) { + iteratees = arrayMap(iteratees, function(iteratee) { + if (isArray(iteratee)) { + return function(value) { + return baseGet(value, iteratee.length === 1 ? iteratee[0] : iteratee); + } + } + return iteratee; + }); + } else { + iteratees = [identity]; + } + + var index = -1; + iteratees = arrayMap(iteratees, baseUnary(baseIteratee)); + + var result = baseMap(collection, function(value, key, collection) { + var criteria = arrayMap(iteratees, function(iteratee) { + return iteratee(value); + }); + return { 'criteria': criteria, 'index': ++index, 'value': value }; + }); + + return baseSortBy(result, function(object, other) { + return compareMultiple(object, other, orders); + }); +} + +module.exports = baseOrderBy; diff --git a/backend/node_modules/lodash/_basePick.js b/backend/node_modules/lodash/_basePick.js new file mode 100644 index 00000000..09b458a6 --- /dev/null +++ b/backend/node_modules/lodash/_basePick.js @@ -0,0 +1,19 @@ +var basePickBy = require('./_basePickBy'), + hasIn = require('./hasIn'); + +/** + * The base implementation of `_.pick` without support for individual + * property identifiers. + * + * @private + * @param {Object} object The source object. + * @param {string[]} paths The property paths to pick. + * @returns {Object} Returns the new object. + */ +function basePick(object, paths) { + return basePickBy(object, paths, function(value, path) { + return hasIn(object, path); + }); +} + +module.exports = basePick; diff --git a/backend/node_modules/lodash/_basePickBy.js b/backend/node_modules/lodash/_basePickBy.js new file mode 100644 index 00000000..85be68c8 --- /dev/null +++ b/backend/node_modules/lodash/_basePickBy.js @@ -0,0 +1,30 @@ +var baseGet = require('./_baseGet'), + baseSet = require('./_baseSet'), + castPath = require('./_castPath'); + +/** + * The base implementation of `_.pickBy` without support for iteratee shorthands. + * + * @private + * @param {Object} object The source object. + * @param {string[]} paths The property paths to pick. + * @param {Function} predicate The function invoked per property. + * @returns {Object} Returns the new object. + */ +function basePickBy(object, paths, predicate) { + var index = -1, + length = paths.length, + result = {}; + + while (++index < length) { + var path = paths[index], + value = baseGet(object, path); + + if (predicate(value, path)) { + baseSet(result, castPath(path, object), value); + } + } + return result; +} + +module.exports = basePickBy; diff --git a/backend/node_modules/lodash/_baseProperty.js b/backend/node_modules/lodash/_baseProperty.js new file mode 100644 index 00000000..496281ec --- /dev/null +++ b/backend/node_modules/lodash/_baseProperty.js @@ -0,0 +1,14 @@ +/** + * The base implementation of `_.property` without support for deep paths. + * + * @private + * @param {string} key The key of the property to get. + * @returns {Function} Returns the new accessor function. + */ +function baseProperty(key) { + return function(object) { + return object == null ? undefined : object[key]; + }; +} + +module.exports = baseProperty; diff --git a/backend/node_modules/lodash/_basePropertyDeep.js b/backend/node_modules/lodash/_basePropertyDeep.js new file mode 100644 index 00000000..1e5aae50 --- /dev/null +++ b/backend/node_modules/lodash/_basePropertyDeep.js @@ -0,0 +1,16 @@ +var baseGet = require('./_baseGet'); + +/** + * A specialized version of `baseProperty` which supports deep paths. + * + * @private + * @param {Array|string} path The path of the property to get. + * @returns {Function} Returns the new accessor function. + */ +function basePropertyDeep(path) { + return function(object) { + return baseGet(object, path); + }; +} + +module.exports = basePropertyDeep; diff --git a/backend/node_modules/lodash/_basePropertyOf.js b/backend/node_modules/lodash/_basePropertyOf.js new file mode 100644 index 00000000..46173999 --- /dev/null +++ b/backend/node_modules/lodash/_basePropertyOf.js @@ -0,0 +1,14 @@ +/** + * The base implementation of `_.propertyOf` without support for deep paths. + * + * @private + * @param {Object} object The object to query. + * @returns {Function} Returns the new accessor function. + */ +function basePropertyOf(object) { + return function(key) { + return object == null ? undefined : object[key]; + }; +} + +module.exports = basePropertyOf; diff --git a/backend/node_modules/lodash/_basePullAll.js b/backend/node_modules/lodash/_basePullAll.js new file mode 100644 index 00000000..305720ed --- /dev/null +++ b/backend/node_modules/lodash/_basePullAll.js @@ -0,0 +1,51 @@ +var arrayMap = require('./_arrayMap'), + baseIndexOf = require('./_baseIndexOf'), + baseIndexOfWith = require('./_baseIndexOfWith'), + baseUnary = require('./_baseUnary'), + copyArray = require('./_copyArray'); + +/** Used for built-in method references. */ +var arrayProto = Array.prototype; + +/** Built-in value references. */ +var splice = arrayProto.splice; + +/** + * The base implementation of `_.pullAllBy` without support for iteratee + * shorthands. + * + * @private + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns `array`. + */ +function basePullAll(array, values, iteratee, comparator) { + var indexOf = comparator ? baseIndexOfWith : baseIndexOf, + index = -1, + length = values.length, + seen = array; + + if (array === values) { + values = copyArray(values); + } + if (iteratee) { + seen = arrayMap(array, baseUnary(iteratee)); + } + while (++index < length) { + var fromIndex = 0, + value = values[index], + computed = iteratee ? iteratee(value) : value; + + while ((fromIndex = indexOf(seen, computed, fromIndex, comparator)) > -1) { + if (seen !== array) { + splice.call(seen, fromIndex, 1); + } + splice.call(array, fromIndex, 1); + } + } + return array; +} + +module.exports = basePullAll; diff --git a/backend/node_modules/lodash/_basePullAt.js b/backend/node_modules/lodash/_basePullAt.js new file mode 100644 index 00000000..c3e9e710 --- /dev/null +++ b/backend/node_modules/lodash/_basePullAt.js @@ -0,0 +1,37 @@ +var baseUnset = require('./_baseUnset'), + isIndex = require('./_isIndex'); + +/** Used for built-in method references. */ +var arrayProto = Array.prototype; + +/** Built-in value references. */ +var splice = arrayProto.splice; + +/** + * The base implementation of `_.pullAt` without support for individual + * indexes or capturing the removed elements. + * + * @private + * @param {Array} array The array to modify. + * @param {number[]} indexes The indexes of elements to remove. + * @returns {Array} Returns `array`. + */ +function basePullAt(array, indexes) { + var length = array ? indexes.length : 0, + lastIndex = length - 1; + + while (length--) { + var index = indexes[length]; + if (length == lastIndex || index !== previous) { + var previous = index; + if (isIndex(index)) { + splice.call(array, index, 1); + } else { + baseUnset(array, index); + } + } + } + return array; +} + +module.exports = basePullAt; diff --git a/backend/node_modules/lodash/_baseRandom.js b/backend/node_modules/lodash/_baseRandom.js new file mode 100644 index 00000000..94f76a76 --- /dev/null +++ b/backend/node_modules/lodash/_baseRandom.js @@ -0,0 +1,18 @@ +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeFloor = Math.floor, + nativeRandom = Math.random; + +/** + * The base implementation of `_.random` without support for returning + * floating-point numbers. + * + * @private + * @param {number} lower The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the random number. + */ +function baseRandom(lower, upper) { + return lower + nativeFloor(nativeRandom() * (upper - lower + 1)); +} + +module.exports = baseRandom; diff --git a/backend/node_modules/lodash/_baseRange.js b/backend/node_modules/lodash/_baseRange.js new file mode 100644 index 00000000..0fb8e419 --- /dev/null +++ b/backend/node_modules/lodash/_baseRange.js @@ -0,0 +1,28 @@ +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeCeil = Math.ceil, + nativeMax = Math.max; + +/** + * The base implementation of `_.range` and `_.rangeRight` which doesn't + * coerce arguments. + * + * @private + * @param {number} start The start of the range. + * @param {number} end The end of the range. + * @param {number} step The value to increment or decrement by. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Array} Returns the range of numbers. + */ +function baseRange(start, end, step, fromRight) { + var index = -1, + length = nativeMax(nativeCeil((end - start) / (step || 1)), 0), + result = Array(length); + + while (length--) { + result[fromRight ? length : ++index] = start; + start += step; + } + return result; +} + +module.exports = baseRange; diff --git a/backend/node_modules/lodash/_baseReduce.js b/backend/node_modules/lodash/_baseReduce.js new file mode 100644 index 00000000..5a1f8b57 --- /dev/null +++ b/backend/node_modules/lodash/_baseReduce.js @@ -0,0 +1,23 @@ +/** + * The base implementation of `_.reduce` and `_.reduceRight`, without support + * for iteratee shorthands, which iterates over `collection` using `eachFunc`. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} accumulator The initial value. + * @param {boolean} initAccum Specify using the first or last element of + * `collection` as the initial value. + * @param {Function} eachFunc The function to iterate over `collection`. + * @returns {*} Returns the accumulated value. + */ +function baseReduce(collection, iteratee, accumulator, initAccum, eachFunc) { + eachFunc(collection, function(value, index, collection) { + accumulator = initAccum + ? (initAccum = false, value) + : iteratee(accumulator, value, index, collection); + }); + return accumulator; +} + +module.exports = baseReduce; diff --git a/backend/node_modules/lodash/_baseRepeat.js b/backend/node_modules/lodash/_baseRepeat.js new file mode 100644 index 00000000..ee44c31a --- /dev/null +++ b/backend/node_modules/lodash/_baseRepeat.js @@ -0,0 +1,35 @@ +/** Used as references for various `Number` constants. */ +var MAX_SAFE_INTEGER = 9007199254740991; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeFloor = Math.floor; + +/** + * The base implementation of `_.repeat` which doesn't coerce arguments. + * + * @private + * @param {string} string The string to repeat. + * @param {number} n The number of times to repeat the string. + * @returns {string} Returns the repeated string. + */ +function baseRepeat(string, n) { + var result = ''; + if (!string || n < 1 || n > MAX_SAFE_INTEGER) { + return result; + } + // Leverage the exponentiation by squaring algorithm for a faster repeat. + // See https://en.wikipedia.org/wiki/Exponentiation_by_squaring for more details. + do { + if (n % 2) { + result += string; + } + n = nativeFloor(n / 2); + if (n) { + string += string; + } + } while (n); + + return result; +} + +module.exports = baseRepeat; diff --git a/backend/node_modules/lodash/_baseRest.js b/backend/node_modules/lodash/_baseRest.js new file mode 100644 index 00000000..d0dc4bdd --- /dev/null +++ b/backend/node_modules/lodash/_baseRest.js @@ -0,0 +1,17 @@ +var identity = require('./identity'), + overRest = require('./_overRest'), + setToString = require('./_setToString'); + +/** + * The base implementation of `_.rest` which doesn't validate or coerce arguments. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @returns {Function} Returns the new function. + */ +function baseRest(func, start) { + return setToString(overRest(func, start, identity), func + ''); +} + +module.exports = baseRest; diff --git a/backend/node_modules/lodash/_baseSample.js b/backend/node_modules/lodash/_baseSample.js new file mode 100644 index 00000000..58582b91 --- /dev/null +++ b/backend/node_modules/lodash/_baseSample.js @@ -0,0 +1,15 @@ +var arraySample = require('./_arraySample'), + values = require('./values'); + +/** + * The base implementation of `_.sample`. + * + * @private + * @param {Array|Object} collection The collection to sample. + * @returns {*} Returns the random element. + */ +function baseSample(collection) { + return arraySample(values(collection)); +} + +module.exports = baseSample; diff --git a/backend/node_modules/lodash/_baseSampleSize.js b/backend/node_modules/lodash/_baseSampleSize.js new file mode 100644 index 00000000..5c90ec51 --- /dev/null +++ b/backend/node_modules/lodash/_baseSampleSize.js @@ -0,0 +1,18 @@ +var baseClamp = require('./_baseClamp'), + shuffleSelf = require('./_shuffleSelf'), + values = require('./values'); + +/** + * The base implementation of `_.sampleSize` without param guards. + * + * @private + * @param {Array|Object} collection The collection to sample. + * @param {number} n The number of elements to sample. + * @returns {Array} Returns the random elements. + */ +function baseSampleSize(collection, n) { + var array = values(collection); + return shuffleSelf(array, baseClamp(n, 0, array.length)); +} + +module.exports = baseSampleSize; diff --git a/backend/node_modules/lodash/_baseSet.js b/backend/node_modules/lodash/_baseSet.js new file mode 100644 index 00000000..99f4fbf9 --- /dev/null +++ b/backend/node_modules/lodash/_baseSet.js @@ -0,0 +1,51 @@ +var assignValue = require('./_assignValue'), + castPath = require('./_castPath'), + isIndex = require('./_isIndex'), + isObject = require('./isObject'), + toKey = require('./_toKey'); + +/** + * The base implementation of `_.set`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @param {Function} [customizer] The function to customize path creation. + * @returns {Object} Returns `object`. + */ +function baseSet(object, path, value, customizer) { + if (!isObject(object)) { + return object; + } + path = castPath(path, object); + + var index = -1, + length = path.length, + lastIndex = length - 1, + nested = object; + + while (nested != null && ++index < length) { + var key = toKey(path[index]), + newValue = value; + + if (key === '__proto__' || key === 'constructor' || key === 'prototype') { + return object; + } + + if (index != lastIndex) { + var objValue = nested[key]; + newValue = customizer ? customizer(objValue, key, nested) : undefined; + if (newValue === undefined) { + newValue = isObject(objValue) + ? objValue + : (isIndex(path[index + 1]) ? [] : {}); + } + } + assignValue(nested, key, newValue); + nested = nested[key]; + } + return object; +} + +module.exports = baseSet; diff --git a/backend/node_modules/lodash/_baseSetData.js b/backend/node_modules/lodash/_baseSetData.js new file mode 100644 index 00000000..c409947d --- /dev/null +++ b/backend/node_modules/lodash/_baseSetData.js @@ -0,0 +1,17 @@ +var identity = require('./identity'), + metaMap = require('./_metaMap'); + +/** + * The base implementation of `setData` without support for hot loop shorting. + * + * @private + * @param {Function} func The function to associate metadata with. + * @param {*} data The metadata. + * @returns {Function} Returns `func`. + */ +var baseSetData = !metaMap ? identity : function(func, data) { + metaMap.set(func, data); + return func; +}; + +module.exports = baseSetData; diff --git a/backend/node_modules/lodash/_baseSetToString.js b/backend/node_modules/lodash/_baseSetToString.js new file mode 100644 index 00000000..89eaca38 --- /dev/null +++ b/backend/node_modules/lodash/_baseSetToString.js @@ -0,0 +1,22 @@ +var constant = require('./constant'), + defineProperty = require('./_defineProperty'), + identity = require('./identity'); + +/** + * The base implementation of `setToString` without support for hot loop shorting. + * + * @private + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. + */ +var baseSetToString = !defineProperty ? identity : function(func, string) { + return defineProperty(func, 'toString', { + 'configurable': true, + 'enumerable': false, + 'value': constant(string), + 'writable': true + }); +}; + +module.exports = baseSetToString; diff --git a/backend/node_modules/lodash/_baseShuffle.js b/backend/node_modules/lodash/_baseShuffle.js new file mode 100644 index 00000000..023077ac --- /dev/null +++ b/backend/node_modules/lodash/_baseShuffle.js @@ -0,0 +1,15 @@ +var shuffleSelf = require('./_shuffleSelf'), + values = require('./values'); + +/** + * The base implementation of `_.shuffle`. + * + * @private + * @param {Array|Object} collection The collection to shuffle. + * @returns {Array} Returns the new shuffled array. + */ +function baseShuffle(collection) { + return shuffleSelf(values(collection)); +} + +module.exports = baseShuffle; diff --git a/backend/node_modules/lodash/_baseSlice.js b/backend/node_modules/lodash/_baseSlice.js new file mode 100644 index 00000000..786f6c99 --- /dev/null +++ b/backend/node_modules/lodash/_baseSlice.js @@ -0,0 +1,31 @@ +/** + * The base implementation of `_.slice` without an iteratee call guard. + * + * @private + * @param {Array} array The array to slice. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the slice of `array`. + */ +function baseSlice(array, start, end) { + var index = -1, + length = array.length; + + if (start < 0) { + start = -start > length ? 0 : (length + start); + } + end = end > length ? length : end; + if (end < 0) { + end += length; + } + length = start > end ? 0 : ((end - start) >>> 0); + start >>>= 0; + + var result = Array(length); + while (++index < length) { + result[index] = array[index + start]; + } + return result; +} + +module.exports = baseSlice; diff --git a/backend/node_modules/lodash/_baseSome.js b/backend/node_modules/lodash/_baseSome.js new file mode 100644 index 00000000..58f3f447 --- /dev/null +++ b/backend/node_modules/lodash/_baseSome.js @@ -0,0 +1,22 @@ +var baseEach = require('./_baseEach'); + +/** + * The base implementation of `_.some` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + */ +function baseSome(collection, predicate) { + var result; + + baseEach(collection, function(value, index, collection) { + result = predicate(value, index, collection); + return !result; + }); + return !!result; +} + +module.exports = baseSome; diff --git a/backend/node_modules/lodash/_baseSortBy.js b/backend/node_modules/lodash/_baseSortBy.js new file mode 100644 index 00000000..a25c92ed --- /dev/null +++ b/backend/node_modules/lodash/_baseSortBy.js @@ -0,0 +1,21 @@ +/** + * The base implementation of `_.sortBy` which uses `comparer` to define the + * sort order of `array` and replaces criteria objects with their corresponding + * values. + * + * @private + * @param {Array} array The array to sort. + * @param {Function} comparer The function to define sort order. + * @returns {Array} Returns `array`. + */ +function baseSortBy(array, comparer) { + var length = array.length; + + array.sort(comparer); + while (length--) { + array[length] = array[length].value; + } + return array; +} + +module.exports = baseSortBy; diff --git a/backend/node_modules/lodash/_baseSortedIndex.js b/backend/node_modules/lodash/_baseSortedIndex.js new file mode 100644 index 00000000..638c366c --- /dev/null +++ b/backend/node_modules/lodash/_baseSortedIndex.js @@ -0,0 +1,42 @@ +var baseSortedIndexBy = require('./_baseSortedIndexBy'), + identity = require('./identity'), + isSymbol = require('./isSymbol'); + +/** Used as references for the maximum length and index of an array. */ +var MAX_ARRAY_LENGTH = 4294967295, + HALF_MAX_ARRAY_LENGTH = MAX_ARRAY_LENGTH >>> 1; + +/** + * The base implementation of `_.sortedIndex` and `_.sortedLastIndex` which + * performs a binary search of `array` to determine the index at which `value` + * should be inserted into `array` in order to maintain its sort order. + * + * @private + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {boolean} [retHighest] Specify returning the highest qualified index. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + */ +function baseSortedIndex(array, value, retHighest) { + var low = 0, + high = array == null ? low : array.length; + + if (typeof value == 'number' && value === value && high <= HALF_MAX_ARRAY_LENGTH) { + while (low < high) { + var mid = (low + high) >>> 1, + computed = array[mid]; + + if (computed !== null && !isSymbol(computed) && + (retHighest ? (computed <= value) : (computed < value))) { + low = mid + 1; + } else { + high = mid; + } + } + return high; + } + return baseSortedIndexBy(array, value, identity, retHighest); +} + +module.exports = baseSortedIndex; diff --git a/backend/node_modules/lodash/_baseSortedIndexBy.js b/backend/node_modules/lodash/_baseSortedIndexBy.js new file mode 100644 index 00000000..c247b377 --- /dev/null +++ b/backend/node_modules/lodash/_baseSortedIndexBy.js @@ -0,0 +1,67 @@ +var isSymbol = require('./isSymbol'); + +/** Used as references for the maximum length and index of an array. */ +var MAX_ARRAY_LENGTH = 4294967295, + MAX_ARRAY_INDEX = MAX_ARRAY_LENGTH - 1; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeFloor = Math.floor, + nativeMin = Math.min; + +/** + * The base implementation of `_.sortedIndexBy` and `_.sortedLastIndexBy` + * which invokes `iteratee` for `value` and each element of `array` to compute + * their sort ranking. The iteratee is invoked with one argument; (value). + * + * @private + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} iteratee The iteratee invoked per element. + * @param {boolean} [retHighest] Specify returning the highest qualified index. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + */ +function baseSortedIndexBy(array, value, iteratee, retHighest) { + var low = 0, + high = array == null ? 0 : array.length; + if (high === 0) { + return 0; + } + + value = iteratee(value); + var valIsNaN = value !== value, + valIsNull = value === null, + valIsSymbol = isSymbol(value), + valIsUndefined = value === undefined; + + while (low < high) { + var mid = nativeFloor((low + high) / 2), + computed = iteratee(array[mid]), + othIsDefined = computed !== undefined, + othIsNull = computed === null, + othIsReflexive = computed === computed, + othIsSymbol = isSymbol(computed); + + if (valIsNaN) { + var setLow = retHighest || othIsReflexive; + } else if (valIsUndefined) { + setLow = othIsReflexive && (retHighest || othIsDefined); + } else if (valIsNull) { + setLow = othIsReflexive && othIsDefined && (retHighest || !othIsNull); + } else if (valIsSymbol) { + setLow = othIsReflexive && othIsDefined && !othIsNull && (retHighest || !othIsSymbol); + } else if (othIsNull || othIsSymbol) { + setLow = false; + } else { + setLow = retHighest ? (computed <= value) : (computed < value); + } + if (setLow) { + low = mid + 1; + } else { + high = mid; + } + } + return nativeMin(high, MAX_ARRAY_INDEX); +} + +module.exports = baseSortedIndexBy; diff --git a/backend/node_modules/lodash/_baseSortedUniq.js b/backend/node_modules/lodash/_baseSortedUniq.js new file mode 100644 index 00000000..802159a3 --- /dev/null +++ b/backend/node_modules/lodash/_baseSortedUniq.js @@ -0,0 +1,30 @@ +var eq = require('./eq'); + +/** + * The base implementation of `_.sortedUniq` and `_.sortedUniqBy` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + */ +function baseSortedUniq(array, iteratee) { + var index = -1, + length = array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + if (!index || !eq(computed, seen)) { + var seen = computed; + result[resIndex++] = value === 0 ? 0 : value; + } + } + return result; +} + +module.exports = baseSortedUniq; diff --git a/backend/node_modules/lodash/_baseSum.js b/backend/node_modules/lodash/_baseSum.js new file mode 100644 index 00000000..a9e84c13 --- /dev/null +++ b/backend/node_modules/lodash/_baseSum.js @@ -0,0 +1,24 @@ +/** + * The base implementation of `_.sum` and `_.sumBy` without support for + * iteratee shorthands. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {number} Returns the sum. + */ +function baseSum(array, iteratee) { + var result, + index = -1, + length = array.length; + + while (++index < length) { + var current = iteratee(array[index]); + if (current !== undefined) { + result = result === undefined ? current : (result + current); + } + } + return result; +} + +module.exports = baseSum; diff --git a/backend/node_modules/lodash/_baseTimes.js b/backend/node_modules/lodash/_baseTimes.js new file mode 100644 index 00000000..0603fc37 --- /dev/null +++ b/backend/node_modules/lodash/_baseTimes.js @@ -0,0 +1,20 @@ +/** + * The base implementation of `_.times` without support for iteratee shorthands + * or max array length checks. + * + * @private + * @param {number} n The number of times to invoke `iteratee`. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the array of results. + */ +function baseTimes(n, iteratee) { + var index = -1, + result = Array(n); + + while (++index < n) { + result[index] = iteratee(index); + } + return result; +} + +module.exports = baseTimes; diff --git a/backend/node_modules/lodash/_baseToNumber.js b/backend/node_modules/lodash/_baseToNumber.js new file mode 100644 index 00000000..04859f39 --- /dev/null +++ b/backend/node_modules/lodash/_baseToNumber.js @@ -0,0 +1,24 @@ +var isSymbol = require('./isSymbol'); + +/** Used as references for various `Number` constants. */ +var NAN = 0 / 0; + +/** + * The base implementation of `_.toNumber` which doesn't ensure correct + * conversions of binary, hexadecimal, or octal string values. + * + * @private + * @param {*} value The value to process. + * @returns {number} Returns the number. + */ +function baseToNumber(value) { + if (typeof value == 'number') { + return value; + } + if (isSymbol(value)) { + return NAN; + } + return +value; +} + +module.exports = baseToNumber; diff --git a/backend/node_modules/lodash/_baseToPairs.js b/backend/node_modules/lodash/_baseToPairs.js new file mode 100644 index 00000000..bff19912 --- /dev/null +++ b/backend/node_modules/lodash/_baseToPairs.js @@ -0,0 +1,18 @@ +var arrayMap = require('./_arrayMap'); + +/** + * The base implementation of `_.toPairs` and `_.toPairsIn` which creates an array + * of key-value pairs for `object` corresponding to the property names of `props`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} props The property names to get values for. + * @returns {Object} Returns the key-value pairs. + */ +function baseToPairs(object, props) { + return arrayMap(props, function(key) { + return [key, object[key]]; + }); +} + +module.exports = baseToPairs; diff --git a/backend/node_modules/lodash/_baseToString.js b/backend/node_modules/lodash/_baseToString.js new file mode 100644 index 00000000..ada6ad29 --- /dev/null +++ b/backend/node_modules/lodash/_baseToString.js @@ -0,0 +1,37 @@ +var Symbol = require('./_Symbol'), + arrayMap = require('./_arrayMap'), + isArray = require('./isArray'), + isSymbol = require('./isSymbol'); + +/** Used as references for various `Number` constants. */ +var INFINITY = 1 / 0; + +/** Used to convert symbols to primitives and strings. */ +var symbolProto = Symbol ? Symbol.prototype : undefined, + symbolToString = symbolProto ? symbolProto.toString : undefined; + +/** + * The base implementation of `_.toString` which doesn't convert nullish + * values to empty strings. + * + * @private + * @param {*} value The value to process. + * @returns {string} Returns the string. + */ +function baseToString(value) { + // Exit early for strings to avoid a performance hit in some environments. + if (typeof value == 'string') { + return value; + } + if (isArray(value)) { + // Recursively convert values (susceptible to call stack limits). + return arrayMap(value, baseToString) + ''; + } + if (isSymbol(value)) { + return symbolToString ? symbolToString.call(value) : ''; + } + var result = (value + ''); + return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; +} + +module.exports = baseToString; diff --git a/backend/node_modules/lodash/_baseTrim.js b/backend/node_modules/lodash/_baseTrim.js new file mode 100644 index 00000000..3e2797d9 --- /dev/null +++ b/backend/node_modules/lodash/_baseTrim.js @@ -0,0 +1,19 @@ +var trimmedEndIndex = require('./_trimmedEndIndex'); + +/** Used to match leading whitespace. */ +var reTrimStart = /^\s+/; + +/** + * The base implementation of `_.trim`. + * + * @private + * @param {string} string The string to trim. + * @returns {string} Returns the trimmed string. + */ +function baseTrim(string) { + return string + ? string.slice(0, trimmedEndIndex(string) + 1).replace(reTrimStart, '') + : string; +} + +module.exports = baseTrim; diff --git a/backend/node_modules/lodash/_baseUnary.js b/backend/node_modules/lodash/_baseUnary.js new file mode 100644 index 00000000..98639e92 --- /dev/null +++ b/backend/node_modules/lodash/_baseUnary.js @@ -0,0 +1,14 @@ +/** + * The base implementation of `_.unary` without support for storing metadata. + * + * @private + * @param {Function} func The function to cap arguments for. + * @returns {Function} Returns the new capped function. + */ +function baseUnary(func) { + return function(value) { + return func(value); + }; +} + +module.exports = baseUnary; diff --git a/backend/node_modules/lodash/_baseUniq.js b/backend/node_modules/lodash/_baseUniq.js new file mode 100644 index 00000000..aea459dc --- /dev/null +++ b/backend/node_modules/lodash/_baseUniq.js @@ -0,0 +1,72 @@ +var SetCache = require('./_SetCache'), + arrayIncludes = require('./_arrayIncludes'), + arrayIncludesWith = require('./_arrayIncludesWith'), + cacheHas = require('./_cacheHas'), + createSet = require('./_createSet'), + setToArray = require('./_setToArray'); + +/** Used as the size to enable large array optimizations. */ +var LARGE_ARRAY_SIZE = 200; + +/** + * The base implementation of `_.uniqBy` without support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new duplicate free array. + */ +function baseUniq(array, iteratee, comparator) { + var index = -1, + includes = arrayIncludes, + length = array.length, + isCommon = true, + result = [], + seen = result; + + if (comparator) { + isCommon = false; + includes = arrayIncludesWith; + } + else if (length >= LARGE_ARRAY_SIZE) { + var set = iteratee ? null : createSet(array); + if (set) { + return setToArray(set); + } + isCommon = false; + includes = cacheHas; + seen = new SetCache; + } + else { + seen = iteratee ? [] : result; + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var seenIndex = seen.length; + while (seenIndex--) { + if (seen[seenIndex] === computed) { + continue outer; + } + } + if (iteratee) { + seen.push(computed); + } + result.push(value); + } + else if (!includes(seen, computed, comparator)) { + if (seen !== result) { + seen.push(computed); + } + result.push(value); + } + } + return result; +} + +module.exports = baseUniq; diff --git a/backend/node_modules/lodash/_baseUnset.js b/backend/node_modules/lodash/_baseUnset.js new file mode 100644 index 00000000..05aa28f0 --- /dev/null +++ b/backend/node_modules/lodash/_baseUnset.js @@ -0,0 +1,65 @@ +var castPath = require('./_castPath'), + last = require('./last'), + parent = require('./_parent'), + toKey = require('./_toKey'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * The base implementation of `_.unset`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The property path to unset. + * @returns {boolean} Returns `true` if the property is deleted, else `false`. + */ +function baseUnset(object, path) { + path = castPath(path, object); + + // Prevent prototype pollution, see: https://github.com/lodash/lodash/security/advisories/GHSA-xxjr-mmjv-4gpg + var index = -1, + length = path.length; + + if (!length) { + return true; + } + + var isRootPrimitive = object == null || (typeof object !== 'object' && typeof object !== 'function'); + + while (++index < length) { + var key = path[index]; + + // skip non-string keys (e.g., Symbols, numbers) + if (typeof key !== 'string') { + continue; + } + + // Always block "__proto__" anywhere in the path if it's not expected + if (key === '__proto__' && !hasOwnProperty.call(object, '__proto__')) { + return false; + } + + // Block "constructor.prototype" chains + if (key === 'constructor' && + (index + 1) < length && + typeof path[index + 1] === 'string' && + path[index + 1] === 'prototype') { + + // Allow ONLY when the path starts at a primitive root, e.g., _.unset(0, 'constructor.prototype.a') + if (isRootPrimitive && index === 0) { + continue; + } + + return false; + } + } + + var obj = parent(object, path); + return obj == null || delete obj[toKey(last(path))]; +} + +module.exports = baseUnset; diff --git a/backend/node_modules/lodash/_baseUpdate.js b/backend/node_modules/lodash/_baseUpdate.js new file mode 100644 index 00000000..92a62377 --- /dev/null +++ b/backend/node_modules/lodash/_baseUpdate.js @@ -0,0 +1,18 @@ +var baseGet = require('./_baseGet'), + baseSet = require('./_baseSet'); + +/** + * The base implementation of `_.update`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to update. + * @param {Function} updater The function to produce the updated value. + * @param {Function} [customizer] The function to customize path creation. + * @returns {Object} Returns `object`. + */ +function baseUpdate(object, path, updater, customizer) { + return baseSet(object, path, updater(baseGet(object, path)), customizer); +} + +module.exports = baseUpdate; diff --git a/backend/node_modules/lodash/_baseValues.js b/backend/node_modules/lodash/_baseValues.js new file mode 100644 index 00000000..b95faadc --- /dev/null +++ b/backend/node_modules/lodash/_baseValues.js @@ -0,0 +1,19 @@ +var arrayMap = require('./_arrayMap'); + +/** + * The base implementation of `_.values` and `_.valuesIn` which creates an + * array of `object` property values corresponding to the property names + * of `props`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} props The property names to get values for. + * @returns {Object} Returns the array of property values. + */ +function baseValues(object, props) { + return arrayMap(props, function(key) { + return object[key]; + }); +} + +module.exports = baseValues; diff --git a/backend/node_modules/lodash/_baseWhile.js b/backend/node_modules/lodash/_baseWhile.js new file mode 100644 index 00000000..07eac61b --- /dev/null +++ b/backend/node_modules/lodash/_baseWhile.js @@ -0,0 +1,26 @@ +var baseSlice = require('./_baseSlice'); + +/** + * The base implementation of methods like `_.dropWhile` and `_.takeWhile` + * without support for iteratee shorthands. + * + * @private + * @param {Array} array The array to query. + * @param {Function} predicate The function invoked per iteration. + * @param {boolean} [isDrop] Specify dropping elements instead of taking them. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Array} Returns the slice of `array`. + */ +function baseWhile(array, predicate, isDrop, fromRight) { + var length = array.length, + index = fromRight ? length : -1; + + while ((fromRight ? index-- : ++index < length) && + predicate(array[index], index, array)) {} + + return isDrop + ? baseSlice(array, (fromRight ? 0 : index), (fromRight ? index + 1 : length)) + : baseSlice(array, (fromRight ? index + 1 : 0), (fromRight ? length : index)); +} + +module.exports = baseWhile; diff --git a/backend/node_modules/lodash/_baseWrapperValue.js b/backend/node_modules/lodash/_baseWrapperValue.js new file mode 100644 index 00000000..443e0df5 --- /dev/null +++ b/backend/node_modules/lodash/_baseWrapperValue.js @@ -0,0 +1,25 @@ +var LazyWrapper = require('./_LazyWrapper'), + arrayPush = require('./_arrayPush'), + arrayReduce = require('./_arrayReduce'); + +/** + * The base implementation of `wrapperValue` which returns the result of + * performing a sequence of actions on the unwrapped `value`, where each + * successive action is supplied the return value of the previous. + * + * @private + * @param {*} value The unwrapped value. + * @param {Array} actions Actions to perform to resolve the unwrapped value. + * @returns {*} Returns the resolved value. + */ +function baseWrapperValue(value, actions) { + var result = value; + if (result instanceof LazyWrapper) { + result = result.value(); + } + return arrayReduce(actions, function(result, action) { + return action.func.apply(action.thisArg, arrayPush([result], action.args)); + }, result); +} + +module.exports = baseWrapperValue; diff --git a/backend/node_modules/lodash/_baseXor.js b/backend/node_modules/lodash/_baseXor.js new file mode 100644 index 00000000..8e69338b --- /dev/null +++ b/backend/node_modules/lodash/_baseXor.js @@ -0,0 +1,36 @@ +var baseDifference = require('./_baseDifference'), + baseFlatten = require('./_baseFlatten'), + baseUniq = require('./_baseUniq'); + +/** + * The base implementation of methods like `_.xor`, without support for + * iteratee shorthands, that accepts an array of arrays to inspect. + * + * @private + * @param {Array} arrays The arrays to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of values. + */ +function baseXor(arrays, iteratee, comparator) { + var length = arrays.length; + if (length < 2) { + return length ? baseUniq(arrays[0]) : []; + } + var index = -1, + result = Array(length); + + while (++index < length) { + var array = arrays[index], + othIndex = -1; + + while (++othIndex < length) { + if (othIndex != index) { + result[index] = baseDifference(result[index] || array, arrays[othIndex], iteratee, comparator); + } + } + } + return baseUniq(baseFlatten(result, 1), iteratee, comparator); +} + +module.exports = baseXor; diff --git a/backend/node_modules/lodash/_baseZipObject.js b/backend/node_modules/lodash/_baseZipObject.js new file mode 100644 index 00000000..401f85be --- /dev/null +++ b/backend/node_modules/lodash/_baseZipObject.js @@ -0,0 +1,23 @@ +/** + * This base implementation of `_.zipObject` which assigns values using `assignFunc`. + * + * @private + * @param {Array} props The property identifiers. + * @param {Array} values The property values. + * @param {Function} assignFunc The function to assign values. + * @returns {Object} Returns the new object. + */ +function baseZipObject(props, values, assignFunc) { + var index = -1, + length = props.length, + valsLength = values.length, + result = {}; + + while (++index < length) { + var value = index < valsLength ? values[index] : undefined; + assignFunc(result, props[index], value); + } + return result; +} + +module.exports = baseZipObject; diff --git a/backend/node_modules/lodash/_cacheHas.js b/backend/node_modules/lodash/_cacheHas.js new file mode 100644 index 00000000..2dec8926 --- /dev/null +++ b/backend/node_modules/lodash/_cacheHas.js @@ -0,0 +1,13 @@ +/** + * Checks if a `cache` value for `key` exists. + * + * @private + * @param {Object} cache The cache to query. + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ +function cacheHas(cache, key) { + return cache.has(key); +} + +module.exports = cacheHas; diff --git a/backend/node_modules/lodash/_castArrayLikeObject.js b/backend/node_modules/lodash/_castArrayLikeObject.js new file mode 100644 index 00000000..92c75fa1 --- /dev/null +++ b/backend/node_modules/lodash/_castArrayLikeObject.js @@ -0,0 +1,14 @@ +var isArrayLikeObject = require('./isArrayLikeObject'); + +/** + * Casts `value` to an empty array if it's not an array like object. + * + * @private + * @param {*} value The value to inspect. + * @returns {Array|Object} Returns the cast array-like object. + */ +function castArrayLikeObject(value) { + return isArrayLikeObject(value) ? value : []; +} + +module.exports = castArrayLikeObject; diff --git a/backend/node_modules/lodash/_castFunction.js b/backend/node_modules/lodash/_castFunction.js new file mode 100644 index 00000000..98c91ae6 --- /dev/null +++ b/backend/node_modules/lodash/_castFunction.js @@ -0,0 +1,14 @@ +var identity = require('./identity'); + +/** + * Casts `value` to `identity` if it's not a function. + * + * @private + * @param {*} value The value to inspect. + * @returns {Function} Returns cast function. + */ +function castFunction(value) { + return typeof value == 'function' ? value : identity; +} + +module.exports = castFunction; diff --git a/backend/node_modules/lodash/_castPath.js b/backend/node_modules/lodash/_castPath.js new file mode 100644 index 00000000..017e4c1b --- /dev/null +++ b/backend/node_modules/lodash/_castPath.js @@ -0,0 +1,21 @@ +var isArray = require('./isArray'), + isKey = require('./_isKey'), + stringToPath = require('./_stringToPath'), + toString = require('./toString'); + +/** + * Casts `value` to a path array if it's not one. + * + * @private + * @param {*} value The value to inspect. + * @param {Object} [object] The object to query keys on. + * @returns {Array} Returns the cast property path array. + */ +function castPath(value, object) { + if (isArray(value)) { + return value; + } + return isKey(value, object) ? [value] : stringToPath(toString(value)); +} + +module.exports = castPath; diff --git a/backend/node_modules/lodash/_castRest.js b/backend/node_modules/lodash/_castRest.js new file mode 100644 index 00000000..213c66f1 --- /dev/null +++ b/backend/node_modules/lodash/_castRest.js @@ -0,0 +1,14 @@ +var baseRest = require('./_baseRest'); + +/** + * A `baseRest` alias which can be replaced with `identity` by module + * replacement plugins. + * + * @private + * @type {Function} + * @param {Function} func The function to apply a rest parameter to. + * @returns {Function} Returns the new function. + */ +var castRest = baseRest; + +module.exports = castRest; diff --git a/backend/node_modules/lodash/_castSlice.js b/backend/node_modules/lodash/_castSlice.js new file mode 100644 index 00000000..071faeba --- /dev/null +++ b/backend/node_modules/lodash/_castSlice.js @@ -0,0 +1,18 @@ +var baseSlice = require('./_baseSlice'); + +/** + * Casts `array` to a slice if it's needed. + * + * @private + * @param {Array} array The array to inspect. + * @param {number} start The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the cast slice. + */ +function castSlice(array, start, end) { + var length = array.length; + end = end === undefined ? length : end; + return (!start && end >= length) ? array : baseSlice(array, start, end); +} + +module.exports = castSlice; diff --git a/backend/node_modules/lodash/_charsEndIndex.js b/backend/node_modules/lodash/_charsEndIndex.js new file mode 100644 index 00000000..07908ff3 --- /dev/null +++ b/backend/node_modules/lodash/_charsEndIndex.js @@ -0,0 +1,19 @@ +var baseIndexOf = require('./_baseIndexOf'); + +/** + * Used by `_.trim` and `_.trimEnd` to get the index of the last string symbol + * that is not found in the character symbols. + * + * @private + * @param {Array} strSymbols The string symbols to inspect. + * @param {Array} chrSymbols The character symbols to find. + * @returns {number} Returns the index of the last unmatched string symbol. + */ +function charsEndIndex(strSymbols, chrSymbols) { + var index = strSymbols.length; + + while (index-- && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} + return index; +} + +module.exports = charsEndIndex; diff --git a/backend/node_modules/lodash/_charsStartIndex.js b/backend/node_modules/lodash/_charsStartIndex.js new file mode 100644 index 00000000..b17afd25 --- /dev/null +++ b/backend/node_modules/lodash/_charsStartIndex.js @@ -0,0 +1,20 @@ +var baseIndexOf = require('./_baseIndexOf'); + +/** + * Used by `_.trim` and `_.trimStart` to get the index of the first string symbol + * that is not found in the character symbols. + * + * @private + * @param {Array} strSymbols The string symbols to inspect. + * @param {Array} chrSymbols The character symbols to find. + * @returns {number} Returns the index of the first unmatched string symbol. + */ +function charsStartIndex(strSymbols, chrSymbols) { + var index = -1, + length = strSymbols.length; + + while (++index < length && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} + return index; +} + +module.exports = charsStartIndex; diff --git a/backend/node_modules/lodash/_cloneArrayBuffer.js b/backend/node_modules/lodash/_cloneArrayBuffer.js new file mode 100644 index 00000000..c3d8f6e3 --- /dev/null +++ b/backend/node_modules/lodash/_cloneArrayBuffer.js @@ -0,0 +1,16 @@ +var Uint8Array = require('./_Uint8Array'); + +/** + * Creates a clone of `arrayBuffer`. + * + * @private + * @param {ArrayBuffer} arrayBuffer The array buffer to clone. + * @returns {ArrayBuffer} Returns the cloned array buffer. + */ +function cloneArrayBuffer(arrayBuffer) { + var result = new arrayBuffer.constructor(arrayBuffer.byteLength); + new Uint8Array(result).set(new Uint8Array(arrayBuffer)); + return result; +} + +module.exports = cloneArrayBuffer; diff --git a/backend/node_modules/lodash/_cloneBuffer.js b/backend/node_modules/lodash/_cloneBuffer.js new file mode 100644 index 00000000..27c48109 --- /dev/null +++ b/backend/node_modules/lodash/_cloneBuffer.js @@ -0,0 +1,35 @@ +var root = require('./_root'); + +/** Detect free variable `exports`. */ +var freeExports = typeof exports == 'object' && exports && !exports.nodeType && exports; + +/** Detect free variable `module`. */ +var freeModule = freeExports && typeof module == 'object' && module && !module.nodeType && module; + +/** Detect the popular CommonJS extension `module.exports`. */ +var moduleExports = freeModule && freeModule.exports === freeExports; + +/** Built-in value references. */ +var Buffer = moduleExports ? root.Buffer : undefined, + allocUnsafe = Buffer ? Buffer.allocUnsafe : undefined; + +/** + * Creates a clone of `buffer`. + * + * @private + * @param {Buffer} buffer The buffer to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Buffer} Returns the cloned buffer. + */ +function cloneBuffer(buffer, isDeep) { + if (isDeep) { + return buffer.slice(); + } + var length = buffer.length, + result = allocUnsafe ? allocUnsafe(length) : new buffer.constructor(length); + + buffer.copy(result); + return result; +} + +module.exports = cloneBuffer; diff --git a/backend/node_modules/lodash/_cloneDataView.js b/backend/node_modules/lodash/_cloneDataView.js new file mode 100644 index 00000000..9c9b7b05 --- /dev/null +++ b/backend/node_modules/lodash/_cloneDataView.js @@ -0,0 +1,16 @@ +var cloneArrayBuffer = require('./_cloneArrayBuffer'); + +/** + * Creates a clone of `dataView`. + * + * @private + * @param {Object} dataView The data view to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the cloned data view. + */ +function cloneDataView(dataView, isDeep) { + var buffer = isDeep ? cloneArrayBuffer(dataView.buffer) : dataView.buffer; + return new dataView.constructor(buffer, dataView.byteOffset, dataView.byteLength); +} + +module.exports = cloneDataView; diff --git a/backend/node_modules/lodash/_cloneRegExp.js b/backend/node_modules/lodash/_cloneRegExp.js new file mode 100644 index 00000000..64a30dfb --- /dev/null +++ b/backend/node_modules/lodash/_cloneRegExp.js @@ -0,0 +1,17 @@ +/** Used to match `RegExp` flags from their coerced string values. */ +var reFlags = /\w*$/; + +/** + * Creates a clone of `regexp`. + * + * @private + * @param {Object} regexp The regexp to clone. + * @returns {Object} Returns the cloned regexp. + */ +function cloneRegExp(regexp) { + var result = new regexp.constructor(regexp.source, reFlags.exec(regexp)); + result.lastIndex = regexp.lastIndex; + return result; +} + +module.exports = cloneRegExp; diff --git a/backend/node_modules/lodash/_cloneSymbol.js b/backend/node_modules/lodash/_cloneSymbol.js new file mode 100644 index 00000000..bede39f5 --- /dev/null +++ b/backend/node_modules/lodash/_cloneSymbol.js @@ -0,0 +1,18 @@ +var Symbol = require('./_Symbol'); + +/** Used to convert symbols to primitives and strings. */ +var symbolProto = Symbol ? Symbol.prototype : undefined, + symbolValueOf = symbolProto ? symbolProto.valueOf : undefined; + +/** + * Creates a clone of the `symbol` object. + * + * @private + * @param {Object} symbol The symbol object to clone. + * @returns {Object} Returns the cloned symbol object. + */ +function cloneSymbol(symbol) { + return symbolValueOf ? Object(symbolValueOf.call(symbol)) : {}; +} + +module.exports = cloneSymbol; diff --git a/backend/node_modules/lodash/_cloneTypedArray.js b/backend/node_modules/lodash/_cloneTypedArray.js new file mode 100644 index 00000000..7aad84d4 --- /dev/null +++ b/backend/node_modules/lodash/_cloneTypedArray.js @@ -0,0 +1,16 @@ +var cloneArrayBuffer = require('./_cloneArrayBuffer'); + +/** + * Creates a clone of `typedArray`. + * + * @private + * @param {Object} typedArray The typed array to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the cloned typed array. + */ +function cloneTypedArray(typedArray, isDeep) { + var buffer = isDeep ? cloneArrayBuffer(typedArray.buffer) : typedArray.buffer; + return new typedArray.constructor(buffer, typedArray.byteOffset, typedArray.length); +} + +module.exports = cloneTypedArray; diff --git a/backend/node_modules/lodash/_compareAscending.js b/backend/node_modules/lodash/_compareAscending.js new file mode 100644 index 00000000..8dc27910 --- /dev/null +++ b/backend/node_modules/lodash/_compareAscending.js @@ -0,0 +1,41 @@ +var isSymbol = require('./isSymbol'); + +/** + * Compares values to sort them in ascending order. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {number} Returns the sort order indicator for `value`. + */ +function compareAscending(value, other) { + if (value !== other) { + var valIsDefined = value !== undefined, + valIsNull = value === null, + valIsReflexive = value === value, + valIsSymbol = isSymbol(value); + + var othIsDefined = other !== undefined, + othIsNull = other === null, + othIsReflexive = other === other, + othIsSymbol = isSymbol(other); + + if ((!othIsNull && !othIsSymbol && !valIsSymbol && value > other) || + (valIsSymbol && othIsDefined && othIsReflexive && !othIsNull && !othIsSymbol) || + (valIsNull && othIsDefined && othIsReflexive) || + (!valIsDefined && othIsReflexive) || + !valIsReflexive) { + return 1; + } + if ((!valIsNull && !valIsSymbol && !othIsSymbol && value < other) || + (othIsSymbol && valIsDefined && valIsReflexive && !valIsNull && !valIsSymbol) || + (othIsNull && valIsDefined && valIsReflexive) || + (!othIsDefined && valIsReflexive) || + !othIsReflexive) { + return -1; + } + } + return 0; +} + +module.exports = compareAscending; diff --git a/backend/node_modules/lodash/_compareMultiple.js b/backend/node_modules/lodash/_compareMultiple.js new file mode 100644 index 00000000..ad61f0fb --- /dev/null +++ b/backend/node_modules/lodash/_compareMultiple.js @@ -0,0 +1,44 @@ +var compareAscending = require('./_compareAscending'); + +/** + * Used by `_.orderBy` to compare multiple properties of a value to another + * and stable sort them. + * + * If `orders` is unspecified, all values are sorted in ascending order. Otherwise, + * specify an order of "desc" for descending or "asc" for ascending sort order + * of corresponding values. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {boolean[]|string[]} orders The order to sort by for each property. + * @returns {number} Returns the sort order indicator for `object`. + */ +function compareMultiple(object, other, orders) { + var index = -1, + objCriteria = object.criteria, + othCriteria = other.criteria, + length = objCriteria.length, + ordersLength = orders.length; + + while (++index < length) { + var result = compareAscending(objCriteria[index], othCriteria[index]); + if (result) { + if (index >= ordersLength) { + return result; + } + var order = orders[index]; + return result * (order == 'desc' ? -1 : 1); + } + } + // Fixes an `Array#sort` bug in the JS engine embedded in Adobe applications + // that causes it, under certain circumstances, to provide the same value for + // `object` and `other`. See https://github.com/jashkenas/underscore/pull/1247 + // for more details. + // + // This also ensures a stable sort in V8 and other engines. + // See https://bugs.chromium.org/p/v8/issues/detail?id=90 for more details. + return object.index - other.index; +} + +module.exports = compareMultiple; diff --git a/backend/node_modules/lodash/_composeArgs.js b/backend/node_modules/lodash/_composeArgs.js new file mode 100644 index 00000000..1ce40f4f --- /dev/null +++ b/backend/node_modules/lodash/_composeArgs.js @@ -0,0 +1,39 @@ +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max; + +/** + * Creates an array that is the composition of partially applied arguments, + * placeholders, and provided arguments into a single array of arguments. + * + * @private + * @param {Array} args The provided arguments. + * @param {Array} partials The arguments to prepend to those provided. + * @param {Array} holders The `partials` placeholder indexes. + * @params {boolean} [isCurried] Specify composing for a curried function. + * @returns {Array} Returns the new array of composed arguments. + */ +function composeArgs(args, partials, holders, isCurried) { + var argsIndex = -1, + argsLength = args.length, + holdersLength = holders.length, + leftIndex = -1, + leftLength = partials.length, + rangeLength = nativeMax(argsLength - holdersLength, 0), + result = Array(leftLength + rangeLength), + isUncurried = !isCurried; + + while (++leftIndex < leftLength) { + result[leftIndex] = partials[leftIndex]; + } + while (++argsIndex < holdersLength) { + if (isUncurried || argsIndex < argsLength) { + result[holders[argsIndex]] = args[argsIndex]; + } + } + while (rangeLength--) { + result[leftIndex++] = args[argsIndex++]; + } + return result; +} + +module.exports = composeArgs; diff --git a/backend/node_modules/lodash/_composeArgsRight.js b/backend/node_modules/lodash/_composeArgsRight.js new file mode 100644 index 00000000..8dc588d0 --- /dev/null +++ b/backend/node_modules/lodash/_composeArgsRight.js @@ -0,0 +1,41 @@ +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max; + +/** + * This function is like `composeArgs` except that the arguments composition + * is tailored for `_.partialRight`. + * + * @private + * @param {Array} args The provided arguments. + * @param {Array} partials The arguments to append to those provided. + * @param {Array} holders The `partials` placeholder indexes. + * @params {boolean} [isCurried] Specify composing for a curried function. + * @returns {Array} Returns the new array of composed arguments. + */ +function composeArgsRight(args, partials, holders, isCurried) { + var argsIndex = -1, + argsLength = args.length, + holdersIndex = -1, + holdersLength = holders.length, + rightIndex = -1, + rightLength = partials.length, + rangeLength = nativeMax(argsLength - holdersLength, 0), + result = Array(rangeLength + rightLength), + isUncurried = !isCurried; + + while (++argsIndex < rangeLength) { + result[argsIndex] = args[argsIndex]; + } + var offset = argsIndex; + while (++rightIndex < rightLength) { + result[offset + rightIndex] = partials[rightIndex]; + } + while (++holdersIndex < holdersLength) { + if (isUncurried || argsIndex < argsLength) { + result[offset + holders[holdersIndex]] = args[argsIndex++]; + } + } + return result; +} + +module.exports = composeArgsRight; diff --git a/backend/node_modules/lodash/_copyArray.js b/backend/node_modules/lodash/_copyArray.js new file mode 100644 index 00000000..cd94d5d0 --- /dev/null +++ b/backend/node_modules/lodash/_copyArray.js @@ -0,0 +1,20 @@ +/** + * Copies the values of `source` to `array`. + * + * @private + * @param {Array} source The array to copy values from. + * @param {Array} [array=[]] The array to copy values to. + * @returns {Array} Returns `array`. + */ +function copyArray(source, array) { + var index = -1, + length = source.length; + + array || (array = Array(length)); + while (++index < length) { + array[index] = source[index]; + } + return array; +} + +module.exports = copyArray; diff --git a/backend/node_modules/lodash/_copyObject.js b/backend/node_modules/lodash/_copyObject.js new file mode 100644 index 00000000..2f2a5c23 --- /dev/null +++ b/backend/node_modules/lodash/_copyObject.js @@ -0,0 +1,40 @@ +var assignValue = require('./_assignValue'), + baseAssignValue = require('./_baseAssignValue'); + +/** + * Copies properties of `source` to `object`. + * + * @private + * @param {Object} source The object to copy properties from. + * @param {Array} props The property identifiers to copy. + * @param {Object} [object={}] The object to copy properties to. + * @param {Function} [customizer] The function to customize copied values. + * @returns {Object} Returns `object`. + */ +function copyObject(source, props, object, customizer) { + var isNew = !object; + object || (object = {}); + + var index = -1, + length = props.length; + + while (++index < length) { + var key = props[index]; + + var newValue = customizer + ? customizer(object[key], source[key], key, object, source) + : undefined; + + if (newValue === undefined) { + newValue = source[key]; + } + if (isNew) { + baseAssignValue(object, key, newValue); + } else { + assignValue(object, key, newValue); + } + } + return object; +} + +module.exports = copyObject; diff --git a/backend/node_modules/lodash/_copySymbols.js b/backend/node_modules/lodash/_copySymbols.js new file mode 100644 index 00000000..c35944ab --- /dev/null +++ b/backend/node_modules/lodash/_copySymbols.js @@ -0,0 +1,16 @@ +var copyObject = require('./_copyObject'), + getSymbols = require('./_getSymbols'); + +/** + * Copies own symbols of `source` to `object`. + * + * @private + * @param {Object} source The object to copy symbols from. + * @param {Object} [object={}] The object to copy symbols to. + * @returns {Object} Returns `object`. + */ +function copySymbols(source, object) { + return copyObject(source, getSymbols(source), object); +} + +module.exports = copySymbols; diff --git a/backend/node_modules/lodash/_copySymbolsIn.js b/backend/node_modules/lodash/_copySymbolsIn.js new file mode 100644 index 00000000..fdf20a73 --- /dev/null +++ b/backend/node_modules/lodash/_copySymbolsIn.js @@ -0,0 +1,16 @@ +var copyObject = require('./_copyObject'), + getSymbolsIn = require('./_getSymbolsIn'); + +/** + * Copies own and inherited symbols of `source` to `object`. + * + * @private + * @param {Object} source The object to copy symbols from. + * @param {Object} [object={}] The object to copy symbols to. + * @returns {Object} Returns `object`. + */ +function copySymbolsIn(source, object) { + return copyObject(source, getSymbolsIn(source), object); +} + +module.exports = copySymbolsIn; diff --git a/backend/node_modules/lodash/_coreJsData.js b/backend/node_modules/lodash/_coreJsData.js new file mode 100644 index 00000000..f8e5b4e3 --- /dev/null +++ b/backend/node_modules/lodash/_coreJsData.js @@ -0,0 +1,6 @@ +var root = require('./_root'); + +/** Used to detect overreaching core-js shims. */ +var coreJsData = root['__core-js_shared__']; + +module.exports = coreJsData; diff --git a/backend/node_modules/lodash/_countHolders.js b/backend/node_modules/lodash/_countHolders.js new file mode 100644 index 00000000..718fcdaa --- /dev/null +++ b/backend/node_modules/lodash/_countHolders.js @@ -0,0 +1,21 @@ +/** + * Gets the number of `placeholder` occurrences in `array`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} placeholder The placeholder to search for. + * @returns {number} Returns the placeholder count. + */ +function countHolders(array, placeholder) { + var length = array.length, + result = 0; + + while (length--) { + if (array[length] === placeholder) { + ++result; + } + } + return result; +} + +module.exports = countHolders; diff --git a/backend/node_modules/lodash/_createAggregator.js b/backend/node_modules/lodash/_createAggregator.js new file mode 100644 index 00000000..0be42c41 --- /dev/null +++ b/backend/node_modules/lodash/_createAggregator.js @@ -0,0 +1,23 @@ +var arrayAggregator = require('./_arrayAggregator'), + baseAggregator = require('./_baseAggregator'), + baseIteratee = require('./_baseIteratee'), + isArray = require('./isArray'); + +/** + * Creates a function like `_.groupBy`. + * + * @private + * @param {Function} setter The function to set accumulator values. + * @param {Function} [initializer] The accumulator object initializer. + * @returns {Function} Returns the new aggregator function. + */ +function createAggregator(setter, initializer) { + return function(collection, iteratee) { + var func = isArray(collection) ? arrayAggregator : baseAggregator, + accumulator = initializer ? initializer() : {}; + + return func(collection, setter, baseIteratee(iteratee, 2), accumulator); + }; +} + +module.exports = createAggregator; diff --git a/backend/node_modules/lodash/_createAssigner.js b/backend/node_modules/lodash/_createAssigner.js new file mode 100644 index 00000000..1f904c51 --- /dev/null +++ b/backend/node_modules/lodash/_createAssigner.js @@ -0,0 +1,37 @@ +var baseRest = require('./_baseRest'), + isIterateeCall = require('./_isIterateeCall'); + +/** + * Creates a function like `_.assign`. + * + * @private + * @param {Function} assigner The function to assign values. + * @returns {Function} Returns the new assigner function. + */ +function createAssigner(assigner) { + return baseRest(function(object, sources) { + var index = -1, + length = sources.length, + customizer = length > 1 ? sources[length - 1] : undefined, + guard = length > 2 ? sources[2] : undefined; + + customizer = (assigner.length > 3 && typeof customizer == 'function') + ? (length--, customizer) + : undefined; + + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + customizer = length < 3 ? undefined : customizer; + length = 1; + } + object = Object(object); + while (++index < length) { + var source = sources[index]; + if (source) { + assigner(object, source, index, customizer); + } + } + return object; + }); +} + +module.exports = createAssigner; diff --git a/backend/node_modules/lodash/_createBaseEach.js b/backend/node_modules/lodash/_createBaseEach.js new file mode 100644 index 00000000..d24fdd1b --- /dev/null +++ b/backend/node_modules/lodash/_createBaseEach.js @@ -0,0 +1,32 @@ +var isArrayLike = require('./isArrayLike'); + +/** + * Creates a `baseEach` or `baseEachRight` function. + * + * @private + * @param {Function} eachFunc The function to iterate over a collection. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new base function. + */ +function createBaseEach(eachFunc, fromRight) { + return function(collection, iteratee) { + if (collection == null) { + return collection; + } + if (!isArrayLike(collection)) { + return eachFunc(collection, iteratee); + } + var length = collection.length, + index = fromRight ? length : -1, + iterable = Object(collection); + + while ((fromRight ? index-- : ++index < length)) { + if (iteratee(iterable[index], index, iterable) === false) { + break; + } + } + return collection; + }; +} + +module.exports = createBaseEach; diff --git a/backend/node_modules/lodash/_createBaseFor.js b/backend/node_modules/lodash/_createBaseFor.js new file mode 100644 index 00000000..94cbf297 --- /dev/null +++ b/backend/node_modules/lodash/_createBaseFor.js @@ -0,0 +1,25 @@ +/** + * Creates a base function for methods like `_.forIn` and `_.forOwn`. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new base function. + */ +function createBaseFor(fromRight) { + return function(object, iteratee, keysFunc) { + var index = -1, + iterable = Object(object), + props = keysFunc(object), + length = props.length; + + while (length--) { + var key = props[fromRight ? length : ++index]; + if (iteratee(iterable[key], key, iterable) === false) { + break; + } + } + return object; + }; +} + +module.exports = createBaseFor; diff --git a/backend/node_modules/lodash/_createBind.js b/backend/node_modules/lodash/_createBind.js new file mode 100644 index 00000000..07cb99f4 --- /dev/null +++ b/backend/node_modules/lodash/_createBind.js @@ -0,0 +1,28 @@ +var createCtor = require('./_createCtor'), + root = require('./_root'); + +/** Used to compose bitmasks for function metadata. */ +var WRAP_BIND_FLAG = 1; + +/** + * Creates a function that wraps `func` to invoke it with the optional `this` + * binding of `thisArg`. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} [thisArg] The `this` binding of `func`. + * @returns {Function} Returns the new wrapped function. + */ +function createBind(func, bitmask, thisArg) { + var isBind = bitmask & WRAP_BIND_FLAG, + Ctor = createCtor(func); + + function wrapper() { + var fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + return fn.apply(isBind ? thisArg : this, arguments); + } + return wrapper; +} + +module.exports = createBind; diff --git a/backend/node_modules/lodash/_createCaseFirst.js b/backend/node_modules/lodash/_createCaseFirst.js new file mode 100644 index 00000000..fe8ea483 --- /dev/null +++ b/backend/node_modules/lodash/_createCaseFirst.js @@ -0,0 +1,33 @@ +var castSlice = require('./_castSlice'), + hasUnicode = require('./_hasUnicode'), + stringToArray = require('./_stringToArray'), + toString = require('./toString'); + +/** + * Creates a function like `_.lowerFirst`. + * + * @private + * @param {string} methodName The name of the `String` case method to use. + * @returns {Function} Returns the new case function. + */ +function createCaseFirst(methodName) { + return function(string) { + string = toString(string); + + var strSymbols = hasUnicode(string) + ? stringToArray(string) + : undefined; + + var chr = strSymbols + ? strSymbols[0] + : string.charAt(0); + + var trailing = strSymbols + ? castSlice(strSymbols, 1).join('') + : string.slice(1); + + return chr[methodName]() + trailing; + }; +} + +module.exports = createCaseFirst; diff --git a/backend/node_modules/lodash/_createCompounder.js b/backend/node_modules/lodash/_createCompounder.js new file mode 100644 index 00000000..8d4cee2c --- /dev/null +++ b/backend/node_modules/lodash/_createCompounder.js @@ -0,0 +1,24 @@ +var arrayReduce = require('./_arrayReduce'), + deburr = require('./deburr'), + words = require('./words'); + +/** Used to compose unicode capture groups. */ +var rsApos = "['\u2019]"; + +/** Used to match apostrophes. */ +var reApos = RegExp(rsApos, 'g'); + +/** + * Creates a function like `_.camelCase`. + * + * @private + * @param {Function} callback The function to combine each word. + * @returns {Function} Returns the new compounder function. + */ +function createCompounder(callback) { + return function(string) { + return arrayReduce(words(deburr(string).replace(reApos, '')), callback, ''); + }; +} + +module.exports = createCompounder; diff --git a/backend/node_modules/lodash/_createCtor.js b/backend/node_modules/lodash/_createCtor.js new file mode 100644 index 00000000..9047aa5f --- /dev/null +++ b/backend/node_modules/lodash/_createCtor.js @@ -0,0 +1,37 @@ +var baseCreate = require('./_baseCreate'), + isObject = require('./isObject'); + +/** + * Creates a function that produces an instance of `Ctor` regardless of + * whether it was invoked as part of a `new` expression or by `call` or `apply`. + * + * @private + * @param {Function} Ctor The constructor to wrap. + * @returns {Function} Returns the new wrapped function. + */ +function createCtor(Ctor) { + return function() { + // Use a `switch` statement to work with class constructors. See + // http://ecma-international.org/ecma-262/7.0/#sec-ecmascript-function-objects-call-thisargument-argumentslist + // for more details. + var args = arguments; + switch (args.length) { + case 0: return new Ctor; + case 1: return new Ctor(args[0]); + case 2: return new Ctor(args[0], args[1]); + case 3: return new Ctor(args[0], args[1], args[2]); + case 4: return new Ctor(args[0], args[1], args[2], args[3]); + case 5: return new Ctor(args[0], args[1], args[2], args[3], args[4]); + case 6: return new Ctor(args[0], args[1], args[2], args[3], args[4], args[5]); + case 7: return new Ctor(args[0], args[1], args[2], args[3], args[4], args[5], args[6]); + } + var thisBinding = baseCreate(Ctor.prototype), + result = Ctor.apply(thisBinding, args); + + // Mimic the constructor's `return` behavior. + // See https://es5.github.io/#x13.2.2 for more details. + return isObject(result) ? result : thisBinding; + }; +} + +module.exports = createCtor; diff --git a/backend/node_modules/lodash/_createCurry.js b/backend/node_modules/lodash/_createCurry.js new file mode 100644 index 00000000..f06c2cdd --- /dev/null +++ b/backend/node_modules/lodash/_createCurry.js @@ -0,0 +1,46 @@ +var apply = require('./_apply'), + createCtor = require('./_createCtor'), + createHybrid = require('./_createHybrid'), + createRecurry = require('./_createRecurry'), + getHolder = require('./_getHolder'), + replaceHolders = require('./_replaceHolders'), + root = require('./_root'); + +/** + * Creates a function that wraps `func` to enable currying. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {number} arity The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ +function createCurry(func, bitmask, arity) { + var Ctor = createCtor(func); + + function wrapper() { + var length = arguments.length, + args = Array(length), + index = length, + placeholder = getHolder(wrapper); + + while (index--) { + args[index] = arguments[index]; + } + var holders = (length < 3 && args[0] !== placeholder && args[length - 1] !== placeholder) + ? [] + : replaceHolders(args, placeholder); + + length -= holders.length; + if (length < arity) { + return createRecurry( + func, bitmask, createHybrid, wrapper.placeholder, undefined, + args, holders, undefined, undefined, arity - length); + } + var fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + return apply(fn, this, args); + } + return wrapper; +} + +module.exports = createCurry; diff --git a/backend/node_modules/lodash/_createFind.js b/backend/node_modules/lodash/_createFind.js new file mode 100644 index 00000000..8859ff89 --- /dev/null +++ b/backend/node_modules/lodash/_createFind.js @@ -0,0 +1,25 @@ +var baseIteratee = require('./_baseIteratee'), + isArrayLike = require('./isArrayLike'), + keys = require('./keys'); + +/** + * Creates a `_.find` or `_.findLast` function. + * + * @private + * @param {Function} findIndexFunc The function to find the collection index. + * @returns {Function} Returns the new find function. + */ +function createFind(findIndexFunc) { + return function(collection, predicate, fromIndex) { + var iterable = Object(collection); + if (!isArrayLike(collection)) { + var iteratee = baseIteratee(predicate, 3); + collection = keys(collection); + predicate = function(key) { return iteratee(iterable[key], key, iterable); }; + } + var index = findIndexFunc(collection, predicate, fromIndex); + return index > -1 ? iterable[iteratee ? collection[index] : index] : undefined; + }; +} + +module.exports = createFind; diff --git a/backend/node_modules/lodash/_createFlow.js b/backend/node_modules/lodash/_createFlow.js new file mode 100644 index 00000000..baaddbf5 --- /dev/null +++ b/backend/node_modules/lodash/_createFlow.js @@ -0,0 +1,78 @@ +var LodashWrapper = require('./_LodashWrapper'), + flatRest = require('./_flatRest'), + getData = require('./_getData'), + getFuncName = require('./_getFuncName'), + isArray = require('./isArray'), + isLaziable = require('./_isLaziable'); + +/** Error message constants. */ +var FUNC_ERROR_TEXT = 'Expected a function'; + +/** Used to compose bitmasks for function metadata. */ +var WRAP_CURRY_FLAG = 8, + WRAP_PARTIAL_FLAG = 32, + WRAP_ARY_FLAG = 128, + WRAP_REARG_FLAG = 256; + +/** + * Creates a `_.flow` or `_.flowRight` function. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new flow function. + */ +function createFlow(fromRight) { + return flatRest(function(funcs) { + var length = funcs.length, + index = length, + prereq = LodashWrapper.prototype.thru; + + if (fromRight) { + funcs.reverse(); + } + while (index--) { + var func = funcs[index]; + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + if (prereq && !wrapper && getFuncName(func) == 'wrapper') { + var wrapper = new LodashWrapper([], true); + } + } + index = wrapper ? index : length; + while (++index < length) { + func = funcs[index]; + + var funcName = getFuncName(func), + data = funcName == 'wrapper' ? getData(func) : undefined; + + if (data && isLaziable(data[0]) && + data[1] == (WRAP_ARY_FLAG | WRAP_CURRY_FLAG | WRAP_PARTIAL_FLAG | WRAP_REARG_FLAG) && + !data[4].length && data[9] == 1 + ) { + wrapper = wrapper[getFuncName(data[0])].apply(wrapper, data[3]); + } else { + wrapper = (func.length == 1 && isLaziable(func)) + ? wrapper[funcName]() + : wrapper.thru(func); + } + } + return function() { + var args = arguments, + value = args[0]; + + if (wrapper && args.length == 1 && isArray(value)) { + return wrapper.plant(value).value(); + } + var index = 0, + result = length ? funcs[index].apply(this, args) : value; + + while (++index < length) { + result = funcs[index].call(this, result); + } + return result; + }; + }); +} + +module.exports = createFlow; diff --git a/backend/node_modules/lodash/_createHybrid.js b/backend/node_modules/lodash/_createHybrid.js new file mode 100644 index 00000000..b671bd11 --- /dev/null +++ b/backend/node_modules/lodash/_createHybrid.js @@ -0,0 +1,92 @@ +var composeArgs = require('./_composeArgs'), + composeArgsRight = require('./_composeArgsRight'), + countHolders = require('./_countHolders'), + createCtor = require('./_createCtor'), + createRecurry = require('./_createRecurry'), + getHolder = require('./_getHolder'), + reorder = require('./_reorder'), + replaceHolders = require('./_replaceHolders'), + root = require('./_root'); + +/** Used to compose bitmasks for function metadata. */ +var WRAP_BIND_FLAG = 1, + WRAP_BIND_KEY_FLAG = 2, + WRAP_CURRY_FLAG = 8, + WRAP_CURRY_RIGHT_FLAG = 16, + WRAP_ARY_FLAG = 128, + WRAP_FLIP_FLAG = 512; + +/** + * Creates a function that wraps `func` to invoke it with optional `this` + * binding of `thisArg`, partial application, and currying. + * + * @private + * @param {Function|string} func The function or method name to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to prepend to those provided to + * the new function. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [partialsRight] The arguments to append to those provided + * to the new function. + * @param {Array} [holdersRight] The `partialsRight` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ +function createHybrid(func, bitmask, thisArg, partials, holders, partialsRight, holdersRight, argPos, ary, arity) { + var isAry = bitmask & WRAP_ARY_FLAG, + isBind = bitmask & WRAP_BIND_FLAG, + isBindKey = bitmask & WRAP_BIND_KEY_FLAG, + isCurried = bitmask & (WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG), + isFlip = bitmask & WRAP_FLIP_FLAG, + Ctor = isBindKey ? undefined : createCtor(func); + + function wrapper() { + var length = arguments.length, + args = Array(length), + index = length; + + while (index--) { + args[index] = arguments[index]; + } + if (isCurried) { + var placeholder = getHolder(wrapper), + holdersCount = countHolders(args, placeholder); + } + if (partials) { + args = composeArgs(args, partials, holders, isCurried); + } + if (partialsRight) { + args = composeArgsRight(args, partialsRight, holdersRight, isCurried); + } + length -= holdersCount; + if (isCurried && length < arity) { + var newHolders = replaceHolders(args, placeholder); + return createRecurry( + func, bitmask, createHybrid, wrapper.placeholder, thisArg, + args, newHolders, argPos, ary, arity - length + ); + } + var thisBinding = isBind ? thisArg : this, + fn = isBindKey ? thisBinding[func] : func; + + length = args.length; + if (argPos) { + args = reorder(args, argPos); + } else if (isFlip && length > 1) { + args.reverse(); + } + if (isAry && ary < length) { + args.length = ary; + } + if (this && this !== root && this instanceof wrapper) { + fn = Ctor || createCtor(fn); + } + return fn.apply(thisBinding, args); + } + return wrapper; +} + +module.exports = createHybrid; diff --git a/backend/node_modules/lodash/_createInverter.js b/backend/node_modules/lodash/_createInverter.js new file mode 100644 index 00000000..6c0c5629 --- /dev/null +++ b/backend/node_modules/lodash/_createInverter.js @@ -0,0 +1,17 @@ +var baseInverter = require('./_baseInverter'); + +/** + * Creates a function like `_.invertBy`. + * + * @private + * @param {Function} setter The function to set accumulator values. + * @param {Function} toIteratee The function to resolve iteratees. + * @returns {Function} Returns the new inverter function. + */ +function createInverter(setter, toIteratee) { + return function(object, iteratee) { + return baseInverter(object, setter, toIteratee(iteratee), {}); + }; +} + +module.exports = createInverter; diff --git a/backend/node_modules/lodash/_createMathOperation.js b/backend/node_modules/lodash/_createMathOperation.js new file mode 100644 index 00000000..f1e238ac --- /dev/null +++ b/backend/node_modules/lodash/_createMathOperation.js @@ -0,0 +1,38 @@ +var baseToNumber = require('./_baseToNumber'), + baseToString = require('./_baseToString'); + +/** + * Creates a function that performs a mathematical operation on two values. + * + * @private + * @param {Function} operator The function to perform the operation. + * @param {number} [defaultValue] The value used for `undefined` arguments. + * @returns {Function} Returns the new mathematical operation function. + */ +function createMathOperation(operator, defaultValue) { + return function(value, other) { + var result; + if (value === undefined && other === undefined) { + return defaultValue; + } + if (value !== undefined) { + result = value; + } + if (other !== undefined) { + if (result === undefined) { + return other; + } + if (typeof value == 'string' || typeof other == 'string') { + value = baseToString(value); + other = baseToString(other); + } else { + value = baseToNumber(value); + other = baseToNumber(other); + } + result = operator(value, other); + } + return result; + }; +} + +module.exports = createMathOperation; diff --git a/backend/node_modules/lodash/_createOver.js b/backend/node_modules/lodash/_createOver.js new file mode 100644 index 00000000..3b945516 --- /dev/null +++ b/backend/node_modules/lodash/_createOver.js @@ -0,0 +1,27 @@ +var apply = require('./_apply'), + arrayMap = require('./_arrayMap'), + baseIteratee = require('./_baseIteratee'), + baseRest = require('./_baseRest'), + baseUnary = require('./_baseUnary'), + flatRest = require('./_flatRest'); + +/** + * Creates a function like `_.over`. + * + * @private + * @param {Function} arrayFunc The function to iterate over iteratees. + * @returns {Function} Returns the new over function. + */ +function createOver(arrayFunc) { + return flatRest(function(iteratees) { + iteratees = arrayMap(iteratees, baseUnary(baseIteratee)); + return baseRest(function(args) { + var thisArg = this; + return arrayFunc(iteratees, function(iteratee) { + return apply(iteratee, thisArg, args); + }); + }); + }); +} + +module.exports = createOver; diff --git a/backend/node_modules/lodash/_createPadding.js b/backend/node_modules/lodash/_createPadding.js new file mode 100644 index 00000000..2124612b --- /dev/null +++ b/backend/node_modules/lodash/_createPadding.js @@ -0,0 +1,33 @@ +var baseRepeat = require('./_baseRepeat'), + baseToString = require('./_baseToString'), + castSlice = require('./_castSlice'), + hasUnicode = require('./_hasUnicode'), + stringSize = require('./_stringSize'), + stringToArray = require('./_stringToArray'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeCeil = Math.ceil; + +/** + * Creates the padding for `string` based on `length`. The `chars` string + * is truncated if the number of characters exceeds `length`. + * + * @private + * @param {number} length The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padding for `string`. + */ +function createPadding(length, chars) { + chars = chars === undefined ? ' ' : baseToString(chars); + + var charsLength = chars.length; + if (charsLength < 2) { + return charsLength ? baseRepeat(chars, length) : chars; + } + var result = baseRepeat(chars, nativeCeil(length / stringSize(chars))); + return hasUnicode(chars) + ? castSlice(stringToArray(result), 0, length).join('') + : result.slice(0, length); +} + +module.exports = createPadding; diff --git a/backend/node_modules/lodash/_createPartial.js b/backend/node_modules/lodash/_createPartial.js new file mode 100644 index 00000000..e16c248b --- /dev/null +++ b/backend/node_modules/lodash/_createPartial.js @@ -0,0 +1,43 @@ +var apply = require('./_apply'), + createCtor = require('./_createCtor'), + root = require('./_root'); + +/** Used to compose bitmasks for function metadata. */ +var WRAP_BIND_FLAG = 1; + +/** + * Creates a function that wraps `func` to invoke it with the `this` binding + * of `thisArg` and `partials` prepended to the arguments it receives. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} partials The arguments to prepend to those provided to + * the new function. + * @returns {Function} Returns the new wrapped function. + */ +function createPartial(func, bitmask, thisArg, partials) { + var isBind = bitmask & WRAP_BIND_FLAG, + Ctor = createCtor(func); + + function wrapper() { + var argsIndex = -1, + argsLength = arguments.length, + leftIndex = -1, + leftLength = partials.length, + args = Array(leftLength + argsLength), + fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + + while (++leftIndex < leftLength) { + args[leftIndex] = partials[leftIndex]; + } + while (argsLength--) { + args[leftIndex++] = arguments[++argsIndex]; + } + return apply(fn, isBind ? thisArg : this, args); + } + return wrapper; +} + +module.exports = createPartial; diff --git a/backend/node_modules/lodash/_createRange.js b/backend/node_modules/lodash/_createRange.js new file mode 100644 index 00000000..9f52c779 --- /dev/null +++ b/backend/node_modules/lodash/_createRange.js @@ -0,0 +1,30 @@ +var baseRange = require('./_baseRange'), + isIterateeCall = require('./_isIterateeCall'), + toFinite = require('./toFinite'); + +/** + * Creates a `_.range` or `_.rangeRight` function. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new range function. + */ +function createRange(fromRight) { + return function(start, end, step) { + if (step && typeof step != 'number' && isIterateeCall(start, end, step)) { + end = step = undefined; + } + // Ensure the sign of `-0` is preserved. + start = toFinite(start); + if (end === undefined) { + end = start; + start = 0; + } else { + end = toFinite(end); + } + step = step === undefined ? (start < end ? 1 : -1) : toFinite(step); + return baseRange(start, end, step, fromRight); + }; +} + +module.exports = createRange; diff --git a/backend/node_modules/lodash/_createRecurry.js b/backend/node_modules/lodash/_createRecurry.js new file mode 100644 index 00000000..eb29fb24 --- /dev/null +++ b/backend/node_modules/lodash/_createRecurry.js @@ -0,0 +1,56 @@ +var isLaziable = require('./_isLaziable'), + setData = require('./_setData'), + setWrapToString = require('./_setWrapToString'); + +/** Used to compose bitmasks for function metadata. */ +var WRAP_BIND_FLAG = 1, + WRAP_BIND_KEY_FLAG = 2, + WRAP_CURRY_BOUND_FLAG = 4, + WRAP_CURRY_FLAG = 8, + WRAP_PARTIAL_FLAG = 32, + WRAP_PARTIAL_RIGHT_FLAG = 64; + +/** + * Creates a function that wraps `func` to continue currying. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {Function} wrapFunc The function to create the `func` wrapper. + * @param {*} placeholder The placeholder value. + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to prepend to those provided to + * the new function. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ +function createRecurry(func, bitmask, wrapFunc, placeholder, thisArg, partials, holders, argPos, ary, arity) { + var isCurry = bitmask & WRAP_CURRY_FLAG, + newHolders = isCurry ? holders : undefined, + newHoldersRight = isCurry ? undefined : holders, + newPartials = isCurry ? partials : undefined, + newPartialsRight = isCurry ? undefined : partials; + + bitmask |= (isCurry ? WRAP_PARTIAL_FLAG : WRAP_PARTIAL_RIGHT_FLAG); + bitmask &= ~(isCurry ? WRAP_PARTIAL_RIGHT_FLAG : WRAP_PARTIAL_FLAG); + + if (!(bitmask & WRAP_CURRY_BOUND_FLAG)) { + bitmask &= ~(WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG); + } + var newData = [ + func, bitmask, thisArg, newPartials, newHolders, newPartialsRight, + newHoldersRight, argPos, ary, arity + ]; + + var result = wrapFunc.apply(undefined, newData); + if (isLaziable(func)) { + setData(result, newData); + } + result.placeholder = placeholder; + return setWrapToString(result, func, bitmask); +} + +module.exports = createRecurry; diff --git a/backend/node_modules/lodash/_createRelationalOperation.js b/backend/node_modules/lodash/_createRelationalOperation.js new file mode 100644 index 00000000..a17c6b5e --- /dev/null +++ b/backend/node_modules/lodash/_createRelationalOperation.js @@ -0,0 +1,20 @@ +var toNumber = require('./toNumber'); + +/** + * Creates a function that performs a relational operation on two values. + * + * @private + * @param {Function} operator The function to perform the operation. + * @returns {Function} Returns the new relational operation function. + */ +function createRelationalOperation(operator) { + return function(value, other) { + if (!(typeof value == 'string' && typeof other == 'string')) { + value = toNumber(value); + other = toNumber(other); + } + return operator(value, other); + }; +} + +module.exports = createRelationalOperation; diff --git a/backend/node_modules/lodash/_createRound.js b/backend/node_modules/lodash/_createRound.js new file mode 100644 index 00000000..88be5df3 --- /dev/null +++ b/backend/node_modules/lodash/_createRound.js @@ -0,0 +1,35 @@ +var root = require('./_root'), + toInteger = require('./toInteger'), + toNumber = require('./toNumber'), + toString = require('./toString'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeIsFinite = root.isFinite, + nativeMin = Math.min; + +/** + * Creates a function like `_.round`. + * + * @private + * @param {string} methodName The name of the `Math` method to use when rounding. + * @returns {Function} Returns the new round function. + */ +function createRound(methodName) { + var func = Math[methodName]; + return function(number, precision) { + number = toNumber(number); + precision = precision == null ? 0 : nativeMin(toInteger(precision), 292); + if (precision && nativeIsFinite(number)) { + // Shift with exponential notation to avoid floating-point issues. + // See [MDN](https://mdn.io/round#Examples) for more details. + var pair = (toString(number) + 'e').split('e'), + value = func(pair[0] + 'e' + (+pair[1] + precision)); + + pair = (toString(value) + 'e').split('e'); + return +(pair[0] + 'e' + (+pair[1] - precision)); + } + return func(number); + }; +} + +module.exports = createRound; diff --git a/backend/node_modules/lodash/_createSet.js b/backend/node_modules/lodash/_createSet.js new file mode 100644 index 00000000..0f644eea --- /dev/null +++ b/backend/node_modules/lodash/_createSet.js @@ -0,0 +1,19 @@ +var Set = require('./_Set'), + noop = require('./noop'), + setToArray = require('./_setToArray'); + +/** Used as references for various `Number` constants. */ +var INFINITY = 1 / 0; + +/** + * Creates a set object of `values`. + * + * @private + * @param {Array} values The values to add to the set. + * @returns {Object} Returns the new set. + */ +var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { + return new Set(values); +}; + +module.exports = createSet; diff --git a/backend/node_modules/lodash/_createToPairs.js b/backend/node_modules/lodash/_createToPairs.js new file mode 100644 index 00000000..568417af --- /dev/null +++ b/backend/node_modules/lodash/_createToPairs.js @@ -0,0 +1,30 @@ +var baseToPairs = require('./_baseToPairs'), + getTag = require('./_getTag'), + mapToArray = require('./_mapToArray'), + setToPairs = require('./_setToPairs'); + +/** `Object#toString` result references. */ +var mapTag = '[object Map]', + setTag = '[object Set]'; + +/** + * Creates a `_.toPairs` or `_.toPairsIn` function. + * + * @private + * @param {Function} keysFunc The function to get the keys of a given object. + * @returns {Function} Returns the new pairs function. + */ +function createToPairs(keysFunc) { + return function(object) { + var tag = getTag(object); + if (tag == mapTag) { + return mapToArray(object); + } + if (tag == setTag) { + return setToPairs(object); + } + return baseToPairs(object, keysFunc(object)); + }; +} + +module.exports = createToPairs; diff --git a/backend/node_modules/lodash/_createWrap.js b/backend/node_modules/lodash/_createWrap.js new file mode 100644 index 00000000..33f0633e --- /dev/null +++ b/backend/node_modules/lodash/_createWrap.js @@ -0,0 +1,106 @@ +var baseSetData = require('./_baseSetData'), + createBind = require('./_createBind'), + createCurry = require('./_createCurry'), + createHybrid = require('./_createHybrid'), + createPartial = require('./_createPartial'), + getData = require('./_getData'), + mergeData = require('./_mergeData'), + setData = require('./_setData'), + setWrapToString = require('./_setWrapToString'), + toInteger = require('./toInteger'); + +/** Error message constants. */ +var FUNC_ERROR_TEXT = 'Expected a function'; + +/** Used to compose bitmasks for function metadata. */ +var WRAP_BIND_FLAG = 1, + WRAP_BIND_KEY_FLAG = 2, + WRAP_CURRY_FLAG = 8, + WRAP_CURRY_RIGHT_FLAG = 16, + WRAP_PARTIAL_FLAG = 32, + WRAP_PARTIAL_RIGHT_FLAG = 64; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max; + +/** + * Creates a function that either curries or invokes `func` with optional + * `this` binding and partially applied arguments. + * + * @private + * @param {Function|string} func The function or method name to wrap. + * @param {number} bitmask The bitmask flags. + * 1 - `_.bind` + * 2 - `_.bindKey` + * 4 - `_.curry` or `_.curryRight` of a bound function + * 8 - `_.curry` + * 16 - `_.curryRight` + * 32 - `_.partial` + * 64 - `_.partialRight` + * 128 - `_.rearg` + * 256 - `_.ary` + * 512 - `_.flip` + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to be partially applied. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ +function createWrap(func, bitmask, thisArg, partials, holders, argPos, ary, arity) { + var isBindKey = bitmask & WRAP_BIND_KEY_FLAG; + if (!isBindKey && typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + var length = partials ? partials.length : 0; + if (!length) { + bitmask &= ~(WRAP_PARTIAL_FLAG | WRAP_PARTIAL_RIGHT_FLAG); + partials = holders = undefined; + } + ary = ary === undefined ? ary : nativeMax(toInteger(ary), 0); + arity = arity === undefined ? arity : toInteger(arity); + length -= holders ? holders.length : 0; + + if (bitmask & WRAP_PARTIAL_RIGHT_FLAG) { + var partialsRight = partials, + holdersRight = holders; + + partials = holders = undefined; + } + var data = isBindKey ? undefined : getData(func); + + var newData = [ + func, bitmask, thisArg, partials, holders, partialsRight, holdersRight, + argPos, ary, arity + ]; + + if (data) { + mergeData(newData, data); + } + func = newData[0]; + bitmask = newData[1]; + thisArg = newData[2]; + partials = newData[3]; + holders = newData[4]; + arity = newData[9] = newData[9] === undefined + ? (isBindKey ? 0 : func.length) + : nativeMax(newData[9] - length, 0); + + if (!arity && bitmask & (WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG)) { + bitmask &= ~(WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG); + } + if (!bitmask || bitmask == WRAP_BIND_FLAG) { + var result = createBind(func, bitmask, thisArg); + } else if (bitmask == WRAP_CURRY_FLAG || bitmask == WRAP_CURRY_RIGHT_FLAG) { + result = createCurry(func, bitmask, arity); + } else if ((bitmask == WRAP_PARTIAL_FLAG || bitmask == (WRAP_BIND_FLAG | WRAP_PARTIAL_FLAG)) && !holders.length) { + result = createPartial(func, bitmask, thisArg, partials); + } else { + result = createHybrid.apply(undefined, newData); + } + var setter = data ? baseSetData : setData; + return setWrapToString(setter(result, newData), func, bitmask); +} + +module.exports = createWrap; diff --git a/backend/node_modules/lodash/_customDefaultsAssignIn.js b/backend/node_modules/lodash/_customDefaultsAssignIn.js new file mode 100644 index 00000000..1f49e6fc --- /dev/null +++ b/backend/node_modules/lodash/_customDefaultsAssignIn.js @@ -0,0 +1,29 @@ +var eq = require('./eq'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Used by `_.defaults` to customize its `_.assignIn` use to assign properties + * of source objects to the destination object for all destination properties + * that resolve to `undefined`. + * + * @private + * @param {*} objValue The destination value. + * @param {*} srcValue The source value. + * @param {string} key The key of the property to assign. + * @param {Object} object The parent object of `objValue`. + * @returns {*} Returns the value to assign. + */ +function customDefaultsAssignIn(objValue, srcValue, key, object) { + if (objValue === undefined || + (eq(objValue, objectProto[key]) && !hasOwnProperty.call(object, key))) { + return srcValue; + } + return objValue; +} + +module.exports = customDefaultsAssignIn; diff --git a/backend/node_modules/lodash/_customDefaultsMerge.js b/backend/node_modules/lodash/_customDefaultsMerge.js new file mode 100644 index 00000000..4cab3175 --- /dev/null +++ b/backend/node_modules/lodash/_customDefaultsMerge.js @@ -0,0 +1,28 @@ +var baseMerge = require('./_baseMerge'), + isObject = require('./isObject'); + +/** + * Used by `_.defaultsDeep` to customize its `_.merge` use to merge source + * objects into destination objects that are passed thru. + * + * @private + * @param {*} objValue The destination value. + * @param {*} srcValue The source value. + * @param {string} key The key of the property to merge. + * @param {Object} object The parent object of `objValue`. + * @param {Object} source The parent object of `srcValue`. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + * @returns {*} Returns the value to assign. + */ +function customDefaultsMerge(objValue, srcValue, key, object, source, stack) { + if (isObject(objValue) && isObject(srcValue)) { + // Recursively merge objects and arrays (susceptible to call stack limits). + stack.set(srcValue, objValue); + baseMerge(objValue, srcValue, undefined, customDefaultsMerge, stack); + stack['delete'](srcValue); + } + return objValue; +} + +module.exports = customDefaultsMerge; diff --git a/backend/node_modules/lodash/_customOmitClone.js b/backend/node_modules/lodash/_customOmitClone.js new file mode 100644 index 00000000..968db2ef --- /dev/null +++ b/backend/node_modules/lodash/_customOmitClone.js @@ -0,0 +1,16 @@ +var isPlainObject = require('./isPlainObject'); + +/** + * Used by `_.omit` to customize its `_.cloneDeep` use to only clone plain + * objects. + * + * @private + * @param {*} value The value to inspect. + * @param {string} key The key of the property to inspect. + * @returns {*} Returns the uncloned value or `undefined` to defer cloning to `_.cloneDeep`. + */ +function customOmitClone(value) { + return isPlainObject(value) ? undefined : value; +} + +module.exports = customOmitClone; diff --git a/backend/node_modules/lodash/_deburrLetter.js b/backend/node_modules/lodash/_deburrLetter.js new file mode 100644 index 00000000..3e531edc --- /dev/null +++ b/backend/node_modules/lodash/_deburrLetter.js @@ -0,0 +1,71 @@ +var basePropertyOf = require('./_basePropertyOf'); + +/** Used to map Latin Unicode letters to basic Latin letters. */ +var deburredLetters = { + // Latin-1 Supplement block. + '\xc0': 'A', '\xc1': 'A', '\xc2': 'A', '\xc3': 'A', '\xc4': 'A', '\xc5': 'A', + '\xe0': 'a', '\xe1': 'a', '\xe2': 'a', '\xe3': 'a', '\xe4': 'a', '\xe5': 'a', + '\xc7': 'C', '\xe7': 'c', + '\xd0': 'D', '\xf0': 'd', + '\xc8': 'E', '\xc9': 'E', '\xca': 'E', '\xcb': 'E', + '\xe8': 'e', '\xe9': 'e', '\xea': 'e', '\xeb': 'e', + '\xcc': 'I', '\xcd': 'I', '\xce': 'I', '\xcf': 'I', + '\xec': 'i', '\xed': 'i', '\xee': 'i', '\xef': 'i', + '\xd1': 'N', '\xf1': 'n', + '\xd2': 'O', '\xd3': 'O', '\xd4': 'O', '\xd5': 'O', '\xd6': 'O', '\xd8': 'O', + '\xf2': 'o', '\xf3': 'o', '\xf4': 'o', '\xf5': 'o', '\xf6': 'o', '\xf8': 'o', + '\xd9': 'U', '\xda': 'U', '\xdb': 'U', '\xdc': 'U', + '\xf9': 'u', '\xfa': 'u', '\xfb': 'u', '\xfc': 'u', + '\xdd': 'Y', '\xfd': 'y', '\xff': 'y', + '\xc6': 'Ae', '\xe6': 'ae', + '\xde': 'Th', '\xfe': 'th', + '\xdf': 'ss', + // Latin Extended-A block. + '\u0100': 'A', '\u0102': 'A', '\u0104': 'A', + '\u0101': 'a', '\u0103': 'a', '\u0105': 'a', + '\u0106': 'C', '\u0108': 'C', '\u010a': 'C', '\u010c': 'C', + '\u0107': 'c', '\u0109': 'c', '\u010b': 'c', '\u010d': 'c', + '\u010e': 'D', '\u0110': 'D', '\u010f': 'd', '\u0111': 'd', + '\u0112': 'E', '\u0114': 'E', '\u0116': 'E', '\u0118': 'E', '\u011a': 'E', + '\u0113': 'e', '\u0115': 'e', '\u0117': 'e', '\u0119': 'e', '\u011b': 'e', + '\u011c': 'G', '\u011e': 'G', '\u0120': 'G', '\u0122': 'G', + '\u011d': 'g', '\u011f': 'g', '\u0121': 'g', '\u0123': 'g', + '\u0124': 'H', '\u0126': 'H', '\u0125': 'h', '\u0127': 'h', + '\u0128': 'I', '\u012a': 'I', '\u012c': 'I', '\u012e': 'I', '\u0130': 'I', + '\u0129': 'i', '\u012b': 'i', '\u012d': 'i', '\u012f': 'i', '\u0131': 'i', + '\u0134': 'J', '\u0135': 'j', + '\u0136': 'K', '\u0137': 'k', '\u0138': 'k', + '\u0139': 'L', '\u013b': 'L', '\u013d': 'L', '\u013f': 'L', '\u0141': 'L', + '\u013a': 'l', '\u013c': 'l', '\u013e': 'l', '\u0140': 'l', '\u0142': 'l', + '\u0143': 'N', '\u0145': 'N', '\u0147': 'N', '\u014a': 'N', + '\u0144': 'n', '\u0146': 'n', '\u0148': 'n', '\u014b': 'n', + '\u014c': 'O', '\u014e': 'O', '\u0150': 'O', + '\u014d': 'o', '\u014f': 'o', '\u0151': 'o', + '\u0154': 'R', '\u0156': 'R', '\u0158': 'R', + '\u0155': 'r', '\u0157': 'r', '\u0159': 'r', + '\u015a': 'S', '\u015c': 'S', '\u015e': 'S', '\u0160': 'S', + '\u015b': 's', '\u015d': 's', '\u015f': 's', '\u0161': 's', + '\u0162': 'T', '\u0164': 'T', '\u0166': 'T', + '\u0163': 't', '\u0165': 't', '\u0167': 't', + '\u0168': 'U', '\u016a': 'U', '\u016c': 'U', '\u016e': 'U', '\u0170': 'U', '\u0172': 'U', + '\u0169': 'u', '\u016b': 'u', '\u016d': 'u', '\u016f': 'u', '\u0171': 'u', '\u0173': 'u', + '\u0174': 'W', '\u0175': 'w', + '\u0176': 'Y', '\u0177': 'y', '\u0178': 'Y', + '\u0179': 'Z', '\u017b': 'Z', '\u017d': 'Z', + '\u017a': 'z', '\u017c': 'z', '\u017e': 'z', + '\u0132': 'IJ', '\u0133': 'ij', + '\u0152': 'Oe', '\u0153': 'oe', + '\u0149': "'n", '\u017f': 's' +}; + +/** + * Used by `_.deburr` to convert Latin-1 Supplement and Latin Extended-A + * letters to basic Latin letters. + * + * @private + * @param {string} letter The matched letter to deburr. + * @returns {string} Returns the deburred letter. + */ +var deburrLetter = basePropertyOf(deburredLetters); + +module.exports = deburrLetter; diff --git a/backend/node_modules/lodash/_defineProperty.js b/backend/node_modules/lodash/_defineProperty.js new file mode 100644 index 00000000..b6116d92 --- /dev/null +++ b/backend/node_modules/lodash/_defineProperty.js @@ -0,0 +1,11 @@ +var getNative = require('./_getNative'); + +var defineProperty = (function() { + try { + var func = getNative(Object, 'defineProperty'); + func({}, '', {}); + return func; + } catch (e) {} +}()); + +module.exports = defineProperty; diff --git a/backend/node_modules/lodash/_equalArrays.js b/backend/node_modules/lodash/_equalArrays.js new file mode 100644 index 00000000..824228c7 --- /dev/null +++ b/backend/node_modules/lodash/_equalArrays.js @@ -0,0 +1,84 @@ +var SetCache = require('./_SetCache'), + arraySome = require('./_arraySome'), + cacheHas = require('./_cacheHas'); + +/** Used to compose bitmasks for value comparisons. */ +var COMPARE_PARTIAL_FLAG = 1, + COMPARE_UNORDERED_FLAG = 2; + +/** + * A specialized version of `baseIsEqualDeep` for arrays with support for + * partial deep comparisons. + * + * @private + * @param {Array} array The array to compare. + * @param {Array} other The other array to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `array` and `other` objects. + * @returns {boolean} Returns `true` if the arrays are equivalent, else `false`. + */ +function equalArrays(array, other, bitmask, customizer, equalFunc, stack) { + var isPartial = bitmask & COMPARE_PARTIAL_FLAG, + arrLength = array.length, + othLength = other.length; + + if (arrLength != othLength && !(isPartial && othLength > arrLength)) { + return false; + } + // Check that cyclic values are equal. + var arrStacked = stack.get(array); + var othStacked = stack.get(other); + if (arrStacked && othStacked) { + return arrStacked == other && othStacked == array; + } + var index = -1, + result = true, + seen = (bitmask & COMPARE_UNORDERED_FLAG) ? new SetCache : undefined; + + stack.set(array, other); + stack.set(other, array); + + // Ignore non-index properties. + while (++index < arrLength) { + var arrValue = array[index], + othValue = other[index]; + + if (customizer) { + var compared = isPartial + ? customizer(othValue, arrValue, index, other, array, stack) + : customizer(arrValue, othValue, index, array, other, stack); + } + if (compared !== undefined) { + if (compared) { + continue; + } + result = false; + break; + } + // Recursively compare arrays (susceptible to call stack limits). + if (seen) { + if (!arraySome(other, function(othValue, othIndex) { + if (!cacheHas(seen, othIndex) && + (arrValue === othValue || equalFunc(arrValue, othValue, bitmask, customizer, stack))) { + return seen.push(othIndex); + } + })) { + result = false; + break; + } + } else if (!( + arrValue === othValue || + equalFunc(arrValue, othValue, bitmask, customizer, stack) + )) { + result = false; + break; + } + } + stack['delete'](array); + stack['delete'](other); + return result; +} + +module.exports = equalArrays; diff --git a/backend/node_modules/lodash/_equalByTag.js b/backend/node_modules/lodash/_equalByTag.js new file mode 100644 index 00000000..71919e86 --- /dev/null +++ b/backend/node_modules/lodash/_equalByTag.js @@ -0,0 +1,112 @@ +var Symbol = require('./_Symbol'), + Uint8Array = require('./_Uint8Array'), + eq = require('./eq'), + equalArrays = require('./_equalArrays'), + mapToArray = require('./_mapToArray'), + setToArray = require('./_setToArray'); + +/** Used to compose bitmasks for value comparisons. */ +var COMPARE_PARTIAL_FLAG = 1, + COMPARE_UNORDERED_FLAG = 2; + +/** `Object#toString` result references. */ +var boolTag = '[object Boolean]', + dateTag = '[object Date]', + errorTag = '[object Error]', + mapTag = '[object Map]', + numberTag = '[object Number]', + regexpTag = '[object RegExp]', + setTag = '[object Set]', + stringTag = '[object String]', + symbolTag = '[object Symbol]'; + +var arrayBufferTag = '[object ArrayBuffer]', + dataViewTag = '[object DataView]'; + +/** Used to convert symbols to primitives and strings. */ +var symbolProto = Symbol ? Symbol.prototype : undefined, + symbolValueOf = symbolProto ? symbolProto.valueOf : undefined; + +/** + * A specialized version of `baseIsEqualDeep` for comparing objects of + * the same `toStringTag`. + * + * **Note:** This function only supports comparing values with tags of + * `Boolean`, `Date`, `Error`, `Number`, `RegExp`, or `String`. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {string} tag The `toStringTag` of the objects to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ +function equalByTag(object, other, tag, bitmask, customizer, equalFunc, stack) { + switch (tag) { + case dataViewTag: + if ((object.byteLength != other.byteLength) || + (object.byteOffset != other.byteOffset)) { + return false; + } + object = object.buffer; + other = other.buffer; + + case arrayBufferTag: + if ((object.byteLength != other.byteLength) || + !equalFunc(new Uint8Array(object), new Uint8Array(other))) { + return false; + } + return true; + + case boolTag: + case dateTag: + case numberTag: + // Coerce booleans to `1` or `0` and dates to milliseconds. + // Invalid dates are coerced to `NaN`. + return eq(+object, +other); + + case errorTag: + return object.name == other.name && object.message == other.message; + + case regexpTag: + case stringTag: + // Coerce regexes to strings and treat strings, primitives and objects, + // as equal. See http://www.ecma-international.org/ecma-262/7.0/#sec-regexp.prototype.tostring + // for more details. + return object == (other + ''); + + case mapTag: + var convert = mapToArray; + + case setTag: + var isPartial = bitmask & COMPARE_PARTIAL_FLAG; + convert || (convert = setToArray); + + if (object.size != other.size && !isPartial) { + return false; + } + // Assume cyclic values are equal. + var stacked = stack.get(object); + if (stacked) { + return stacked == other; + } + bitmask |= COMPARE_UNORDERED_FLAG; + + // Recursively compare objects (susceptible to call stack limits). + stack.set(object, other); + var result = equalArrays(convert(object), convert(other), bitmask, customizer, equalFunc, stack); + stack['delete'](object); + return result; + + case symbolTag: + if (symbolValueOf) { + return symbolValueOf.call(object) == symbolValueOf.call(other); + } + } + return false; +} + +module.exports = equalByTag; diff --git a/backend/node_modules/lodash/_equalObjects.js b/backend/node_modules/lodash/_equalObjects.js new file mode 100644 index 00000000..cdaacd2d --- /dev/null +++ b/backend/node_modules/lodash/_equalObjects.js @@ -0,0 +1,90 @@ +var getAllKeys = require('./_getAllKeys'); + +/** Used to compose bitmasks for value comparisons. */ +var COMPARE_PARTIAL_FLAG = 1; + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * A specialized version of `baseIsEqualDeep` for objects with support for + * partial deep comparisons. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ +function equalObjects(object, other, bitmask, customizer, equalFunc, stack) { + var isPartial = bitmask & COMPARE_PARTIAL_FLAG, + objProps = getAllKeys(object), + objLength = objProps.length, + othProps = getAllKeys(other), + othLength = othProps.length; + + if (objLength != othLength && !isPartial) { + return false; + } + var index = objLength; + while (index--) { + var key = objProps[index]; + if (!(isPartial ? key in other : hasOwnProperty.call(other, key))) { + return false; + } + } + // Check that cyclic values are equal. + var objStacked = stack.get(object); + var othStacked = stack.get(other); + if (objStacked && othStacked) { + return objStacked == other && othStacked == object; + } + var result = true; + stack.set(object, other); + stack.set(other, object); + + var skipCtor = isPartial; + while (++index < objLength) { + key = objProps[index]; + var objValue = object[key], + othValue = other[key]; + + if (customizer) { + var compared = isPartial + ? customizer(othValue, objValue, key, other, object, stack) + : customizer(objValue, othValue, key, object, other, stack); + } + // Recursively compare objects (susceptible to call stack limits). + if (!(compared === undefined + ? (objValue === othValue || equalFunc(objValue, othValue, bitmask, customizer, stack)) + : compared + )) { + result = false; + break; + } + skipCtor || (skipCtor = key == 'constructor'); + } + if (result && !skipCtor) { + var objCtor = object.constructor, + othCtor = other.constructor; + + // Non `Object` object instances with different constructors are not equal. + if (objCtor != othCtor && + ('constructor' in object && 'constructor' in other) && + !(typeof objCtor == 'function' && objCtor instanceof objCtor && + typeof othCtor == 'function' && othCtor instanceof othCtor)) { + result = false; + } + } + stack['delete'](object); + stack['delete'](other); + return result; +} + +module.exports = equalObjects; diff --git a/backend/node_modules/lodash/_escapeHtmlChar.js b/backend/node_modules/lodash/_escapeHtmlChar.js new file mode 100644 index 00000000..7ca68ee6 --- /dev/null +++ b/backend/node_modules/lodash/_escapeHtmlChar.js @@ -0,0 +1,21 @@ +var basePropertyOf = require('./_basePropertyOf'); + +/** Used to map characters to HTML entities. */ +var htmlEscapes = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''' +}; + +/** + * Used by `_.escape` to convert characters to HTML entities. + * + * @private + * @param {string} chr The matched character to escape. + * @returns {string} Returns the escaped character. + */ +var escapeHtmlChar = basePropertyOf(htmlEscapes); + +module.exports = escapeHtmlChar; diff --git a/backend/node_modules/lodash/_escapeStringChar.js b/backend/node_modules/lodash/_escapeStringChar.js new file mode 100644 index 00000000..44eca96c --- /dev/null +++ b/backend/node_modules/lodash/_escapeStringChar.js @@ -0,0 +1,22 @@ +/** Used to escape characters for inclusion in compiled string literals. */ +var stringEscapes = { + '\\': '\\', + "'": "'", + '\n': 'n', + '\r': 'r', + '\u2028': 'u2028', + '\u2029': 'u2029' +}; + +/** + * Used by `_.template` to escape characters for inclusion in compiled string literals. + * + * @private + * @param {string} chr The matched character to escape. + * @returns {string} Returns the escaped character. + */ +function escapeStringChar(chr) { + return '\\' + stringEscapes[chr]; +} + +module.exports = escapeStringChar; diff --git a/backend/node_modules/lodash/_flatRest.js b/backend/node_modules/lodash/_flatRest.js new file mode 100644 index 00000000..94ab6cca --- /dev/null +++ b/backend/node_modules/lodash/_flatRest.js @@ -0,0 +1,16 @@ +var flatten = require('./flatten'), + overRest = require('./_overRest'), + setToString = require('./_setToString'); + +/** + * A specialized version of `baseRest` which flattens the rest array. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @returns {Function} Returns the new function. + */ +function flatRest(func) { + return setToString(overRest(func, undefined, flatten), func + ''); +} + +module.exports = flatRest; diff --git a/backend/node_modules/lodash/_freeGlobal.js b/backend/node_modules/lodash/_freeGlobal.js new file mode 100644 index 00000000..bbec998f --- /dev/null +++ b/backend/node_modules/lodash/_freeGlobal.js @@ -0,0 +1,4 @@ +/** Detect free variable `global` from Node.js. */ +var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; + +module.exports = freeGlobal; diff --git a/backend/node_modules/lodash/_getAllKeys.js b/backend/node_modules/lodash/_getAllKeys.js new file mode 100644 index 00000000..a9ce6995 --- /dev/null +++ b/backend/node_modules/lodash/_getAllKeys.js @@ -0,0 +1,16 @@ +var baseGetAllKeys = require('./_baseGetAllKeys'), + getSymbols = require('./_getSymbols'), + keys = require('./keys'); + +/** + * Creates an array of own enumerable property names and symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names and symbols. + */ +function getAllKeys(object) { + return baseGetAllKeys(object, keys, getSymbols); +} + +module.exports = getAllKeys; diff --git a/backend/node_modules/lodash/_getAllKeysIn.js b/backend/node_modules/lodash/_getAllKeysIn.js new file mode 100644 index 00000000..1b466784 --- /dev/null +++ b/backend/node_modules/lodash/_getAllKeysIn.js @@ -0,0 +1,17 @@ +var baseGetAllKeys = require('./_baseGetAllKeys'), + getSymbolsIn = require('./_getSymbolsIn'), + keysIn = require('./keysIn'); + +/** + * Creates an array of own and inherited enumerable property names and + * symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names and symbols. + */ +function getAllKeysIn(object) { + return baseGetAllKeys(object, keysIn, getSymbolsIn); +} + +module.exports = getAllKeysIn; diff --git a/backend/node_modules/lodash/_getData.js b/backend/node_modules/lodash/_getData.js new file mode 100644 index 00000000..a1fe7b77 --- /dev/null +++ b/backend/node_modules/lodash/_getData.js @@ -0,0 +1,15 @@ +var metaMap = require('./_metaMap'), + noop = require('./noop'); + +/** + * Gets metadata for `func`. + * + * @private + * @param {Function} func The function to query. + * @returns {*} Returns the metadata for `func`. + */ +var getData = !metaMap ? noop : function(func) { + return metaMap.get(func); +}; + +module.exports = getData; diff --git a/backend/node_modules/lodash/_getFuncName.js b/backend/node_modules/lodash/_getFuncName.js new file mode 100644 index 00000000..21e15b33 --- /dev/null +++ b/backend/node_modules/lodash/_getFuncName.js @@ -0,0 +1,31 @@ +var realNames = require('./_realNames'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Gets the name of `func`. + * + * @private + * @param {Function} func The function to query. + * @returns {string} Returns the function name. + */ +function getFuncName(func) { + var result = (func.name + ''), + array = realNames[result], + length = hasOwnProperty.call(realNames, result) ? array.length : 0; + + while (length--) { + var data = array[length], + otherFunc = data.func; + if (otherFunc == null || otherFunc == func) { + return data.name; + } + } + return result; +} + +module.exports = getFuncName; diff --git a/backend/node_modules/lodash/_getHolder.js b/backend/node_modules/lodash/_getHolder.js new file mode 100644 index 00000000..65e94b5c --- /dev/null +++ b/backend/node_modules/lodash/_getHolder.js @@ -0,0 +1,13 @@ +/** + * Gets the argument placeholder value for `func`. + * + * @private + * @param {Function} func The function to inspect. + * @returns {*} Returns the placeholder value. + */ +function getHolder(func) { + var object = func; + return object.placeholder; +} + +module.exports = getHolder; diff --git a/backend/node_modules/lodash/_getMapData.js b/backend/node_modules/lodash/_getMapData.js new file mode 100644 index 00000000..17f63032 --- /dev/null +++ b/backend/node_modules/lodash/_getMapData.js @@ -0,0 +1,18 @@ +var isKeyable = require('./_isKeyable'); + +/** + * Gets the data for `map`. + * + * @private + * @param {Object} map The map to query. + * @param {string} key The reference key. + * @returns {*} Returns the map data. + */ +function getMapData(map, key) { + var data = map.__data__; + return isKeyable(key) + ? data[typeof key == 'string' ? 'string' : 'hash'] + : data.map; +} + +module.exports = getMapData; diff --git a/backend/node_modules/lodash/_getMatchData.js b/backend/node_modules/lodash/_getMatchData.js new file mode 100644 index 00000000..2cc70f91 --- /dev/null +++ b/backend/node_modules/lodash/_getMatchData.js @@ -0,0 +1,24 @@ +var isStrictComparable = require('./_isStrictComparable'), + keys = require('./keys'); + +/** + * Gets the property names, values, and compare flags of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the match data of `object`. + */ +function getMatchData(object) { + var result = keys(object), + length = result.length; + + while (length--) { + var key = result[length], + value = object[key]; + + result[length] = [key, value, isStrictComparable(value)]; + } + return result; +} + +module.exports = getMatchData; diff --git a/backend/node_modules/lodash/_getNative.js b/backend/node_modules/lodash/_getNative.js new file mode 100644 index 00000000..97a622b8 --- /dev/null +++ b/backend/node_modules/lodash/_getNative.js @@ -0,0 +1,17 @@ +var baseIsNative = require('./_baseIsNative'), + getValue = require('./_getValue'); + +/** + * Gets the native function at `key` of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {string} key The key of the method to get. + * @returns {*} Returns the function if it's native, else `undefined`. + */ +function getNative(object, key) { + var value = getValue(object, key); + return baseIsNative(value) ? value : undefined; +} + +module.exports = getNative; diff --git a/backend/node_modules/lodash/_getPrototype.js b/backend/node_modules/lodash/_getPrototype.js new file mode 100644 index 00000000..e8086121 --- /dev/null +++ b/backend/node_modules/lodash/_getPrototype.js @@ -0,0 +1,6 @@ +var overArg = require('./_overArg'); + +/** Built-in value references. */ +var getPrototype = overArg(Object.getPrototypeOf, Object); + +module.exports = getPrototype; diff --git a/backend/node_modules/lodash/_getRawTag.js b/backend/node_modules/lodash/_getRawTag.js new file mode 100644 index 00000000..49a95c9c --- /dev/null +++ b/backend/node_modules/lodash/_getRawTag.js @@ -0,0 +1,46 @@ +var Symbol = require('./_Symbol'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var nativeObjectToString = objectProto.toString; + +/** Built-in value references. */ +var symToStringTag = Symbol ? Symbol.toStringTag : undefined; + +/** + * A specialized version of `baseGetTag` which ignores `Symbol.toStringTag` values. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the raw `toStringTag`. + */ +function getRawTag(value) { + var isOwn = hasOwnProperty.call(value, symToStringTag), + tag = value[symToStringTag]; + + try { + value[symToStringTag] = undefined; + var unmasked = true; + } catch (e) {} + + var result = nativeObjectToString.call(value); + if (unmasked) { + if (isOwn) { + value[symToStringTag] = tag; + } else { + delete value[symToStringTag]; + } + } + return result; +} + +module.exports = getRawTag; diff --git a/backend/node_modules/lodash/_getSymbols.js b/backend/node_modules/lodash/_getSymbols.js new file mode 100644 index 00000000..7d6eafeb --- /dev/null +++ b/backend/node_modules/lodash/_getSymbols.js @@ -0,0 +1,30 @@ +var arrayFilter = require('./_arrayFilter'), + stubArray = require('./stubArray'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Built-in value references. */ +var propertyIsEnumerable = objectProto.propertyIsEnumerable; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeGetSymbols = Object.getOwnPropertySymbols; + +/** + * Creates an array of the own enumerable symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of symbols. + */ +var getSymbols = !nativeGetSymbols ? stubArray : function(object) { + if (object == null) { + return []; + } + object = Object(object); + return arrayFilter(nativeGetSymbols(object), function(symbol) { + return propertyIsEnumerable.call(object, symbol); + }); +}; + +module.exports = getSymbols; diff --git a/backend/node_modules/lodash/_getSymbolsIn.js b/backend/node_modules/lodash/_getSymbolsIn.js new file mode 100644 index 00000000..cec0855a --- /dev/null +++ b/backend/node_modules/lodash/_getSymbolsIn.js @@ -0,0 +1,25 @@ +var arrayPush = require('./_arrayPush'), + getPrototype = require('./_getPrototype'), + getSymbols = require('./_getSymbols'), + stubArray = require('./stubArray'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeGetSymbols = Object.getOwnPropertySymbols; + +/** + * Creates an array of the own and inherited enumerable symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of symbols. + */ +var getSymbolsIn = !nativeGetSymbols ? stubArray : function(object) { + var result = []; + while (object) { + arrayPush(result, getSymbols(object)); + object = getPrototype(object); + } + return result; +}; + +module.exports = getSymbolsIn; diff --git a/backend/node_modules/lodash/_getTag.js b/backend/node_modules/lodash/_getTag.js new file mode 100644 index 00000000..deaf89d5 --- /dev/null +++ b/backend/node_modules/lodash/_getTag.js @@ -0,0 +1,58 @@ +var DataView = require('./_DataView'), + Map = require('./_Map'), + Promise = require('./_Promise'), + Set = require('./_Set'), + WeakMap = require('./_WeakMap'), + baseGetTag = require('./_baseGetTag'), + toSource = require('./_toSource'); + +/** `Object#toString` result references. */ +var mapTag = '[object Map]', + objectTag = '[object Object]', + promiseTag = '[object Promise]', + setTag = '[object Set]', + weakMapTag = '[object WeakMap]'; + +var dataViewTag = '[object DataView]'; + +/** Used to detect maps, sets, and weakmaps. */ +var dataViewCtorString = toSource(DataView), + mapCtorString = toSource(Map), + promiseCtorString = toSource(Promise), + setCtorString = toSource(Set), + weakMapCtorString = toSource(WeakMap); + +/** + * Gets the `toStringTag` of `value`. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. + */ +var getTag = baseGetTag; + +// Fallback for data views, maps, sets, and weak maps in IE 11 and promises in Node.js < 6. +if ((DataView && getTag(new DataView(new ArrayBuffer(1))) != dataViewTag) || + (Map && getTag(new Map) != mapTag) || + (Promise && getTag(Promise.resolve()) != promiseTag) || + (Set && getTag(new Set) != setTag) || + (WeakMap && getTag(new WeakMap) != weakMapTag)) { + getTag = function(value) { + var result = baseGetTag(value), + Ctor = result == objectTag ? value.constructor : undefined, + ctorString = Ctor ? toSource(Ctor) : ''; + + if (ctorString) { + switch (ctorString) { + case dataViewCtorString: return dataViewTag; + case mapCtorString: return mapTag; + case promiseCtorString: return promiseTag; + case setCtorString: return setTag; + case weakMapCtorString: return weakMapTag; + } + } + return result; + }; +} + +module.exports = getTag; diff --git a/backend/node_modules/lodash/_getValue.js b/backend/node_modules/lodash/_getValue.js new file mode 100644 index 00000000..5f7d7736 --- /dev/null +++ b/backend/node_modules/lodash/_getValue.js @@ -0,0 +1,13 @@ +/** + * Gets the value at `key` of `object`. + * + * @private + * @param {Object} [object] The object to query. + * @param {string} key The key of the property to get. + * @returns {*} Returns the property value. + */ +function getValue(object, key) { + return object == null ? undefined : object[key]; +} + +module.exports = getValue; diff --git a/backend/node_modules/lodash/_getView.js b/backend/node_modules/lodash/_getView.js new file mode 100644 index 00000000..df1e5d44 --- /dev/null +++ b/backend/node_modules/lodash/_getView.js @@ -0,0 +1,33 @@ +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max, + nativeMin = Math.min; + +/** + * Gets the view, applying any `transforms` to the `start` and `end` positions. + * + * @private + * @param {number} start The start of the view. + * @param {number} end The end of the view. + * @param {Array} transforms The transformations to apply to the view. + * @returns {Object} Returns an object containing the `start` and `end` + * positions of the view. + */ +function getView(start, end, transforms) { + var index = -1, + length = transforms.length; + + while (++index < length) { + var data = transforms[index], + size = data.size; + + switch (data.type) { + case 'drop': start += size; break; + case 'dropRight': end -= size; break; + case 'take': end = nativeMin(end, start + size); break; + case 'takeRight': start = nativeMax(start, end - size); break; + } + } + return { 'start': start, 'end': end }; +} + +module.exports = getView; diff --git a/backend/node_modules/lodash/_getWrapDetails.js b/backend/node_modules/lodash/_getWrapDetails.js new file mode 100644 index 00000000..3bcc6e48 --- /dev/null +++ b/backend/node_modules/lodash/_getWrapDetails.js @@ -0,0 +1,17 @@ +/** Used to match wrap detail comments. */ +var reWrapDetails = /\{\n\/\* \[wrapped with (.+)\] \*/, + reSplitDetails = /,? & /; + +/** + * Extracts wrapper details from the `source` body comment. + * + * @private + * @param {string} source The source to inspect. + * @returns {Array} Returns the wrapper details. + */ +function getWrapDetails(source) { + var match = source.match(reWrapDetails); + return match ? match[1].split(reSplitDetails) : []; +} + +module.exports = getWrapDetails; diff --git a/backend/node_modules/lodash/_hasPath.js b/backend/node_modules/lodash/_hasPath.js new file mode 100644 index 00000000..93dbde15 --- /dev/null +++ b/backend/node_modules/lodash/_hasPath.js @@ -0,0 +1,39 @@ +var castPath = require('./_castPath'), + isArguments = require('./isArguments'), + isArray = require('./isArray'), + isIndex = require('./_isIndex'), + isLength = require('./isLength'), + toKey = require('./_toKey'); + +/** + * Checks if `path` exists on `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @param {Function} hasFunc The function to check properties. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + */ +function hasPath(object, path, hasFunc) { + path = castPath(path, object); + + var index = -1, + length = path.length, + result = false; + + while (++index < length) { + var key = toKey(path[index]); + if (!(result = object != null && hasFunc(object, key))) { + break; + } + object = object[key]; + } + if (result || ++index != length) { + return result; + } + length = object == null ? 0 : object.length; + return !!length && isLength(length) && isIndex(key, length) && + (isArray(object) || isArguments(object)); +} + +module.exports = hasPath; diff --git a/backend/node_modules/lodash/_hasUnicode.js b/backend/node_modules/lodash/_hasUnicode.js new file mode 100644 index 00000000..cb6ca15f --- /dev/null +++ b/backend/node_modules/lodash/_hasUnicode.js @@ -0,0 +1,26 @@ +/** Used to compose unicode character classes. */ +var rsAstralRange = '\\ud800-\\udfff', + rsComboMarksRange = '\\u0300-\\u036f', + reComboHalfMarksRange = '\\ufe20-\\ufe2f', + rsComboSymbolsRange = '\\u20d0-\\u20ff', + rsComboRange = rsComboMarksRange + reComboHalfMarksRange + rsComboSymbolsRange, + rsVarRange = '\\ufe0e\\ufe0f'; + +/** Used to compose unicode capture groups. */ +var rsZWJ = '\\u200d'; + +/** Used to detect strings with [zero-width joiners or code points from the astral planes](http://eev.ee/blog/2015/09/12/dark-corners-of-unicode/). */ +var reHasUnicode = RegExp('[' + rsZWJ + rsAstralRange + rsComboRange + rsVarRange + ']'); + +/** + * Checks if `string` contains Unicode symbols. + * + * @private + * @param {string} string The string to inspect. + * @returns {boolean} Returns `true` if a symbol is found, else `false`. + */ +function hasUnicode(string) { + return reHasUnicode.test(string); +} + +module.exports = hasUnicode; diff --git a/backend/node_modules/lodash/_hasUnicodeWord.js b/backend/node_modules/lodash/_hasUnicodeWord.js new file mode 100644 index 00000000..95d52c44 --- /dev/null +++ b/backend/node_modules/lodash/_hasUnicodeWord.js @@ -0,0 +1,15 @@ +/** Used to detect strings that need a more robust regexp to match words. */ +var reHasUnicodeWord = /[a-z][A-Z]|[A-Z]{2}[a-z]|[0-9][a-zA-Z]|[a-zA-Z][0-9]|[^a-zA-Z0-9 ]/; + +/** + * Checks if `string` contains a word composed of Unicode symbols. + * + * @private + * @param {string} string The string to inspect. + * @returns {boolean} Returns `true` if a word is found, else `false`. + */ +function hasUnicodeWord(string) { + return reHasUnicodeWord.test(string); +} + +module.exports = hasUnicodeWord; diff --git a/backend/node_modules/lodash/_hashClear.js b/backend/node_modules/lodash/_hashClear.js new file mode 100644 index 00000000..5d4b70cc --- /dev/null +++ b/backend/node_modules/lodash/_hashClear.js @@ -0,0 +1,15 @@ +var nativeCreate = require('./_nativeCreate'); + +/** + * Removes all key-value entries from the hash. + * + * @private + * @name clear + * @memberOf Hash + */ +function hashClear() { + this.__data__ = nativeCreate ? nativeCreate(null) : {}; + this.size = 0; +} + +module.exports = hashClear; diff --git a/backend/node_modules/lodash/_hashDelete.js b/backend/node_modules/lodash/_hashDelete.js new file mode 100644 index 00000000..ea9dabf1 --- /dev/null +++ b/backend/node_modules/lodash/_hashDelete.js @@ -0,0 +1,17 @@ +/** + * Removes `key` and its value from the hash. + * + * @private + * @name delete + * @memberOf Hash + * @param {Object} hash The hash to modify. + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ +function hashDelete(key) { + var result = this.has(key) && delete this.__data__[key]; + this.size -= result ? 1 : 0; + return result; +} + +module.exports = hashDelete; diff --git a/backend/node_modules/lodash/_hashGet.js b/backend/node_modules/lodash/_hashGet.js new file mode 100644 index 00000000..1fc2f34b --- /dev/null +++ b/backend/node_modules/lodash/_hashGet.js @@ -0,0 +1,30 @@ +var nativeCreate = require('./_nativeCreate'); + +/** Used to stand-in for `undefined` hash values. */ +var HASH_UNDEFINED = '__lodash_hash_undefined__'; + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Gets the hash value for `key`. + * + * @private + * @name get + * @memberOf Hash + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ +function hashGet(key) { + var data = this.__data__; + if (nativeCreate) { + var result = data[key]; + return result === HASH_UNDEFINED ? undefined : result; + } + return hasOwnProperty.call(data, key) ? data[key] : undefined; +} + +module.exports = hashGet; diff --git a/backend/node_modules/lodash/_hashHas.js b/backend/node_modules/lodash/_hashHas.js new file mode 100644 index 00000000..281a5517 --- /dev/null +++ b/backend/node_modules/lodash/_hashHas.js @@ -0,0 +1,23 @@ +var nativeCreate = require('./_nativeCreate'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Checks if a hash value for `key` exists. + * + * @private + * @name has + * @memberOf Hash + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ +function hashHas(key) { + var data = this.__data__; + return nativeCreate ? (data[key] !== undefined) : hasOwnProperty.call(data, key); +} + +module.exports = hashHas; diff --git a/backend/node_modules/lodash/_hashSet.js b/backend/node_modules/lodash/_hashSet.js new file mode 100644 index 00000000..e1055283 --- /dev/null +++ b/backend/node_modules/lodash/_hashSet.js @@ -0,0 +1,23 @@ +var nativeCreate = require('./_nativeCreate'); + +/** Used to stand-in for `undefined` hash values. */ +var HASH_UNDEFINED = '__lodash_hash_undefined__'; + +/** + * Sets the hash `key` to `value`. + * + * @private + * @name set + * @memberOf Hash + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the hash instance. + */ +function hashSet(key, value) { + var data = this.__data__; + this.size += this.has(key) ? 0 : 1; + data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; + return this; +} + +module.exports = hashSet; diff --git a/backend/node_modules/lodash/_initCloneArray.js b/backend/node_modules/lodash/_initCloneArray.js new file mode 100644 index 00000000..078c15af --- /dev/null +++ b/backend/node_modules/lodash/_initCloneArray.js @@ -0,0 +1,26 @@ +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Initializes an array clone. + * + * @private + * @param {Array} array The array to clone. + * @returns {Array} Returns the initialized clone. + */ +function initCloneArray(array) { + var length = array.length, + result = new array.constructor(length); + + // Add properties assigned by `RegExp#exec`. + if (length && typeof array[0] == 'string' && hasOwnProperty.call(array, 'index')) { + result.index = array.index; + result.input = array.input; + } + return result; +} + +module.exports = initCloneArray; diff --git a/backend/node_modules/lodash/_initCloneByTag.js b/backend/node_modules/lodash/_initCloneByTag.js new file mode 100644 index 00000000..f69a008c --- /dev/null +++ b/backend/node_modules/lodash/_initCloneByTag.js @@ -0,0 +1,77 @@ +var cloneArrayBuffer = require('./_cloneArrayBuffer'), + cloneDataView = require('./_cloneDataView'), + cloneRegExp = require('./_cloneRegExp'), + cloneSymbol = require('./_cloneSymbol'), + cloneTypedArray = require('./_cloneTypedArray'); + +/** `Object#toString` result references. */ +var boolTag = '[object Boolean]', + dateTag = '[object Date]', + mapTag = '[object Map]', + numberTag = '[object Number]', + regexpTag = '[object RegExp]', + setTag = '[object Set]', + stringTag = '[object String]', + symbolTag = '[object Symbol]'; + +var arrayBufferTag = '[object ArrayBuffer]', + dataViewTag = '[object DataView]', + float32Tag = '[object Float32Array]', + float64Tag = '[object Float64Array]', + int8Tag = '[object Int8Array]', + int16Tag = '[object Int16Array]', + int32Tag = '[object Int32Array]', + uint8Tag = '[object Uint8Array]', + uint8ClampedTag = '[object Uint8ClampedArray]', + uint16Tag = '[object Uint16Array]', + uint32Tag = '[object Uint32Array]'; + +/** + * Initializes an object clone based on its `toStringTag`. + * + * **Note:** This function only supports cloning values with tags of + * `Boolean`, `Date`, `Error`, `Map`, `Number`, `RegExp`, `Set`, or `String`. + * + * @private + * @param {Object} object The object to clone. + * @param {string} tag The `toStringTag` of the object to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the initialized clone. + */ +function initCloneByTag(object, tag, isDeep) { + var Ctor = object.constructor; + switch (tag) { + case arrayBufferTag: + return cloneArrayBuffer(object); + + case boolTag: + case dateTag: + return new Ctor(+object); + + case dataViewTag: + return cloneDataView(object, isDeep); + + case float32Tag: case float64Tag: + case int8Tag: case int16Tag: case int32Tag: + case uint8Tag: case uint8ClampedTag: case uint16Tag: case uint32Tag: + return cloneTypedArray(object, isDeep); + + case mapTag: + return new Ctor; + + case numberTag: + case stringTag: + return new Ctor(object); + + case regexpTag: + return cloneRegExp(object); + + case setTag: + return new Ctor; + + case symbolTag: + return cloneSymbol(object); + } +} + +module.exports = initCloneByTag; diff --git a/backend/node_modules/lodash/_initCloneObject.js b/backend/node_modules/lodash/_initCloneObject.js new file mode 100644 index 00000000..5a13e64a --- /dev/null +++ b/backend/node_modules/lodash/_initCloneObject.js @@ -0,0 +1,18 @@ +var baseCreate = require('./_baseCreate'), + getPrototype = require('./_getPrototype'), + isPrototype = require('./_isPrototype'); + +/** + * Initializes an object clone. + * + * @private + * @param {Object} object The object to clone. + * @returns {Object} Returns the initialized clone. + */ +function initCloneObject(object) { + return (typeof object.constructor == 'function' && !isPrototype(object)) + ? baseCreate(getPrototype(object)) + : {}; +} + +module.exports = initCloneObject; diff --git a/backend/node_modules/lodash/_insertWrapDetails.js b/backend/node_modules/lodash/_insertWrapDetails.js new file mode 100644 index 00000000..e7908086 --- /dev/null +++ b/backend/node_modules/lodash/_insertWrapDetails.js @@ -0,0 +1,23 @@ +/** Used to match wrap detail comments. */ +var reWrapComment = /\{(?:\n\/\* \[wrapped with .+\] \*\/)?\n?/; + +/** + * Inserts wrapper `details` in a comment at the top of the `source` body. + * + * @private + * @param {string} source The source to modify. + * @returns {Array} details The details to insert. + * @returns {string} Returns the modified source. + */ +function insertWrapDetails(source, details) { + var length = details.length; + if (!length) { + return source; + } + var lastIndex = length - 1; + details[lastIndex] = (length > 1 ? '& ' : '') + details[lastIndex]; + details = details.join(length > 2 ? ', ' : ' '); + return source.replace(reWrapComment, '{\n/* [wrapped with ' + details + '] */\n'); +} + +module.exports = insertWrapDetails; diff --git a/backend/node_modules/lodash/_isFlattenable.js b/backend/node_modules/lodash/_isFlattenable.js new file mode 100644 index 00000000..4cc2c249 --- /dev/null +++ b/backend/node_modules/lodash/_isFlattenable.js @@ -0,0 +1,20 @@ +var Symbol = require('./_Symbol'), + isArguments = require('./isArguments'), + isArray = require('./isArray'); + +/** Built-in value references. */ +var spreadableSymbol = Symbol ? Symbol.isConcatSpreadable : undefined; + +/** + * Checks if `value` is a flattenable `arguments` object or array. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is flattenable, else `false`. + */ +function isFlattenable(value) { + return isArray(value) || isArguments(value) || + !!(spreadableSymbol && value && value[spreadableSymbol]); +} + +module.exports = isFlattenable; diff --git a/backend/node_modules/lodash/_isIndex.js b/backend/node_modules/lodash/_isIndex.js new file mode 100644 index 00000000..061cd390 --- /dev/null +++ b/backend/node_modules/lodash/_isIndex.js @@ -0,0 +1,25 @@ +/** Used as references for various `Number` constants. */ +var MAX_SAFE_INTEGER = 9007199254740991; + +/** Used to detect unsigned integer values. */ +var reIsUint = /^(?:0|[1-9]\d*)$/; + +/** + * Checks if `value` is a valid array-like index. + * + * @private + * @param {*} value The value to check. + * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. + * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. + */ +function isIndex(value, length) { + var type = typeof value; + length = length == null ? MAX_SAFE_INTEGER : length; + + return !!length && + (type == 'number' || + (type != 'symbol' && reIsUint.test(value))) && + (value > -1 && value % 1 == 0 && value < length); +} + +module.exports = isIndex; diff --git a/backend/node_modules/lodash/_isIterateeCall.js b/backend/node_modules/lodash/_isIterateeCall.js new file mode 100644 index 00000000..a0bb5a9c --- /dev/null +++ b/backend/node_modules/lodash/_isIterateeCall.js @@ -0,0 +1,30 @@ +var eq = require('./eq'), + isArrayLike = require('./isArrayLike'), + isIndex = require('./_isIndex'), + isObject = require('./isObject'); + +/** + * Checks if the given arguments are from an iteratee call. + * + * @private + * @param {*} value The potential iteratee value argument. + * @param {*} index The potential iteratee index or key argument. + * @param {*} object The potential iteratee object argument. + * @returns {boolean} Returns `true` if the arguments are from an iteratee call, + * else `false`. + */ +function isIterateeCall(value, index, object) { + if (!isObject(object)) { + return false; + } + var type = typeof index; + if (type == 'number' + ? (isArrayLike(object) && isIndex(index, object.length)) + : (type == 'string' && index in object) + ) { + return eq(object[index], value); + } + return false; +} + +module.exports = isIterateeCall; diff --git a/backend/node_modules/lodash/_isKey.js b/backend/node_modules/lodash/_isKey.js new file mode 100644 index 00000000..ff08b068 --- /dev/null +++ b/backend/node_modules/lodash/_isKey.js @@ -0,0 +1,29 @@ +var isArray = require('./isArray'), + isSymbol = require('./isSymbol'); + +/** Used to match property names within property paths. */ +var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, + reIsPlainProp = /^\w*$/; + +/** + * Checks if `value` is a property name and not a property path. + * + * @private + * @param {*} value The value to check. + * @param {Object} [object] The object to query keys on. + * @returns {boolean} Returns `true` if `value` is a property name, else `false`. + */ +function isKey(value, object) { + if (isArray(value)) { + return false; + } + var type = typeof value; + if (type == 'number' || type == 'symbol' || type == 'boolean' || + value == null || isSymbol(value)) { + return true; + } + return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || + (object != null && value in Object(object)); +} + +module.exports = isKey; diff --git a/backend/node_modules/lodash/_isKeyable.js b/backend/node_modules/lodash/_isKeyable.js new file mode 100644 index 00000000..39f1828d --- /dev/null +++ b/backend/node_modules/lodash/_isKeyable.js @@ -0,0 +1,15 @@ +/** + * Checks if `value` is suitable for use as unique object key. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is suitable, else `false`. + */ +function isKeyable(value) { + var type = typeof value; + return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') + ? (value !== '__proto__') + : (value === null); +} + +module.exports = isKeyable; diff --git a/backend/node_modules/lodash/_isLaziable.js b/backend/node_modules/lodash/_isLaziable.js new file mode 100644 index 00000000..a57c4f2d --- /dev/null +++ b/backend/node_modules/lodash/_isLaziable.js @@ -0,0 +1,28 @@ +var LazyWrapper = require('./_LazyWrapper'), + getData = require('./_getData'), + getFuncName = require('./_getFuncName'), + lodash = require('./wrapperLodash'); + +/** + * Checks if `func` has a lazy counterpart. + * + * @private + * @param {Function} func The function to check. + * @returns {boolean} Returns `true` if `func` has a lazy counterpart, + * else `false`. + */ +function isLaziable(func) { + var funcName = getFuncName(func), + other = lodash[funcName]; + + if (typeof other != 'function' || !(funcName in LazyWrapper.prototype)) { + return false; + } + if (func === other) { + return true; + } + var data = getData(other); + return !!data && func === data[0]; +} + +module.exports = isLaziable; diff --git a/backend/node_modules/lodash/_isMaskable.js b/backend/node_modules/lodash/_isMaskable.js new file mode 100644 index 00000000..eb98d09f --- /dev/null +++ b/backend/node_modules/lodash/_isMaskable.js @@ -0,0 +1,14 @@ +var coreJsData = require('./_coreJsData'), + isFunction = require('./isFunction'), + stubFalse = require('./stubFalse'); + +/** + * Checks if `func` is capable of being masked. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `func` is maskable, else `false`. + */ +var isMaskable = coreJsData ? isFunction : stubFalse; + +module.exports = isMaskable; diff --git a/backend/node_modules/lodash/_isMasked.js b/backend/node_modules/lodash/_isMasked.js new file mode 100644 index 00000000..4b0f21ba --- /dev/null +++ b/backend/node_modules/lodash/_isMasked.js @@ -0,0 +1,20 @@ +var coreJsData = require('./_coreJsData'); + +/** Used to detect methods masquerading as native. */ +var maskSrcKey = (function() { + var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); + return uid ? ('Symbol(src)_1.' + uid) : ''; +}()); + +/** + * Checks if `func` has its source masked. + * + * @private + * @param {Function} func The function to check. + * @returns {boolean} Returns `true` if `func` is masked, else `false`. + */ +function isMasked(func) { + return !!maskSrcKey && (maskSrcKey in func); +} + +module.exports = isMasked; diff --git a/backend/node_modules/lodash/_isPrototype.js b/backend/node_modules/lodash/_isPrototype.js new file mode 100644 index 00000000..0f29498d --- /dev/null +++ b/backend/node_modules/lodash/_isPrototype.js @@ -0,0 +1,18 @@ +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** + * Checks if `value` is likely a prototype object. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a prototype, else `false`. + */ +function isPrototype(value) { + var Ctor = value && value.constructor, + proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto; + + return value === proto; +} + +module.exports = isPrototype; diff --git a/backend/node_modules/lodash/_isStrictComparable.js b/backend/node_modules/lodash/_isStrictComparable.js new file mode 100644 index 00000000..b59f40b8 --- /dev/null +++ b/backend/node_modules/lodash/_isStrictComparable.js @@ -0,0 +1,15 @@ +var isObject = require('./isObject'); + +/** + * Checks if `value` is suitable for strict equality comparisons, i.e. `===`. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` if suitable for strict + * equality comparisons, else `false`. + */ +function isStrictComparable(value) { + return value === value && !isObject(value); +} + +module.exports = isStrictComparable; diff --git a/backend/node_modules/lodash/_iteratorToArray.js b/backend/node_modules/lodash/_iteratorToArray.js new file mode 100644 index 00000000..47685664 --- /dev/null +++ b/backend/node_modules/lodash/_iteratorToArray.js @@ -0,0 +1,18 @@ +/** + * Converts `iterator` to an array. + * + * @private + * @param {Object} iterator The iterator to convert. + * @returns {Array} Returns the converted array. + */ +function iteratorToArray(iterator) { + var data, + result = []; + + while (!(data = iterator.next()).done) { + result.push(data.value); + } + return result; +} + +module.exports = iteratorToArray; diff --git a/backend/node_modules/lodash/_lazyClone.js b/backend/node_modules/lodash/_lazyClone.js new file mode 100644 index 00000000..d8a51f87 --- /dev/null +++ b/backend/node_modules/lodash/_lazyClone.js @@ -0,0 +1,23 @@ +var LazyWrapper = require('./_LazyWrapper'), + copyArray = require('./_copyArray'); + +/** + * Creates a clone of the lazy wrapper object. + * + * @private + * @name clone + * @memberOf LazyWrapper + * @returns {Object} Returns the cloned `LazyWrapper` object. + */ +function lazyClone() { + var result = new LazyWrapper(this.__wrapped__); + result.__actions__ = copyArray(this.__actions__); + result.__dir__ = this.__dir__; + result.__filtered__ = this.__filtered__; + result.__iteratees__ = copyArray(this.__iteratees__); + result.__takeCount__ = this.__takeCount__; + result.__views__ = copyArray(this.__views__); + return result; +} + +module.exports = lazyClone; diff --git a/backend/node_modules/lodash/_lazyReverse.js b/backend/node_modules/lodash/_lazyReverse.js new file mode 100644 index 00000000..c5b52190 --- /dev/null +++ b/backend/node_modules/lodash/_lazyReverse.js @@ -0,0 +1,23 @@ +var LazyWrapper = require('./_LazyWrapper'); + +/** + * Reverses the direction of lazy iteration. + * + * @private + * @name reverse + * @memberOf LazyWrapper + * @returns {Object} Returns the new reversed `LazyWrapper` object. + */ +function lazyReverse() { + if (this.__filtered__) { + var result = new LazyWrapper(this); + result.__dir__ = -1; + result.__filtered__ = true; + } else { + result = this.clone(); + result.__dir__ *= -1; + } + return result; +} + +module.exports = lazyReverse; diff --git a/backend/node_modules/lodash/_lazyValue.js b/backend/node_modules/lodash/_lazyValue.js new file mode 100644 index 00000000..371ca8d2 --- /dev/null +++ b/backend/node_modules/lodash/_lazyValue.js @@ -0,0 +1,69 @@ +var baseWrapperValue = require('./_baseWrapperValue'), + getView = require('./_getView'), + isArray = require('./isArray'); + +/** Used to indicate the type of lazy iteratees. */ +var LAZY_FILTER_FLAG = 1, + LAZY_MAP_FLAG = 2; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMin = Math.min; + +/** + * Extracts the unwrapped value from its lazy wrapper. + * + * @private + * @name value + * @memberOf LazyWrapper + * @returns {*} Returns the unwrapped value. + */ +function lazyValue() { + var array = this.__wrapped__.value(), + dir = this.__dir__, + isArr = isArray(array), + isRight = dir < 0, + arrLength = isArr ? array.length : 0, + view = getView(0, arrLength, this.__views__), + start = view.start, + end = view.end, + length = end - start, + index = isRight ? end : (start - 1), + iteratees = this.__iteratees__, + iterLength = iteratees.length, + resIndex = 0, + takeCount = nativeMin(length, this.__takeCount__); + + if (!isArr || (!isRight && arrLength == length && takeCount == length)) { + return baseWrapperValue(array, this.__actions__); + } + var result = []; + + outer: + while (length-- && resIndex < takeCount) { + index += dir; + + var iterIndex = -1, + value = array[index]; + + while (++iterIndex < iterLength) { + var data = iteratees[iterIndex], + iteratee = data.iteratee, + type = data.type, + computed = iteratee(value); + + if (type == LAZY_MAP_FLAG) { + value = computed; + } else if (!computed) { + if (type == LAZY_FILTER_FLAG) { + continue outer; + } else { + break outer; + } + } + } + result[resIndex++] = value; + } + return result; +} + +module.exports = lazyValue; diff --git a/backend/node_modules/lodash/_listCacheClear.js b/backend/node_modules/lodash/_listCacheClear.js new file mode 100644 index 00000000..acbe39a5 --- /dev/null +++ b/backend/node_modules/lodash/_listCacheClear.js @@ -0,0 +1,13 @@ +/** + * Removes all key-value entries from the list cache. + * + * @private + * @name clear + * @memberOf ListCache + */ +function listCacheClear() { + this.__data__ = []; + this.size = 0; +} + +module.exports = listCacheClear; diff --git a/backend/node_modules/lodash/_listCacheDelete.js b/backend/node_modules/lodash/_listCacheDelete.js new file mode 100644 index 00000000..b1384ade --- /dev/null +++ b/backend/node_modules/lodash/_listCacheDelete.js @@ -0,0 +1,35 @@ +var assocIndexOf = require('./_assocIndexOf'); + +/** Used for built-in method references. */ +var arrayProto = Array.prototype; + +/** Built-in value references. */ +var splice = arrayProto.splice; + +/** + * Removes `key` and its value from the list cache. + * + * @private + * @name delete + * @memberOf ListCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ +function listCacheDelete(key) { + var data = this.__data__, + index = assocIndexOf(data, key); + + if (index < 0) { + return false; + } + var lastIndex = data.length - 1; + if (index == lastIndex) { + data.pop(); + } else { + splice.call(data, index, 1); + } + --this.size; + return true; +} + +module.exports = listCacheDelete; diff --git a/backend/node_modules/lodash/_listCacheGet.js b/backend/node_modules/lodash/_listCacheGet.js new file mode 100644 index 00000000..f8192fc3 --- /dev/null +++ b/backend/node_modules/lodash/_listCacheGet.js @@ -0,0 +1,19 @@ +var assocIndexOf = require('./_assocIndexOf'); + +/** + * Gets the list cache value for `key`. + * + * @private + * @name get + * @memberOf ListCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ +function listCacheGet(key) { + var data = this.__data__, + index = assocIndexOf(data, key); + + return index < 0 ? undefined : data[index][1]; +} + +module.exports = listCacheGet; diff --git a/backend/node_modules/lodash/_listCacheHas.js b/backend/node_modules/lodash/_listCacheHas.js new file mode 100644 index 00000000..2adf6714 --- /dev/null +++ b/backend/node_modules/lodash/_listCacheHas.js @@ -0,0 +1,16 @@ +var assocIndexOf = require('./_assocIndexOf'); + +/** + * Checks if a list cache value for `key` exists. + * + * @private + * @name has + * @memberOf ListCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ +function listCacheHas(key) { + return assocIndexOf(this.__data__, key) > -1; +} + +module.exports = listCacheHas; diff --git a/backend/node_modules/lodash/_listCacheSet.js b/backend/node_modules/lodash/_listCacheSet.js new file mode 100644 index 00000000..5855c95e --- /dev/null +++ b/backend/node_modules/lodash/_listCacheSet.js @@ -0,0 +1,26 @@ +var assocIndexOf = require('./_assocIndexOf'); + +/** + * Sets the list cache `key` to `value`. + * + * @private + * @name set + * @memberOf ListCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the list cache instance. + */ +function listCacheSet(key, value) { + var data = this.__data__, + index = assocIndexOf(data, key); + + if (index < 0) { + ++this.size; + data.push([key, value]); + } else { + data[index][1] = value; + } + return this; +} + +module.exports = listCacheSet; diff --git a/backend/node_modules/lodash/_mapCacheClear.js b/backend/node_modules/lodash/_mapCacheClear.js new file mode 100644 index 00000000..bc9ca204 --- /dev/null +++ b/backend/node_modules/lodash/_mapCacheClear.js @@ -0,0 +1,21 @@ +var Hash = require('./_Hash'), + ListCache = require('./_ListCache'), + Map = require('./_Map'); + +/** + * Removes all key-value entries from the map. + * + * @private + * @name clear + * @memberOf MapCache + */ +function mapCacheClear() { + this.size = 0; + this.__data__ = { + 'hash': new Hash, + 'map': new (Map || ListCache), + 'string': new Hash + }; +} + +module.exports = mapCacheClear; diff --git a/backend/node_modules/lodash/_mapCacheDelete.js b/backend/node_modules/lodash/_mapCacheDelete.js new file mode 100644 index 00000000..946ca3c9 --- /dev/null +++ b/backend/node_modules/lodash/_mapCacheDelete.js @@ -0,0 +1,18 @@ +var getMapData = require('./_getMapData'); + +/** + * Removes `key` and its value from the map. + * + * @private + * @name delete + * @memberOf MapCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ +function mapCacheDelete(key) { + var result = getMapData(this, key)['delete'](key); + this.size -= result ? 1 : 0; + return result; +} + +module.exports = mapCacheDelete; diff --git a/backend/node_modules/lodash/_mapCacheGet.js b/backend/node_modules/lodash/_mapCacheGet.js new file mode 100644 index 00000000..f29f55cf --- /dev/null +++ b/backend/node_modules/lodash/_mapCacheGet.js @@ -0,0 +1,16 @@ +var getMapData = require('./_getMapData'); + +/** + * Gets the map value for `key`. + * + * @private + * @name get + * @memberOf MapCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ +function mapCacheGet(key) { + return getMapData(this, key).get(key); +} + +module.exports = mapCacheGet; diff --git a/backend/node_modules/lodash/_mapCacheHas.js b/backend/node_modules/lodash/_mapCacheHas.js new file mode 100644 index 00000000..a1214c02 --- /dev/null +++ b/backend/node_modules/lodash/_mapCacheHas.js @@ -0,0 +1,16 @@ +var getMapData = require('./_getMapData'); + +/** + * Checks if a map value for `key` exists. + * + * @private + * @name has + * @memberOf MapCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ +function mapCacheHas(key) { + return getMapData(this, key).has(key); +} + +module.exports = mapCacheHas; diff --git a/backend/node_modules/lodash/_mapCacheSet.js b/backend/node_modules/lodash/_mapCacheSet.js new file mode 100644 index 00000000..73468492 --- /dev/null +++ b/backend/node_modules/lodash/_mapCacheSet.js @@ -0,0 +1,22 @@ +var getMapData = require('./_getMapData'); + +/** + * Sets the map `key` to `value`. + * + * @private + * @name set + * @memberOf MapCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the map cache instance. + */ +function mapCacheSet(key, value) { + var data = getMapData(this, key), + size = data.size; + + data.set(key, value); + this.size += data.size == size ? 0 : 1; + return this; +} + +module.exports = mapCacheSet; diff --git a/backend/node_modules/lodash/_mapToArray.js b/backend/node_modules/lodash/_mapToArray.js new file mode 100644 index 00000000..fe3dd531 --- /dev/null +++ b/backend/node_modules/lodash/_mapToArray.js @@ -0,0 +1,18 @@ +/** + * Converts `map` to its key-value pairs. + * + * @private + * @param {Object} map The map to convert. + * @returns {Array} Returns the key-value pairs. + */ +function mapToArray(map) { + var index = -1, + result = Array(map.size); + + map.forEach(function(value, key) { + result[++index] = [key, value]; + }); + return result; +} + +module.exports = mapToArray; diff --git a/backend/node_modules/lodash/_matchesStrictComparable.js b/backend/node_modules/lodash/_matchesStrictComparable.js new file mode 100644 index 00000000..f608af9e --- /dev/null +++ b/backend/node_modules/lodash/_matchesStrictComparable.js @@ -0,0 +1,20 @@ +/** + * A specialized version of `matchesProperty` for source values suitable + * for strict equality comparisons, i.e. `===`. + * + * @private + * @param {string} key The key of the property to get. + * @param {*} srcValue The value to match. + * @returns {Function} Returns the new spec function. + */ +function matchesStrictComparable(key, srcValue) { + return function(object) { + if (object == null) { + return false; + } + return object[key] === srcValue && + (srcValue !== undefined || (key in Object(object))); + }; +} + +module.exports = matchesStrictComparable; diff --git a/backend/node_modules/lodash/_memoizeCapped.js b/backend/node_modules/lodash/_memoizeCapped.js new file mode 100644 index 00000000..7f71c8fb --- /dev/null +++ b/backend/node_modules/lodash/_memoizeCapped.js @@ -0,0 +1,26 @@ +var memoize = require('./memoize'); + +/** Used as the maximum memoize cache size. */ +var MAX_MEMOIZE_SIZE = 500; + +/** + * A specialized version of `_.memoize` which clears the memoized function's + * cache when it exceeds `MAX_MEMOIZE_SIZE`. + * + * @private + * @param {Function} func The function to have its output memoized. + * @returns {Function} Returns the new memoized function. + */ +function memoizeCapped(func) { + var result = memoize(func, function(key) { + if (cache.size === MAX_MEMOIZE_SIZE) { + cache.clear(); + } + return key; + }); + + var cache = result.cache; + return result; +} + +module.exports = memoizeCapped; diff --git a/backend/node_modules/lodash/_mergeData.js b/backend/node_modules/lodash/_mergeData.js new file mode 100644 index 00000000..cb570f97 --- /dev/null +++ b/backend/node_modules/lodash/_mergeData.js @@ -0,0 +1,90 @@ +var composeArgs = require('./_composeArgs'), + composeArgsRight = require('./_composeArgsRight'), + replaceHolders = require('./_replaceHolders'); + +/** Used as the internal argument placeholder. */ +var PLACEHOLDER = '__lodash_placeholder__'; + +/** Used to compose bitmasks for function metadata. */ +var WRAP_BIND_FLAG = 1, + WRAP_BIND_KEY_FLAG = 2, + WRAP_CURRY_BOUND_FLAG = 4, + WRAP_CURRY_FLAG = 8, + WRAP_ARY_FLAG = 128, + WRAP_REARG_FLAG = 256; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMin = Math.min; + +/** + * Merges the function metadata of `source` into `data`. + * + * Merging metadata reduces the number of wrappers used to invoke a function. + * This is possible because methods like `_.bind`, `_.curry`, and `_.partial` + * may be applied regardless of execution order. Methods like `_.ary` and + * `_.rearg` modify function arguments, making the order in which they are + * executed important, preventing the merging of metadata. However, we make + * an exception for a safe combined case where curried functions have `_.ary` + * and or `_.rearg` applied. + * + * @private + * @param {Array} data The destination metadata. + * @param {Array} source The source metadata. + * @returns {Array} Returns `data`. + */ +function mergeData(data, source) { + var bitmask = data[1], + srcBitmask = source[1], + newBitmask = bitmask | srcBitmask, + isCommon = newBitmask < (WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG | WRAP_ARY_FLAG); + + var isCombo = + ((srcBitmask == WRAP_ARY_FLAG) && (bitmask == WRAP_CURRY_FLAG)) || + ((srcBitmask == WRAP_ARY_FLAG) && (bitmask == WRAP_REARG_FLAG) && (data[7].length <= source[8])) || + ((srcBitmask == (WRAP_ARY_FLAG | WRAP_REARG_FLAG)) && (source[7].length <= source[8]) && (bitmask == WRAP_CURRY_FLAG)); + + // Exit early if metadata can't be merged. + if (!(isCommon || isCombo)) { + return data; + } + // Use source `thisArg` if available. + if (srcBitmask & WRAP_BIND_FLAG) { + data[2] = source[2]; + // Set when currying a bound function. + newBitmask |= bitmask & WRAP_BIND_FLAG ? 0 : WRAP_CURRY_BOUND_FLAG; + } + // Compose partial arguments. + var value = source[3]; + if (value) { + var partials = data[3]; + data[3] = partials ? composeArgs(partials, value, source[4]) : value; + data[4] = partials ? replaceHolders(data[3], PLACEHOLDER) : source[4]; + } + // Compose partial right arguments. + value = source[5]; + if (value) { + partials = data[5]; + data[5] = partials ? composeArgsRight(partials, value, source[6]) : value; + data[6] = partials ? replaceHolders(data[5], PLACEHOLDER) : source[6]; + } + // Use source `argPos` if available. + value = source[7]; + if (value) { + data[7] = value; + } + // Use source `ary` if it's smaller. + if (srcBitmask & WRAP_ARY_FLAG) { + data[8] = data[8] == null ? source[8] : nativeMin(data[8], source[8]); + } + // Use source `arity` if one is not provided. + if (data[9] == null) { + data[9] = source[9]; + } + // Use source `func` and merge bitmasks. + data[0] = source[0]; + data[1] = newBitmask; + + return data; +} + +module.exports = mergeData; diff --git a/backend/node_modules/lodash/_metaMap.js b/backend/node_modules/lodash/_metaMap.js new file mode 100644 index 00000000..0157a0b0 --- /dev/null +++ b/backend/node_modules/lodash/_metaMap.js @@ -0,0 +1,6 @@ +var WeakMap = require('./_WeakMap'); + +/** Used to store function metadata. */ +var metaMap = WeakMap && new WeakMap; + +module.exports = metaMap; diff --git a/backend/node_modules/lodash/_nativeCreate.js b/backend/node_modules/lodash/_nativeCreate.js new file mode 100644 index 00000000..c7aede85 --- /dev/null +++ b/backend/node_modules/lodash/_nativeCreate.js @@ -0,0 +1,6 @@ +var getNative = require('./_getNative'); + +/* Built-in method references that are verified to be native. */ +var nativeCreate = getNative(Object, 'create'); + +module.exports = nativeCreate; diff --git a/backend/node_modules/lodash/_nativeKeys.js b/backend/node_modules/lodash/_nativeKeys.js new file mode 100644 index 00000000..479a104a --- /dev/null +++ b/backend/node_modules/lodash/_nativeKeys.js @@ -0,0 +1,6 @@ +var overArg = require('./_overArg'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeKeys = overArg(Object.keys, Object); + +module.exports = nativeKeys; diff --git a/backend/node_modules/lodash/_nativeKeysIn.js b/backend/node_modules/lodash/_nativeKeysIn.js new file mode 100644 index 00000000..00ee5059 --- /dev/null +++ b/backend/node_modules/lodash/_nativeKeysIn.js @@ -0,0 +1,20 @@ +/** + * This function is like + * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * except that it includes inherited enumerable properties. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ +function nativeKeysIn(object) { + var result = []; + if (object != null) { + for (var key in Object(object)) { + result.push(key); + } + } + return result; +} + +module.exports = nativeKeysIn; diff --git a/backend/node_modules/lodash/_nodeUtil.js b/backend/node_modules/lodash/_nodeUtil.js new file mode 100644 index 00000000..983d78f7 --- /dev/null +++ b/backend/node_modules/lodash/_nodeUtil.js @@ -0,0 +1,30 @@ +var freeGlobal = require('./_freeGlobal'); + +/** Detect free variable `exports`. */ +var freeExports = typeof exports == 'object' && exports && !exports.nodeType && exports; + +/** Detect free variable `module`. */ +var freeModule = freeExports && typeof module == 'object' && module && !module.nodeType && module; + +/** Detect the popular CommonJS extension `module.exports`. */ +var moduleExports = freeModule && freeModule.exports === freeExports; + +/** Detect free variable `process` from Node.js. */ +var freeProcess = moduleExports && freeGlobal.process; + +/** Used to access faster Node.js helpers. */ +var nodeUtil = (function() { + try { + // Use `util.types` for Node.js 10+. + var types = freeModule && freeModule.require && freeModule.require('util').types; + + if (types) { + return types; + } + + // Legacy `process.binding('util')` for Node.js < 10. + return freeProcess && freeProcess.binding && freeProcess.binding('util'); + } catch (e) {} +}()); + +module.exports = nodeUtil; diff --git a/backend/node_modules/lodash/_objectToString.js b/backend/node_modules/lodash/_objectToString.js new file mode 100644 index 00000000..c614ec09 --- /dev/null +++ b/backend/node_modules/lodash/_objectToString.js @@ -0,0 +1,22 @@ +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var nativeObjectToString = objectProto.toString; + +/** + * Converts `value` to a string using `Object.prototype.toString`. + * + * @private + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + */ +function objectToString(value) { + return nativeObjectToString.call(value); +} + +module.exports = objectToString; diff --git a/backend/node_modules/lodash/_overArg.js b/backend/node_modules/lodash/_overArg.js new file mode 100644 index 00000000..651c5c55 --- /dev/null +++ b/backend/node_modules/lodash/_overArg.js @@ -0,0 +1,15 @@ +/** + * Creates a unary function that invokes `func` with its argument transformed. + * + * @private + * @param {Function} func The function to wrap. + * @param {Function} transform The argument transform. + * @returns {Function} Returns the new function. + */ +function overArg(func, transform) { + return function(arg) { + return func(transform(arg)); + }; +} + +module.exports = overArg; diff --git a/backend/node_modules/lodash/_overRest.js b/backend/node_modules/lodash/_overRest.js new file mode 100644 index 00000000..c7cdef33 --- /dev/null +++ b/backend/node_modules/lodash/_overRest.js @@ -0,0 +1,36 @@ +var apply = require('./_apply'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max; + +/** + * A specialized version of `baseRest` which transforms the rest array. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @param {Function} transform The rest array transform. + * @returns {Function} Returns the new function. + */ +function overRest(func, start, transform) { + start = nativeMax(start === undefined ? (func.length - 1) : start, 0); + return function() { + var args = arguments, + index = -1, + length = nativeMax(args.length - start, 0), + array = Array(length); + + while (++index < length) { + array[index] = args[start + index]; + } + index = -1; + var otherArgs = Array(start + 1); + while (++index < start) { + otherArgs[index] = args[index]; + } + otherArgs[start] = transform(array); + return apply(func, this, otherArgs); + }; +} + +module.exports = overRest; diff --git a/backend/node_modules/lodash/_parent.js b/backend/node_modules/lodash/_parent.js new file mode 100644 index 00000000..f174328f --- /dev/null +++ b/backend/node_modules/lodash/_parent.js @@ -0,0 +1,16 @@ +var baseGet = require('./_baseGet'), + baseSlice = require('./_baseSlice'); + +/** + * Gets the parent value at `path` of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} path The path to get the parent value of. + * @returns {*} Returns the parent value. + */ +function parent(object, path) { + return path.length < 2 ? object : baseGet(object, baseSlice(path, 0, -1)); +} + +module.exports = parent; diff --git a/backend/node_modules/lodash/_reEscape.js b/backend/node_modules/lodash/_reEscape.js new file mode 100644 index 00000000..7f47eda6 --- /dev/null +++ b/backend/node_modules/lodash/_reEscape.js @@ -0,0 +1,4 @@ +/** Used to match template delimiters. */ +var reEscape = /<%-([\s\S]+?)%>/g; + +module.exports = reEscape; diff --git a/backend/node_modules/lodash/_reEvaluate.js b/backend/node_modules/lodash/_reEvaluate.js new file mode 100644 index 00000000..6adfc312 --- /dev/null +++ b/backend/node_modules/lodash/_reEvaluate.js @@ -0,0 +1,4 @@ +/** Used to match template delimiters. */ +var reEvaluate = /<%([\s\S]+?)%>/g; + +module.exports = reEvaluate; diff --git a/backend/node_modules/lodash/_reInterpolate.js b/backend/node_modules/lodash/_reInterpolate.js new file mode 100644 index 00000000..d02ff0b2 --- /dev/null +++ b/backend/node_modules/lodash/_reInterpolate.js @@ -0,0 +1,4 @@ +/** Used to match template delimiters. */ +var reInterpolate = /<%=([\s\S]+?)%>/g; + +module.exports = reInterpolate; diff --git a/backend/node_modules/lodash/_realNames.js b/backend/node_modules/lodash/_realNames.js new file mode 100644 index 00000000..aa0d5292 --- /dev/null +++ b/backend/node_modules/lodash/_realNames.js @@ -0,0 +1,4 @@ +/** Used to lookup unminified function names. */ +var realNames = {}; + +module.exports = realNames; diff --git a/backend/node_modules/lodash/_reorder.js b/backend/node_modules/lodash/_reorder.js new file mode 100644 index 00000000..a3502b05 --- /dev/null +++ b/backend/node_modules/lodash/_reorder.js @@ -0,0 +1,29 @@ +var copyArray = require('./_copyArray'), + isIndex = require('./_isIndex'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMin = Math.min; + +/** + * Reorder `array` according to the specified indexes where the element at + * the first index is assigned as the first element, the element at + * the second index is assigned as the second element, and so on. + * + * @private + * @param {Array} array The array to reorder. + * @param {Array} indexes The arranged array indexes. + * @returns {Array} Returns `array`. + */ +function reorder(array, indexes) { + var arrLength = array.length, + length = nativeMin(indexes.length, arrLength), + oldArray = copyArray(array); + + while (length--) { + var index = indexes[length]; + array[length] = isIndex(index, arrLength) ? oldArray[index] : undefined; + } + return array; +} + +module.exports = reorder; diff --git a/backend/node_modules/lodash/_replaceHolders.js b/backend/node_modules/lodash/_replaceHolders.js new file mode 100644 index 00000000..74360ec4 --- /dev/null +++ b/backend/node_modules/lodash/_replaceHolders.js @@ -0,0 +1,29 @@ +/** Used as the internal argument placeholder. */ +var PLACEHOLDER = '__lodash_placeholder__'; + +/** + * Replaces all `placeholder` elements in `array` with an internal placeholder + * and returns an array of their indexes. + * + * @private + * @param {Array} array The array to modify. + * @param {*} placeholder The placeholder to replace. + * @returns {Array} Returns the new array of placeholder indexes. + */ +function replaceHolders(array, placeholder) { + var index = -1, + length = array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (value === placeholder || value === PLACEHOLDER) { + array[index] = PLACEHOLDER; + result[resIndex++] = index; + } + } + return result; +} + +module.exports = replaceHolders; diff --git a/backend/node_modules/lodash/_root.js b/backend/node_modules/lodash/_root.js new file mode 100644 index 00000000..d2852bed --- /dev/null +++ b/backend/node_modules/lodash/_root.js @@ -0,0 +1,9 @@ +var freeGlobal = require('./_freeGlobal'); + +/** Detect free variable `self`. */ +var freeSelf = typeof self == 'object' && self && self.Object === Object && self; + +/** Used as a reference to the global object. */ +var root = freeGlobal || freeSelf || Function('return this')(); + +module.exports = root; diff --git a/backend/node_modules/lodash/_safeGet.js b/backend/node_modules/lodash/_safeGet.js new file mode 100644 index 00000000..b070897d --- /dev/null +++ b/backend/node_modules/lodash/_safeGet.js @@ -0,0 +1,21 @@ +/** + * Gets the value at `key`, unless `key` is "__proto__" or "constructor". + * + * @private + * @param {Object} object The object to query. + * @param {string} key The key of the property to get. + * @returns {*} Returns the property value. + */ +function safeGet(object, key) { + if (key === 'constructor' && typeof object[key] === 'function') { + return; + } + + if (key == '__proto__') { + return; + } + + return object[key]; +} + +module.exports = safeGet; diff --git a/backend/node_modules/lodash/_setCacheAdd.js b/backend/node_modules/lodash/_setCacheAdd.js new file mode 100644 index 00000000..1081a744 --- /dev/null +++ b/backend/node_modules/lodash/_setCacheAdd.js @@ -0,0 +1,19 @@ +/** Used to stand-in for `undefined` hash values. */ +var HASH_UNDEFINED = '__lodash_hash_undefined__'; + +/** + * Adds `value` to the array cache. + * + * @private + * @name add + * @memberOf SetCache + * @alias push + * @param {*} value The value to cache. + * @returns {Object} Returns the cache instance. + */ +function setCacheAdd(value) { + this.__data__.set(value, HASH_UNDEFINED); + return this; +} + +module.exports = setCacheAdd; diff --git a/backend/node_modules/lodash/_setCacheHas.js b/backend/node_modules/lodash/_setCacheHas.js new file mode 100644 index 00000000..9a492556 --- /dev/null +++ b/backend/node_modules/lodash/_setCacheHas.js @@ -0,0 +1,14 @@ +/** + * Checks if `value` is in the array cache. + * + * @private + * @name has + * @memberOf SetCache + * @param {*} value The value to search for. + * @returns {number} Returns `true` if `value` is found, else `false`. + */ +function setCacheHas(value) { + return this.__data__.has(value); +} + +module.exports = setCacheHas; diff --git a/backend/node_modules/lodash/_setData.js b/backend/node_modules/lodash/_setData.js new file mode 100644 index 00000000..e5cf3eb9 --- /dev/null +++ b/backend/node_modules/lodash/_setData.js @@ -0,0 +1,20 @@ +var baseSetData = require('./_baseSetData'), + shortOut = require('./_shortOut'); + +/** + * Sets metadata for `func`. + * + * **Note:** If this function becomes hot, i.e. is invoked a lot in a short + * period of time, it will trip its breaker and transition to an identity + * function to avoid garbage collection pauses in V8. See + * [V8 issue 2070](https://bugs.chromium.org/p/v8/issues/detail?id=2070) + * for more details. + * + * @private + * @param {Function} func The function to associate metadata with. + * @param {*} data The metadata. + * @returns {Function} Returns `func`. + */ +var setData = shortOut(baseSetData); + +module.exports = setData; diff --git a/backend/node_modules/lodash/_setToArray.js b/backend/node_modules/lodash/_setToArray.js new file mode 100644 index 00000000..b87f0741 --- /dev/null +++ b/backend/node_modules/lodash/_setToArray.js @@ -0,0 +1,18 @@ +/** + * Converts `set` to an array of its values. + * + * @private + * @param {Object} set The set to convert. + * @returns {Array} Returns the values. + */ +function setToArray(set) { + var index = -1, + result = Array(set.size); + + set.forEach(function(value) { + result[++index] = value; + }); + return result; +} + +module.exports = setToArray; diff --git a/backend/node_modules/lodash/_setToPairs.js b/backend/node_modules/lodash/_setToPairs.js new file mode 100644 index 00000000..36ad37a0 --- /dev/null +++ b/backend/node_modules/lodash/_setToPairs.js @@ -0,0 +1,18 @@ +/** + * Converts `set` to its value-value pairs. + * + * @private + * @param {Object} set The set to convert. + * @returns {Array} Returns the value-value pairs. + */ +function setToPairs(set) { + var index = -1, + result = Array(set.size); + + set.forEach(function(value) { + result[++index] = [value, value]; + }); + return result; +} + +module.exports = setToPairs; diff --git a/backend/node_modules/lodash/_setToString.js b/backend/node_modules/lodash/_setToString.js new file mode 100644 index 00000000..6ca84196 --- /dev/null +++ b/backend/node_modules/lodash/_setToString.js @@ -0,0 +1,14 @@ +var baseSetToString = require('./_baseSetToString'), + shortOut = require('./_shortOut'); + +/** + * Sets the `toString` method of `func` to return `string`. + * + * @private + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. + */ +var setToString = shortOut(baseSetToString); + +module.exports = setToString; diff --git a/backend/node_modules/lodash/_setWrapToString.js b/backend/node_modules/lodash/_setWrapToString.js new file mode 100644 index 00000000..decdc449 --- /dev/null +++ b/backend/node_modules/lodash/_setWrapToString.js @@ -0,0 +1,21 @@ +var getWrapDetails = require('./_getWrapDetails'), + insertWrapDetails = require('./_insertWrapDetails'), + setToString = require('./_setToString'), + updateWrapDetails = require('./_updateWrapDetails'); + +/** + * Sets the `toString` method of `wrapper` to mimic the source of `reference` + * with wrapper details in a comment at the top of the source body. + * + * @private + * @param {Function} wrapper The function to modify. + * @param {Function} reference The reference function. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @returns {Function} Returns `wrapper`. + */ +function setWrapToString(wrapper, reference, bitmask) { + var source = (reference + ''); + return setToString(wrapper, insertWrapDetails(source, updateWrapDetails(getWrapDetails(source), bitmask))); +} + +module.exports = setWrapToString; diff --git a/backend/node_modules/lodash/_shortOut.js b/backend/node_modules/lodash/_shortOut.js new file mode 100644 index 00000000..3300a079 --- /dev/null +++ b/backend/node_modules/lodash/_shortOut.js @@ -0,0 +1,37 @@ +/** Used to detect hot functions by number of calls within a span of milliseconds. */ +var HOT_COUNT = 800, + HOT_SPAN = 16; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeNow = Date.now; + +/** + * Creates a function that'll short out and invoke `identity` instead + * of `func` when it's called `HOT_COUNT` or more times in `HOT_SPAN` + * milliseconds. + * + * @private + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new shortable function. + */ +function shortOut(func) { + var count = 0, + lastCalled = 0; + + return function() { + var stamp = nativeNow(), + remaining = HOT_SPAN - (stamp - lastCalled); + + lastCalled = stamp; + if (remaining > 0) { + if (++count >= HOT_COUNT) { + return arguments[0]; + } + } else { + count = 0; + } + return func.apply(undefined, arguments); + }; +} + +module.exports = shortOut; diff --git a/backend/node_modules/lodash/_shuffleSelf.js b/backend/node_modules/lodash/_shuffleSelf.js new file mode 100644 index 00000000..8bcc4f5c --- /dev/null +++ b/backend/node_modules/lodash/_shuffleSelf.js @@ -0,0 +1,28 @@ +var baseRandom = require('./_baseRandom'); + +/** + * A specialized version of `_.shuffle` which mutates and sets the size of `array`. + * + * @private + * @param {Array} array The array to shuffle. + * @param {number} [size=array.length] The size of `array`. + * @returns {Array} Returns `array`. + */ +function shuffleSelf(array, size) { + var index = -1, + length = array.length, + lastIndex = length - 1; + + size = size === undefined ? length : size; + while (++index < size) { + var rand = baseRandom(index, lastIndex), + value = array[rand]; + + array[rand] = array[index]; + array[index] = value; + } + array.length = size; + return array; +} + +module.exports = shuffleSelf; diff --git a/backend/node_modules/lodash/_stackClear.js b/backend/node_modules/lodash/_stackClear.js new file mode 100644 index 00000000..ce8e5a92 --- /dev/null +++ b/backend/node_modules/lodash/_stackClear.js @@ -0,0 +1,15 @@ +var ListCache = require('./_ListCache'); + +/** + * Removes all key-value entries from the stack. + * + * @private + * @name clear + * @memberOf Stack + */ +function stackClear() { + this.__data__ = new ListCache; + this.size = 0; +} + +module.exports = stackClear; diff --git a/backend/node_modules/lodash/_stackDelete.js b/backend/node_modules/lodash/_stackDelete.js new file mode 100644 index 00000000..ff9887ab --- /dev/null +++ b/backend/node_modules/lodash/_stackDelete.js @@ -0,0 +1,18 @@ +/** + * Removes `key` and its value from the stack. + * + * @private + * @name delete + * @memberOf Stack + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ +function stackDelete(key) { + var data = this.__data__, + result = data['delete'](key); + + this.size = data.size; + return result; +} + +module.exports = stackDelete; diff --git a/backend/node_modules/lodash/_stackGet.js b/backend/node_modules/lodash/_stackGet.js new file mode 100644 index 00000000..1cdf0040 --- /dev/null +++ b/backend/node_modules/lodash/_stackGet.js @@ -0,0 +1,14 @@ +/** + * Gets the stack value for `key`. + * + * @private + * @name get + * @memberOf Stack + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ +function stackGet(key) { + return this.__data__.get(key); +} + +module.exports = stackGet; diff --git a/backend/node_modules/lodash/_stackHas.js b/backend/node_modules/lodash/_stackHas.js new file mode 100644 index 00000000..16a3ad11 --- /dev/null +++ b/backend/node_modules/lodash/_stackHas.js @@ -0,0 +1,14 @@ +/** + * Checks if a stack value for `key` exists. + * + * @private + * @name has + * @memberOf Stack + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ +function stackHas(key) { + return this.__data__.has(key); +} + +module.exports = stackHas; diff --git a/backend/node_modules/lodash/_stackSet.js b/backend/node_modules/lodash/_stackSet.js new file mode 100644 index 00000000..b790ac5f --- /dev/null +++ b/backend/node_modules/lodash/_stackSet.js @@ -0,0 +1,34 @@ +var ListCache = require('./_ListCache'), + Map = require('./_Map'), + MapCache = require('./_MapCache'); + +/** Used as the size to enable large array optimizations. */ +var LARGE_ARRAY_SIZE = 200; + +/** + * Sets the stack `key` to `value`. + * + * @private + * @name set + * @memberOf Stack + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the stack cache instance. + */ +function stackSet(key, value) { + var data = this.__data__; + if (data instanceof ListCache) { + var pairs = data.__data__; + if (!Map || (pairs.length < LARGE_ARRAY_SIZE - 1)) { + pairs.push([key, value]); + this.size = ++data.size; + return this; + } + data = this.__data__ = new MapCache(pairs); + } + data.set(key, value); + this.size = data.size; + return this; +} + +module.exports = stackSet; diff --git a/backend/node_modules/lodash/_strictIndexOf.js b/backend/node_modules/lodash/_strictIndexOf.js new file mode 100644 index 00000000..0486a495 --- /dev/null +++ b/backend/node_modules/lodash/_strictIndexOf.js @@ -0,0 +1,23 @@ +/** + * A specialized version of `_.indexOf` which performs strict equality + * comparisons of values, i.e. `===`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ +function strictIndexOf(array, value, fromIndex) { + var index = fromIndex - 1, + length = array.length; + + while (++index < length) { + if (array[index] === value) { + return index; + } + } + return -1; +} + +module.exports = strictIndexOf; diff --git a/backend/node_modules/lodash/_strictLastIndexOf.js b/backend/node_modules/lodash/_strictLastIndexOf.js new file mode 100644 index 00000000..d7310dcc --- /dev/null +++ b/backend/node_modules/lodash/_strictLastIndexOf.js @@ -0,0 +1,21 @@ +/** + * A specialized version of `_.lastIndexOf` which performs strict equality + * comparisons of values, i.e. `===`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ +function strictLastIndexOf(array, value, fromIndex) { + var index = fromIndex + 1; + while (index--) { + if (array[index] === value) { + return index; + } + } + return index; +} + +module.exports = strictLastIndexOf; diff --git a/backend/node_modules/lodash/_stringSize.js b/backend/node_modules/lodash/_stringSize.js new file mode 100644 index 00000000..17ef462a --- /dev/null +++ b/backend/node_modules/lodash/_stringSize.js @@ -0,0 +1,18 @@ +var asciiSize = require('./_asciiSize'), + hasUnicode = require('./_hasUnicode'), + unicodeSize = require('./_unicodeSize'); + +/** + * Gets the number of symbols in `string`. + * + * @private + * @param {string} string The string to inspect. + * @returns {number} Returns the string size. + */ +function stringSize(string) { + return hasUnicode(string) + ? unicodeSize(string) + : asciiSize(string); +} + +module.exports = stringSize; diff --git a/backend/node_modules/lodash/_stringToArray.js b/backend/node_modules/lodash/_stringToArray.js new file mode 100644 index 00000000..d161158c --- /dev/null +++ b/backend/node_modules/lodash/_stringToArray.js @@ -0,0 +1,18 @@ +var asciiToArray = require('./_asciiToArray'), + hasUnicode = require('./_hasUnicode'), + unicodeToArray = require('./_unicodeToArray'); + +/** + * Converts `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ +function stringToArray(string) { + return hasUnicode(string) + ? unicodeToArray(string) + : asciiToArray(string); +} + +module.exports = stringToArray; diff --git a/backend/node_modules/lodash/_stringToPath.js b/backend/node_modules/lodash/_stringToPath.js new file mode 100644 index 00000000..8f39f8a2 --- /dev/null +++ b/backend/node_modules/lodash/_stringToPath.js @@ -0,0 +1,27 @@ +var memoizeCapped = require('./_memoizeCapped'); + +/** Used to match property names within property paths. */ +var rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; + +/** Used to match backslashes in property paths. */ +var reEscapeChar = /\\(\\)?/g; + +/** + * Converts `string` to a property path array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the property path array. + */ +var stringToPath = memoizeCapped(function(string) { + var result = []; + if (string.charCodeAt(0) === 46 /* . */) { + result.push(''); + } + string.replace(rePropName, function(match, number, quote, subString) { + result.push(quote ? subString.replace(reEscapeChar, '$1') : (number || match)); + }); + return result; +}); + +module.exports = stringToPath; diff --git a/backend/node_modules/lodash/_toKey.js b/backend/node_modules/lodash/_toKey.js new file mode 100644 index 00000000..c6d645c4 --- /dev/null +++ b/backend/node_modules/lodash/_toKey.js @@ -0,0 +1,21 @@ +var isSymbol = require('./isSymbol'); + +/** Used as references for various `Number` constants. */ +var INFINITY = 1 / 0; + +/** + * Converts `value` to a string key if it's not a string or symbol. + * + * @private + * @param {*} value The value to inspect. + * @returns {string|symbol} Returns the key. + */ +function toKey(value) { + if (typeof value == 'string' || isSymbol(value)) { + return value; + } + var result = (value + ''); + return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; +} + +module.exports = toKey; diff --git a/backend/node_modules/lodash/_toSource.js b/backend/node_modules/lodash/_toSource.js new file mode 100644 index 00000000..a020b386 --- /dev/null +++ b/backend/node_modules/lodash/_toSource.js @@ -0,0 +1,26 @@ +/** Used for built-in method references. */ +var funcProto = Function.prototype; + +/** Used to resolve the decompiled source of functions. */ +var funcToString = funcProto.toString; + +/** + * Converts `func` to its source code. + * + * @private + * @param {Function} func The function to convert. + * @returns {string} Returns the source code. + */ +function toSource(func) { + if (func != null) { + try { + return funcToString.call(func); + } catch (e) {} + try { + return (func + ''); + } catch (e) {} + } + return ''; +} + +module.exports = toSource; diff --git a/backend/node_modules/lodash/_trimmedEndIndex.js b/backend/node_modules/lodash/_trimmedEndIndex.js new file mode 100644 index 00000000..139439ad --- /dev/null +++ b/backend/node_modules/lodash/_trimmedEndIndex.js @@ -0,0 +1,19 @@ +/** Used to match a single whitespace character. */ +var reWhitespace = /\s/; + +/** + * Used by `_.trim` and `_.trimEnd` to get the index of the last non-whitespace + * character of `string`. + * + * @private + * @param {string} string The string to inspect. + * @returns {number} Returns the index of the last non-whitespace character. + */ +function trimmedEndIndex(string) { + var index = string.length; + + while (index-- && reWhitespace.test(string.charAt(index))) {} + return index; +} + +module.exports = trimmedEndIndex; diff --git a/backend/node_modules/lodash/_unescapeHtmlChar.js b/backend/node_modules/lodash/_unescapeHtmlChar.js new file mode 100644 index 00000000..a71fecb3 --- /dev/null +++ b/backend/node_modules/lodash/_unescapeHtmlChar.js @@ -0,0 +1,21 @@ +var basePropertyOf = require('./_basePropertyOf'); + +/** Used to map HTML entities to characters. */ +var htmlUnescapes = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + ''': "'" +}; + +/** + * Used by `_.unescape` to convert HTML entities to characters. + * + * @private + * @param {string} chr The matched character to unescape. + * @returns {string} Returns the unescaped character. + */ +var unescapeHtmlChar = basePropertyOf(htmlUnescapes); + +module.exports = unescapeHtmlChar; diff --git a/backend/node_modules/lodash/_unicodeSize.js b/backend/node_modules/lodash/_unicodeSize.js new file mode 100644 index 00000000..68137ec2 --- /dev/null +++ b/backend/node_modules/lodash/_unicodeSize.js @@ -0,0 +1,44 @@ +/** Used to compose unicode character classes. */ +var rsAstralRange = '\\ud800-\\udfff', + rsComboMarksRange = '\\u0300-\\u036f', + reComboHalfMarksRange = '\\ufe20-\\ufe2f', + rsComboSymbolsRange = '\\u20d0-\\u20ff', + rsComboRange = rsComboMarksRange + reComboHalfMarksRange + rsComboSymbolsRange, + rsVarRange = '\\ufe0e\\ufe0f'; + +/** Used to compose unicode capture groups. */ +var rsAstral = '[' + rsAstralRange + ']', + rsCombo = '[' + rsComboRange + ']', + rsFitz = '\\ud83c[\\udffb-\\udfff]', + rsModifier = '(?:' + rsCombo + '|' + rsFitz + ')', + rsNonAstral = '[^' + rsAstralRange + ']', + rsRegional = '(?:\\ud83c[\\udde6-\\uddff]){2}', + rsSurrPair = '[\\ud800-\\udbff][\\udc00-\\udfff]', + rsZWJ = '\\u200d'; + +/** Used to compose unicode regexes. */ +var reOptMod = rsModifier + '?', + rsOptVar = '[' + rsVarRange + ']?', + rsOptJoin = '(?:' + rsZWJ + '(?:' + [rsNonAstral, rsRegional, rsSurrPair].join('|') + ')' + rsOptVar + reOptMod + ')*', + rsSeq = rsOptVar + reOptMod + rsOptJoin, + rsSymbol = '(?:' + [rsNonAstral + rsCombo + '?', rsCombo, rsRegional, rsSurrPair, rsAstral].join('|') + ')'; + +/** Used to match [string symbols](https://mathiasbynens.be/notes/javascript-unicode). */ +var reUnicode = RegExp(rsFitz + '(?=' + rsFitz + ')|' + rsSymbol + rsSeq, 'g'); + +/** + * Gets the size of a Unicode `string`. + * + * @private + * @param {string} string The string inspect. + * @returns {number} Returns the string size. + */ +function unicodeSize(string) { + var result = reUnicode.lastIndex = 0; + while (reUnicode.test(string)) { + ++result; + } + return result; +} + +module.exports = unicodeSize; diff --git a/backend/node_modules/lodash/_unicodeToArray.js b/backend/node_modules/lodash/_unicodeToArray.js new file mode 100644 index 00000000..2a725c06 --- /dev/null +++ b/backend/node_modules/lodash/_unicodeToArray.js @@ -0,0 +1,40 @@ +/** Used to compose unicode character classes. */ +var rsAstralRange = '\\ud800-\\udfff', + rsComboMarksRange = '\\u0300-\\u036f', + reComboHalfMarksRange = '\\ufe20-\\ufe2f', + rsComboSymbolsRange = '\\u20d0-\\u20ff', + rsComboRange = rsComboMarksRange + reComboHalfMarksRange + rsComboSymbolsRange, + rsVarRange = '\\ufe0e\\ufe0f'; + +/** Used to compose unicode capture groups. */ +var rsAstral = '[' + rsAstralRange + ']', + rsCombo = '[' + rsComboRange + ']', + rsFitz = '\\ud83c[\\udffb-\\udfff]', + rsModifier = '(?:' + rsCombo + '|' + rsFitz + ')', + rsNonAstral = '[^' + rsAstralRange + ']', + rsRegional = '(?:\\ud83c[\\udde6-\\uddff]){2}', + rsSurrPair = '[\\ud800-\\udbff][\\udc00-\\udfff]', + rsZWJ = '\\u200d'; + +/** Used to compose unicode regexes. */ +var reOptMod = rsModifier + '?', + rsOptVar = '[' + rsVarRange + ']?', + rsOptJoin = '(?:' + rsZWJ + '(?:' + [rsNonAstral, rsRegional, rsSurrPair].join('|') + ')' + rsOptVar + reOptMod + ')*', + rsSeq = rsOptVar + reOptMod + rsOptJoin, + rsSymbol = '(?:' + [rsNonAstral + rsCombo + '?', rsCombo, rsRegional, rsSurrPair, rsAstral].join('|') + ')'; + +/** Used to match [string symbols](https://mathiasbynens.be/notes/javascript-unicode). */ +var reUnicode = RegExp(rsFitz + '(?=' + rsFitz + ')|' + rsSymbol + rsSeq, 'g'); + +/** + * Converts a Unicode `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ +function unicodeToArray(string) { + return string.match(reUnicode) || []; +} + +module.exports = unicodeToArray; diff --git a/backend/node_modules/lodash/_unicodeWords.js b/backend/node_modules/lodash/_unicodeWords.js new file mode 100644 index 00000000..e72e6e0f --- /dev/null +++ b/backend/node_modules/lodash/_unicodeWords.js @@ -0,0 +1,69 @@ +/** Used to compose unicode character classes. */ +var rsAstralRange = '\\ud800-\\udfff', + rsComboMarksRange = '\\u0300-\\u036f', + reComboHalfMarksRange = '\\ufe20-\\ufe2f', + rsComboSymbolsRange = '\\u20d0-\\u20ff', + rsComboRange = rsComboMarksRange + reComboHalfMarksRange + rsComboSymbolsRange, + rsDingbatRange = '\\u2700-\\u27bf', + rsLowerRange = 'a-z\\xdf-\\xf6\\xf8-\\xff', + rsMathOpRange = '\\xac\\xb1\\xd7\\xf7', + rsNonCharRange = '\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf', + rsPunctuationRange = '\\u2000-\\u206f', + rsSpaceRange = ' \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000', + rsUpperRange = 'A-Z\\xc0-\\xd6\\xd8-\\xde', + rsVarRange = '\\ufe0e\\ufe0f', + rsBreakRange = rsMathOpRange + rsNonCharRange + rsPunctuationRange + rsSpaceRange; + +/** Used to compose unicode capture groups. */ +var rsApos = "['\u2019]", + rsBreak = '[' + rsBreakRange + ']', + rsCombo = '[' + rsComboRange + ']', + rsDigits = '\\d+', + rsDingbat = '[' + rsDingbatRange + ']', + rsLower = '[' + rsLowerRange + ']', + rsMisc = '[^' + rsAstralRange + rsBreakRange + rsDigits + rsDingbatRange + rsLowerRange + rsUpperRange + ']', + rsFitz = '\\ud83c[\\udffb-\\udfff]', + rsModifier = '(?:' + rsCombo + '|' + rsFitz + ')', + rsNonAstral = '[^' + rsAstralRange + ']', + rsRegional = '(?:\\ud83c[\\udde6-\\uddff]){2}', + rsSurrPair = '[\\ud800-\\udbff][\\udc00-\\udfff]', + rsUpper = '[' + rsUpperRange + ']', + rsZWJ = '\\u200d'; + +/** Used to compose unicode regexes. */ +var rsMiscLower = '(?:' + rsLower + '|' + rsMisc + ')', + rsMiscUpper = '(?:' + rsUpper + '|' + rsMisc + ')', + rsOptContrLower = '(?:' + rsApos + '(?:d|ll|m|re|s|t|ve))?', + rsOptContrUpper = '(?:' + rsApos + '(?:D|LL|M|RE|S|T|VE))?', + reOptMod = rsModifier + '?', + rsOptVar = '[' + rsVarRange + ']?', + rsOptJoin = '(?:' + rsZWJ + '(?:' + [rsNonAstral, rsRegional, rsSurrPair].join('|') + ')' + rsOptVar + reOptMod + ')*', + rsOrdLower = '\\d*(?:1st|2nd|3rd|(?![123])\\dth)(?=\\b|[A-Z_])', + rsOrdUpper = '\\d*(?:1ST|2ND|3RD|(?![123])\\dTH)(?=\\b|[a-z_])', + rsSeq = rsOptVar + reOptMod + rsOptJoin, + rsEmoji = '(?:' + [rsDingbat, rsRegional, rsSurrPair].join('|') + ')' + rsSeq; + +/** Used to match complex or compound words. */ +var reUnicodeWord = RegExp([ + rsUpper + '?' + rsLower + '+' + rsOptContrLower + '(?=' + [rsBreak, rsUpper, '$'].join('|') + ')', + rsMiscUpper + '+' + rsOptContrUpper + '(?=' + [rsBreak, rsUpper + rsMiscLower, '$'].join('|') + ')', + rsUpper + '?' + rsMiscLower + '+' + rsOptContrLower, + rsUpper + '+' + rsOptContrUpper, + rsOrdUpper, + rsOrdLower, + rsDigits, + rsEmoji +].join('|'), 'g'); + +/** + * Splits a Unicode `string` into an array of its words. + * + * @private + * @param {string} The string to inspect. + * @returns {Array} Returns the words of `string`. + */ +function unicodeWords(string) { + return string.match(reUnicodeWord) || []; +} + +module.exports = unicodeWords; diff --git a/backend/node_modules/lodash/_updateWrapDetails.js b/backend/node_modules/lodash/_updateWrapDetails.js new file mode 100644 index 00000000..8759fbdf --- /dev/null +++ b/backend/node_modules/lodash/_updateWrapDetails.js @@ -0,0 +1,46 @@ +var arrayEach = require('./_arrayEach'), + arrayIncludes = require('./_arrayIncludes'); + +/** Used to compose bitmasks for function metadata. */ +var WRAP_BIND_FLAG = 1, + WRAP_BIND_KEY_FLAG = 2, + WRAP_CURRY_FLAG = 8, + WRAP_CURRY_RIGHT_FLAG = 16, + WRAP_PARTIAL_FLAG = 32, + WRAP_PARTIAL_RIGHT_FLAG = 64, + WRAP_ARY_FLAG = 128, + WRAP_REARG_FLAG = 256, + WRAP_FLIP_FLAG = 512; + +/** Used to associate wrap methods with their bit flags. */ +var wrapFlags = [ + ['ary', WRAP_ARY_FLAG], + ['bind', WRAP_BIND_FLAG], + ['bindKey', WRAP_BIND_KEY_FLAG], + ['curry', WRAP_CURRY_FLAG], + ['curryRight', WRAP_CURRY_RIGHT_FLAG], + ['flip', WRAP_FLIP_FLAG], + ['partial', WRAP_PARTIAL_FLAG], + ['partialRight', WRAP_PARTIAL_RIGHT_FLAG], + ['rearg', WRAP_REARG_FLAG] +]; + +/** + * Updates wrapper `details` based on `bitmask` flags. + * + * @private + * @returns {Array} details The details to modify. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @returns {Array} Returns `details`. + */ +function updateWrapDetails(details, bitmask) { + arrayEach(wrapFlags, function(pair) { + var value = '_.' + pair[0]; + if ((bitmask & pair[1]) && !arrayIncludes(details, value)) { + details.push(value); + } + }); + return details.sort(); +} + +module.exports = updateWrapDetails; diff --git a/backend/node_modules/lodash/_wrapperClone.js b/backend/node_modules/lodash/_wrapperClone.js new file mode 100644 index 00000000..7bb58a2e --- /dev/null +++ b/backend/node_modules/lodash/_wrapperClone.js @@ -0,0 +1,23 @@ +var LazyWrapper = require('./_LazyWrapper'), + LodashWrapper = require('./_LodashWrapper'), + copyArray = require('./_copyArray'); + +/** + * Creates a clone of `wrapper`. + * + * @private + * @param {Object} wrapper The wrapper to clone. + * @returns {Object} Returns the cloned wrapper. + */ +function wrapperClone(wrapper) { + if (wrapper instanceof LazyWrapper) { + return wrapper.clone(); + } + var result = new LodashWrapper(wrapper.__wrapped__, wrapper.__chain__); + result.__actions__ = copyArray(wrapper.__actions__); + result.__index__ = wrapper.__index__; + result.__values__ = wrapper.__values__; + return result; +} + +module.exports = wrapperClone; diff --git a/backend/node_modules/lodash/add.js b/backend/node_modules/lodash/add.js new file mode 100644 index 00000000..f0695156 --- /dev/null +++ b/backend/node_modules/lodash/add.js @@ -0,0 +1,22 @@ +var createMathOperation = require('./_createMathOperation'); + +/** + * Adds two numbers. + * + * @static + * @memberOf _ + * @since 3.4.0 + * @category Math + * @param {number} augend The first number in an addition. + * @param {number} addend The second number in an addition. + * @returns {number} Returns the total. + * @example + * + * _.add(6, 4); + * // => 10 + */ +var add = createMathOperation(function(augend, addend) { + return augend + addend; +}, 0); + +module.exports = add; diff --git a/backend/node_modules/lodash/after.js b/backend/node_modules/lodash/after.js new file mode 100644 index 00000000..3900c979 --- /dev/null +++ b/backend/node_modules/lodash/after.js @@ -0,0 +1,42 @@ +var toInteger = require('./toInteger'); + +/** Error message constants. */ +var FUNC_ERROR_TEXT = 'Expected a function'; + +/** + * The opposite of `_.before`; this method creates a function that invokes + * `func` once it's called `n` or more times. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {number} n The number of calls before `func` is invoked. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var saves = ['profile', 'settings']; + * + * var done = _.after(saves.length, function() { + * console.log('done saving!'); + * }); + * + * _.forEach(saves, function(type) { + * asyncSave({ 'type': type, 'complete': done }); + * }); + * // => Logs 'done saving!' after the two async saves have completed. + */ +function after(n, func) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + n = toInteger(n); + return function() { + if (--n < 1) { + return func.apply(this, arguments); + } + }; +} + +module.exports = after; diff --git a/backend/node_modules/lodash/array.js b/backend/node_modules/lodash/array.js new file mode 100644 index 00000000..af688d3e --- /dev/null +++ b/backend/node_modules/lodash/array.js @@ -0,0 +1,67 @@ +module.exports = { + 'chunk': require('./chunk'), + 'compact': require('./compact'), + 'concat': require('./concat'), + 'difference': require('./difference'), + 'differenceBy': require('./differenceBy'), + 'differenceWith': require('./differenceWith'), + 'drop': require('./drop'), + 'dropRight': require('./dropRight'), + 'dropRightWhile': require('./dropRightWhile'), + 'dropWhile': require('./dropWhile'), + 'fill': require('./fill'), + 'findIndex': require('./findIndex'), + 'findLastIndex': require('./findLastIndex'), + 'first': require('./first'), + 'flatten': require('./flatten'), + 'flattenDeep': require('./flattenDeep'), + 'flattenDepth': require('./flattenDepth'), + 'fromPairs': require('./fromPairs'), + 'head': require('./head'), + 'indexOf': require('./indexOf'), + 'initial': require('./initial'), + 'intersection': require('./intersection'), + 'intersectionBy': require('./intersectionBy'), + 'intersectionWith': require('./intersectionWith'), + 'join': require('./join'), + 'last': require('./last'), + 'lastIndexOf': require('./lastIndexOf'), + 'nth': require('./nth'), + 'pull': require('./pull'), + 'pullAll': require('./pullAll'), + 'pullAllBy': require('./pullAllBy'), + 'pullAllWith': require('./pullAllWith'), + 'pullAt': require('./pullAt'), + 'remove': require('./remove'), + 'reverse': require('./reverse'), + 'slice': require('./slice'), + 'sortedIndex': require('./sortedIndex'), + 'sortedIndexBy': require('./sortedIndexBy'), + 'sortedIndexOf': require('./sortedIndexOf'), + 'sortedLastIndex': require('./sortedLastIndex'), + 'sortedLastIndexBy': require('./sortedLastIndexBy'), + 'sortedLastIndexOf': require('./sortedLastIndexOf'), + 'sortedUniq': require('./sortedUniq'), + 'sortedUniqBy': require('./sortedUniqBy'), + 'tail': require('./tail'), + 'take': require('./take'), + 'takeRight': require('./takeRight'), + 'takeRightWhile': require('./takeRightWhile'), + 'takeWhile': require('./takeWhile'), + 'union': require('./union'), + 'unionBy': require('./unionBy'), + 'unionWith': require('./unionWith'), + 'uniq': require('./uniq'), + 'uniqBy': require('./uniqBy'), + 'uniqWith': require('./uniqWith'), + 'unzip': require('./unzip'), + 'unzipWith': require('./unzipWith'), + 'without': require('./without'), + 'xor': require('./xor'), + 'xorBy': require('./xorBy'), + 'xorWith': require('./xorWith'), + 'zip': require('./zip'), + 'zipObject': require('./zipObject'), + 'zipObjectDeep': require('./zipObjectDeep'), + 'zipWith': require('./zipWith') +}; diff --git a/backend/node_modules/lodash/ary.js b/backend/node_modules/lodash/ary.js new file mode 100644 index 00000000..70c87d09 --- /dev/null +++ b/backend/node_modules/lodash/ary.js @@ -0,0 +1,29 @@ +var createWrap = require('./_createWrap'); + +/** Used to compose bitmasks for function metadata. */ +var WRAP_ARY_FLAG = 128; + +/** + * Creates a function that invokes `func`, with up to `n` arguments, + * ignoring any additional arguments. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to cap arguments for. + * @param {number} [n=func.length] The arity cap. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new capped function. + * @example + * + * _.map(['6', '8', '10'], _.ary(parseInt, 1)); + * // => [6, 8, 10] + */ +function ary(func, n, guard) { + n = guard ? undefined : n; + n = (func && n == null) ? func.length : n; + return createWrap(func, WRAP_ARY_FLAG, undefined, undefined, undefined, undefined, n); +} + +module.exports = ary; diff --git a/backend/node_modules/lodash/assign.js b/backend/node_modules/lodash/assign.js new file mode 100644 index 00000000..909db26a --- /dev/null +++ b/backend/node_modules/lodash/assign.js @@ -0,0 +1,58 @@ +var assignValue = require('./_assignValue'), + copyObject = require('./_copyObject'), + createAssigner = require('./_createAssigner'), + isArrayLike = require('./isArrayLike'), + isPrototype = require('./_isPrototype'), + keys = require('./keys'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Assigns own enumerable string keyed properties of source objects to the + * destination object. Source objects are applied from left to right. + * Subsequent sources overwrite property assignments of previous sources. + * + * **Note:** This method mutates `object` and is loosely based on + * [`Object.assign`](https://mdn.io/Object/assign). + * + * @static + * @memberOf _ + * @since 0.10.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.assignIn + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * function Bar() { + * this.c = 3; + * } + * + * Foo.prototype.b = 2; + * Bar.prototype.d = 4; + * + * _.assign({ 'a': 0 }, new Foo, new Bar); + * // => { 'a': 1, 'c': 3 } + */ +var assign = createAssigner(function(object, source) { + if (isPrototype(source) || isArrayLike(source)) { + copyObject(source, keys(source), object); + return; + } + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + assignValue(object, key, source[key]); + } + } +}); + +module.exports = assign; diff --git a/backend/node_modules/lodash/assignIn.js b/backend/node_modules/lodash/assignIn.js new file mode 100644 index 00000000..e663473a --- /dev/null +++ b/backend/node_modules/lodash/assignIn.js @@ -0,0 +1,40 @@ +var copyObject = require('./_copyObject'), + createAssigner = require('./_createAssigner'), + keysIn = require('./keysIn'); + +/** + * This method is like `_.assign` except that it iterates over own and + * inherited source properties. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias extend + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.assign + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * function Bar() { + * this.c = 3; + * } + * + * Foo.prototype.b = 2; + * Bar.prototype.d = 4; + * + * _.assignIn({ 'a': 0 }, new Foo, new Bar); + * // => { 'a': 1, 'b': 2, 'c': 3, 'd': 4 } + */ +var assignIn = createAssigner(function(object, source) { + copyObject(source, keysIn(source), object); +}); + +module.exports = assignIn; diff --git a/backend/node_modules/lodash/assignInWith.js b/backend/node_modules/lodash/assignInWith.js new file mode 100644 index 00000000..68fcc0b0 --- /dev/null +++ b/backend/node_modules/lodash/assignInWith.js @@ -0,0 +1,38 @@ +var copyObject = require('./_copyObject'), + createAssigner = require('./_createAssigner'), + keysIn = require('./keysIn'); + +/** + * This method is like `_.assignIn` except that it accepts `customizer` + * which is invoked to produce the assigned values. If `customizer` returns + * `undefined`, assignment is handled by the method instead. The `customizer` + * is invoked with five arguments: (objValue, srcValue, key, object, source). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias extendWith + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @see _.assignWith + * @example + * + * function customizer(objValue, srcValue) { + * return _.isUndefined(objValue) ? srcValue : objValue; + * } + * + * var defaults = _.partialRight(_.assignInWith, customizer); + * + * defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ +var assignInWith = createAssigner(function(object, source, srcIndex, customizer) { + copyObject(source, keysIn(source), object, customizer); +}); + +module.exports = assignInWith; diff --git a/backend/node_modules/lodash/assignWith.js b/backend/node_modules/lodash/assignWith.js new file mode 100644 index 00000000..7dc6c761 --- /dev/null +++ b/backend/node_modules/lodash/assignWith.js @@ -0,0 +1,37 @@ +var copyObject = require('./_copyObject'), + createAssigner = require('./_createAssigner'), + keys = require('./keys'); + +/** + * This method is like `_.assign` except that it accepts `customizer` + * which is invoked to produce the assigned values. If `customizer` returns + * `undefined`, assignment is handled by the method instead. The `customizer` + * is invoked with five arguments: (objValue, srcValue, key, object, source). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @see _.assignInWith + * @example + * + * function customizer(objValue, srcValue) { + * return _.isUndefined(objValue) ? srcValue : objValue; + * } + * + * var defaults = _.partialRight(_.assignWith, customizer); + * + * defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ +var assignWith = createAssigner(function(object, source, srcIndex, customizer) { + copyObject(source, keys(source), object, customizer); +}); + +module.exports = assignWith; diff --git a/backend/node_modules/lodash/at.js b/backend/node_modules/lodash/at.js new file mode 100644 index 00000000..781ee9e5 --- /dev/null +++ b/backend/node_modules/lodash/at.js @@ -0,0 +1,23 @@ +var baseAt = require('./_baseAt'), + flatRest = require('./_flatRest'); + +/** + * Creates an array of values corresponding to `paths` of `object`. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Array} Returns the picked values. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }, 4] }; + * + * _.at(object, ['a[0].b.c', 'a[1]']); + * // => [3, 4] + */ +var at = flatRest(baseAt); + +module.exports = at; diff --git a/backend/node_modules/lodash/attempt.js b/backend/node_modules/lodash/attempt.js new file mode 100644 index 00000000..624d0152 --- /dev/null +++ b/backend/node_modules/lodash/attempt.js @@ -0,0 +1,35 @@ +var apply = require('./_apply'), + baseRest = require('./_baseRest'), + isError = require('./isError'); + +/** + * Attempts to invoke `func`, returning either the result or the caught error + * object. Any additional arguments are provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Util + * @param {Function} func The function to attempt. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {*} Returns the `func` result or error object. + * @example + * + * // Avoid throwing errors for invalid selectors. + * var elements = _.attempt(function(selector) { + * return document.querySelectorAll(selector); + * }, '>_>'); + * + * if (_.isError(elements)) { + * elements = []; + * } + */ +var attempt = baseRest(function(func, args) { + try { + return apply(func, undefined, args); + } catch (e) { + return isError(e) ? e : new Error(e); + } +}); + +module.exports = attempt; diff --git a/backend/node_modules/lodash/before.js b/backend/node_modules/lodash/before.js new file mode 100644 index 00000000..a3e0a16c --- /dev/null +++ b/backend/node_modules/lodash/before.js @@ -0,0 +1,40 @@ +var toInteger = require('./toInteger'); + +/** Error message constants. */ +var FUNC_ERROR_TEXT = 'Expected a function'; + +/** + * Creates a function that invokes `func`, with the `this` binding and arguments + * of the created function, while it's called less than `n` times. Subsequent + * calls to the created function return the result of the last `func` invocation. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {number} n The number of calls at which `func` is no longer invoked. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * jQuery(element).on('click', _.before(5, addContactToList)); + * // => Allows adding up to 4 contacts to the list. + */ +function before(n, func) { + var result; + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + n = toInteger(n); + return function() { + if (--n > 0) { + result = func.apply(this, arguments); + } + if (n <= 1) { + func = undefined; + } + return result; + }; +} + +module.exports = before; diff --git a/backend/node_modules/lodash/bind.js b/backend/node_modules/lodash/bind.js new file mode 100644 index 00000000..b1076e93 --- /dev/null +++ b/backend/node_modules/lodash/bind.js @@ -0,0 +1,57 @@ +var baseRest = require('./_baseRest'), + createWrap = require('./_createWrap'), + getHolder = require('./_getHolder'), + replaceHolders = require('./_replaceHolders'); + +/** Used to compose bitmasks for function metadata. */ +var WRAP_BIND_FLAG = 1, + WRAP_PARTIAL_FLAG = 32; + +/** + * Creates a function that invokes `func` with the `this` binding of `thisArg` + * and `partials` prepended to the arguments it receives. + * + * The `_.bind.placeholder` value, which defaults to `_` in monolithic builds, + * may be used as a placeholder for partially applied arguments. + * + * **Note:** Unlike native `Function#bind`, this method doesn't set the "length" + * property of bound functions. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to bind. + * @param {*} thisArg The `this` binding of `func`. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * function greet(greeting, punctuation) { + * return greeting + ' ' + this.user + punctuation; + * } + * + * var object = { 'user': 'fred' }; + * + * var bound = _.bind(greet, object, 'hi'); + * bound('!'); + * // => 'hi fred!' + * + * // Bound with placeholders. + * var bound = _.bind(greet, object, _, '!'); + * bound('hi'); + * // => 'hi fred!' + */ +var bind = baseRest(function(func, thisArg, partials) { + var bitmask = WRAP_BIND_FLAG; + if (partials.length) { + var holders = replaceHolders(partials, getHolder(bind)); + bitmask |= WRAP_PARTIAL_FLAG; + } + return createWrap(func, bitmask, thisArg, partials, holders); +}); + +// Assign default placeholders. +bind.placeholder = {}; + +module.exports = bind; diff --git a/backend/node_modules/lodash/bindAll.js b/backend/node_modules/lodash/bindAll.js new file mode 100644 index 00000000..a35706de --- /dev/null +++ b/backend/node_modules/lodash/bindAll.js @@ -0,0 +1,41 @@ +var arrayEach = require('./_arrayEach'), + baseAssignValue = require('./_baseAssignValue'), + bind = require('./bind'), + flatRest = require('./_flatRest'), + toKey = require('./_toKey'); + +/** + * Binds methods of an object to the object itself, overwriting the existing + * method. + * + * **Note:** This method doesn't set the "length" property of bound functions. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Util + * @param {Object} object The object to bind and assign the bound methods to. + * @param {...(string|string[])} methodNames The object method names to bind. + * @returns {Object} Returns `object`. + * @example + * + * var view = { + * 'label': 'docs', + * 'click': function() { + * console.log('clicked ' + this.label); + * } + * }; + * + * _.bindAll(view, ['click']); + * jQuery(element).on('click', view.click); + * // => Logs 'clicked docs' when clicked. + */ +var bindAll = flatRest(function(object, methodNames) { + arrayEach(methodNames, function(key) { + key = toKey(key); + baseAssignValue(object, key, bind(object[key], object)); + }); + return object; +}); + +module.exports = bindAll; diff --git a/backend/node_modules/lodash/bindKey.js b/backend/node_modules/lodash/bindKey.js new file mode 100644 index 00000000..f7fd64cd --- /dev/null +++ b/backend/node_modules/lodash/bindKey.js @@ -0,0 +1,68 @@ +var baseRest = require('./_baseRest'), + createWrap = require('./_createWrap'), + getHolder = require('./_getHolder'), + replaceHolders = require('./_replaceHolders'); + +/** Used to compose bitmasks for function metadata. */ +var WRAP_BIND_FLAG = 1, + WRAP_BIND_KEY_FLAG = 2, + WRAP_PARTIAL_FLAG = 32; + +/** + * Creates a function that invokes the method at `object[key]` with `partials` + * prepended to the arguments it receives. + * + * This method differs from `_.bind` by allowing bound functions to reference + * methods that may be redefined or don't yet exist. See + * [Peter Michaux's article](http://peter.michaux.ca/articles/lazy-function-definition-pattern) + * for more details. + * + * The `_.bindKey.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * @static + * @memberOf _ + * @since 0.10.0 + * @category Function + * @param {Object} object The object to invoke the method on. + * @param {string} key The key of the method. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * var object = { + * 'user': 'fred', + * 'greet': function(greeting, punctuation) { + * return greeting + ' ' + this.user + punctuation; + * } + * }; + * + * var bound = _.bindKey(object, 'greet', 'hi'); + * bound('!'); + * // => 'hi fred!' + * + * object.greet = function(greeting, punctuation) { + * return greeting + 'ya ' + this.user + punctuation; + * }; + * + * bound('!'); + * // => 'hiya fred!' + * + * // Bound with placeholders. + * var bound = _.bindKey(object, 'greet', _, '!'); + * bound('hi'); + * // => 'hiya fred!' + */ +var bindKey = baseRest(function(object, key, partials) { + var bitmask = WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG; + if (partials.length) { + var holders = replaceHolders(partials, getHolder(bindKey)); + bitmask |= WRAP_PARTIAL_FLAG; + } + return createWrap(key, bitmask, object, partials, holders); +}); + +// Assign default placeholders. +bindKey.placeholder = {}; + +module.exports = bindKey; diff --git a/backend/node_modules/lodash/camelCase.js b/backend/node_modules/lodash/camelCase.js new file mode 100644 index 00000000..d7390def --- /dev/null +++ b/backend/node_modules/lodash/camelCase.js @@ -0,0 +1,29 @@ +var capitalize = require('./capitalize'), + createCompounder = require('./_createCompounder'); + +/** + * Converts `string` to [camel case](https://en.wikipedia.org/wiki/CamelCase). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the camel cased string. + * @example + * + * _.camelCase('Foo Bar'); + * // => 'fooBar' + * + * _.camelCase('--foo-bar--'); + * // => 'fooBar' + * + * _.camelCase('__FOO_BAR__'); + * // => 'fooBar' + */ +var camelCase = createCompounder(function(result, word, index) { + word = word.toLowerCase(); + return result + (index ? capitalize(word) : word); +}); + +module.exports = camelCase; diff --git a/backend/node_modules/lodash/capitalize.js b/backend/node_modules/lodash/capitalize.js new file mode 100644 index 00000000..3e1600e7 --- /dev/null +++ b/backend/node_modules/lodash/capitalize.js @@ -0,0 +1,23 @@ +var toString = require('./toString'), + upperFirst = require('./upperFirst'); + +/** + * Converts the first character of `string` to upper case and the remaining + * to lower case. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to capitalize. + * @returns {string} Returns the capitalized string. + * @example + * + * _.capitalize('FRED'); + * // => 'Fred' + */ +function capitalize(string) { + return upperFirst(toString(string).toLowerCase()); +} + +module.exports = capitalize; diff --git a/backend/node_modules/lodash/castArray.js b/backend/node_modules/lodash/castArray.js new file mode 100644 index 00000000..e470bdb9 --- /dev/null +++ b/backend/node_modules/lodash/castArray.js @@ -0,0 +1,44 @@ +var isArray = require('./isArray'); + +/** + * Casts `value` as an array if it's not one. + * + * @static + * @memberOf _ + * @since 4.4.0 + * @category Lang + * @param {*} value The value to inspect. + * @returns {Array} Returns the cast array. + * @example + * + * _.castArray(1); + * // => [1] + * + * _.castArray({ 'a': 1 }); + * // => [{ 'a': 1 }] + * + * _.castArray('abc'); + * // => ['abc'] + * + * _.castArray(null); + * // => [null] + * + * _.castArray(undefined); + * // => [undefined] + * + * _.castArray(); + * // => [] + * + * var array = [1, 2, 3]; + * console.log(_.castArray(array) === array); + * // => true + */ +function castArray() { + if (!arguments.length) { + return []; + } + var value = arguments[0]; + return isArray(value) ? value : [value]; +} + +module.exports = castArray; diff --git a/backend/node_modules/lodash/ceil.js b/backend/node_modules/lodash/ceil.js new file mode 100644 index 00000000..56c8722c --- /dev/null +++ b/backend/node_modules/lodash/ceil.js @@ -0,0 +1,26 @@ +var createRound = require('./_createRound'); + +/** + * Computes `number` rounded up to `precision`. + * + * @static + * @memberOf _ + * @since 3.10.0 + * @category Math + * @param {number} number The number to round up. + * @param {number} [precision=0] The precision to round up to. + * @returns {number} Returns the rounded up number. + * @example + * + * _.ceil(4.006); + * // => 5 + * + * _.ceil(6.004, 2); + * // => 6.01 + * + * _.ceil(6040, -2); + * // => 6100 + */ +var ceil = createRound('ceil'); + +module.exports = ceil; diff --git a/backend/node_modules/lodash/chain.js b/backend/node_modules/lodash/chain.js new file mode 100644 index 00000000..f6cd6475 --- /dev/null +++ b/backend/node_modules/lodash/chain.js @@ -0,0 +1,38 @@ +var lodash = require('./wrapperLodash'); + +/** + * Creates a `lodash` wrapper instance that wraps `value` with explicit method + * chain sequences enabled. The result of such sequences must be unwrapped + * with `_#value`. + * + * @static + * @memberOf _ + * @since 1.3.0 + * @category Seq + * @param {*} value The value to wrap. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 40 }, + * { 'user': 'pebbles', 'age': 1 } + * ]; + * + * var youngest = _ + * .chain(users) + * .sortBy('age') + * .map(function(o) { + * return o.user + ' is ' + o.age; + * }) + * .head() + * .value(); + * // => 'pebbles is 1' + */ +function chain(value) { + var result = lodash(value); + result.__chain__ = true; + return result; +} + +module.exports = chain; diff --git a/backend/node_modules/lodash/chunk.js b/backend/node_modules/lodash/chunk.js new file mode 100644 index 00000000..5b562fef --- /dev/null +++ b/backend/node_modules/lodash/chunk.js @@ -0,0 +1,50 @@ +var baseSlice = require('./_baseSlice'), + isIterateeCall = require('./_isIterateeCall'), + toInteger = require('./toInteger'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeCeil = Math.ceil, + nativeMax = Math.max; + +/** + * Creates an array of elements split into groups the length of `size`. + * If `array` can't be split evenly, the final chunk will be the remaining + * elements. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to process. + * @param {number} [size=1] The length of each chunk + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the new array of chunks. + * @example + * + * _.chunk(['a', 'b', 'c', 'd'], 2); + * // => [['a', 'b'], ['c', 'd']] + * + * _.chunk(['a', 'b', 'c', 'd'], 3); + * // => [['a', 'b', 'c'], ['d']] + */ +function chunk(array, size, guard) { + if ((guard ? isIterateeCall(array, size, guard) : size === undefined)) { + size = 1; + } else { + size = nativeMax(toInteger(size), 0); + } + var length = array == null ? 0 : array.length; + if (!length || size < 1) { + return []; + } + var index = 0, + resIndex = 0, + result = Array(nativeCeil(length / size)); + + while (index < length) { + result[resIndex++] = baseSlice(array, index, (index += size)); + } + return result; +} + +module.exports = chunk; diff --git a/backend/node_modules/lodash/clamp.js b/backend/node_modules/lodash/clamp.js new file mode 100644 index 00000000..91a72c97 --- /dev/null +++ b/backend/node_modules/lodash/clamp.js @@ -0,0 +1,39 @@ +var baseClamp = require('./_baseClamp'), + toNumber = require('./toNumber'); + +/** + * Clamps `number` within the inclusive `lower` and `upper` bounds. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Number + * @param {number} number The number to clamp. + * @param {number} [lower] The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the clamped number. + * @example + * + * _.clamp(-10, -5, 5); + * // => -5 + * + * _.clamp(10, -5, 5); + * // => 5 + */ +function clamp(number, lower, upper) { + if (upper === undefined) { + upper = lower; + lower = undefined; + } + if (upper !== undefined) { + upper = toNumber(upper); + upper = upper === upper ? upper : 0; + } + if (lower !== undefined) { + lower = toNumber(lower); + lower = lower === lower ? lower : 0; + } + return baseClamp(toNumber(number), lower, upper); +} + +module.exports = clamp; diff --git a/backend/node_modules/lodash/clone.js b/backend/node_modules/lodash/clone.js new file mode 100644 index 00000000..dd439d63 --- /dev/null +++ b/backend/node_modules/lodash/clone.js @@ -0,0 +1,36 @@ +var baseClone = require('./_baseClone'); + +/** Used to compose bitmasks for cloning. */ +var CLONE_SYMBOLS_FLAG = 4; + +/** + * Creates a shallow clone of `value`. + * + * **Note:** This method is loosely based on the + * [structured clone algorithm](https://mdn.io/Structured_clone_algorithm) + * and supports cloning arrays, array buffers, booleans, date objects, maps, + * numbers, `Object` objects, regexes, sets, strings, symbols, and typed + * arrays. The own enumerable properties of `arguments` objects are cloned + * as plain objects. An empty object is returned for uncloneable values such + * as error objects, functions, DOM nodes, and WeakMaps. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to clone. + * @returns {*} Returns the cloned value. + * @see _.cloneDeep + * @example + * + * var objects = [{ 'a': 1 }, { 'b': 2 }]; + * + * var shallow = _.clone(objects); + * console.log(shallow[0] === objects[0]); + * // => true + */ +function clone(value) { + return baseClone(value, CLONE_SYMBOLS_FLAG); +} + +module.exports = clone; diff --git a/backend/node_modules/lodash/cloneDeep.js b/backend/node_modules/lodash/cloneDeep.js new file mode 100644 index 00000000..4425fbe8 --- /dev/null +++ b/backend/node_modules/lodash/cloneDeep.js @@ -0,0 +1,29 @@ +var baseClone = require('./_baseClone'); + +/** Used to compose bitmasks for cloning. */ +var CLONE_DEEP_FLAG = 1, + CLONE_SYMBOLS_FLAG = 4; + +/** + * This method is like `_.clone` except that it recursively clones `value`. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Lang + * @param {*} value The value to recursively clone. + * @returns {*} Returns the deep cloned value. + * @see _.clone + * @example + * + * var objects = [{ 'a': 1 }, { 'b': 2 }]; + * + * var deep = _.cloneDeep(objects); + * console.log(deep[0] === objects[0]); + * // => false + */ +function cloneDeep(value) { + return baseClone(value, CLONE_DEEP_FLAG | CLONE_SYMBOLS_FLAG); +} + +module.exports = cloneDeep; diff --git a/backend/node_modules/lodash/cloneDeepWith.js b/backend/node_modules/lodash/cloneDeepWith.js new file mode 100644 index 00000000..fd9c6c05 --- /dev/null +++ b/backend/node_modules/lodash/cloneDeepWith.js @@ -0,0 +1,40 @@ +var baseClone = require('./_baseClone'); + +/** Used to compose bitmasks for cloning. */ +var CLONE_DEEP_FLAG = 1, + CLONE_SYMBOLS_FLAG = 4; + +/** + * This method is like `_.cloneWith` except that it recursively clones `value`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to recursively clone. + * @param {Function} [customizer] The function to customize cloning. + * @returns {*} Returns the deep cloned value. + * @see _.cloneWith + * @example + * + * function customizer(value) { + * if (_.isElement(value)) { + * return value.cloneNode(true); + * } + * } + * + * var el = _.cloneDeepWith(document.body, customizer); + * + * console.log(el === document.body); + * // => false + * console.log(el.nodeName); + * // => 'BODY' + * console.log(el.childNodes.length); + * // => 20 + */ +function cloneDeepWith(value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseClone(value, CLONE_DEEP_FLAG | CLONE_SYMBOLS_FLAG, customizer); +} + +module.exports = cloneDeepWith; diff --git a/backend/node_modules/lodash/cloneWith.js b/backend/node_modules/lodash/cloneWith.js new file mode 100644 index 00000000..d2f4e756 --- /dev/null +++ b/backend/node_modules/lodash/cloneWith.js @@ -0,0 +1,42 @@ +var baseClone = require('./_baseClone'); + +/** Used to compose bitmasks for cloning. */ +var CLONE_SYMBOLS_FLAG = 4; + +/** + * This method is like `_.clone` except that it accepts `customizer` which + * is invoked to produce the cloned value. If `customizer` returns `undefined`, + * cloning is handled by the method instead. The `customizer` is invoked with + * up to four arguments; (value [, index|key, object, stack]). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to clone. + * @param {Function} [customizer] The function to customize cloning. + * @returns {*} Returns the cloned value. + * @see _.cloneDeepWith + * @example + * + * function customizer(value) { + * if (_.isElement(value)) { + * return value.cloneNode(false); + * } + * } + * + * var el = _.cloneWith(document.body, customizer); + * + * console.log(el === document.body); + * // => false + * console.log(el.nodeName); + * // => 'BODY' + * console.log(el.childNodes.length); + * // => 0 + */ +function cloneWith(value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseClone(value, CLONE_SYMBOLS_FLAG, customizer); +} + +module.exports = cloneWith; diff --git a/backend/node_modules/lodash/collection.js b/backend/node_modules/lodash/collection.js new file mode 100644 index 00000000..77fe837f --- /dev/null +++ b/backend/node_modules/lodash/collection.js @@ -0,0 +1,30 @@ +module.exports = { + 'countBy': require('./countBy'), + 'each': require('./each'), + 'eachRight': require('./eachRight'), + 'every': require('./every'), + 'filter': require('./filter'), + 'find': require('./find'), + 'findLast': require('./findLast'), + 'flatMap': require('./flatMap'), + 'flatMapDeep': require('./flatMapDeep'), + 'flatMapDepth': require('./flatMapDepth'), + 'forEach': require('./forEach'), + 'forEachRight': require('./forEachRight'), + 'groupBy': require('./groupBy'), + 'includes': require('./includes'), + 'invokeMap': require('./invokeMap'), + 'keyBy': require('./keyBy'), + 'map': require('./map'), + 'orderBy': require('./orderBy'), + 'partition': require('./partition'), + 'reduce': require('./reduce'), + 'reduceRight': require('./reduceRight'), + 'reject': require('./reject'), + 'sample': require('./sample'), + 'sampleSize': require('./sampleSize'), + 'shuffle': require('./shuffle'), + 'size': require('./size'), + 'some': require('./some'), + 'sortBy': require('./sortBy') +}; diff --git a/backend/node_modules/lodash/commit.js b/backend/node_modules/lodash/commit.js new file mode 100644 index 00000000..fe4db717 --- /dev/null +++ b/backend/node_modules/lodash/commit.js @@ -0,0 +1,33 @@ +var LodashWrapper = require('./_LodashWrapper'); + +/** + * Executes the chain sequence and returns the wrapped result. + * + * @name commit + * @memberOf _ + * @since 3.2.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var array = [1, 2]; + * var wrapped = _(array).push(3); + * + * console.log(array); + * // => [1, 2] + * + * wrapped = wrapped.commit(); + * console.log(array); + * // => [1, 2, 3] + * + * wrapped.last(); + * // => 3 + * + * console.log(array); + * // => [1, 2, 3] + */ +function wrapperCommit() { + return new LodashWrapper(this.value(), this.__chain__); +} + +module.exports = wrapperCommit; diff --git a/backend/node_modules/lodash/compact.js b/backend/node_modules/lodash/compact.js new file mode 100644 index 00000000..031fab4e --- /dev/null +++ b/backend/node_modules/lodash/compact.js @@ -0,0 +1,31 @@ +/** + * Creates an array with all falsey values removed. The values `false`, `null`, + * `0`, `""`, `undefined`, and `NaN` are falsey. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to compact. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.compact([0, 1, false, 2, '', 3]); + * // => [1, 2, 3] + */ +function compact(array) { + var index = -1, + length = array == null ? 0 : array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (value) { + result[resIndex++] = value; + } + } + return result; +} + +module.exports = compact; diff --git a/backend/node_modules/lodash/concat.js b/backend/node_modules/lodash/concat.js new file mode 100644 index 00000000..1da48a4f --- /dev/null +++ b/backend/node_modules/lodash/concat.js @@ -0,0 +1,43 @@ +var arrayPush = require('./_arrayPush'), + baseFlatten = require('./_baseFlatten'), + copyArray = require('./_copyArray'), + isArray = require('./isArray'); + +/** + * Creates a new array concatenating `array` with any additional arrays + * and/or values. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to concatenate. + * @param {...*} [values] The values to concatenate. + * @returns {Array} Returns the new concatenated array. + * @example + * + * var array = [1]; + * var other = _.concat(array, 2, [3], [[4]]); + * + * console.log(other); + * // => [1, 2, 3, [4]] + * + * console.log(array); + * // => [1] + */ +function concat() { + var length = arguments.length; + if (!length) { + return []; + } + var args = Array(length - 1), + array = arguments[0], + index = length; + + while (index--) { + args[index - 1] = arguments[index]; + } + return arrayPush(isArray(array) ? copyArray(array) : [array], baseFlatten(args, 1)); +} + +module.exports = concat; diff --git a/backend/node_modules/lodash/cond.js b/backend/node_modules/lodash/cond.js new file mode 100644 index 00000000..64555986 --- /dev/null +++ b/backend/node_modules/lodash/cond.js @@ -0,0 +1,60 @@ +var apply = require('./_apply'), + arrayMap = require('./_arrayMap'), + baseIteratee = require('./_baseIteratee'), + baseRest = require('./_baseRest'); + +/** Error message constants. */ +var FUNC_ERROR_TEXT = 'Expected a function'; + +/** + * Creates a function that iterates over `pairs` and invokes the corresponding + * function of the first predicate to return truthy. The predicate-function + * pairs are invoked with the `this` binding and arguments of the created + * function. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Util + * @param {Array} pairs The predicate-function pairs. + * @returns {Function} Returns the new composite function. + * @example + * + * var func = _.cond([ + * [_.matches({ 'a': 1 }), _.constant('matches A')], + * [_.conforms({ 'b': _.isNumber }), _.constant('matches B')], + * [_.stubTrue, _.constant('no match')] + * ]); + * + * func({ 'a': 1, 'b': 2 }); + * // => 'matches A' + * + * func({ 'a': 0, 'b': 1 }); + * // => 'matches B' + * + * func({ 'a': '1', 'b': '2' }); + * // => 'no match' + */ +function cond(pairs) { + var length = pairs == null ? 0 : pairs.length, + toIteratee = baseIteratee; + + pairs = !length ? [] : arrayMap(pairs, function(pair) { + if (typeof pair[1] != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + return [toIteratee(pair[0]), pair[1]]; + }); + + return baseRest(function(args) { + var index = -1; + while (++index < length) { + var pair = pairs[index]; + if (apply(pair[0], this, args)) { + return apply(pair[1], this, args); + } + } + }); +} + +module.exports = cond; diff --git a/backend/node_modules/lodash/conforms.js b/backend/node_modules/lodash/conforms.js new file mode 100644 index 00000000..5501a949 --- /dev/null +++ b/backend/node_modules/lodash/conforms.js @@ -0,0 +1,35 @@ +var baseClone = require('./_baseClone'), + baseConforms = require('./_baseConforms'); + +/** Used to compose bitmasks for cloning. */ +var CLONE_DEEP_FLAG = 1; + +/** + * Creates a function that invokes the predicate properties of `source` with + * the corresponding property values of a given object, returning `true` if + * all predicates return truthy, else `false`. + * + * **Note:** The created function is equivalent to `_.conformsTo` with + * `source` partially applied. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Util + * @param {Object} source The object of property predicates to conform to. + * @returns {Function} Returns the new spec function. + * @example + * + * var objects = [ + * { 'a': 2, 'b': 1 }, + * { 'a': 1, 'b': 2 } + * ]; + * + * _.filter(objects, _.conforms({ 'b': function(n) { return n > 1; } })); + * // => [{ 'a': 1, 'b': 2 }] + */ +function conforms(source) { + return baseConforms(baseClone(source, CLONE_DEEP_FLAG)); +} + +module.exports = conforms; diff --git a/backend/node_modules/lodash/conformsTo.js b/backend/node_modules/lodash/conformsTo.js new file mode 100644 index 00000000..b8a93ebf --- /dev/null +++ b/backend/node_modules/lodash/conformsTo.js @@ -0,0 +1,32 @@ +var baseConformsTo = require('./_baseConformsTo'), + keys = require('./keys'); + +/** + * Checks if `object` conforms to `source` by invoking the predicate + * properties of `source` with the corresponding property values of `object`. + * + * **Note:** This method is equivalent to `_.conforms` when `source` is + * partially applied. + * + * @static + * @memberOf _ + * @since 4.14.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property predicates to conform to. + * @returns {boolean} Returns `true` if `object` conforms, else `false`. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * + * _.conformsTo(object, { 'b': function(n) { return n > 1; } }); + * // => true + * + * _.conformsTo(object, { 'b': function(n) { return n > 2; } }); + * // => false + */ +function conformsTo(object, source) { + return source == null || baseConformsTo(object, source, keys(source)); +} + +module.exports = conformsTo; diff --git a/backend/node_modules/lodash/constant.js b/backend/node_modules/lodash/constant.js new file mode 100644 index 00000000..655ece3f --- /dev/null +++ b/backend/node_modules/lodash/constant.js @@ -0,0 +1,26 @@ +/** + * Creates a function that returns `value`. + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Util + * @param {*} value The value to return from the new function. + * @returns {Function} Returns the new constant function. + * @example + * + * var objects = _.times(2, _.constant({ 'a': 1 })); + * + * console.log(objects); + * // => [{ 'a': 1 }, { 'a': 1 }] + * + * console.log(objects[0] === objects[1]); + * // => true + */ +function constant(value) { + return function() { + return value; + }; +} + +module.exports = constant; diff --git a/backend/node_modules/lodash/core.js b/backend/node_modules/lodash/core.js new file mode 100644 index 00000000..caf078f6 --- /dev/null +++ b/backend/node_modules/lodash/core.js @@ -0,0 +1,3877 @@ +/** + * @license + * Lodash (Custom Build) + * Build: `lodash core -o ./dist/lodash.core.js` + * Copyright OpenJS Foundation and other contributors + * Released under MIT license + * Based on Underscore.js 1.8.3 + * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + */ +;(function() { + + /** Used as a safe reference for `undefined` in pre-ES5 environments. */ + var undefined; + + /** Used as the semantic version number. */ + var VERSION = '4.17.23'; + + /** Error message constants. */ + var FUNC_ERROR_TEXT = 'Expected a function'; + + /** Used to compose bitmasks for value comparisons. */ + var COMPARE_PARTIAL_FLAG = 1, + COMPARE_UNORDERED_FLAG = 2; + + /** Used to compose bitmasks for function metadata. */ + var WRAP_BIND_FLAG = 1, + WRAP_PARTIAL_FLAG = 32; + + /** Used as references for various `Number` constants. */ + var INFINITY = 1 / 0, + MAX_SAFE_INTEGER = 9007199254740991; + + /** `Object#toString` result references. */ + var argsTag = '[object Arguments]', + arrayTag = '[object Array]', + asyncTag = '[object AsyncFunction]', + boolTag = '[object Boolean]', + dateTag = '[object Date]', + errorTag = '[object Error]', + funcTag = '[object Function]', + genTag = '[object GeneratorFunction]', + numberTag = '[object Number]', + objectTag = '[object Object]', + proxyTag = '[object Proxy]', + regexpTag = '[object RegExp]', + stringTag = '[object String]'; + + /** Used to match HTML entities and HTML characters. */ + var reUnescapedHtml = /[&<>"']/g, + reHasUnescapedHtml = RegExp(reUnescapedHtml.source); + + /** Used to detect unsigned integer values. */ + var reIsUint = /^(?:0|[1-9]\d*)$/; + + /** Used to map characters to HTML entities. */ + var htmlEscapes = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''' + }; + + /** Detect free variable `global` from Node.js. */ + var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; + + /** Detect free variable `self`. */ + var freeSelf = typeof self == 'object' && self && self.Object === Object && self; + + /** Used as a reference to the global object. */ + var root = freeGlobal || freeSelf || Function('return this')(); + + /** Detect free variable `exports`. */ + var freeExports = typeof exports == 'object' && exports && !exports.nodeType && exports; + + /** Detect free variable `module`. */ + var freeModule = freeExports && typeof module == 'object' && module && !module.nodeType && module; + + /*--------------------------------------------------------------------------*/ + + /** + * Appends the elements of `values` to `array`. + * + * @private + * @param {Array} array The array to modify. + * @param {Array} values The values to append. + * @returns {Array} Returns `array`. + */ + function arrayPush(array, values) { + array.push.apply(array, values); + return array; + } + + /** + * The base implementation of `_.findIndex` and `_.findLastIndex` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {number} fromIndex The index to search from. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseFindIndex(array, predicate, fromIndex, fromRight) { + var length = array.length, + index = fromIndex + (fromRight ? 1 : -1); + + while ((fromRight ? index-- : ++index < length)) { + if (predicate(array[index], index, array)) { + return index; + } + } + return -1; + } + + /** + * The base implementation of `_.property` without support for deep paths. + * + * @private + * @param {string} key The key of the property to get. + * @returns {Function} Returns the new accessor function. + */ + function baseProperty(key) { + return function(object) { + return object == null ? undefined : object[key]; + }; + } + + /** + * The base implementation of `_.propertyOf` without support for deep paths. + * + * @private + * @param {Object} object The object to query. + * @returns {Function} Returns the new accessor function. + */ + function basePropertyOf(object) { + return function(key) { + return object == null ? undefined : object[key]; + }; + } + + /** + * The base implementation of `_.reduce` and `_.reduceRight`, without support + * for iteratee shorthands, which iterates over `collection` using `eachFunc`. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} accumulator The initial value. + * @param {boolean} initAccum Specify using the first or last element of + * `collection` as the initial value. + * @param {Function} eachFunc The function to iterate over `collection`. + * @returns {*} Returns the accumulated value. + */ + function baseReduce(collection, iteratee, accumulator, initAccum, eachFunc) { + eachFunc(collection, function(value, index, collection) { + accumulator = initAccum + ? (initAccum = false, value) + : iteratee(accumulator, value, index, collection); + }); + return accumulator; + } + + /** + * The base implementation of `_.values` and `_.valuesIn` which creates an + * array of `object` property values corresponding to the property names + * of `props`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} props The property names to get values for. + * @returns {Object} Returns the array of property values. + */ + function baseValues(object, props) { + return baseMap(props, function(key) { + return object[key]; + }); + } + + /** + * Used by `_.escape` to convert characters to HTML entities. + * + * @private + * @param {string} chr The matched character to escape. + * @returns {string} Returns the escaped character. + */ + var escapeHtmlChar = basePropertyOf(htmlEscapes); + + /** + * Creates a unary function that invokes `func` with its argument transformed. + * + * @private + * @param {Function} func The function to wrap. + * @param {Function} transform The argument transform. + * @returns {Function} Returns the new function. + */ + function overArg(func, transform) { + return function(arg) { + return func(transform(arg)); + }; + } + + /*--------------------------------------------------------------------------*/ + + /** Used for built-in method references. */ + var arrayProto = Array.prototype, + objectProto = Object.prototype; + + /** Used to check objects for own properties. */ + var hasOwnProperty = objectProto.hasOwnProperty; + + /** Used to generate unique IDs. */ + var idCounter = 0; + + /** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ + var nativeObjectToString = objectProto.toString; + + /** Used to restore the original `_` reference in `_.noConflict`. */ + var oldDash = root._; + + /** Built-in value references. */ + var objectCreate = Object.create, + propertyIsEnumerable = objectProto.propertyIsEnumerable; + + /* Built-in method references for those with the same name as other `lodash` methods. */ + var nativeIsFinite = root.isFinite, + nativeKeys = overArg(Object.keys, Object), + nativeMax = Math.max; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a `lodash` object which wraps `value` to enable implicit method + * chain sequences. Methods that operate on and return arrays, collections, + * and functions can be chained together. Methods that retrieve a single value + * or may return a primitive value will automatically end the chain sequence + * and return the unwrapped value. Otherwise, the value must be unwrapped + * with `_#value`. + * + * Explicit chain sequences, which must be unwrapped with `_#value`, may be + * enabled using `_.chain`. + * + * The execution of chained methods is lazy, that is, it's deferred until + * `_#value` is implicitly or explicitly called. + * + * Lazy evaluation allows several methods to support shortcut fusion. + * Shortcut fusion is an optimization to merge iteratee calls; this avoids + * the creation of intermediate arrays and can greatly reduce the number of + * iteratee executions. Sections of a chain sequence qualify for shortcut + * fusion if the section is applied to an array and iteratees accept only + * one argument. The heuristic for whether a section qualifies for shortcut + * fusion is subject to change. + * + * Chaining is supported in custom builds as long as the `_#value` method is + * directly or indirectly included in the build. + * + * In addition to lodash methods, wrappers have `Array` and `String` methods. + * + * The wrapper `Array` methods are: + * `concat`, `join`, `pop`, `push`, `shift`, `sort`, `splice`, and `unshift` + * + * The wrapper `String` methods are: + * `replace` and `split` + * + * The wrapper methods that support shortcut fusion are: + * `at`, `compact`, `drop`, `dropRight`, `dropWhile`, `filter`, `find`, + * `findLast`, `head`, `initial`, `last`, `map`, `reject`, `reverse`, `slice`, + * `tail`, `take`, `takeRight`, `takeRightWhile`, `takeWhile`, and `toArray` + * + * The chainable wrapper methods are: + * `after`, `ary`, `assign`, `assignIn`, `assignInWith`, `assignWith`, `at`, + * `before`, `bind`, `bindAll`, `bindKey`, `castArray`, `chain`, `chunk`, + * `commit`, `compact`, `concat`, `conforms`, `constant`, `countBy`, `create`, + * `curry`, `debounce`, `defaults`, `defaultsDeep`, `defer`, `delay`, + * `difference`, `differenceBy`, `differenceWith`, `drop`, `dropRight`, + * `dropRightWhile`, `dropWhile`, `extend`, `extendWith`, `fill`, `filter`, + * `flatMap`, `flatMapDeep`, `flatMapDepth`, `flatten`, `flattenDeep`, + * `flattenDepth`, `flip`, `flow`, `flowRight`, `fromPairs`, `functions`, + * `functionsIn`, `groupBy`, `initial`, `intersection`, `intersectionBy`, + * `intersectionWith`, `invert`, `invertBy`, `invokeMap`, `iteratee`, `keyBy`, + * `keys`, `keysIn`, `map`, `mapKeys`, `mapValues`, `matches`, `matchesProperty`, + * `memoize`, `merge`, `mergeWith`, `method`, `methodOf`, `mixin`, `negate`, + * `nthArg`, `omit`, `omitBy`, `once`, `orderBy`, `over`, `overArgs`, + * `overEvery`, `overSome`, `partial`, `partialRight`, `partition`, `pick`, + * `pickBy`, `plant`, `property`, `propertyOf`, `pull`, `pullAll`, `pullAllBy`, + * `pullAllWith`, `pullAt`, `push`, `range`, `rangeRight`, `rearg`, `reject`, + * `remove`, `rest`, `reverse`, `sampleSize`, `set`, `setWith`, `shuffle`, + * `slice`, `sort`, `sortBy`, `splice`, `spread`, `tail`, `take`, `takeRight`, + * `takeRightWhile`, `takeWhile`, `tap`, `throttle`, `thru`, `toArray`, + * `toPairs`, `toPairsIn`, `toPath`, `toPlainObject`, `transform`, `unary`, + * `union`, `unionBy`, `unionWith`, `uniq`, `uniqBy`, `uniqWith`, `unset`, + * `unshift`, `unzip`, `unzipWith`, `update`, `updateWith`, `values`, + * `valuesIn`, `without`, `wrap`, `xor`, `xorBy`, `xorWith`, `zip`, + * `zipObject`, `zipObjectDeep`, and `zipWith` + * + * The wrapper methods that are **not** chainable by default are: + * `add`, `attempt`, `camelCase`, `capitalize`, `ceil`, `clamp`, `clone`, + * `cloneDeep`, `cloneDeepWith`, `cloneWith`, `conformsTo`, `deburr`, + * `defaultTo`, `divide`, `each`, `eachRight`, `endsWith`, `eq`, `escape`, + * `escapeRegExp`, `every`, `find`, `findIndex`, `findKey`, `findLast`, + * `findLastIndex`, `findLastKey`, `first`, `floor`, `forEach`, `forEachRight`, + * `forIn`, `forInRight`, `forOwn`, `forOwnRight`, `get`, `gt`, `gte`, `has`, + * `hasIn`, `head`, `identity`, `includes`, `indexOf`, `inRange`, `invoke`, + * `isArguments`, `isArray`, `isArrayBuffer`, `isArrayLike`, `isArrayLikeObject`, + * `isBoolean`, `isBuffer`, `isDate`, `isElement`, `isEmpty`, `isEqual`, + * `isEqualWith`, `isError`, `isFinite`, `isFunction`, `isInteger`, `isLength`, + * `isMap`, `isMatch`, `isMatchWith`, `isNaN`, `isNative`, `isNil`, `isNull`, + * `isNumber`, `isObject`, `isObjectLike`, `isPlainObject`, `isRegExp`, + * `isSafeInteger`, `isSet`, `isString`, `isUndefined`, `isTypedArray`, + * `isWeakMap`, `isWeakSet`, `join`, `kebabCase`, `last`, `lastIndexOf`, + * `lowerCase`, `lowerFirst`, `lt`, `lte`, `max`, `maxBy`, `mean`, `meanBy`, + * `min`, `minBy`, `multiply`, `noConflict`, `noop`, `now`, `nth`, `pad`, + * `padEnd`, `padStart`, `parseInt`, `pop`, `random`, `reduce`, `reduceRight`, + * `repeat`, `result`, `round`, `runInContext`, `sample`, `shift`, `size`, + * `snakeCase`, `some`, `sortedIndex`, `sortedIndexBy`, `sortedLastIndex`, + * `sortedLastIndexBy`, `startCase`, `startsWith`, `stubArray`, `stubFalse`, + * `stubObject`, `stubString`, `stubTrue`, `subtract`, `sum`, `sumBy`, + * `template`, `times`, `toFinite`, `toInteger`, `toJSON`, `toLength`, + * `toLower`, `toNumber`, `toSafeInteger`, `toString`, `toUpper`, `trim`, + * `trimEnd`, `trimStart`, `truncate`, `unescape`, `uniqueId`, `upperCase`, + * `upperFirst`, `value`, and `words` + * + * @name _ + * @constructor + * @category Seq + * @param {*} value The value to wrap in a `lodash` instance. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * function square(n) { + * return n * n; + * } + * + * var wrapped = _([1, 2, 3]); + * + * // Returns an unwrapped value. + * wrapped.reduce(_.add); + * // => 6 + * + * // Returns a wrapped value. + * var squares = wrapped.map(square); + * + * _.isArray(squares); + * // => false + * + * _.isArray(squares.value()); + * // => true + */ + function lodash(value) { + return value instanceof LodashWrapper + ? value + : new LodashWrapper(value); + } + + /** + * The base implementation of `_.create` without support for assigning + * properties to the created object. + * + * @private + * @param {Object} proto The object to inherit from. + * @returns {Object} Returns the new object. + */ + var baseCreate = (function() { + function object() {} + return function(proto) { + if (!isObject(proto)) { + return {}; + } + if (objectCreate) { + return objectCreate(proto); + } + object.prototype = proto; + var result = new object; + object.prototype = undefined; + return result; + }; + }()); + + /** + * The base constructor for creating `lodash` wrapper objects. + * + * @private + * @param {*} value The value to wrap. + * @param {boolean} [chainAll] Enable explicit method chain sequences. + */ + function LodashWrapper(value, chainAll) { + this.__wrapped__ = value; + this.__actions__ = []; + this.__chain__ = !!chainAll; + } + + LodashWrapper.prototype = baseCreate(lodash.prototype); + LodashWrapper.prototype.constructor = LodashWrapper; + + /*------------------------------------------------------------------------*/ + + /** + * Assigns `value` to `key` of `object` if the existing value is not equivalent + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function assignValue(object, key, value) { + var objValue = object[key]; + if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) || + (value === undefined && !(key in object))) { + baseAssignValue(object, key, value); + } + } + + /** + * The base implementation of `assignValue` and `assignMergeValue` without + * value checks. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function baseAssignValue(object, key, value) { + object[key] = value; + } + + /** + * The base implementation of `_.delay` and `_.defer` which accepts `args` + * to provide to `func`. + * + * @private + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @param {Array} args The arguments to provide to `func`. + * @returns {number|Object} Returns the timer id or timeout object. + */ + function baseDelay(func, wait, args) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + return setTimeout(function() { func.apply(undefined, args); }, wait); + } + + /** + * The base implementation of `_.forEach` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + */ + var baseEach = createBaseEach(baseForOwn); + + /** + * The base implementation of `_.every` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false` + */ + function baseEvery(collection, predicate) { + var result = true; + baseEach(collection, function(value, index, collection) { + result = !!predicate(value, index, collection); + return result; + }); + return result; + } + + /** + * The base implementation of methods like `_.max` and `_.min` which accepts a + * `comparator` to determine the extremum value. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The iteratee invoked per iteration. + * @param {Function} comparator The comparator used to compare values. + * @returns {*} Returns the extremum value. + */ + function baseExtremum(array, iteratee, comparator) { + var index = -1, + length = array.length; + + while (++index < length) { + var value = array[index], + current = iteratee(value); + + if (current != null && (computed === undefined + ? (current === current && !false) + : comparator(current, computed) + )) { + var computed = current, + result = value; + } + } + return result; + } + + /** + * The base implementation of `_.filter` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + */ + function baseFilter(collection, predicate) { + var result = []; + baseEach(collection, function(value, index, collection) { + if (predicate(value, index, collection)) { + result.push(value); + } + }); + return result; + } + + /** + * The base implementation of `_.flatten` with support for restricting flattening. + * + * @private + * @param {Array} array The array to flatten. + * @param {number} depth The maximum recursion depth. + * @param {boolean} [predicate=isFlattenable] The function invoked per iteration. + * @param {boolean} [isStrict] Restrict to values that pass `predicate` checks. + * @param {Array} [result=[]] The initial result value. + * @returns {Array} Returns the new flattened array. + */ + function baseFlatten(array, depth, predicate, isStrict, result) { + var index = -1, + length = array.length; + + predicate || (predicate = isFlattenable); + result || (result = []); + + while (++index < length) { + var value = array[index]; + if (depth > 0 && predicate(value)) { + if (depth > 1) { + // Recursively flatten arrays (susceptible to call stack limits). + baseFlatten(value, depth - 1, predicate, isStrict, result); + } else { + arrayPush(result, value); + } + } else if (!isStrict) { + result[result.length] = value; + } + } + return result; + } + + /** + * The base implementation of `baseForOwn` which iterates over `object` + * properties returned by `keysFunc` and invokes `iteratee` for each property. + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {Function} keysFunc The function to get the keys of `object`. + * @returns {Object} Returns `object`. + */ + var baseFor = createBaseFor(); + + /** + * The base implementation of `_.forOwn` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Object} Returns `object`. + */ + function baseForOwn(object, iteratee) { + return object && baseFor(object, iteratee, keys); + } + + /** + * The base implementation of `_.functions` which creates an array of + * `object` function property names filtered from `props`. + * + * @private + * @param {Object} object The object to inspect. + * @param {Array} props The property names to filter. + * @returns {Array} Returns the function names. + */ + function baseFunctions(object, props) { + return baseFilter(props, function(key) { + return isFunction(object[key]); + }); + } + + /** + * The base implementation of `getTag` without fallbacks for buggy environments. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. + */ + function baseGetTag(value) { + return objectToString(value); + } + + /** + * The base implementation of `_.gt` which doesn't coerce arguments. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than `other`, + * else `false`. + */ + function baseGt(value, other) { + return value > other; + } + + /** + * The base implementation of `_.isArguments`. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + */ + var baseIsArguments = noop; + + /** + * The base implementation of `_.isDate` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a date object, else `false`. + */ + function baseIsDate(value) { + return isObjectLike(value) && baseGetTag(value) == dateTag; + } + + /** + * The base implementation of `_.isEqual` which supports partial comparisons + * and tracks traversed objects. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @param {boolean} bitmask The bitmask flags. + * 1 - Unordered comparison + * 2 - Partial comparison + * @param {Function} [customizer] The function to customize comparisons. + * @param {Object} [stack] Tracks traversed `value` and `other` objects. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + */ + function baseIsEqual(value, other, bitmask, customizer, stack) { + if (value === other) { + return true; + } + if (value == null || other == null || (!isObjectLike(value) && !isObjectLike(other))) { + return value !== value && other !== other; + } + return baseIsEqualDeep(value, other, bitmask, customizer, baseIsEqual, stack); + } + + /** + * A specialized version of `baseIsEqual` for arrays and objects which performs + * deep comparisons and tracks traversed objects enabling objects with circular + * references to be compared. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} [stack] Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function baseIsEqualDeep(object, other, bitmask, customizer, equalFunc, stack) { + var objIsArr = isArray(object), + othIsArr = isArray(other), + objTag = objIsArr ? arrayTag : baseGetTag(object), + othTag = othIsArr ? arrayTag : baseGetTag(other); + + objTag = objTag == argsTag ? objectTag : objTag; + othTag = othTag == argsTag ? objectTag : othTag; + + var objIsObj = objTag == objectTag, + othIsObj = othTag == objectTag, + isSameTag = objTag == othTag; + + stack || (stack = []); + var objStack = find(stack, function(entry) { + return entry[0] == object; + }); + var othStack = find(stack, function(entry) { + return entry[0] == other; + }); + if (objStack && othStack) { + return objStack[1] == other; + } + stack.push([object, other]); + stack.push([other, object]); + if (isSameTag && !objIsObj) { + var result = (objIsArr) + ? equalArrays(object, other, bitmask, customizer, equalFunc, stack) + : equalByTag(object, other, objTag, bitmask, customizer, equalFunc, stack); + stack.pop(); + return result; + } + if (!(bitmask & COMPARE_PARTIAL_FLAG)) { + var objIsWrapped = objIsObj && hasOwnProperty.call(object, '__wrapped__'), + othIsWrapped = othIsObj && hasOwnProperty.call(other, '__wrapped__'); + + if (objIsWrapped || othIsWrapped) { + var objUnwrapped = objIsWrapped ? object.value() : object, + othUnwrapped = othIsWrapped ? other.value() : other; + + var result = equalFunc(objUnwrapped, othUnwrapped, bitmask, customizer, stack); + stack.pop(); + return result; + } + } + if (!isSameTag) { + return false; + } + var result = equalObjects(object, other, bitmask, customizer, equalFunc, stack); + stack.pop(); + return result; + } + + /** + * The base implementation of `_.isRegExp` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a regexp, else `false`. + */ + function baseIsRegExp(value) { + return isObjectLike(value) && baseGetTag(value) == regexpTag; + } + + /** + * The base implementation of `_.iteratee`. + * + * @private + * @param {*} [value=_.identity] The value to convert to an iteratee. + * @returns {Function} Returns the iteratee. + */ + function baseIteratee(func) { + if (typeof func == 'function') { + return func; + } + if (func == null) { + return identity; + } + return (typeof func == 'object' ? baseMatches : baseProperty)(func); + } + + /** + * The base implementation of `_.lt` which doesn't coerce arguments. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than `other`, + * else `false`. + */ + function baseLt(value, other) { + return value < other; + } + + /** + * The base implementation of `_.map` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ + function baseMap(collection, iteratee) { + var index = -1, + result = isArrayLike(collection) ? Array(collection.length) : []; + + baseEach(collection, function(value, key, collection) { + result[++index] = iteratee(value, key, collection); + }); + return result; + } + + /** + * The base implementation of `_.matches` which doesn't clone `source`. + * + * @private + * @param {Object} source The object of property values to match. + * @returns {Function} Returns the new spec function. + */ + function baseMatches(source) { + var props = nativeKeys(source); + return function(object) { + var length = props.length; + if (object == null) { + return !length; + } + object = Object(object); + while (length--) { + var key = props[length]; + if (!(key in object && + baseIsEqual(source[key], object[key], COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG) + )) { + return false; + } + } + return true; + }; + } + + /** + * The base implementation of `_.pick` without support for individual + * property identifiers. + * + * @private + * @param {Object} object The source object. + * @param {string[]} paths The property paths to pick. + * @returns {Object} Returns the new object. + */ + function basePick(object, props) { + object = Object(object); + return reduce(props, function(result, key) { + if (key in object) { + result[key] = object[key]; + } + return result; + }, {}); + } + + /** + * The base implementation of `_.rest` which doesn't validate or coerce arguments. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @returns {Function} Returns the new function. + */ + function baseRest(func, start) { + return setToString(overRest(func, start, identity), func + ''); + } + + /** + * The base implementation of `_.slice` without an iteratee call guard. + * + * @private + * @param {Array} array The array to slice. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the slice of `array`. + */ + function baseSlice(array, start, end) { + var index = -1, + length = array.length; + + if (start < 0) { + start = -start > length ? 0 : (length + start); + } + end = end > length ? length : end; + if (end < 0) { + end += length; + } + length = start > end ? 0 : ((end - start) >>> 0); + start >>>= 0; + + var result = Array(length); + while (++index < length) { + result[index] = array[index + start]; + } + return result; + } + + /** + * Copies the values of `source` to `array`. + * + * @private + * @param {Array} source The array to copy values from. + * @param {Array} [array=[]] The array to copy values to. + * @returns {Array} Returns `array`. + */ + function copyArray(source) { + return baseSlice(source, 0, source.length); + } + + /** + * The base implementation of `_.some` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + */ + function baseSome(collection, predicate) { + var result; + + baseEach(collection, function(value, index, collection) { + result = predicate(value, index, collection); + return !result; + }); + return !!result; + } + + /** + * The base implementation of `wrapperValue` which returns the result of + * performing a sequence of actions on the unwrapped `value`, where each + * successive action is supplied the return value of the previous. + * + * @private + * @param {*} value The unwrapped value. + * @param {Array} actions Actions to perform to resolve the unwrapped value. + * @returns {*} Returns the resolved value. + */ + function baseWrapperValue(value, actions) { + var result = value; + return reduce(actions, function(result, action) { + return action.func.apply(action.thisArg, arrayPush([result], action.args)); + }, result); + } + + /** + * Compares values to sort them in ascending order. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {number} Returns the sort order indicator for `value`. + */ + function compareAscending(value, other) { + if (value !== other) { + var valIsDefined = value !== undefined, + valIsNull = value === null, + valIsReflexive = value === value, + valIsSymbol = false; + + var othIsDefined = other !== undefined, + othIsNull = other === null, + othIsReflexive = other === other, + othIsSymbol = false; + + if ((!othIsNull && !othIsSymbol && !valIsSymbol && value > other) || + (valIsSymbol && othIsDefined && othIsReflexive && !othIsNull && !othIsSymbol) || + (valIsNull && othIsDefined && othIsReflexive) || + (!valIsDefined && othIsReflexive) || + !valIsReflexive) { + return 1; + } + if ((!valIsNull && !valIsSymbol && !othIsSymbol && value < other) || + (othIsSymbol && valIsDefined && valIsReflexive && !valIsNull && !valIsSymbol) || + (othIsNull && valIsDefined && valIsReflexive) || + (!othIsDefined && valIsReflexive) || + !othIsReflexive) { + return -1; + } + } + return 0; + } + + /** + * Copies properties of `source` to `object`. + * + * @private + * @param {Object} source The object to copy properties from. + * @param {Array} props The property identifiers to copy. + * @param {Object} [object={}] The object to copy properties to. + * @param {Function} [customizer] The function to customize copied values. + * @returns {Object} Returns `object`. + */ + function copyObject(source, props, object, customizer) { + var isNew = !object; + object || (object = {}); + + var index = -1, + length = props.length; + + while (++index < length) { + var key = props[index]; + + var newValue = customizer + ? customizer(object[key], source[key], key, object, source) + : undefined; + + if (newValue === undefined) { + newValue = source[key]; + } + if (isNew) { + baseAssignValue(object, key, newValue); + } else { + assignValue(object, key, newValue); + } + } + return object; + } + + /** + * Creates a function like `_.assign`. + * + * @private + * @param {Function} assigner The function to assign values. + * @returns {Function} Returns the new assigner function. + */ + function createAssigner(assigner) { + return baseRest(function(object, sources) { + var index = -1, + length = sources.length, + customizer = length > 1 ? sources[length - 1] : undefined; + + customizer = (assigner.length > 3 && typeof customizer == 'function') + ? (length--, customizer) + : undefined; + + object = Object(object); + while (++index < length) { + var source = sources[index]; + if (source) { + assigner(object, source, index, customizer); + } + } + return object; + }); + } + + /** + * Creates a `baseEach` or `baseEachRight` function. + * + * @private + * @param {Function} eachFunc The function to iterate over a collection. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new base function. + */ + function createBaseEach(eachFunc, fromRight) { + return function(collection, iteratee) { + if (collection == null) { + return collection; + } + if (!isArrayLike(collection)) { + return eachFunc(collection, iteratee); + } + var length = collection.length, + index = fromRight ? length : -1, + iterable = Object(collection); + + while ((fromRight ? index-- : ++index < length)) { + if (iteratee(iterable[index], index, iterable) === false) { + break; + } + } + return collection; + }; + } + + /** + * Creates a base function for methods like `_.forIn` and `_.forOwn`. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new base function. + */ + function createBaseFor(fromRight) { + return function(object, iteratee, keysFunc) { + var index = -1, + iterable = Object(object), + props = keysFunc(object), + length = props.length; + + while (length--) { + var key = props[fromRight ? length : ++index]; + if (iteratee(iterable[key], key, iterable) === false) { + break; + } + } + return object; + }; + } + + /** + * Creates a function that produces an instance of `Ctor` regardless of + * whether it was invoked as part of a `new` expression or by `call` or `apply`. + * + * @private + * @param {Function} Ctor The constructor to wrap. + * @returns {Function} Returns the new wrapped function. + */ + function createCtor(Ctor) { + return function() { + // Use a `switch` statement to work with class constructors. See + // http://ecma-international.org/ecma-262/7.0/#sec-ecmascript-function-objects-call-thisargument-argumentslist + // for more details. + var args = arguments; + var thisBinding = baseCreate(Ctor.prototype), + result = Ctor.apply(thisBinding, args); + + // Mimic the constructor's `return` behavior. + // See https://es5.github.io/#x13.2.2 for more details. + return isObject(result) ? result : thisBinding; + }; + } + + /** + * Creates a `_.find` or `_.findLast` function. + * + * @private + * @param {Function} findIndexFunc The function to find the collection index. + * @returns {Function} Returns the new find function. + */ + function createFind(findIndexFunc) { + return function(collection, predicate, fromIndex) { + var iterable = Object(collection); + if (!isArrayLike(collection)) { + var iteratee = baseIteratee(predicate, 3); + collection = keys(collection); + predicate = function(key) { return iteratee(iterable[key], key, iterable); }; + } + var index = findIndexFunc(collection, predicate, fromIndex); + return index > -1 ? iterable[iteratee ? collection[index] : index] : undefined; + }; + } + + /** + * Creates a function that wraps `func` to invoke it with the `this` binding + * of `thisArg` and `partials` prepended to the arguments it receives. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} partials The arguments to prepend to those provided to + * the new function. + * @returns {Function} Returns the new wrapped function. + */ + function createPartial(func, bitmask, thisArg, partials) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + var isBind = bitmask & WRAP_BIND_FLAG, + Ctor = createCtor(func); + + function wrapper() { + var argsIndex = -1, + argsLength = arguments.length, + leftIndex = -1, + leftLength = partials.length, + args = Array(leftLength + argsLength), + fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + + while (++leftIndex < leftLength) { + args[leftIndex] = partials[leftIndex]; + } + while (argsLength--) { + args[leftIndex++] = arguments[++argsIndex]; + } + return fn.apply(isBind ? thisArg : this, args); + } + return wrapper; + } + + /** + * A specialized version of `baseIsEqualDeep` for arrays with support for + * partial deep comparisons. + * + * @private + * @param {Array} array The array to compare. + * @param {Array} other The other array to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `array` and `other` objects. + * @returns {boolean} Returns `true` if the arrays are equivalent, else `false`. + */ + function equalArrays(array, other, bitmask, customizer, equalFunc, stack) { + var isPartial = bitmask & COMPARE_PARTIAL_FLAG, + arrLength = array.length, + othLength = other.length; + + if (arrLength != othLength && !(isPartial && othLength > arrLength)) { + return false; + } + // Check that cyclic values are equal. + var arrStacked = stack.get(array); + var othStacked = stack.get(other); + if (arrStacked && othStacked) { + return arrStacked == other && othStacked == array; + } + var index = -1, + result = true, + seen = (bitmask & COMPARE_UNORDERED_FLAG) ? [] : undefined; + + // Ignore non-index properties. + while (++index < arrLength) { + var arrValue = array[index], + othValue = other[index]; + + var compared; + if (compared !== undefined) { + if (compared) { + continue; + } + result = false; + break; + } + // Recursively compare arrays (susceptible to call stack limits). + if (seen) { + if (!baseSome(other, function(othValue, othIndex) { + if (!indexOf(seen, othIndex) && + (arrValue === othValue || equalFunc(arrValue, othValue, bitmask, customizer, stack))) { + return seen.push(othIndex); + } + })) { + result = false; + break; + } + } else if (!( + arrValue === othValue || + equalFunc(arrValue, othValue, bitmask, customizer, stack) + )) { + result = false; + break; + } + } + return result; + } + + /** + * A specialized version of `baseIsEqualDeep` for comparing objects of + * the same `toStringTag`. + * + * **Note:** This function only supports comparing values with tags of + * `Boolean`, `Date`, `Error`, `Number`, `RegExp`, or `String`. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {string} tag The `toStringTag` of the objects to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function equalByTag(object, other, tag, bitmask, customizer, equalFunc, stack) { + switch (tag) { + + case boolTag: + case dateTag: + case numberTag: + // Coerce booleans to `1` or `0` and dates to milliseconds. + // Invalid dates are coerced to `NaN`. + return eq(+object, +other); + + case errorTag: + return object.name == other.name && object.message == other.message; + + case regexpTag: + case stringTag: + // Coerce regexes to strings and treat strings, primitives and objects, + // as equal. See http://www.ecma-international.org/ecma-262/7.0/#sec-regexp.prototype.tostring + // for more details. + return object == (other + ''); + + } + return false; + } + + /** + * A specialized version of `baseIsEqualDeep` for objects with support for + * partial deep comparisons. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function equalObjects(object, other, bitmask, customizer, equalFunc, stack) { + var isPartial = bitmask & COMPARE_PARTIAL_FLAG, + objProps = keys(object), + objLength = objProps.length, + othProps = keys(other), + othLength = othProps.length; + + if (objLength != othLength && !isPartial) { + return false; + } + var index = objLength; + while (index--) { + var key = objProps[index]; + if (!(isPartial ? key in other : hasOwnProperty.call(other, key))) { + return false; + } + } + // Check that cyclic values are equal. + var objStacked = stack.get(object); + var othStacked = stack.get(other); + if (objStacked && othStacked) { + return objStacked == other && othStacked == object; + } + var result = true; + + var skipCtor = isPartial; + while (++index < objLength) { + key = objProps[index]; + var objValue = object[key], + othValue = other[key]; + + var compared; + // Recursively compare objects (susceptible to call stack limits). + if (!(compared === undefined + ? (objValue === othValue || equalFunc(objValue, othValue, bitmask, customizer, stack)) + : compared + )) { + result = false; + break; + } + skipCtor || (skipCtor = key == 'constructor'); + } + if (result && !skipCtor) { + var objCtor = object.constructor, + othCtor = other.constructor; + + // Non `Object` object instances with different constructors are not equal. + if (objCtor != othCtor && + ('constructor' in object && 'constructor' in other) && + !(typeof objCtor == 'function' && objCtor instanceof objCtor && + typeof othCtor == 'function' && othCtor instanceof othCtor)) { + result = false; + } + } + return result; + } + + /** + * A specialized version of `baseRest` which flattens the rest array. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @returns {Function} Returns the new function. + */ + function flatRest(func) { + return setToString(overRest(func, undefined, flatten), func + ''); + } + + /** + * Checks if `value` is a flattenable `arguments` object or array. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is flattenable, else `false`. + */ + function isFlattenable(value) { + return isArray(value) || isArguments(value); + } + + /** + * Checks if `value` is a valid array-like index. + * + * @private + * @param {*} value The value to check. + * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. + * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. + */ + function isIndex(value, length) { + var type = typeof value; + length = length == null ? MAX_SAFE_INTEGER : length; + + return !!length && + (type == 'number' || + (type != 'symbol' && reIsUint.test(value))) && + (value > -1 && value % 1 == 0 && value < length); + } + + /** + * Checks if the given arguments are from an iteratee call. + * + * @private + * @param {*} value The potential iteratee value argument. + * @param {*} index The potential iteratee index or key argument. + * @param {*} object The potential iteratee object argument. + * @returns {boolean} Returns `true` if the arguments are from an iteratee call, + * else `false`. + */ + function isIterateeCall(value, index, object) { + if (!isObject(object)) { + return false; + } + var type = typeof index; + if (type == 'number' + ? (isArrayLike(object) && isIndex(index, object.length)) + : (type == 'string' && index in object) + ) { + return eq(object[index], value); + } + return false; + } + + /** + * This function is like + * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * except that it includes inherited enumerable properties. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ + function nativeKeysIn(object) { + var result = []; + if (object != null) { + for (var key in Object(object)) { + result.push(key); + } + } + return result; + } + + /** + * Converts `value` to a string using `Object.prototype.toString`. + * + * @private + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + */ + function objectToString(value) { + return nativeObjectToString.call(value); + } + + /** + * A specialized version of `baseRest` which transforms the rest array. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @param {Function} transform The rest array transform. + * @returns {Function} Returns the new function. + */ + function overRest(func, start, transform) { + start = nativeMax(start === undefined ? (func.length - 1) : start, 0); + return function() { + var args = arguments, + index = -1, + length = nativeMax(args.length - start, 0), + array = Array(length); + + while (++index < length) { + array[index] = args[start + index]; + } + index = -1; + var otherArgs = Array(start + 1); + while (++index < start) { + otherArgs[index] = args[index]; + } + otherArgs[start] = transform(array); + return func.apply(this, otherArgs); + }; + } + + /** + * Sets the `toString` method of `func` to return `string`. + * + * @private + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. + */ + var setToString = identity; + + /*------------------------------------------------------------------------*/ + + /** + * Creates an array with all falsey values removed. The values `false`, `null`, + * `0`, `""`, `undefined`, and `NaN` are falsey. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to compact. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.compact([0, 1, false, 2, '', 3]); + * // => [1, 2, 3] + */ + function compact(array) { + return baseFilter(array, Boolean); + } + + /** + * Creates a new array concatenating `array` with any additional arrays + * and/or values. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to concatenate. + * @param {...*} [values] The values to concatenate. + * @returns {Array} Returns the new concatenated array. + * @example + * + * var array = [1]; + * var other = _.concat(array, 2, [3], [[4]]); + * + * console.log(other); + * // => [1, 2, 3, [4]] + * + * console.log(array); + * // => [1] + */ + function concat() { + var length = arguments.length; + if (!length) { + return []; + } + var args = Array(length - 1), + array = arguments[0], + index = length; + + while (index--) { + args[index - 1] = arguments[index]; + } + return arrayPush(isArray(array) ? copyArray(array) : [array], baseFlatten(args, 1)); + } + + /** + * This method is like `_.find` except that it returns the index of the first + * element `predicate` returns truthy for instead of the element itself. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.findIndex(users, function(o) { return o.user == 'barney'; }); + * // => 0 + * + * // The `_.matches` iteratee shorthand. + * _.findIndex(users, { 'user': 'fred', 'active': false }); + * // => 1 + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findIndex(users, ['active', false]); + * // => 0 + * + * // The `_.property` iteratee shorthand. + * _.findIndex(users, 'active'); + * // => 2 + */ + function findIndex(array, predicate, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = fromIndex == null ? 0 : toInteger(fromIndex); + if (index < 0) { + index = nativeMax(length + index, 0); + } + return baseFindIndex(array, baseIteratee(predicate, 3), index); + } + + /** + * Flattens `array` a single level deep. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flatten([1, [2, [3, [4]], 5]]); + * // => [1, 2, [3, [4]], 5] + */ + function flatten(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, 1) : []; + } + + /** + * Recursively flattens `array`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flattenDeep([1, [2, [3, [4]], 5]]); + * // => [1, 2, 3, 4, 5] + */ + function flattenDeep(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, INFINITY) : []; + } + + /** + * Gets the first element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @alias first + * @category Array + * @param {Array} array The array to query. + * @returns {*} Returns the first element of `array`. + * @example + * + * _.head([1, 2, 3]); + * // => 1 + * + * _.head([]); + * // => undefined + */ + function head(array) { + return (array && array.length) ? array[0] : undefined; + } + + /** + * Gets the index at which the first occurrence of `value` is found in `array` + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. If `fromIndex` is negative, it's used as the + * offset from the end of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.indexOf([1, 2, 1, 2], 2); + * // => 1 + * + * // Search from the `fromIndex`. + * _.indexOf([1, 2, 1, 2], 2, 2); + * // => 3 + */ + function indexOf(array, value, fromIndex) { + var length = array == null ? 0 : array.length; + if (typeof fromIndex == 'number') { + fromIndex = fromIndex < 0 ? nativeMax(length + fromIndex, 0) : fromIndex; + } else { + fromIndex = 0; + } + var index = (fromIndex || 0) - 1, + isReflexive = value === value; + + while (++index < length) { + var other = array[index]; + if ((isReflexive ? other === value : other !== other)) { + return index; + } + } + return -1; + } + + /** + * Gets the last element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @returns {*} Returns the last element of `array`. + * @example + * + * _.last([1, 2, 3]); + * // => 3 + */ + function last(array) { + var length = array == null ? 0 : array.length; + return length ? array[length - 1] : undefined; + } + + /** + * Creates a slice of `array` from `start` up to, but not including, `end`. + * + * **Note:** This method is used instead of + * [`Array#slice`](https://mdn.io/Array/slice) to ensure dense arrays are + * returned. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to slice. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the slice of `array`. + */ + function slice(array, start, end) { + var length = array == null ? 0 : array.length; + start = start == null ? 0 : +start; + end = end === undefined ? length : +end; + return length ? baseSlice(array, start, end) : []; + } + + /*------------------------------------------------------------------------*/ + + /** + * Creates a `lodash` wrapper instance that wraps `value` with explicit method + * chain sequences enabled. The result of such sequences must be unwrapped + * with `_#value`. + * + * @static + * @memberOf _ + * @since 1.3.0 + * @category Seq + * @param {*} value The value to wrap. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 40 }, + * { 'user': 'pebbles', 'age': 1 } + * ]; + * + * var youngest = _ + * .chain(users) + * .sortBy('age') + * .map(function(o) { + * return o.user + ' is ' + o.age; + * }) + * .head() + * .value(); + * // => 'pebbles is 1' + */ + function chain(value) { + var result = lodash(value); + result.__chain__ = true; + return result; + } + + /** + * This method invokes `interceptor` and returns `value`. The interceptor + * is invoked with one argument; (value). The purpose of this method is to + * "tap into" a method chain sequence in order to modify intermediate results. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @param {*} value The value to provide to `interceptor`. + * @param {Function} interceptor The function to invoke. + * @returns {*} Returns `value`. + * @example + * + * _([1, 2, 3]) + * .tap(function(array) { + * // Mutate input array. + * array.pop(); + * }) + * .reverse() + * .value(); + * // => [2, 1] + */ + function tap(value, interceptor) { + interceptor(value); + return value; + } + + /** + * This method is like `_.tap` except that it returns the result of `interceptor`. + * The purpose of this method is to "pass thru" values replacing intermediate + * results in a method chain sequence. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Seq + * @param {*} value The value to provide to `interceptor`. + * @param {Function} interceptor The function to invoke. + * @returns {*} Returns the result of `interceptor`. + * @example + * + * _(' abc ') + * .chain() + * .trim() + * .thru(function(value) { + * return [value]; + * }) + * .value(); + * // => ['abc'] + */ + function thru(value, interceptor) { + return interceptor(value); + } + + /** + * Creates a `lodash` wrapper instance with explicit method chain sequences enabled. + * + * @name chain + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 40 } + * ]; + * + * // A sequence without explicit chaining. + * _(users).head(); + * // => { 'user': 'barney', 'age': 36 } + * + * // A sequence with explicit chaining. + * _(users) + * .chain() + * .head() + * .pick('user') + * .value(); + * // => { 'user': 'barney' } + */ + function wrapperChain() { + return chain(this); + } + + /** + * Executes the chain sequence to resolve the unwrapped value. + * + * @name value + * @memberOf _ + * @since 0.1.0 + * @alias toJSON, valueOf + * @category Seq + * @returns {*} Returns the resolved unwrapped value. + * @example + * + * _([1, 2, 3]).value(); + * // => [1, 2, 3] + */ + function wrapperValue() { + return baseWrapperValue(this.__wrapped__, this.__actions__); + } + + /*------------------------------------------------------------------------*/ + + /** + * Checks if `predicate` returns truthy for **all** elements of `collection`. + * Iteration is stopped once `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index|key, collection). + * + * **Note:** This method returns `true` for + * [empty collections](https://en.wikipedia.org/wiki/Empty_set) because + * [everything is true](https://en.wikipedia.org/wiki/Vacuous_truth) of + * elements of empty collections. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false`. + * @example + * + * _.every([true, 1, null, 'yes'], Boolean); + * // => false + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.every(users, { 'user': 'barney', 'active': false }); + * // => false + * + * // The `_.matchesProperty` iteratee shorthand. + * _.every(users, ['active', false]); + * // => true + * + * // The `_.property` iteratee shorthand. + * _.every(users, 'active'); + * // => false + */ + function every(collection, predicate, guard) { + predicate = guard ? undefined : predicate; + return baseEvery(collection, baseIteratee(predicate)); + } + + /** + * Iterates over elements of `collection`, returning an array of all elements + * `predicate` returns truthy for. The predicate is invoked with three + * arguments: (value, index|key, collection). + * + * **Note:** Unlike `_.remove`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + * @see _.reject + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * _.filter(users, function(o) { return !o.active; }); + * // => objects for ['fred'] + * + * // The `_.matches` iteratee shorthand. + * _.filter(users, { 'age': 36, 'active': true }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.filter(users, ['active', false]); + * // => objects for ['fred'] + * + * // The `_.property` iteratee shorthand. + * _.filter(users, 'active'); + * // => objects for ['barney'] + * + * // Combining several predicates using `_.overEvery` or `_.overSome`. + * _.filter(users, _.overSome([{ 'age': 36 }, ['age', 40]])); + * // => objects for ['fred', 'barney'] + */ + function filter(collection, predicate) { + return baseFilter(collection, baseIteratee(predicate)); + } + + /** + * Iterates over elements of `collection`, returning the first element + * `predicate` returns truthy for. The predicate is invoked with three + * arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=0] The index to search from. + * @returns {*} Returns the matched element, else `undefined`. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false }, + * { 'user': 'pebbles', 'age': 1, 'active': true } + * ]; + * + * _.find(users, function(o) { return o.age < 40; }); + * // => object for 'barney' + * + * // The `_.matches` iteratee shorthand. + * _.find(users, { 'age': 1, 'active': true }); + * // => object for 'pebbles' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.find(users, ['active', false]); + * // => object for 'fred' + * + * // The `_.property` iteratee shorthand. + * _.find(users, 'active'); + * // => object for 'barney' + */ + var find = createFind(findIndex); + + /** + * Iterates over elements of `collection` and invokes `iteratee` for each element. + * The iteratee is invoked with three arguments: (value, index|key, collection). + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * **Note:** As with other "Collections" methods, objects with a "length" + * property are iterated like arrays. To avoid this behavior use `_.forIn` + * or `_.forOwn` for object iteration. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @alias each + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + * @see _.forEachRight + * @example + * + * _.forEach([1, 2], function(value) { + * console.log(value); + * }); + * // => Logs `1` then `2`. + * + * _.forEach({ 'a': 1, 'b': 2 }, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a' then 'b' (iteration order is not guaranteed). + */ + function forEach(collection, iteratee) { + return baseEach(collection, baseIteratee(iteratee)); + } + + /** + * Creates an array of values by running each element in `collection` thru + * `iteratee`. The iteratee is invoked with three arguments: + * (value, index|key, collection). + * + * Many lodash methods are guarded to work as iteratees for methods like + * `_.every`, `_.filter`, `_.map`, `_.mapValues`, `_.reject`, and `_.some`. + * + * The guarded methods are: + * `ary`, `chunk`, `curry`, `curryRight`, `drop`, `dropRight`, `every`, + * `fill`, `invert`, `parseInt`, `random`, `range`, `rangeRight`, `repeat`, + * `sampleSize`, `slice`, `some`, `sortBy`, `split`, `take`, `takeRight`, + * `template`, `trim`, `trimEnd`, `trimStart`, and `words` + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + * @example + * + * function square(n) { + * return n * n; + * } + * + * _.map([4, 8], square); + * // => [16, 64] + * + * _.map({ 'a': 4, 'b': 8 }, square); + * // => [16, 64] (iteration order is not guaranteed) + * + * var users = [ + * { 'user': 'barney' }, + * { 'user': 'fred' } + * ]; + * + * // The `_.property` iteratee shorthand. + * _.map(users, 'user'); + * // => ['barney', 'fred'] + */ + function map(collection, iteratee) { + return baseMap(collection, baseIteratee(iteratee)); + } + + /** + * Reduces `collection` to a value which is the accumulated result of running + * each element in `collection` thru `iteratee`, where each successive + * invocation is supplied the return value of the previous. If `accumulator` + * is not given, the first element of `collection` is used as the initial + * value. The iteratee is invoked with four arguments: + * (accumulator, value, index|key, collection). + * + * Many lodash methods are guarded to work as iteratees for methods like + * `_.reduce`, `_.reduceRight`, and `_.transform`. + * + * The guarded methods are: + * `assign`, `defaults`, `defaultsDeep`, `includes`, `merge`, `orderBy`, + * and `sortBy` + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @returns {*} Returns the accumulated value. + * @see _.reduceRight + * @example + * + * _.reduce([1, 2], function(sum, n) { + * return sum + n; + * }, 0); + * // => 3 + * + * _.reduce({ 'a': 1, 'b': 2, 'c': 1 }, function(result, value, key) { + * (result[value] || (result[value] = [])).push(key); + * return result; + * }, {}); + * // => { '1': ['a', 'c'], '2': ['b'] } (iteration order is not guaranteed) + */ + function reduce(collection, iteratee, accumulator) { + return baseReduce(collection, baseIteratee(iteratee), accumulator, arguments.length < 3, baseEach); + } + + /** + * Gets the size of `collection` by returning its length for array-like + * values or the number of own enumerable string keyed properties for objects. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object|string} collection The collection to inspect. + * @returns {number} Returns the collection size. + * @example + * + * _.size([1, 2, 3]); + * // => 3 + * + * _.size({ 'a': 1, 'b': 2 }); + * // => 2 + * + * _.size('pebbles'); + * // => 7 + */ + function size(collection) { + if (collection == null) { + return 0; + } + collection = isArrayLike(collection) ? collection : nativeKeys(collection); + return collection.length; + } + + /** + * Checks if `predicate` returns truthy for **any** element of `collection`. + * Iteration is stopped once `predicate` returns truthy. The predicate is + * invoked with three arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + * @example + * + * _.some([null, 0, 'yes', false], Boolean); + * // => true + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.some(users, { 'user': 'barney', 'active': false }); + * // => false + * + * // The `_.matchesProperty` iteratee shorthand. + * _.some(users, ['active', false]); + * // => true + * + * // The `_.property` iteratee shorthand. + * _.some(users, 'active'); + * // => true + */ + function some(collection, predicate, guard) { + predicate = guard ? undefined : predicate; + return baseSome(collection, baseIteratee(predicate)); + } + + /** + * Creates an array of elements, sorted in ascending order by the results of + * running each element in a collection thru each iteratee. This method + * performs a stable sort, that is, it preserves the original sort order of + * equal elements. The iteratees are invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {...(Function|Function[])} [iteratees=[_.identity]] + * The iteratees to sort by. + * @returns {Array} Returns the new sorted array. + * @example + * + * var users = [ + * { 'user': 'fred', 'age': 48 }, + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 30 }, + * { 'user': 'barney', 'age': 34 } + * ]; + * + * _.sortBy(users, [function(o) { return o.user; }]); + * // => objects for [['barney', 36], ['barney', 34], ['fred', 48], ['fred', 30]] + * + * _.sortBy(users, ['user', 'age']); + * // => objects for [['barney', 34], ['barney', 36], ['fred', 30], ['fred', 48]] + */ + function sortBy(collection, iteratee) { + var index = 0; + iteratee = baseIteratee(iteratee); + + return baseMap(baseMap(collection, function(value, key, collection) { + return { 'value': value, 'index': index++, 'criteria': iteratee(value, key, collection) }; + }).sort(function(object, other) { + return compareAscending(object.criteria, other.criteria) || (object.index - other.index); + }), baseProperty('value')); + } + + /*------------------------------------------------------------------------*/ + + /** + * Creates a function that invokes `func`, with the `this` binding and arguments + * of the created function, while it's called less than `n` times. Subsequent + * calls to the created function return the result of the last `func` invocation. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {number} n The number of calls at which `func` is no longer invoked. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * jQuery(element).on('click', _.before(5, addContactToList)); + * // => Allows adding up to 4 contacts to the list. + */ + function before(n, func) { + var result; + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + n = toInteger(n); + return function() { + if (--n > 0) { + result = func.apply(this, arguments); + } + if (n <= 1) { + func = undefined; + } + return result; + }; + } + + /** + * Creates a function that invokes `func` with the `this` binding of `thisArg` + * and `partials` prepended to the arguments it receives. + * + * The `_.bind.placeholder` value, which defaults to `_` in monolithic builds, + * may be used as a placeholder for partially applied arguments. + * + * **Note:** Unlike native `Function#bind`, this method doesn't set the "length" + * property of bound functions. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to bind. + * @param {*} thisArg The `this` binding of `func`. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * function greet(greeting, punctuation) { + * return greeting + ' ' + this.user + punctuation; + * } + * + * var object = { 'user': 'fred' }; + * + * var bound = _.bind(greet, object, 'hi'); + * bound('!'); + * // => 'hi fred!' + * + * // Bound with placeholders. + * var bound = _.bind(greet, object, _, '!'); + * bound('hi'); + * // => 'hi fred!' + */ + var bind = baseRest(function(func, thisArg, partials) { + return createPartial(func, WRAP_BIND_FLAG | WRAP_PARTIAL_FLAG, thisArg, partials); + }); + + /** + * Defers invoking the `func` until the current call stack has cleared. Any + * additional arguments are provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to defer. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {number} Returns the timer id. + * @example + * + * _.defer(function(text) { + * console.log(text); + * }, 'deferred'); + * // => Logs 'deferred' after one millisecond. + */ + var defer = baseRest(function(func, args) { + return baseDelay(func, 1, args); + }); + + /** + * Invokes `func` after `wait` milliseconds. Any additional arguments are + * provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {number} Returns the timer id. + * @example + * + * _.delay(function(text) { + * console.log(text); + * }, 1000, 'later'); + * // => Logs 'later' after one second. + */ + var delay = baseRest(function(func, wait, args) { + return baseDelay(func, toNumber(wait) || 0, args); + }); + + /** + * Creates a function that negates the result of the predicate `func`. The + * `func` predicate is invoked with the `this` binding and arguments of the + * created function. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} predicate The predicate to negate. + * @returns {Function} Returns the new negated function. + * @example + * + * function isEven(n) { + * return n % 2 == 0; + * } + * + * _.filter([1, 2, 3, 4, 5, 6], _.negate(isEven)); + * // => [1, 3, 5] + */ + function negate(predicate) { + if (typeof predicate != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + return function() { + var args = arguments; + return !predicate.apply(this, args); + }; + } + + /** + * Creates a function that is restricted to invoking `func` once. Repeat calls + * to the function return the value of the first invocation. The `func` is + * invoked with the `this` binding and arguments of the created function. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var initialize = _.once(createApplication); + * initialize(); + * initialize(); + * // => `createApplication` is invoked once + */ + function once(func) { + return before(2, func); + } + + /*------------------------------------------------------------------------*/ + + /** + * Creates a shallow clone of `value`. + * + * **Note:** This method is loosely based on the + * [structured clone algorithm](https://mdn.io/Structured_clone_algorithm) + * and supports cloning arrays, array buffers, booleans, date objects, maps, + * numbers, `Object` objects, regexes, sets, strings, symbols, and typed + * arrays. The own enumerable properties of `arguments` objects are cloned + * as plain objects. An empty object is returned for uncloneable values such + * as error objects, functions, DOM nodes, and WeakMaps. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to clone. + * @returns {*} Returns the cloned value. + * @see _.cloneDeep + * @example + * + * var objects = [{ 'a': 1 }, { 'b': 2 }]; + * + * var shallow = _.clone(objects); + * console.log(shallow[0] === objects[0]); + * // => true + */ + function clone(value) { + if (!isObject(value)) { + return value; + } + return isArray(value) ? copyArray(value) : copyObject(value, nativeKeys(value)); + } + + /** + * Performs a + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * comparison between two values to determine if they are equivalent. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.eq(object, object); + * // => true + * + * _.eq(object, other); + * // => false + * + * _.eq('a', 'a'); + * // => true + * + * _.eq('a', Object('a')); + * // => false + * + * _.eq(NaN, NaN); + * // => true + */ + function eq(value, other) { + return value === other || (value !== value && other !== other); + } + + /** + * Checks if `value` is likely an `arguments` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + * else `false`. + * @example + * + * _.isArguments(function() { return arguments; }()); + * // => true + * + * _.isArguments([1, 2, 3]); + * // => false + */ + var isArguments = baseIsArguments(function() { return arguments; }()) ? baseIsArguments : function(value) { + return isObjectLike(value) && hasOwnProperty.call(value, 'callee') && + !propertyIsEnumerable.call(value, 'callee'); + }; + + /** + * Checks if `value` is classified as an `Array` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array, else `false`. + * @example + * + * _.isArray([1, 2, 3]); + * // => true + * + * _.isArray(document.body.children); + * // => false + * + * _.isArray('abc'); + * // => false + * + * _.isArray(_.noop); + * // => false + */ + var isArray = Array.isArray; + + /** + * Checks if `value` is array-like. A value is considered array-like if it's + * not a function and has a `value.length` that's an integer greater than or + * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is array-like, else `false`. + * @example + * + * _.isArrayLike([1, 2, 3]); + * // => true + * + * _.isArrayLike(document.body.children); + * // => true + * + * _.isArrayLike('abc'); + * // => true + * + * _.isArrayLike(_.noop); + * // => false + */ + function isArrayLike(value) { + return value != null && isLength(value.length) && !isFunction(value); + } + + /** + * Checks if `value` is classified as a boolean primitive or object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a boolean, else `false`. + * @example + * + * _.isBoolean(false); + * // => true + * + * _.isBoolean(null); + * // => false + */ + function isBoolean(value) { + return value === true || value === false || + (isObjectLike(value) && baseGetTag(value) == boolTag); + } + + /** + * Checks if `value` is classified as a `Date` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a date object, else `false`. + * @example + * + * _.isDate(new Date); + * // => true + * + * _.isDate('Mon April 23 2012'); + * // => false + */ + var isDate = baseIsDate; + + /** + * Checks if `value` is an empty object, collection, map, or set. + * + * Objects are considered empty if they have no own enumerable string keyed + * properties. + * + * Array-like values such as `arguments` objects, arrays, buffers, strings, or + * jQuery-like collections are considered empty if they have a `length` of `0`. + * Similarly, maps and sets are considered empty if they have a `size` of `0`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is empty, else `false`. + * @example + * + * _.isEmpty(null); + * // => true + * + * _.isEmpty(true); + * // => true + * + * _.isEmpty(1); + * // => true + * + * _.isEmpty([1, 2, 3]); + * // => false + * + * _.isEmpty({ 'a': 1 }); + * // => false + */ + function isEmpty(value) { + if (isArrayLike(value) && + (isArray(value) || isString(value) || + isFunction(value.splice) || isArguments(value))) { + return !value.length; + } + return !nativeKeys(value).length; + } + + /** + * Performs a deep comparison between two values to determine if they are + * equivalent. + * + * **Note:** This method supports comparing arrays, array buffers, booleans, + * date objects, error objects, maps, numbers, `Object` objects, regexes, + * sets, strings, symbols, and typed arrays. `Object` objects are compared + * by their own, not inherited, enumerable properties. Functions and DOM + * nodes are compared by strict equality, i.e. `===`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.isEqual(object, other); + * // => true + * + * object === other; + * // => false + */ + function isEqual(value, other) { + return baseIsEqual(value, other); + } + + /** + * Checks if `value` is a finite primitive number. + * + * **Note:** This method is based on + * [`Number.isFinite`](https://mdn.io/Number/isFinite). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a finite number, else `false`. + * @example + * + * _.isFinite(3); + * // => true + * + * _.isFinite(Number.MIN_VALUE); + * // => true + * + * _.isFinite(Infinity); + * // => false + * + * _.isFinite('3'); + * // => false + */ + function isFinite(value) { + return typeof value == 'number' && nativeIsFinite(value); + } + + /** + * Checks if `value` is classified as a `Function` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a function, else `false`. + * @example + * + * _.isFunction(_); + * // => true + * + * _.isFunction(/abc/); + * // => false + */ + function isFunction(value) { + if (!isObject(value)) { + return false; + } + // The use of `Object#toString` avoids issues with the `typeof` operator + // in Safari 9 which returns 'object' for typed arrays and other constructors. + var tag = baseGetTag(value); + return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag; + } + + /** + * Checks if `value` is a valid array-like length. + * + * **Note:** This method is loosely based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. + * @example + * + * _.isLength(3); + * // => true + * + * _.isLength(Number.MIN_VALUE); + * // => false + * + * _.isLength(Infinity); + * // => false + * + * _.isLength('3'); + * // => false + */ + function isLength(value) { + return typeof value == 'number' && + value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; + } + + /** + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false + */ + function isObject(value) { + var type = typeof value; + return value != null && (type == 'object' || type == 'function'); + } + + /** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ + function isObjectLike(value) { + return value != null && typeof value == 'object'; + } + + /** + * Checks if `value` is `NaN`. + * + * **Note:** This method is based on + * [`Number.isNaN`](https://mdn.io/Number/isNaN) and is not the same as + * global [`isNaN`](https://mdn.io/isNaN) which returns `true` for + * `undefined` and other non-number values. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + * @example + * + * _.isNaN(NaN); + * // => true + * + * _.isNaN(new Number(NaN)); + * // => true + * + * isNaN(undefined); + * // => true + * + * _.isNaN(undefined); + * // => false + */ + function isNaN(value) { + // An `NaN` primitive is the only value that is not equal to itself. + // Perform the `toStringTag` check first to avoid errors with some + // ActiveX objects in IE. + return isNumber(value) && value != +value; + } + + /** + * Checks if `value` is `null`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `null`, else `false`. + * @example + * + * _.isNull(null); + * // => true + * + * _.isNull(void 0); + * // => false + */ + function isNull(value) { + return value === null; + } + + /** + * Checks if `value` is classified as a `Number` primitive or object. + * + * **Note:** To exclude `Infinity`, `-Infinity`, and `NaN`, which are + * classified as numbers, use the `_.isFinite` method. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a number, else `false`. + * @example + * + * _.isNumber(3); + * // => true + * + * _.isNumber(Number.MIN_VALUE); + * // => true + * + * _.isNumber(Infinity); + * // => true + * + * _.isNumber('3'); + * // => false + */ + function isNumber(value) { + return typeof value == 'number' || + (isObjectLike(value) && baseGetTag(value) == numberTag); + } + + /** + * Checks if `value` is classified as a `RegExp` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a regexp, else `false`. + * @example + * + * _.isRegExp(/abc/); + * // => true + * + * _.isRegExp('/abc/'); + * // => false + */ + var isRegExp = baseIsRegExp; + + /** + * Checks if `value` is classified as a `String` primitive or object. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a string, else `false`. + * @example + * + * _.isString('abc'); + * // => true + * + * _.isString(1); + * // => false + */ + function isString(value) { + return typeof value == 'string' || + (!isArray(value) && isObjectLike(value) && baseGetTag(value) == stringTag); + } + + /** + * Checks if `value` is `undefined`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `undefined`, else `false`. + * @example + * + * _.isUndefined(void 0); + * // => true + * + * _.isUndefined(null); + * // => false + */ + function isUndefined(value) { + return value === undefined; + } + + /** + * Converts `value` to an array. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to convert. + * @returns {Array} Returns the converted array. + * @example + * + * _.toArray({ 'a': 1, 'b': 2 }); + * // => [1, 2] + * + * _.toArray('abc'); + * // => ['a', 'b', 'c'] + * + * _.toArray(1); + * // => [] + * + * _.toArray(null); + * // => [] + */ + function toArray(value) { + if (!isArrayLike(value)) { + return values(value); + } + return value.length ? copyArray(value) : []; + } + + /** + * Converts `value` to an integer. + * + * **Note:** This method is loosely based on + * [`ToInteger`](http://www.ecma-international.org/ecma-262/7.0/#sec-tointeger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toInteger(3.2); + * // => 3 + * + * _.toInteger(Number.MIN_VALUE); + * // => 0 + * + * _.toInteger(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toInteger('3.2'); + * // => 3 + */ + var toInteger = Number; + + /** + * Converts `value` to a number. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to process. + * @returns {number} Returns the number. + * @example + * + * _.toNumber(3.2); + * // => 3.2 + * + * _.toNumber(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toNumber(Infinity); + * // => Infinity + * + * _.toNumber('3.2'); + * // => 3.2 + */ + var toNumber = Number; + + /** + * Converts `value` to a string. An empty string is returned for `null` + * and `undefined` values. The sign of `-0` is preserved. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + * @example + * + * _.toString(null); + * // => '' + * + * _.toString(-0); + * // => '-0' + * + * _.toString([1, 2, 3]); + * // => '1,2,3' + */ + function toString(value) { + if (typeof value == 'string') { + return value; + } + return value == null ? '' : (value + ''); + } + + /*------------------------------------------------------------------------*/ + + /** + * Assigns own enumerable string keyed properties of source objects to the + * destination object. Source objects are applied from left to right. + * Subsequent sources overwrite property assignments of previous sources. + * + * **Note:** This method mutates `object` and is loosely based on + * [`Object.assign`](https://mdn.io/Object/assign). + * + * @static + * @memberOf _ + * @since 0.10.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.assignIn + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * function Bar() { + * this.c = 3; + * } + * + * Foo.prototype.b = 2; + * Bar.prototype.d = 4; + * + * _.assign({ 'a': 0 }, new Foo, new Bar); + * // => { 'a': 1, 'c': 3 } + */ + var assign = createAssigner(function(object, source) { + copyObject(source, nativeKeys(source), object); + }); + + /** + * This method is like `_.assign` except that it iterates over own and + * inherited source properties. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias extend + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.assign + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * function Bar() { + * this.c = 3; + * } + * + * Foo.prototype.b = 2; + * Bar.prototype.d = 4; + * + * _.assignIn({ 'a': 0 }, new Foo, new Bar); + * // => { 'a': 1, 'b': 2, 'c': 3, 'd': 4 } + */ + var assignIn = createAssigner(function(object, source) { + copyObject(source, nativeKeysIn(source), object); + }); + + /** + * Creates an object that inherits from the `prototype` object. If a + * `properties` object is given, its own enumerable string keyed properties + * are assigned to the created object. + * + * @static + * @memberOf _ + * @since 2.3.0 + * @category Object + * @param {Object} prototype The object to inherit from. + * @param {Object} [properties] The properties to assign to the object. + * @returns {Object} Returns the new object. + * @example + * + * function Shape() { + * this.x = 0; + * this.y = 0; + * } + * + * function Circle() { + * Shape.call(this); + * } + * + * Circle.prototype = _.create(Shape.prototype, { + * 'constructor': Circle + * }); + * + * var circle = new Circle; + * circle instanceof Circle; + * // => true + * + * circle instanceof Shape; + * // => true + */ + function create(prototype, properties) { + var result = baseCreate(prototype); + return properties == null ? result : assign(result, properties); + } + + /** + * Assigns own and inherited enumerable string keyed properties of source + * objects to the destination object for all destination properties that + * resolve to `undefined`. Source objects are applied from left to right. + * Once a property is set, additional values of the same property are ignored. + * + * **Note:** This method mutates `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaultsDeep + * @example + * + * _.defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var defaults = baseRest(function(object, sources) { + object = Object(object); + + var index = -1; + var length = sources.length; + var guard = length > 2 ? sources[2] : undefined; + + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + length = 1; + } + + while (++index < length) { + var source = sources[index]; + var props = keysIn(source); + var propsIndex = -1; + var propsLength = props.length; + + while (++propsIndex < propsLength) { + var key = props[propsIndex]; + var value = object[key]; + + if (value === undefined || + (eq(value, objectProto[key]) && !hasOwnProperty.call(object, key))) { + object[key] = source[key]; + } + } + } + + return object; + }); + + /** + * Checks if `path` is a direct property of `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + * @example + * + * var object = { 'a': { 'b': 2 } }; + * var other = _.create({ 'a': _.create({ 'b': 2 }) }); + * + * _.has(object, 'a'); + * // => true + * + * _.has(object, 'a.b'); + * // => true + * + * _.has(object, ['a', 'b']); + * // => true + * + * _.has(other, 'a'); + * // => false + */ + function has(object, path) { + return object != null && hasOwnProperty.call(object, path); + } + + /** + * Creates an array of the own enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. See the + * [ES spec](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * for more details. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keys(new Foo); + * // => ['a', 'b'] (iteration order is not guaranteed) + * + * _.keys('hi'); + * // => ['0', '1'] + */ + var keys = nativeKeys; + + /** + * Creates an array of the own and inherited enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keysIn(new Foo); + * // => ['a', 'b', 'c'] (iteration order is not guaranteed) + */ + var keysIn = nativeKeysIn; + + /** + * Creates an object composed of the picked `object` properties. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The source object. + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.pick(object, ['a', 'c']); + * // => { 'a': 1, 'c': 3 } + */ + var pick = flatRest(function(object, paths) { + return object == null ? {} : basePick(object, paths); + }); + + /** + * This method is like `_.get` except that if the resolved value is a + * function it's invoked with the `this` binding of its parent object and + * its result is returned. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to resolve. + * @param {*} [defaultValue] The value returned for `undefined` resolved values. + * @returns {*} Returns the resolved value. + * @example + * + * var object = { 'a': [{ 'b': { 'c1': 3, 'c2': _.constant(4) } }] }; + * + * _.result(object, 'a[0].b.c1'); + * // => 3 + * + * _.result(object, 'a[0].b.c2'); + * // => 4 + * + * _.result(object, 'a[0].b.c3', 'default'); + * // => 'default' + * + * _.result(object, 'a[0].b.c3', _.constant('default')); + * // => 'default' + */ + function result(object, path, defaultValue) { + var value = object == null ? undefined : object[path]; + if (value === undefined) { + value = defaultValue; + } + return isFunction(value) ? value.call(object) : value; + } + + /** + * Creates an array of the own enumerable string keyed property values of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property values. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.values(new Foo); + * // => [1, 2] (iteration order is not guaranteed) + * + * _.values('hi'); + * // => ['h', 'i'] + */ + function values(object) { + return object == null ? [] : baseValues(object, keys(object)); + } + + /*------------------------------------------------------------------------*/ + + /** + * Converts the characters "&", "<", ">", '"', and "'" in `string` to their + * corresponding HTML entities. + * + * **Note:** No other characters are escaped. To escape additional + * characters use a third-party library like [_he_](https://mths.be/he). + * + * Though the ">" character is escaped for symmetry, characters like + * ">" and "/" don't need escaping in HTML and have no special meaning + * unless they're part of a tag or unquoted attribute value. See + * [Mathias Bynens's article](https://mathiasbynens.be/notes/ambiguous-ampersands) + * (under "semi-related fun fact") for more details. + * + * When working with HTML you should always + * [quote attribute values](http://wonko.com/post/html-escaping) to reduce + * XSS vectors. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category String + * @param {string} [string=''] The string to escape. + * @returns {string} Returns the escaped string. + * @example + * + * _.escape('fred, barney, & pebbles'); + * // => 'fred, barney, & pebbles' + */ + function escape(string) { + string = toString(string); + return (string && reHasUnescapedHtml.test(string)) + ? string.replace(reUnescapedHtml, escapeHtmlChar) + : string; + } + + /*------------------------------------------------------------------------*/ + + /** + * This method returns the first argument it receives. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Util + * @param {*} value Any value. + * @returns {*} Returns `value`. + * @example + * + * var object = { 'a': 1 }; + * + * console.log(_.identity(object) === object); + * // => true + */ + function identity(value) { + return value; + } + + /** + * Creates a function that invokes `func` with the arguments of the created + * function. If `func` is a property name, the created function returns the + * property value for a given element. If `func` is an array or object, the + * created function returns `true` for elements that contain the equivalent + * source properties, otherwise it returns `false`. + * + * @static + * @since 4.0.0 + * @memberOf _ + * @category Util + * @param {*} [func=_.identity] The value to convert to a callback. + * @returns {Function} Returns the callback. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.filter(users, _.iteratee({ 'user': 'barney', 'active': true })); + * // => [{ 'user': 'barney', 'age': 36, 'active': true }] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.filter(users, _.iteratee(['user', 'fred'])); + * // => [{ 'user': 'fred', 'age': 40 }] + * + * // The `_.property` iteratee shorthand. + * _.map(users, _.iteratee('user')); + * // => ['barney', 'fred'] + * + * // Create custom iteratee shorthands. + * _.iteratee = _.wrap(_.iteratee, function(iteratee, func) { + * return !_.isRegExp(func) ? iteratee(func) : function(string) { + * return func.test(string); + * }; + * }); + * + * _.filter(['abc', 'def'], /ef/); + * // => ['def'] + */ + var iteratee = baseIteratee; + + /** + * Creates a function that performs a partial deep comparison between a given + * object and `source`, returning `true` if the given object has equivalent + * property values, else `false`. + * + * **Note:** The created function is equivalent to `_.isMatch` with `source` + * partially applied. + * + * Partial comparisons will match empty array and empty object `source` + * values against any array or object value, respectively. See `_.isEqual` + * for a list of supported value comparisons. + * + * **Note:** Multiple values can be checked by combining several matchers + * using `_.overSome` + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Util + * @param {Object} source The object of property values to match. + * @returns {Function} Returns the new spec function. + * @example + * + * var objects = [ + * { 'a': 1, 'b': 2, 'c': 3 }, + * { 'a': 4, 'b': 5, 'c': 6 } + * ]; + * + * _.filter(objects, _.matches({ 'a': 4, 'c': 6 })); + * // => [{ 'a': 4, 'b': 5, 'c': 6 }] + * + * // Checking for several possible values + * _.filter(objects, _.overSome([_.matches({ 'a': 1 }), _.matches({ 'a': 4 })])); + * // => [{ 'a': 1, 'b': 2, 'c': 3 }, { 'a': 4, 'b': 5, 'c': 6 }] + */ + function matches(source) { + return baseMatches(assign({}, source)); + } + + /** + * Adds all own enumerable string keyed function properties of a source + * object to the destination object. If `object` is a function, then methods + * are added to its prototype as well. + * + * **Note:** Use `_.runInContext` to create a pristine `lodash` function to + * avoid conflicts caused by modifying the original. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Util + * @param {Function|Object} [object=lodash] The destination object. + * @param {Object} source The object of functions to add. + * @param {Object} [options={}] The options object. + * @param {boolean} [options.chain=true] Specify whether mixins are chainable. + * @returns {Function|Object} Returns `object`. + * @example + * + * function vowels(string) { + * return _.filter(string, function(v) { + * return /[aeiou]/i.test(v); + * }); + * } + * + * _.mixin({ 'vowels': vowels }); + * _.vowels('fred'); + * // => ['e'] + * + * _('fred').vowels().value(); + * // => ['e'] + * + * _.mixin({ 'vowels': vowels }, { 'chain': false }); + * _('fred').vowels(); + * // => ['e'] + */ + function mixin(object, source, options) { + var props = keys(source), + methodNames = baseFunctions(source, props); + + if (options == null && + !(isObject(source) && (methodNames.length || !props.length))) { + options = source; + source = object; + object = this; + methodNames = baseFunctions(source, keys(source)); + } + var chain = !(isObject(options) && 'chain' in options) || !!options.chain, + isFunc = isFunction(object); + + baseEach(methodNames, function(methodName) { + var func = source[methodName]; + object[methodName] = func; + if (isFunc) { + object.prototype[methodName] = function() { + var chainAll = this.__chain__; + if (chain || chainAll) { + var result = object(this.__wrapped__), + actions = result.__actions__ = copyArray(this.__actions__); + + actions.push({ 'func': func, 'args': arguments, 'thisArg': object }); + result.__chain__ = chainAll; + return result; + } + return func.apply(object, arrayPush([this.value()], arguments)); + }; + } + }); + + return object; + } + + /** + * Reverts the `_` variable to its previous value and returns a reference to + * the `lodash` function. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Util + * @returns {Function} Returns the `lodash` function. + * @example + * + * var lodash = _.noConflict(); + */ + function noConflict() { + if (root._ === this) { + root._ = oldDash; + } + return this; + } + + /** + * This method returns `undefined`. + * + * @static + * @memberOf _ + * @since 2.3.0 + * @category Util + * @example + * + * _.times(2, _.noop); + * // => [undefined, undefined] + */ + function noop() { + // No operation performed. + } + + /** + * Generates a unique ID. If `prefix` is given, the ID is appended to it. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Util + * @param {string} [prefix=''] The value to prefix the ID with. + * @returns {string} Returns the unique ID. + * @example + * + * _.uniqueId('contact_'); + * // => 'contact_104' + * + * _.uniqueId(); + * // => '105' + */ + function uniqueId(prefix) { + var id = ++idCounter; + return toString(prefix) + id; + } + + /*------------------------------------------------------------------------*/ + + /** + * Computes the maximum value of `array`. If `array` is empty or falsey, + * `undefined` is returned. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Math + * @param {Array} array The array to iterate over. + * @returns {*} Returns the maximum value. + * @example + * + * _.max([4, 2, 8, 6]); + * // => 8 + * + * _.max([]); + * // => undefined + */ + function max(array) { + return (array && array.length) + ? baseExtremum(array, identity, baseGt) + : undefined; + } + + /** + * Computes the minimum value of `array`. If `array` is empty or falsey, + * `undefined` is returned. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Math + * @param {Array} array The array to iterate over. + * @returns {*} Returns the minimum value. + * @example + * + * _.min([4, 2, 8, 6]); + * // => 2 + * + * _.min([]); + * // => undefined + */ + function min(array) { + return (array && array.length) + ? baseExtremum(array, identity, baseLt) + : undefined; + } + + /*------------------------------------------------------------------------*/ + + // Add methods that return wrapped values in chain sequences. + lodash.assignIn = assignIn; + lodash.before = before; + lodash.bind = bind; + lodash.chain = chain; + lodash.compact = compact; + lodash.concat = concat; + lodash.create = create; + lodash.defaults = defaults; + lodash.defer = defer; + lodash.delay = delay; + lodash.filter = filter; + lodash.flatten = flatten; + lodash.flattenDeep = flattenDeep; + lodash.iteratee = iteratee; + lodash.keys = keys; + lodash.map = map; + lodash.matches = matches; + lodash.mixin = mixin; + lodash.negate = negate; + lodash.once = once; + lodash.pick = pick; + lodash.slice = slice; + lodash.sortBy = sortBy; + lodash.tap = tap; + lodash.thru = thru; + lodash.toArray = toArray; + lodash.values = values; + + // Add aliases. + lodash.extend = assignIn; + + // Add methods to `lodash.prototype`. + mixin(lodash, lodash); + + /*------------------------------------------------------------------------*/ + + // Add methods that return unwrapped values in chain sequences. + lodash.clone = clone; + lodash.escape = escape; + lodash.every = every; + lodash.find = find; + lodash.forEach = forEach; + lodash.has = has; + lodash.head = head; + lodash.identity = identity; + lodash.indexOf = indexOf; + lodash.isArguments = isArguments; + lodash.isArray = isArray; + lodash.isBoolean = isBoolean; + lodash.isDate = isDate; + lodash.isEmpty = isEmpty; + lodash.isEqual = isEqual; + lodash.isFinite = isFinite; + lodash.isFunction = isFunction; + lodash.isNaN = isNaN; + lodash.isNull = isNull; + lodash.isNumber = isNumber; + lodash.isObject = isObject; + lodash.isRegExp = isRegExp; + lodash.isString = isString; + lodash.isUndefined = isUndefined; + lodash.last = last; + lodash.max = max; + lodash.min = min; + lodash.noConflict = noConflict; + lodash.noop = noop; + lodash.reduce = reduce; + lodash.result = result; + lodash.size = size; + lodash.some = some; + lodash.uniqueId = uniqueId; + + // Add aliases. + lodash.each = forEach; + lodash.first = head; + + mixin(lodash, (function() { + var source = {}; + baseForOwn(lodash, function(func, methodName) { + if (!hasOwnProperty.call(lodash.prototype, methodName)) { + source[methodName] = func; + } + }); + return source; + }()), { 'chain': false }); + + /*------------------------------------------------------------------------*/ + + /** + * The semantic version number. + * + * @static + * @memberOf _ + * @type {string} + */ + lodash.VERSION = VERSION; + + // Add `Array` methods to `lodash.prototype`. + baseEach(['pop', 'join', 'replace', 'reverse', 'split', 'push', 'shift', 'sort', 'splice', 'unshift'], function(methodName) { + var func = (/^(?:replace|split)$/.test(methodName) ? String.prototype : arrayProto)[methodName], + chainName = /^(?:push|sort|unshift)$/.test(methodName) ? 'tap' : 'thru', + retUnwrapped = /^(?:pop|join|replace|shift)$/.test(methodName); + + lodash.prototype[methodName] = function() { + var args = arguments; + if (retUnwrapped && !this.__chain__) { + var value = this.value(); + return func.apply(isArray(value) ? value : [], args); + } + return this[chainName](function(value) { + return func.apply(isArray(value) ? value : [], args); + }); + }; + }); + + // Add chain sequence methods to the `lodash` wrapper. + lodash.prototype.toJSON = lodash.prototype.valueOf = lodash.prototype.value = wrapperValue; + + /*--------------------------------------------------------------------------*/ + + // Some AMD build optimizers, like r.js, check for condition patterns like: + if (typeof define == 'function' && typeof define.amd == 'object' && define.amd) { + // Expose Lodash on the global object to prevent errors when Lodash is + // loaded by a script tag in the presence of an AMD loader. + // See http://requirejs.org/docs/errors.html#mismatch for more details. + // Use `_.noConflict` to remove Lodash from the global object. + root._ = lodash; + + // Define as an anonymous module so, through path mapping, it can be + // referenced as the "underscore" module. + define(function() { + return lodash; + }); + } + // Check for `exports` after `define` in case a build optimizer adds it. + else if (freeModule) { + // Export for Node.js. + (freeModule.exports = lodash)._ = lodash; + // Export for CommonJS support. + freeExports._ = lodash; + } + else { + // Export to the global object. + root._ = lodash; + } +}.call(this)); diff --git a/backend/node_modules/lodash/core.min.js b/backend/node_modules/lodash/core.min.js new file mode 100644 index 00000000..a9920884 --- /dev/null +++ b/backend/node_modules/lodash/core.min.js @@ -0,0 +1,29 @@ +/** + * @license + * Lodash (Custom Build) lodash.com/license | Underscore.js 1.8.3 underscorejs.org/LICENSE + * Build: `lodash core -o ./dist/lodash.core.js` + */ +;(function(){function n(n){return H(n)&&pn.call(n,"callee")&&!yn.call(n,"callee")}function t(n,t){return n.push.apply(n,t),n}function r(n){return function(t){return null==t?Z:t[n]}}function e(n,t,r,e,u){return u(n,function(n,u,o){r=e?(e=false,n):t(r,n,u,o)}),r}function u(n,t){return j(t,function(t){return n[t]})}function o(n){return n instanceof i?n:new i(n)}function i(n,t){this.__wrapped__=n,this.__actions__=[],this.__chain__=!!t}function c(n,t,r){if(typeof n!="function")throw new TypeError("Expected a function"); +return setTimeout(function(){n.apply(Z,r)},t)}function f(n,t){var r=true;return mn(n,function(n,e,u){return r=!!t(n,e,u)}),r}function a(n,t,r){for(var e=-1,u=n.length;++et}function b(n,t,r,e,u){return n===t||(null==n||null==t||!H(n)&&!H(t)?n!==n&&t!==t:y(n,t,r,e,b,u))}function y(n,t,r,e,u,o){var i=Nn(n),c=Nn(t),f=i?"[object Array]":hn.call(n),a=c?"[object Array]":hn.call(t),f="[object Arguments]"==f?"[object Object]":f,a="[object Arguments]"==a?"[object Object]":a,l="[object Object]"==f,c="[object Object]"==a,a=f==a;o||(o=[]);var p=An(o,function(t){return t[0]==n}),s=An(o,function(n){ +return n[0]==t});if(p&&s)return p[1]==t;if(o.push([n,t]),o.push([t,n]),a&&!l){if(i)r=T(n,t,r,e,u,o);else n:{switch(f){case"[object Boolean]":case"[object Date]":case"[object Number]":r=J(+n,+t);break n;case"[object Error]":r=n.name==t.name&&n.message==t.message;break n;case"[object RegExp]":case"[object String]":r=n==t+"";break n}r=false}return o.pop(),r}return 1&r||(i=l&&pn.call(n,"__wrapped__"),f=c&&pn.call(t,"__wrapped__"),!i&&!f)?!!a&&(r=B(n,t,r,e,u,o),o.pop(),r):(i=i?n.value():n,f=f?t.value():t, +r=u(i,f,r,e,o),o.pop(),r)}function g(n){return typeof n=="function"?n:null==n?X:(typeof n=="object"?d:r)(n)}function _(n,t){return nt&&(t=-t>u?0:u+t),r=r>u?u:r,0>r&&(r+=u),u=t>r?0:r-t>>>0,t>>>=0,r=Array(u);++ei))return false;var c=o.get(n),f=o.get(t);if(c&&f)return c==t&&f==n;for(var c=-1,f=true,a=2&r?[]:Z;++cr?jn(e+r,0):r:0,r=(r||0)-1;for(var u=t===t;++rarguments.length,mn); +}function G(n,t){var r;if(typeof t!="function")throw new TypeError("Expected a function");return n=Fn(n),function(){return 0<--n&&(r=t.apply(this,arguments)),1>=n&&(t=Z),r}}function J(n,t){return n===t||n!==n&&t!==t}function M(n){var t;return(t=null!=n)&&(t=n.length,t=typeof t=="number"&&-1=t),t&&!U(n)}function U(n){return!!V(n)&&(n=hn.call(n),"[object Function]"==n||"[object GeneratorFunction]"==n||"[object AsyncFunction]"==n||"[object Proxy]"==n)}function V(n){var t=typeof n; +return null!=n&&("object"==t||"function"==t)}function H(n){return null!=n&&typeof n=="object"}function K(n){return typeof n=="number"||H(n)&&"[object Number]"==hn.call(n)}function L(n){return typeof n=="string"||!Nn(n)&&H(n)&&"[object String]"==hn.call(n)}function Q(n){return typeof n=="string"?n:null==n?"":n+""}function W(n){return null==n?[]:u(n,Dn(n))}function X(n){return n}function Y(n,r,e){var u=Dn(r),o=h(r,u);null!=e||V(r)&&(o.length||!u.length)||(e=r,r=n,n=this,o=h(r,Dn(r)));var i=!(V(e)&&"chain"in e&&!e.chain),c=U(n); +return mn(o,function(e){var u=r[e];n[e]=u,c&&(n.prototype[e]=function(){var r=this.__chain__;if(i||r){var e=n(this.__wrapped__);return(e.__actions__=A(this.__actions__)).push({func:u,args:arguments,thisArg:n}),e.__chain__=r,e}return u.apply(n,t([this.value()],arguments))})}),n}var Z,nn=1/0,tn=/[&<>"']/g,rn=RegExp(tn.source),en=/^(?:0|[1-9]\d*)$/,un=typeof self=="object"&&self&&self.Object===Object&&self,on=typeof global=="object"&&global&&global.Object===Object&&global||un||Function("return this")(),cn=(un=typeof exports=="object"&&exports&&!exports.nodeType&&exports)&&typeof module=="object"&&module&&!module.nodeType&&module,fn=function(n){ +return function(t){return null==n?Z:n[t]}}({"&":"&","<":"<",">":">",'"':""","'":"'"}),an=Array.prototype,ln=Object.prototype,pn=ln.hasOwnProperty,sn=0,hn=ln.toString,vn=on._,bn=Object.create,yn=ln.propertyIsEnumerable,gn=on.isFinite,_n=function(n,t){return function(r){return n(t(r))}}(Object.keys,Object),jn=Math.max,dn=function(){function n(){}return function(t){return V(t)?bn?bn(t):(n.prototype=t,t=new n,n.prototype=Z,t):{}}}();i.prototype=dn(o.prototype),i.prototype.constructor=i; +var mn=function(n,t){return function(r,e){if(null==r)return r;if(!M(r))return n(r,e);for(var u=r.length,o=t?u:-1,i=Object(r);(t?o--:++or&&(r=jn(e+r,0));n:{for(t=g(t),e=n.length,r+=-1;++re||o&&c&&a||!u&&a||!i){r=1;break n}if(!o&&r { '4': 1, '6': 2 } + * + * // The `_.property` iteratee shorthand. + * _.countBy(['one', 'two', 'three'], 'length'); + * // => { '3': 2, '5': 1 } + */ +var countBy = createAggregator(function(result, value, key) { + if (hasOwnProperty.call(result, key)) { + ++result[key]; + } else { + baseAssignValue(result, key, 1); + } +}); + +module.exports = countBy; diff --git a/backend/node_modules/lodash/create.js b/backend/node_modules/lodash/create.js new file mode 100644 index 00000000..919edb85 --- /dev/null +++ b/backend/node_modules/lodash/create.js @@ -0,0 +1,43 @@ +var baseAssign = require('./_baseAssign'), + baseCreate = require('./_baseCreate'); + +/** + * Creates an object that inherits from the `prototype` object. If a + * `properties` object is given, its own enumerable string keyed properties + * are assigned to the created object. + * + * @static + * @memberOf _ + * @since 2.3.0 + * @category Object + * @param {Object} prototype The object to inherit from. + * @param {Object} [properties] The properties to assign to the object. + * @returns {Object} Returns the new object. + * @example + * + * function Shape() { + * this.x = 0; + * this.y = 0; + * } + * + * function Circle() { + * Shape.call(this); + * } + * + * Circle.prototype = _.create(Shape.prototype, { + * 'constructor': Circle + * }); + * + * var circle = new Circle; + * circle instanceof Circle; + * // => true + * + * circle instanceof Shape; + * // => true + */ +function create(prototype, properties) { + var result = baseCreate(prototype); + return properties == null ? result : baseAssign(result, properties); +} + +module.exports = create; diff --git a/backend/node_modules/lodash/curry.js b/backend/node_modules/lodash/curry.js new file mode 100644 index 00000000..918db1a4 --- /dev/null +++ b/backend/node_modules/lodash/curry.js @@ -0,0 +1,57 @@ +var createWrap = require('./_createWrap'); + +/** Used to compose bitmasks for function metadata. */ +var WRAP_CURRY_FLAG = 8; + +/** + * Creates a function that accepts arguments of `func` and either invokes + * `func` returning its result, if at least `arity` number of arguments have + * been provided, or returns a function that accepts the remaining `func` + * arguments, and so on. The arity of `func` may be specified if `func.length` + * is not sufficient. + * + * The `_.curry.placeholder` value, which defaults to `_` in monolithic builds, + * may be used as a placeholder for provided arguments. + * + * **Note:** This method doesn't set the "length" property of curried functions. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Function + * @param {Function} func The function to curry. + * @param {number} [arity=func.length] The arity of `func`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new curried function. + * @example + * + * var abc = function(a, b, c) { + * return [a, b, c]; + * }; + * + * var curried = _.curry(abc); + * + * curried(1)(2)(3); + * // => [1, 2, 3] + * + * curried(1, 2)(3); + * // => [1, 2, 3] + * + * curried(1, 2, 3); + * // => [1, 2, 3] + * + * // Curried with placeholders. + * curried(1)(_, 3)(2); + * // => [1, 2, 3] + */ +function curry(func, arity, guard) { + arity = guard ? undefined : arity; + var result = createWrap(func, WRAP_CURRY_FLAG, undefined, undefined, undefined, undefined, undefined, arity); + result.placeholder = curry.placeholder; + return result; +} + +// Assign default placeholders. +curry.placeholder = {}; + +module.exports = curry; diff --git a/backend/node_modules/lodash/curryRight.js b/backend/node_modules/lodash/curryRight.js new file mode 100644 index 00000000..c85b6f33 --- /dev/null +++ b/backend/node_modules/lodash/curryRight.js @@ -0,0 +1,54 @@ +var createWrap = require('./_createWrap'); + +/** Used to compose bitmasks for function metadata. */ +var WRAP_CURRY_RIGHT_FLAG = 16; + +/** + * This method is like `_.curry` except that arguments are applied to `func` + * in the manner of `_.partialRight` instead of `_.partial`. + * + * The `_.curryRight.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for provided arguments. + * + * **Note:** This method doesn't set the "length" property of curried functions. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to curry. + * @param {number} [arity=func.length] The arity of `func`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new curried function. + * @example + * + * var abc = function(a, b, c) { + * return [a, b, c]; + * }; + * + * var curried = _.curryRight(abc); + * + * curried(3)(2)(1); + * // => [1, 2, 3] + * + * curried(2, 3)(1); + * // => [1, 2, 3] + * + * curried(1, 2, 3); + * // => [1, 2, 3] + * + * // Curried with placeholders. + * curried(3)(1, _)(2); + * // => [1, 2, 3] + */ +function curryRight(func, arity, guard) { + arity = guard ? undefined : arity; + var result = createWrap(func, WRAP_CURRY_RIGHT_FLAG, undefined, undefined, undefined, undefined, undefined, arity); + result.placeholder = curryRight.placeholder; + return result; +} + +// Assign default placeholders. +curryRight.placeholder = {}; + +module.exports = curryRight; diff --git a/backend/node_modules/lodash/date.js b/backend/node_modules/lodash/date.js new file mode 100644 index 00000000..cbf5b410 --- /dev/null +++ b/backend/node_modules/lodash/date.js @@ -0,0 +1,3 @@ +module.exports = { + 'now': require('./now') +}; diff --git a/backend/node_modules/lodash/debounce.js b/backend/node_modules/lodash/debounce.js new file mode 100644 index 00000000..8f751d53 --- /dev/null +++ b/backend/node_modules/lodash/debounce.js @@ -0,0 +1,191 @@ +var isObject = require('./isObject'), + now = require('./now'), + toNumber = require('./toNumber'); + +/** Error message constants. */ +var FUNC_ERROR_TEXT = 'Expected a function'; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max, + nativeMin = Math.min; + +/** + * Creates a debounced function that delays invoking `func` until after `wait` + * milliseconds have elapsed since the last time the debounced function was + * invoked. The debounced function comes with a `cancel` method to cancel + * delayed `func` invocations and a `flush` method to immediately invoke them. + * Provide `options` to indicate whether `func` should be invoked on the + * leading and/or trailing edge of the `wait` timeout. The `func` is invoked + * with the last arguments provided to the debounced function. Subsequent + * calls to the debounced function return the result of the last `func` + * invocation. + * + * **Note:** If `leading` and `trailing` options are `true`, `func` is + * invoked on the trailing edge of the timeout only if the debounced function + * is invoked more than once during the `wait` timeout. + * + * If `wait` is `0` and `leading` is `false`, `func` invocation is deferred + * until to the next tick, similar to `setTimeout` with a timeout of `0`. + * + * See [David Corbacho's article](https://css-tricks.com/debouncing-throttling-explained-examples/) + * for details over the differences between `_.debounce` and `_.throttle`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to debounce. + * @param {number} [wait=0] The number of milliseconds to delay. + * @param {Object} [options={}] The options object. + * @param {boolean} [options.leading=false] + * Specify invoking on the leading edge of the timeout. + * @param {number} [options.maxWait] + * The maximum time `func` is allowed to be delayed before it's invoked. + * @param {boolean} [options.trailing=true] + * Specify invoking on the trailing edge of the timeout. + * @returns {Function} Returns the new debounced function. + * @example + * + * // Avoid costly calculations while the window size is in flux. + * jQuery(window).on('resize', _.debounce(calculateLayout, 150)); + * + * // Invoke `sendMail` when clicked, debouncing subsequent calls. + * jQuery(element).on('click', _.debounce(sendMail, 300, { + * 'leading': true, + * 'trailing': false + * })); + * + * // Ensure `batchLog` is invoked once after 1 second of debounced calls. + * var debounced = _.debounce(batchLog, 250, { 'maxWait': 1000 }); + * var source = new EventSource('/stream'); + * jQuery(source).on('message', debounced); + * + * // Cancel the trailing debounced invocation. + * jQuery(window).on('popstate', debounced.cancel); + */ +function debounce(func, wait, options) { + var lastArgs, + lastThis, + maxWait, + result, + timerId, + lastCallTime, + lastInvokeTime = 0, + leading = false, + maxing = false, + trailing = true; + + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + wait = toNumber(wait) || 0; + if (isObject(options)) { + leading = !!options.leading; + maxing = 'maxWait' in options; + maxWait = maxing ? nativeMax(toNumber(options.maxWait) || 0, wait) : maxWait; + trailing = 'trailing' in options ? !!options.trailing : trailing; + } + + function invokeFunc(time) { + var args = lastArgs, + thisArg = lastThis; + + lastArgs = lastThis = undefined; + lastInvokeTime = time; + result = func.apply(thisArg, args); + return result; + } + + function leadingEdge(time) { + // Reset any `maxWait` timer. + lastInvokeTime = time; + // Start the timer for the trailing edge. + timerId = setTimeout(timerExpired, wait); + // Invoke the leading edge. + return leading ? invokeFunc(time) : result; + } + + function remainingWait(time) { + var timeSinceLastCall = time - lastCallTime, + timeSinceLastInvoke = time - lastInvokeTime, + timeWaiting = wait - timeSinceLastCall; + + return maxing + ? nativeMin(timeWaiting, maxWait - timeSinceLastInvoke) + : timeWaiting; + } + + function shouldInvoke(time) { + var timeSinceLastCall = time - lastCallTime, + timeSinceLastInvoke = time - lastInvokeTime; + + // Either this is the first call, activity has stopped and we're at the + // trailing edge, the system time has gone backwards and we're treating + // it as the trailing edge, or we've hit the `maxWait` limit. + return (lastCallTime === undefined || (timeSinceLastCall >= wait) || + (timeSinceLastCall < 0) || (maxing && timeSinceLastInvoke >= maxWait)); + } + + function timerExpired() { + var time = now(); + if (shouldInvoke(time)) { + return trailingEdge(time); + } + // Restart the timer. + timerId = setTimeout(timerExpired, remainingWait(time)); + } + + function trailingEdge(time) { + timerId = undefined; + + // Only invoke if we have `lastArgs` which means `func` has been + // debounced at least once. + if (trailing && lastArgs) { + return invokeFunc(time); + } + lastArgs = lastThis = undefined; + return result; + } + + function cancel() { + if (timerId !== undefined) { + clearTimeout(timerId); + } + lastInvokeTime = 0; + lastArgs = lastCallTime = lastThis = timerId = undefined; + } + + function flush() { + return timerId === undefined ? result : trailingEdge(now()); + } + + function debounced() { + var time = now(), + isInvoking = shouldInvoke(time); + + lastArgs = arguments; + lastThis = this; + lastCallTime = time; + + if (isInvoking) { + if (timerId === undefined) { + return leadingEdge(lastCallTime); + } + if (maxing) { + // Handle invocations in a tight loop. + clearTimeout(timerId); + timerId = setTimeout(timerExpired, wait); + return invokeFunc(lastCallTime); + } + } + if (timerId === undefined) { + timerId = setTimeout(timerExpired, wait); + } + return result; + } + debounced.cancel = cancel; + debounced.flush = flush; + return debounced; +} + +module.exports = debounce; diff --git a/backend/node_modules/lodash/deburr.js b/backend/node_modules/lodash/deburr.js new file mode 100644 index 00000000..f85e314a --- /dev/null +++ b/backend/node_modules/lodash/deburr.js @@ -0,0 +1,45 @@ +var deburrLetter = require('./_deburrLetter'), + toString = require('./toString'); + +/** Used to match Latin Unicode letters (excluding mathematical operators). */ +var reLatin = /[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g; + +/** Used to compose unicode character classes. */ +var rsComboMarksRange = '\\u0300-\\u036f', + reComboHalfMarksRange = '\\ufe20-\\ufe2f', + rsComboSymbolsRange = '\\u20d0-\\u20ff', + rsComboRange = rsComboMarksRange + reComboHalfMarksRange + rsComboSymbolsRange; + +/** Used to compose unicode capture groups. */ +var rsCombo = '[' + rsComboRange + ']'; + +/** + * Used to match [combining diacritical marks](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks) and + * [combining diacritical marks for symbols](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks_for_Symbols). + */ +var reComboMark = RegExp(rsCombo, 'g'); + +/** + * Deburrs `string` by converting + * [Latin-1 Supplement](https://en.wikipedia.org/wiki/Latin-1_Supplement_(Unicode_block)#Character_table) + * and [Latin Extended-A](https://en.wikipedia.org/wiki/Latin_Extended-A) + * letters to basic Latin letters and removing + * [combining diacritical marks](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to deburr. + * @returns {string} Returns the deburred string. + * @example + * + * _.deburr('déjà vu'); + * // => 'deja vu' + */ +function deburr(string) { + string = toString(string); + return string && string.replace(reLatin, deburrLetter).replace(reComboMark, ''); +} + +module.exports = deburr; diff --git a/backend/node_modules/lodash/defaultTo.js b/backend/node_modules/lodash/defaultTo.js new file mode 100644 index 00000000..5b333592 --- /dev/null +++ b/backend/node_modules/lodash/defaultTo.js @@ -0,0 +1,25 @@ +/** + * Checks `value` to determine whether a default value should be returned in + * its place. The `defaultValue` is returned if `value` is `NaN`, `null`, + * or `undefined`. + * + * @static + * @memberOf _ + * @since 4.14.0 + * @category Util + * @param {*} value The value to check. + * @param {*} defaultValue The default value. + * @returns {*} Returns the resolved value. + * @example + * + * _.defaultTo(1, 10); + * // => 1 + * + * _.defaultTo(undefined, 10); + * // => 10 + */ +function defaultTo(value, defaultValue) { + return (value == null || value !== value) ? defaultValue : value; +} + +module.exports = defaultTo; diff --git a/backend/node_modules/lodash/defaults.js b/backend/node_modules/lodash/defaults.js new file mode 100644 index 00000000..c74df044 --- /dev/null +++ b/backend/node_modules/lodash/defaults.js @@ -0,0 +1,64 @@ +var baseRest = require('./_baseRest'), + eq = require('./eq'), + isIterateeCall = require('./_isIterateeCall'), + keysIn = require('./keysIn'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Assigns own and inherited enumerable string keyed properties of source + * objects to the destination object for all destination properties that + * resolve to `undefined`. Source objects are applied from left to right. + * Once a property is set, additional values of the same property are ignored. + * + * **Note:** This method mutates `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaultsDeep + * @example + * + * _.defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ +var defaults = baseRest(function(object, sources) { + object = Object(object); + + var index = -1; + var length = sources.length; + var guard = length > 2 ? sources[2] : undefined; + + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + length = 1; + } + + while (++index < length) { + var source = sources[index]; + var props = keysIn(source); + var propsIndex = -1; + var propsLength = props.length; + + while (++propsIndex < propsLength) { + var key = props[propsIndex]; + var value = object[key]; + + if (value === undefined || + (eq(value, objectProto[key]) && !hasOwnProperty.call(object, key))) { + object[key] = source[key]; + } + } + } + + return object; +}); + +module.exports = defaults; diff --git a/backend/node_modules/lodash/defaultsDeep.js b/backend/node_modules/lodash/defaultsDeep.js new file mode 100644 index 00000000..9b5fa3ee --- /dev/null +++ b/backend/node_modules/lodash/defaultsDeep.js @@ -0,0 +1,30 @@ +var apply = require('./_apply'), + baseRest = require('./_baseRest'), + customDefaultsMerge = require('./_customDefaultsMerge'), + mergeWith = require('./mergeWith'); + +/** + * This method is like `_.defaults` except that it recursively assigns + * default properties. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 3.10.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaults + * @example + * + * _.defaultsDeep({ 'a': { 'b': 2 } }, { 'a': { 'b': 1, 'c': 3 } }); + * // => { 'a': { 'b': 2, 'c': 3 } } + */ +var defaultsDeep = baseRest(function(args) { + args.push(undefined, customDefaultsMerge); + return apply(mergeWith, undefined, args); +}); + +module.exports = defaultsDeep; diff --git a/backend/node_modules/lodash/defer.js b/backend/node_modules/lodash/defer.js new file mode 100644 index 00000000..f6d6c6fa --- /dev/null +++ b/backend/node_modules/lodash/defer.js @@ -0,0 +1,26 @@ +var baseDelay = require('./_baseDelay'), + baseRest = require('./_baseRest'); + +/** + * Defers invoking the `func` until the current call stack has cleared. Any + * additional arguments are provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to defer. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {number} Returns the timer id. + * @example + * + * _.defer(function(text) { + * console.log(text); + * }, 'deferred'); + * // => Logs 'deferred' after one millisecond. + */ +var defer = baseRest(function(func, args) { + return baseDelay(func, 1, args); +}); + +module.exports = defer; diff --git a/backend/node_modules/lodash/delay.js b/backend/node_modules/lodash/delay.js new file mode 100644 index 00000000..bd554796 --- /dev/null +++ b/backend/node_modules/lodash/delay.js @@ -0,0 +1,28 @@ +var baseDelay = require('./_baseDelay'), + baseRest = require('./_baseRest'), + toNumber = require('./toNumber'); + +/** + * Invokes `func` after `wait` milliseconds. Any additional arguments are + * provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {number} Returns the timer id. + * @example + * + * _.delay(function(text) { + * console.log(text); + * }, 1000, 'later'); + * // => Logs 'later' after one second. + */ +var delay = baseRest(function(func, wait, args) { + return baseDelay(func, toNumber(wait) || 0, args); +}); + +module.exports = delay; diff --git a/backend/node_modules/lodash/difference.js b/backend/node_modules/lodash/difference.js new file mode 100644 index 00000000..fa28bb30 --- /dev/null +++ b/backend/node_modules/lodash/difference.js @@ -0,0 +1,33 @@ +var baseDifference = require('./_baseDifference'), + baseFlatten = require('./_baseFlatten'), + baseRest = require('./_baseRest'), + isArrayLikeObject = require('./isArrayLikeObject'); + +/** + * Creates an array of `array` values not included in the other given arrays + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. The order and references of result values are + * determined by the first array. + * + * **Note:** Unlike `_.pullAll`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @returns {Array} Returns the new array of filtered values. + * @see _.without, _.xor + * @example + * + * _.difference([2, 1], [2, 3]); + * // => [1] + */ +var difference = baseRest(function(array, values) { + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true)) + : []; +}); + +module.exports = difference; diff --git a/backend/node_modules/lodash/differenceBy.js b/backend/node_modules/lodash/differenceBy.js new file mode 100644 index 00000000..2cd63e7e --- /dev/null +++ b/backend/node_modules/lodash/differenceBy.js @@ -0,0 +1,44 @@ +var baseDifference = require('./_baseDifference'), + baseFlatten = require('./_baseFlatten'), + baseIteratee = require('./_baseIteratee'), + baseRest = require('./_baseRest'), + isArrayLikeObject = require('./isArrayLikeObject'), + last = require('./last'); + +/** + * This method is like `_.difference` except that it accepts `iteratee` which + * is invoked for each element of `array` and `values` to generate the criterion + * by which they're compared. The order and references of result values are + * determined by the first array. The iteratee is invoked with one argument: + * (value). + * + * **Note:** Unlike `_.pullAllBy`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.differenceBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [1.2] + * + * // The `_.property` iteratee shorthand. + * _.differenceBy([{ 'x': 2 }, { 'x': 1 }], [{ 'x': 1 }], 'x'); + * // => [{ 'x': 2 }] + */ +var differenceBy = baseRest(function(array, values) { + var iteratee = last(values); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true), baseIteratee(iteratee, 2)) + : []; +}); + +module.exports = differenceBy; diff --git a/backend/node_modules/lodash/differenceWith.js b/backend/node_modules/lodash/differenceWith.js new file mode 100644 index 00000000..c0233f4b --- /dev/null +++ b/backend/node_modules/lodash/differenceWith.js @@ -0,0 +1,40 @@ +var baseDifference = require('./_baseDifference'), + baseFlatten = require('./_baseFlatten'), + baseRest = require('./_baseRest'), + isArrayLikeObject = require('./isArrayLikeObject'), + last = require('./last'); + +/** + * This method is like `_.difference` except that it accepts `comparator` + * which is invoked to compare elements of `array` to `values`. The order and + * references of result values are determined by the first array. The comparator + * is invoked with two arguments: (arrVal, othVal). + * + * **Note:** Unlike `_.pullAllWith`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * + * _.differenceWith(objects, [{ 'x': 1, 'y': 2 }], _.isEqual); + * // => [{ 'x': 2, 'y': 1 }] + */ +var differenceWith = baseRest(function(array, values) { + var comparator = last(values); + if (isArrayLikeObject(comparator)) { + comparator = undefined; + } + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true), undefined, comparator) + : []; +}); + +module.exports = differenceWith; diff --git a/backend/node_modules/lodash/divide.js b/backend/node_modules/lodash/divide.js new file mode 100644 index 00000000..8cae0cd1 --- /dev/null +++ b/backend/node_modules/lodash/divide.js @@ -0,0 +1,22 @@ +var createMathOperation = require('./_createMathOperation'); + +/** + * Divide two numbers. + * + * @static + * @memberOf _ + * @since 4.7.0 + * @category Math + * @param {number} dividend The first number in a division. + * @param {number} divisor The second number in a division. + * @returns {number} Returns the quotient. + * @example + * + * _.divide(6, 4); + * // => 1.5 + */ +var divide = createMathOperation(function(dividend, divisor) { + return dividend / divisor; +}, 1); + +module.exports = divide; diff --git a/backend/node_modules/lodash/drop.js b/backend/node_modules/lodash/drop.js new file mode 100644 index 00000000..d5c3cbaa --- /dev/null +++ b/backend/node_modules/lodash/drop.js @@ -0,0 +1,38 @@ +var baseSlice = require('./_baseSlice'), + toInteger = require('./toInteger'); + +/** + * Creates a slice of `array` with `n` elements dropped from the beginning. + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to drop. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.drop([1, 2, 3]); + * // => [2, 3] + * + * _.drop([1, 2, 3], 2); + * // => [3] + * + * _.drop([1, 2, 3], 5); + * // => [] + * + * _.drop([1, 2, 3], 0); + * // => [1, 2, 3] + */ +function drop(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + return baseSlice(array, n < 0 ? 0 : n, length); +} + +module.exports = drop; diff --git a/backend/node_modules/lodash/dropRight.js b/backend/node_modules/lodash/dropRight.js new file mode 100644 index 00000000..441fe996 --- /dev/null +++ b/backend/node_modules/lodash/dropRight.js @@ -0,0 +1,39 @@ +var baseSlice = require('./_baseSlice'), + toInteger = require('./toInteger'); + +/** + * Creates a slice of `array` with `n` elements dropped from the end. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to drop. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.dropRight([1, 2, 3]); + * // => [1, 2] + * + * _.dropRight([1, 2, 3], 2); + * // => [1] + * + * _.dropRight([1, 2, 3], 5); + * // => [] + * + * _.dropRight([1, 2, 3], 0); + * // => [1, 2, 3] + */ +function dropRight(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + n = length - n; + return baseSlice(array, 0, n < 0 ? 0 : n); +} + +module.exports = dropRight; diff --git a/backend/node_modules/lodash/dropRightWhile.js b/backend/node_modules/lodash/dropRightWhile.js new file mode 100644 index 00000000..9ad36a04 --- /dev/null +++ b/backend/node_modules/lodash/dropRightWhile.js @@ -0,0 +1,45 @@ +var baseIteratee = require('./_baseIteratee'), + baseWhile = require('./_baseWhile'); + +/** + * Creates a slice of `array` excluding elements dropped from the end. + * Elements are dropped until `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.dropRightWhile(users, function(o) { return !o.active; }); + * // => objects for ['barney'] + * + * // The `_.matches` iteratee shorthand. + * _.dropRightWhile(users, { 'user': 'pebbles', 'active': false }); + * // => objects for ['barney', 'fred'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.dropRightWhile(users, ['active', false]); + * // => objects for ['barney'] + * + * // The `_.property` iteratee shorthand. + * _.dropRightWhile(users, 'active'); + * // => objects for ['barney', 'fred', 'pebbles'] + */ +function dropRightWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, baseIteratee(predicate, 3), true, true) + : []; +} + +module.exports = dropRightWhile; diff --git a/backend/node_modules/lodash/dropWhile.js b/backend/node_modules/lodash/dropWhile.js new file mode 100644 index 00000000..903ef568 --- /dev/null +++ b/backend/node_modules/lodash/dropWhile.js @@ -0,0 +1,45 @@ +var baseIteratee = require('./_baseIteratee'), + baseWhile = require('./_baseWhile'); + +/** + * Creates a slice of `array` excluding elements dropped from the beginning. + * Elements are dropped until `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.dropWhile(users, function(o) { return !o.active; }); + * // => objects for ['pebbles'] + * + * // The `_.matches` iteratee shorthand. + * _.dropWhile(users, { 'user': 'barney', 'active': false }); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.dropWhile(users, ['active', false]); + * // => objects for ['pebbles'] + * + * // The `_.property` iteratee shorthand. + * _.dropWhile(users, 'active'); + * // => objects for ['barney', 'fred', 'pebbles'] + */ +function dropWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, baseIteratee(predicate, 3), true) + : []; +} + +module.exports = dropWhile; diff --git a/backend/node_modules/lodash/each.js b/backend/node_modules/lodash/each.js new file mode 100644 index 00000000..8800f420 --- /dev/null +++ b/backend/node_modules/lodash/each.js @@ -0,0 +1 @@ +module.exports = require('./forEach'); diff --git a/backend/node_modules/lodash/eachRight.js b/backend/node_modules/lodash/eachRight.js new file mode 100644 index 00000000..3252b2ab --- /dev/null +++ b/backend/node_modules/lodash/eachRight.js @@ -0,0 +1 @@ +module.exports = require('./forEachRight'); diff --git a/backend/node_modules/lodash/endsWith.js b/backend/node_modules/lodash/endsWith.js new file mode 100644 index 00000000..76fc866e --- /dev/null +++ b/backend/node_modules/lodash/endsWith.js @@ -0,0 +1,43 @@ +var baseClamp = require('./_baseClamp'), + baseToString = require('./_baseToString'), + toInteger = require('./toInteger'), + toString = require('./toString'); + +/** + * Checks if `string` ends with the given target string. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to inspect. + * @param {string} [target] The string to search for. + * @param {number} [position=string.length] The position to search up to. + * @returns {boolean} Returns `true` if `string` ends with `target`, + * else `false`. + * @example + * + * _.endsWith('abc', 'c'); + * // => true + * + * _.endsWith('abc', 'b'); + * // => false + * + * _.endsWith('abc', 'b', 2); + * // => true + */ +function endsWith(string, target, position) { + string = toString(string); + target = baseToString(target); + + var length = string.length; + position = position === undefined + ? length + : baseClamp(toInteger(position), 0, length); + + var end = position; + position -= target.length; + return position >= 0 && string.slice(position, end) == target; +} + +module.exports = endsWith; diff --git a/backend/node_modules/lodash/entries.js b/backend/node_modules/lodash/entries.js new file mode 100644 index 00000000..7a88df20 --- /dev/null +++ b/backend/node_modules/lodash/entries.js @@ -0,0 +1 @@ +module.exports = require('./toPairs'); diff --git a/backend/node_modules/lodash/entriesIn.js b/backend/node_modules/lodash/entriesIn.js new file mode 100644 index 00000000..f6c6331c --- /dev/null +++ b/backend/node_modules/lodash/entriesIn.js @@ -0,0 +1 @@ +module.exports = require('./toPairsIn'); diff --git a/backend/node_modules/lodash/eq.js b/backend/node_modules/lodash/eq.js new file mode 100644 index 00000000..a9406880 --- /dev/null +++ b/backend/node_modules/lodash/eq.js @@ -0,0 +1,37 @@ +/** + * Performs a + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * comparison between two values to determine if they are equivalent. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.eq(object, object); + * // => true + * + * _.eq(object, other); + * // => false + * + * _.eq('a', 'a'); + * // => true + * + * _.eq('a', Object('a')); + * // => false + * + * _.eq(NaN, NaN); + * // => true + */ +function eq(value, other) { + return value === other || (value !== value && other !== other); +} + +module.exports = eq; diff --git a/backend/node_modules/lodash/escape.js b/backend/node_modules/lodash/escape.js new file mode 100644 index 00000000..9247e002 --- /dev/null +++ b/backend/node_modules/lodash/escape.js @@ -0,0 +1,43 @@ +var escapeHtmlChar = require('./_escapeHtmlChar'), + toString = require('./toString'); + +/** Used to match HTML entities and HTML characters. */ +var reUnescapedHtml = /[&<>"']/g, + reHasUnescapedHtml = RegExp(reUnescapedHtml.source); + +/** + * Converts the characters "&", "<", ">", '"', and "'" in `string` to their + * corresponding HTML entities. + * + * **Note:** No other characters are escaped. To escape additional + * characters use a third-party library like [_he_](https://mths.be/he). + * + * Though the ">" character is escaped for symmetry, characters like + * ">" and "/" don't need escaping in HTML and have no special meaning + * unless they're part of a tag or unquoted attribute value. See + * [Mathias Bynens's article](https://mathiasbynens.be/notes/ambiguous-ampersands) + * (under "semi-related fun fact") for more details. + * + * When working with HTML you should always + * [quote attribute values](http://wonko.com/post/html-escaping) to reduce + * XSS vectors. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category String + * @param {string} [string=''] The string to escape. + * @returns {string} Returns the escaped string. + * @example + * + * _.escape('fred, barney, & pebbles'); + * // => 'fred, barney, & pebbles' + */ +function escape(string) { + string = toString(string); + return (string && reHasUnescapedHtml.test(string)) + ? string.replace(reUnescapedHtml, escapeHtmlChar) + : string; +} + +module.exports = escape; diff --git a/backend/node_modules/lodash/escapeRegExp.js b/backend/node_modules/lodash/escapeRegExp.js new file mode 100644 index 00000000..0a58c69f --- /dev/null +++ b/backend/node_modules/lodash/escapeRegExp.js @@ -0,0 +1,32 @@ +var toString = require('./toString'); + +/** + * Used to match `RegExp` + * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). + */ +var reRegExpChar = /[\\^$.*+?()[\]{}|]/g, + reHasRegExpChar = RegExp(reRegExpChar.source); + +/** + * Escapes the `RegExp` special characters "^", "$", "\", ".", "*", "+", + * "?", "(", ")", "[", "]", "{", "}", and "|" in `string`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to escape. + * @returns {string} Returns the escaped string. + * @example + * + * _.escapeRegExp('[lodash](https://lodash.com/)'); + * // => '\[lodash\]\(https://lodash\.com/\)' + */ +function escapeRegExp(string) { + string = toString(string); + return (string && reHasRegExpChar.test(string)) + ? string.replace(reRegExpChar, '\\$&') + : string; +} + +module.exports = escapeRegExp; diff --git a/backend/node_modules/lodash/every.js b/backend/node_modules/lodash/every.js new file mode 100644 index 00000000..25080dac --- /dev/null +++ b/backend/node_modules/lodash/every.js @@ -0,0 +1,56 @@ +var arrayEvery = require('./_arrayEvery'), + baseEvery = require('./_baseEvery'), + baseIteratee = require('./_baseIteratee'), + isArray = require('./isArray'), + isIterateeCall = require('./_isIterateeCall'); + +/** + * Checks if `predicate` returns truthy for **all** elements of `collection`. + * Iteration is stopped once `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index|key, collection). + * + * **Note:** This method returns `true` for + * [empty collections](https://en.wikipedia.org/wiki/Empty_set) because + * [everything is true](https://en.wikipedia.org/wiki/Vacuous_truth) of + * elements of empty collections. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false`. + * @example + * + * _.every([true, 1, null, 'yes'], Boolean); + * // => false + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.every(users, { 'user': 'barney', 'active': false }); + * // => false + * + * // The `_.matchesProperty` iteratee shorthand. + * _.every(users, ['active', false]); + * // => true + * + * // The `_.property` iteratee shorthand. + * _.every(users, 'active'); + * // => false + */ +function every(collection, predicate, guard) { + var func = isArray(collection) ? arrayEvery : baseEvery; + if (guard && isIterateeCall(collection, predicate, guard)) { + predicate = undefined; + } + return func(collection, baseIteratee(predicate, 3)); +} + +module.exports = every; diff --git a/backend/node_modules/lodash/extend.js b/backend/node_modules/lodash/extend.js new file mode 100644 index 00000000..e00166c2 --- /dev/null +++ b/backend/node_modules/lodash/extend.js @@ -0,0 +1 @@ +module.exports = require('./assignIn'); diff --git a/backend/node_modules/lodash/extendWith.js b/backend/node_modules/lodash/extendWith.js new file mode 100644 index 00000000..dbdcb3b4 --- /dev/null +++ b/backend/node_modules/lodash/extendWith.js @@ -0,0 +1 @@ +module.exports = require('./assignInWith'); diff --git a/backend/node_modules/lodash/fill.js b/backend/node_modules/lodash/fill.js new file mode 100644 index 00000000..ae13aa1c --- /dev/null +++ b/backend/node_modules/lodash/fill.js @@ -0,0 +1,45 @@ +var baseFill = require('./_baseFill'), + isIterateeCall = require('./_isIterateeCall'); + +/** + * Fills elements of `array` with `value` from `start` up to, but not + * including, `end`. + * + * **Note:** This method mutates `array`. + * + * @static + * @memberOf _ + * @since 3.2.0 + * @category Array + * @param {Array} array The array to fill. + * @param {*} value The value to fill `array` with. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns `array`. + * @example + * + * var array = [1, 2, 3]; + * + * _.fill(array, 'a'); + * console.log(array); + * // => ['a', 'a', 'a'] + * + * _.fill(Array(3), 2); + * // => [2, 2, 2] + * + * _.fill([4, 6, 8, 10], '*', 1, 3); + * // => [4, '*', '*', 10] + */ +function fill(array, value, start, end) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + if (start && typeof start != 'number' && isIterateeCall(array, value, start)) { + start = 0; + end = length; + } + return baseFill(array, value, start, end); +} + +module.exports = fill; diff --git a/backend/node_modules/lodash/filter.js b/backend/node_modules/lodash/filter.js new file mode 100644 index 00000000..89e0c8c4 --- /dev/null +++ b/backend/node_modules/lodash/filter.js @@ -0,0 +1,52 @@ +var arrayFilter = require('./_arrayFilter'), + baseFilter = require('./_baseFilter'), + baseIteratee = require('./_baseIteratee'), + isArray = require('./isArray'); + +/** + * Iterates over elements of `collection`, returning an array of all elements + * `predicate` returns truthy for. The predicate is invoked with three + * arguments: (value, index|key, collection). + * + * **Note:** Unlike `_.remove`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + * @see _.reject + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * _.filter(users, function(o) { return !o.active; }); + * // => objects for ['fred'] + * + * // The `_.matches` iteratee shorthand. + * _.filter(users, { 'age': 36, 'active': true }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.filter(users, ['active', false]); + * // => objects for ['fred'] + * + * // The `_.property` iteratee shorthand. + * _.filter(users, 'active'); + * // => objects for ['barney'] + * + * // Combining several predicates using `_.overEvery` or `_.overSome`. + * _.filter(users, _.overSome([{ 'age': 36 }, ['age', 40]])); + * // => objects for ['fred', 'barney'] + */ +function filter(collection, predicate) { + var func = isArray(collection) ? arrayFilter : baseFilter; + return func(collection, baseIteratee(predicate, 3)); +} + +module.exports = filter; diff --git a/backend/node_modules/lodash/find.js b/backend/node_modules/lodash/find.js new file mode 100644 index 00000000..de732ccb --- /dev/null +++ b/backend/node_modules/lodash/find.js @@ -0,0 +1,42 @@ +var createFind = require('./_createFind'), + findIndex = require('./findIndex'); + +/** + * Iterates over elements of `collection`, returning the first element + * `predicate` returns truthy for. The predicate is invoked with three + * arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=0] The index to search from. + * @returns {*} Returns the matched element, else `undefined`. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false }, + * { 'user': 'pebbles', 'age': 1, 'active': true } + * ]; + * + * _.find(users, function(o) { return o.age < 40; }); + * // => object for 'barney' + * + * // The `_.matches` iteratee shorthand. + * _.find(users, { 'age': 1, 'active': true }); + * // => object for 'pebbles' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.find(users, ['active', false]); + * // => object for 'fred' + * + * // The `_.property` iteratee shorthand. + * _.find(users, 'active'); + * // => object for 'barney' + */ +var find = createFind(findIndex); + +module.exports = find; diff --git a/backend/node_modules/lodash/findIndex.js b/backend/node_modules/lodash/findIndex.js new file mode 100644 index 00000000..4689069f --- /dev/null +++ b/backend/node_modules/lodash/findIndex.js @@ -0,0 +1,55 @@ +var baseFindIndex = require('./_baseFindIndex'), + baseIteratee = require('./_baseIteratee'), + toInteger = require('./toInteger'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max; + +/** + * This method is like `_.find` except that it returns the index of the first + * element `predicate` returns truthy for instead of the element itself. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.findIndex(users, function(o) { return o.user == 'barney'; }); + * // => 0 + * + * // The `_.matches` iteratee shorthand. + * _.findIndex(users, { 'user': 'fred', 'active': false }); + * // => 1 + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findIndex(users, ['active', false]); + * // => 0 + * + * // The `_.property` iteratee shorthand. + * _.findIndex(users, 'active'); + * // => 2 + */ +function findIndex(array, predicate, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = fromIndex == null ? 0 : toInteger(fromIndex); + if (index < 0) { + index = nativeMax(length + index, 0); + } + return baseFindIndex(array, baseIteratee(predicate, 3), index); +} + +module.exports = findIndex; diff --git a/backend/node_modules/lodash/findKey.js b/backend/node_modules/lodash/findKey.js new file mode 100644 index 00000000..cac0248a --- /dev/null +++ b/backend/node_modules/lodash/findKey.js @@ -0,0 +1,44 @@ +var baseFindKey = require('./_baseFindKey'), + baseForOwn = require('./_baseForOwn'), + baseIteratee = require('./_baseIteratee'); + +/** + * This method is like `_.find` except that it returns the key of the first + * element `predicate` returns truthy for instead of the element itself. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Object + * @param {Object} object The object to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {string|undefined} Returns the key of the matched element, + * else `undefined`. + * @example + * + * var users = { + * 'barney': { 'age': 36, 'active': true }, + * 'fred': { 'age': 40, 'active': false }, + * 'pebbles': { 'age': 1, 'active': true } + * }; + * + * _.findKey(users, function(o) { return o.age < 40; }); + * // => 'barney' (iteration order is not guaranteed) + * + * // The `_.matches` iteratee shorthand. + * _.findKey(users, { 'age': 1, 'active': true }); + * // => 'pebbles' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findKey(users, ['active', false]); + * // => 'fred' + * + * // The `_.property` iteratee shorthand. + * _.findKey(users, 'active'); + * // => 'barney' + */ +function findKey(object, predicate) { + return baseFindKey(object, baseIteratee(predicate, 3), baseForOwn); +} + +module.exports = findKey; diff --git a/backend/node_modules/lodash/findLast.js b/backend/node_modules/lodash/findLast.js new file mode 100644 index 00000000..70b4271d --- /dev/null +++ b/backend/node_modules/lodash/findLast.js @@ -0,0 +1,25 @@ +var createFind = require('./_createFind'), + findLastIndex = require('./findLastIndex'); + +/** + * This method is like `_.find` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Collection + * @param {Array|Object} collection The collection to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=collection.length-1] The index to search from. + * @returns {*} Returns the matched element, else `undefined`. + * @example + * + * _.findLast([1, 2, 3, 4], function(n) { + * return n % 2 == 1; + * }); + * // => 3 + */ +var findLast = createFind(findLastIndex); + +module.exports = findLast; diff --git a/backend/node_modules/lodash/findLastIndex.js b/backend/node_modules/lodash/findLastIndex.js new file mode 100644 index 00000000..7da3431f --- /dev/null +++ b/backend/node_modules/lodash/findLastIndex.js @@ -0,0 +1,59 @@ +var baseFindIndex = require('./_baseFindIndex'), + baseIteratee = require('./_baseIteratee'), + toInteger = require('./toInteger'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max, + nativeMin = Math.min; + +/** + * This method is like `_.findIndex` except that it iterates over elements + * of `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=array.length-1] The index to search from. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.findLastIndex(users, function(o) { return o.user == 'pebbles'; }); + * // => 2 + * + * // The `_.matches` iteratee shorthand. + * _.findLastIndex(users, { 'user': 'barney', 'active': true }); + * // => 0 + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findLastIndex(users, ['active', false]); + * // => 2 + * + * // The `_.property` iteratee shorthand. + * _.findLastIndex(users, 'active'); + * // => 0 + */ +function findLastIndex(array, predicate, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = length - 1; + if (fromIndex !== undefined) { + index = toInteger(fromIndex); + index = fromIndex < 0 + ? nativeMax(length + index, 0) + : nativeMin(index, length - 1); + } + return baseFindIndex(array, baseIteratee(predicate, 3), index, true); +} + +module.exports = findLastIndex; diff --git a/backend/node_modules/lodash/findLastKey.js b/backend/node_modules/lodash/findLastKey.js new file mode 100644 index 00000000..66fb9fbc --- /dev/null +++ b/backend/node_modules/lodash/findLastKey.js @@ -0,0 +1,44 @@ +var baseFindKey = require('./_baseFindKey'), + baseForOwnRight = require('./_baseForOwnRight'), + baseIteratee = require('./_baseIteratee'); + +/** + * This method is like `_.findKey` except that it iterates over elements of + * a collection in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {string|undefined} Returns the key of the matched element, + * else `undefined`. + * @example + * + * var users = { + * 'barney': { 'age': 36, 'active': true }, + * 'fred': { 'age': 40, 'active': false }, + * 'pebbles': { 'age': 1, 'active': true } + * }; + * + * _.findLastKey(users, function(o) { return o.age < 40; }); + * // => returns 'pebbles' assuming `_.findKey` returns 'barney' + * + * // The `_.matches` iteratee shorthand. + * _.findLastKey(users, { 'age': 36, 'active': true }); + * // => 'barney' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findLastKey(users, ['active', false]); + * // => 'fred' + * + * // The `_.property` iteratee shorthand. + * _.findLastKey(users, 'active'); + * // => 'pebbles' + */ +function findLastKey(object, predicate) { + return baseFindKey(object, baseIteratee(predicate, 3), baseForOwnRight); +} + +module.exports = findLastKey; diff --git a/backend/node_modules/lodash/first.js b/backend/node_modules/lodash/first.js new file mode 100644 index 00000000..53f4ad13 --- /dev/null +++ b/backend/node_modules/lodash/first.js @@ -0,0 +1 @@ +module.exports = require('./head'); diff --git a/backend/node_modules/lodash/flatMap.js b/backend/node_modules/lodash/flatMap.js new file mode 100644 index 00000000..e6685068 --- /dev/null +++ b/backend/node_modules/lodash/flatMap.js @@ -0,0 +1,29 @@ +var baseFlatten = require('./_baseFlatten'), + map = require('./map'); + +/** + * Creates a flattened array of values by running each element in `collection` + * thru `iteratee` and flattening the mapped results. The iteratee is invoked + * with three arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [n, n]; + * } + * + * _.flatMap([1, 2], duplicate); + * // => [1, 1, 2, 2] + */ +function flatMap(collection, iteratee) { + return baseFlatten(map(collection, iteratee), 1); +} + +module.exports = flatMap; diff --git a/backend/node_modules/lodash/flatMapDeep.js b/backend/node_modules/lodash/flatMapDeep.js new file mode 100644 index 00000000..4653d603 --- /dev/null +++ b/backend/node_modules/lodash/flatMapDeep.js @@ -0,0 +1,31 @@ +var baseFlatten = require('./_baseFlatten'), + map = require('./map'); + +/** Used as references for various `Number` constants. */ +var INFINITY = 1 / 0; + +/** + * This method is like `_.flatMap` except that it recursively flattens the + * mapped results. + * + * @static + * @memberOf _ + * @since 4.7.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [[[n, n]]]; + * } + * + * _.flatMapDeep([1, 2], duplicate); + * // => [1, 1, 2, 2] + */ +function flatMapDeep(collection, iteratee) { + return baseFlatten(map(collection, iteratee), INFINITY); +} + +module.exports = flatMapDeep; diff --git a/backend/node_modules/lodash/flatMapDepth.js b/backend/node_modules/lodash/flatMapDepth.js new file mode 100644 index 00000000..6d72005c --- /dev/null +++ b/backend/node_modules/lodash/flatMapDepth.js @@ -0,0 +1,31 @@ +var baseFlatten = require('./_baseFlatten'), + map = require('./map'), + toInteger = require('./toInteger'); + +/** + * This method is like `_.flatMap` except that it recursively flattens the + * mapped results up to `depth` times. + * + * @static + * @memberOf _ + * @since 4.7.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {number} [depth=1] The maximum recursion depth. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [[[n, n]]]; + * } + * + * _.flatMapDepth([1, 2], duplicate, 2); + * // => [[1, 1], [2, 2]] + */ +function flatMapDepth(collection, iteratee, depth) { + depth = depth === undefined ? 1 : toInteger(depth); + return baseFlatten(map(collection, iteratee), depth); +} + +module.exports = flatMapDepth; diff --git a/backend/node_modules/lodash/flatten.js b/backend/node_modules/lodash/flatten.js new file mode 100644 index 00000000..3f09f7f7 --- /dev/null +++ b/backend/node_modules/lodash/flatten.js @@ -0,0 +1,22 @@ +var baseFlatten = require('./_baseFlatten'); + +/** + * Flattens `array` a single level deep. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flatten([1, [2, [3, [4]], 5]]); + * // => [1, 2, [3, [4]], 5] + */ +function flatten(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, 1) : []; +} + +module.exports = flatten; diff --git a/backend/node_modules/lodash/flattenDeep.js b/backend/node_modules/lodash/flattenDeep.js new file mode 100644 index 00000000..8ad585cf --- /dev/null +++ b/backend/node_modules/lodash/flattenDeep.js @@ -0,0 +1,25 @@ +var baseFlatten = require('./_baseFlatten'); + +/** Used as references for various `Number` constants. */ +var INFINITY = 1 / 0; + +/** + * Recursively flattens `array`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flattenDeep([1, [2, [3, [4]], 5]]); + * // => [1, 2, 3, 4, 5] + */ +function flattenDeep(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, INFINITY) : []; +} + +module.exports = flattenDeep; diff --git a/backend/node_modules/lodash/flattenDepth.js b/backend/node_modules/lodash/flattenDepth.js new file mode 100644 index 00000000..441fdcc2 --- /dev/null +++ b/backend/node_modules/lodash/flattenDepth.js @@ -0,0 +1,33 @@ +var baseFlatten = require('./_baseFlatten'), + toInteger = require('./toInteger'); + +/** + * Recursively flatten `array` up to `depth` times. + * + * @static + * @memberOf _ + * @since 4.4.0 + * @category Array + * @param {Array} array The array to flatten. + * @param {number} [depth=1] The maximum recursion depth. + * @returns {Array} Returns the new flattened array. + * @example + * + * var array = [1, [2, [3, [4]], 5]]; + * + * _.flattenDepth(array, 1); + * // => [1, 2, [3, [4]], 5] + * + * _.flattenDepth(array, 2); + * // => [1, 2, 3, [4], 5] + */ +function flattenDepth(array, depth) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + depth = depth === undefined ? 1 : toInteger(depth); + return baseFlatten(array, depth); +} + +module.exports = flattenDepth; diff --git a/backend/node_modules/lodash/flip.js b/backend/node_modules/lodash/flip.js new file mode 100644 index 00000000..c28dd789 --- /dev/null +++ b/backend/node_modules/lodash/flip.js @@ -0,0 +1,28 @@ +var createWrap = require('./_createWrap'); + +/** Used to compose bitmasks for function metadata. */ +var WRAP_FLIP_FLAG = 512; + +/** + * Creates a function that invokes `func` with arguments reversed. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to flip arguments for. + * @returns {Function} Returns the new flipped function. + * @example + * + * var flipped = _.flip(function() { + * return _.toArray(arguments); + * }); + * + * flipped('a', 'b', 'c', 'd'); + * // => ['d', 'c', 'b', 'a'] + */ +function flip(func) { + return createWrap(func, WRAP_FLIP_FLAG); +} + +module.exports = flip; diff --git a/backend/node_modules/lodash/floor.js b/backend/node_modules/lodash/floor.js new file mode 100644 index 00000000..ab6dfa28 --- /dev/null +++ b/backend/node_modules/lodash/floor.js @@ -0,0 +1,26 @@ +var createRound = require('./_createRound'); + +/** + * Computes `number` rounded down to `precision`. + * + * @static + * @memberOf _ + * @since 3.10.0 + * @category Math + * @param {number} number The number to round down. + * @param {number} [precision=0] The precision to round down to. + * @returns {number} Returns the rounded down number. + * @example + * + * _.floor(4.006); + * // => 4 + * + * _.floor(0.046, 2); + * // => 0.04 + * + * _.floor(4060, -2); + * // => 4000 + */ +var floor = createRound('floor'); + +module.exports = floor; diff --git a/backend/node_modules/lodash/flow.js b/backend/node_modules/lodash/flow.js new file mode 100644 index 00000000..74b6b62d --- /dev/null +++ b/backend/node_modules/lodash/flow.js @@ -0,0 +1,27 @@ +var createFlow = require('./_createFlow'); + +/** + * Creates a function that returns the result of invoking the given functions + * with the `this` binding of the created function, where each successive + * invocation is supplied the return value of the previous. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Util + * @param {...(Function|Function[])} [funcs] The functions to invoke. + * @returns {Function} Returns the new composite function. + * @see _.flowRight + * @example + * + * function square(n) { + * return n * n; + * } + * + * var addSquare = _.flow([_.add, square]); + * addSquare(1, 2); + * // => 9 + */ +var flow = createFlow(); + +module.exports = flow; diff --git a/backend/node_modules/lodash/flowRight.js b/backend/node_modules/lodash/flowRight.js new file mode 100644 index 00000000..11461410 --- /dev/null +++ b/backend/node_modules/lodash/flowRight.js @@ -0,0 +1,26 @@ +var createFlow = require('./_createFlow'); + +/** + * This method is like `_.flow` except that it creates a function that + * invokes the given functions from right to left. + * + * @static + * @since 3.0.0 + * @memberOf _ + * @category Util + * @param {...(Function|Function[])} [funcs] The functions to invoke. + * @returns {Function} Returns the new composite function. + * @see _.flow + * @example + * + * function square(n) { + * return n * n; + * } + * + * var addSquare = _.flowRight([square, _.add]); + * addSquare(1, 2); + * // => 9 + */ +var flowRight = createFlow(true); + +module.exports = flowRight; diff --git a/backend/node_modules/lodash/forEach.js b/backend/node_modules/lodash/forEach.js new file mode 100644 index 00000000..c64eaa73 --- /dev/null +++ b/backend/node_modules/lodash/forEach.js @@ -0,0 +1,41 @@ +var arrayEach = require('./_arrayEach'), + baseEach = require('./_baseEach'), + castFunction = require('./_castFunction'), + isArray = require('./isArray'); + +/** + * Iterates over elements of `collection` and invokes `iteratee` for each element. + * The iteratee is invoked with three arguments: (value, index|key, collection). + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * **Note:** As with other "Collections" methods, objects with a "length" + * property are iterated like arrays. To avoid this behavior use `_.forIn` + * or `_.forOwn` for object iteration. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @alias each + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + * @see _.forEachRight + * @example + * + * _.forEach([1, 2], function(value) { + * console.log(value); + * }); + * // => Logs `1` then `2`. + * + * _.forEach({ 'a': 1, 'b': 2 }, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a' then 'b' (iteration order is not guaranteed). + */ +function forEach(collection, iteratee) { + var func = isArray(collection) ? arrayEach : baseEach; + return func(collection, castFunction(iteratee)); +} + +module.exports = forEach; diff --git a/backend/node_modules/lodash/forEachRight.js b/backend/node_modules/lodash/forEachRight.js new file mode 100644 index 00000000..7390ebaf --- /dev/null +++ b/backend/node_modules/lodash/forEachRight.js @@ -0,0 +1,31 @@ +var arrayEachRight = require('./_arrayEachRight'), + baseEachRight = require('./_baseEachRight'), + castFunction = require('./_castFunction'), + isArray = require('./isArray'); + +/** + * This method is like `_.forEach` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @alias eachRight + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + * @see _.forEach + * @example + * + * _.forEachRight([1, 2], function(value) { + * console.log(value); + * }); + * // => Logs `2` then `1`. + */ +function forEachRight(collection, iteratee) { + var func = isArray(collection) ? arrayEachRight : baseEachRight; + return func(collection, castFunction(iteratee)); +} + +module.exports = forEachRight; diff --git a/backend/node_modules/lodash/forIn.js b/backend/node_modules/lodash/forIn.js new file mode 100644 index 00000000..583a5963 --- /dev/null +++ b/backend/node_modules/lodash/forIn.js @@ -0,0 +1,39 @@ +var baseFor = require('./_baseFor'), + castFunction = require('./_castFunction'), + keysIn = require('./keysIn'); + +/** + * Iterates over own and inherited enumerable string keyed properties of an + * object and invokes `iteratee` for each property. The iteratee is invoked + * with three arguments: (value, key, object). Iteratee functions may exit + * iteration early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 0.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forInRight + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forIn(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a', 'b', then 'c' (iteration order is not guaranteed). + */ +function forIn(object, iteratee) { + return object == null + ? object + : baseFor(object, castFunction(iteratee), keysIn); +} + +module.exports = forIn; diff --git a/backend/node_modules/lodash/forInRight.js b/backend/node_modules/lodash/forInRight.js new file mode 100644 index 00000000..4aedf58a --- /dev/null +++ b/backend/node_modules/lodash/forInRight.js @@ -0,0 +1,37 @@ +var baseForRight = require('./_baseForRight'), + castFunction = require('./_castFunction'), + keysIn = require('./keysIn'); + +/** + * This method is like `_.forIn` except that it iterates over properties of + * `object` in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forIn + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forInRight(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'c', 'b', then 'a' assuming `_.forIn` logs 'a', 'b', then 'c'. + */ +function forInRight(object, iteratee) { + return object == null + ? object + : baseForRight(object, castFunction(iteratee), keysIn); +} + +module.exports = forInRight; diff --git a/backend/node_modules/lodash/forOwn.js b/backend/node_modules/lodash/forOwn.js new file mode 100644 index 00000000..94eed840 --- /dev/null +++ b/backend/node_modules/lodash/forOwn.js @@ -0,0 +1,36 @@ +var baseForOwn = require('./_baseForOwn'), + castFunction = require('./_castFunction'); + +/** + * Iterates over own enumerable string keyed properties of an object and + * invokes `iteratee` for each property. The iteratee is invoked with three + * arguments: (value, key, object). Iteratee functions may exit iteration + * early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 0.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forOwnRight + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forOwn(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a' then 'b' (iteration order is not guaranteed). + */ +function forOwn(object, iteratee) { + return object && baseForOwn(object, castFunction(iteratee)); +} + +module.exports = forOwn; diff --git a/backend/node_modules/lodash/forOwnRight.js b/backend/node_modules/lodash/forOwnRight.js new file mode 100644 index 00000000..86f338f0 --- /dev/null +++ b/backend/node_modules/lodash/forOwnRight.js @@ -0,0 +1,34 @@ +var baseForOwnRight = require('./_baseForOwnRight'), + castFunction = require('./_castFunction'); + +/** + * This method is like `_.forOwn` except that it iterates over properties of + * `object` in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forOwn + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forOwnRight(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'b' then 'a' assuming `_.forOwn` logs 'a' then 'b'. + */ +function forOwnRight(object, iteratee) { + return object && baseForOwnRight(object, castFunction(iteratee)); +} + +module.exports = forOwnRight; diff --git a/backend/node_modules/lodash/fp.js b/backend/node_modules/lodash/fp.js new file mode 100644 index 00000000..e372dbbd --- /dev/null +++ b/backend/node_modules/lodash/fp.js @@ -0,0 +1,2 @@ +var _ = require('./lodash.min').runInContext(); +module.exports = require('./fp/_baseConvert')(_, _); diff --git a/backend/node_modules/lodash/fp/F.js b/backend/node_modules/lodash/fp/F.js new file mode 100644 index 00000000..a05a63ad --- /dev/null +++ b/backend/node_modules/lodash/fp/F.js @@ -0,0 +1 @@ +module.exports = require('./stubFalse'); diff --git a/backend/node_modules/lodash/fp/T.js b/backend/node_modules/lodash/fp/T.js new file mode 100644 index 00000000..e2ba8ea5 --- /dev/null +++ b/backend/node_modules/lodash/fp/T.js @@ -0,0 +1 @@ +module.exports = require('./stubTrue'); diff --git a/backend/node_modules/lodash/fp/__.js b/backend/node_modules/lodash/fp/__.js new file mode 100644 index 00000000..4af98deb --- /dev/null +++ b/backend/node_modules/lodash/fp/__.js @@ -0,0 +1 @@ +module.exports = require('./placeholder'); diff --git a/backend/node_modules/lodash/fp/_baseConvert.js b/backend/node_modules/lodash/fp/_baseConvert.js new file mode 100644 index 00000000..9baf8e19 --- /dev/null +++ b/backend/node_modules/lodash/fp/_baseConvert.js @@ -0,0 +1,569 @@ +var mapping = require('./_mapping'), + fallbackHolder = require('./placeholder'); + +/** Built-in value reference. */ +var push = Array.prototype.push; + +/** + * Creates a function, with an arity of `n`, that invokes `func` with the + * arguments it receives. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} n The arity of the new function. + * @returns {Function} Returns the new function. + */ +function baseArity(func, n) { + return n == 2 + ? function(a, b) { return func.apply(undefined, arguments); } + : function(a) { return func.apply(undefined, arguments); }; +} + +/** + * Creates a function that invokes `func`, with up to `n` arguments, ignoring + * any additional arguments. + * + * @private + * @param {Function} func The function to cap arguments for. + * @param {number} n The arity cap. + * @returns {Function} Returns the new function. + */ +function baseAry(func, n) { + return n == 2 + ? function(a, b) { return func(a, b); } + : function(a) { return func(a); }; +} + +/** + * Creates a clone of `array`. + * + * @private + * @param {Array} array The array to clone. + * @returns {Array} Returns the cloned array. + */ +function cloneArray(array) { + var length = array ? array.length : 0, + result = Array(length); + + while (length--) { + result[length] = array[length]; + } + return result; +} + +/** + * Creates a function that clones a given object using the assignment `func`. + * + * @private + * @param {Function} func The assignment function. + * @returns {Function} Returns the new cloner function. + */ +function createCloner(func) { + return function(object) { + return func({}, object); + }; +} + +/** + * A specialized version of `_.spread` which flattens the spread array into + * the arguments of the invoked `func`. + * + * @private + * @param {Function} func The function to spread arguments over. + * @param {number} start The start position of the spread. + * @returns {Function} Returns the new function. + */ +function flatSpread(func, start) { + return function() { + var length = arguments.length, + lastIndex = length - 1, + args = Array(length); + + while (length--) { + args[length] = arguments[length]; + } + var array = args[start], + otherArgs = args.slice(0, start); + + if (array) { + push.apply(otherArgs, array); + } + if (start != lastIndex) { + push.apply(otherArgs, args.slice(start + 1)); + } + return func.apply(this, otherArgs); + }; +} + +/** + * Creates a function that wraps `func` and uses `cloner` to clone the first + * argument it receives. + * + * @private + * @param {Function} func The function to wrap. + * @param {Function} cloner The function to clone arguments. + * @returns {Function} Returns the new immutable function. + */ +function wrapImmutable(func, cloner) { + return function() { + var length = arguments.length; + if (!length) { + return; + } + var args = Array(length); + while (length--) { + args[length] = arguments[length]; + } + var result = args[0] = cloner.apply(undefined, args); + func.apply(undefined, args); + return result; + }; +} + +/** + * The base implementation of `convert` which accepts a `util` object of methods + * required to perform conversions. + * + * @param {Object} util The util object. + * @param {string} name The name of the function to convert. + * @param {Function} func The function to convert. + * @param {Object} [options] The options object. + * @param {boolean} [options.cap=true] Specify capping iteratee arguments. + * @param {boolean} [options.curry=true] Specify currying. + * @param {boolean} [options.fixed=true] Specify fixed arity. + * @param {boolean} [options.immutable=true] Specify immutable operations. + * @param {boolean} [options.rearg=true] Specify rearranging arguments. + * @returns {Function|Object} Returns the converted function or object. + */ +function baseConvert(util, name, func, options) { + var isLib = typeof name == 'function', + isObj = name === Object(name); + + if (isObj) { + options = func; + func = name; + name = undefined; + } + if (func == null) { + throw new TypeError; + } + options || (options = {}); + + var config = { + 'cap': 'cap' in options ? options.cap : true, + 'curry': 'curry' in options ? options.curry : true, + 'fixed': 'fixed' in options ? options.fixed : true, + 'immutable': 'immutable' in options ? options.immutable : true, + 'rearg': 'rearg' in options ? options.rearg : true + }; + + var defaultHolder = isLib ? func : fallbackHolder, + forceCurry = ('curry' in options) && options.curry, + forceFixed = ('fixed' in options) && options.fixed, + forceRearg = ('rearg' in options) && options.rearg, + pristine = isLib ? func.runInContext() : undefined; + + var helpers = isLib ? func : { + 'ary': util.ary, + 'assign': util.assign, + 'clone': util.clone, + 'curry': util.curry, + 'forEach': util.forEach, + 'isArray': util.isArray, + 'isError': util.isError, + 'isFunction': util.isFunction, + 'isWeakMap': util.isWeakMap, + 'iteratee': util.iteratee, + 'keys': util.keys, + 'rearg': util.rearg, + 'toInteger': util.toInteger, + 'toPath': util.toPath + }; + + var ary = helpers.ary, + assign = helpers.assign, + clone = helpers.clone, + curry = helpers.curry, + each = helpers.forEach, + isArray = helpers.isArray, + isError = helpers.isError, + isFunction = helpers.isFunction, + isWeakMap = helpers.isWeakMap, + keys = helpers.keys, + rearg = helpers.rearg, + toInteger = helpers.toInteger, + toPath = helpers.toPath; + + var aryMethodKeys = keys(mapping.aryMethod); + + var wrappers = { + 'castArray': function(castArray) { + return function() { + var value = arguments[0]; + return isArray(value) + ? castArray(cloneArray(value)) + : castArray.apply(undefined, arguments); + }; + }, + 'iteratee': function(iteratee) { + return function() { + var func = arguments[0], + arity = arguments[1], + result = iteratee(func, arity), + length = result.length; + + if (config.cap && typeof arity == 'number') { + arity = arity > 2 ? (arity - 2) : 1; + return (length && length <= arity) ? result : baseAry(result, arity); + } + return result; + }; + }, + 'mixin': function(mixin) { + return function(source) { + var func = this; + if (!isFunction(func)) { + return mixin(func, Object(source)); + } + var pairs = []; + each(keys(source), function(key) { + if (isFunction(source[key])) { + pairs.push([key, func.prototype[key]]); + } + }); + + mixin(func, Object(source)); + + each(pairs, function(pair) { + var value = pair[1]; + if (isFunction(value)) { + func.prototype[pair[0]] = value; + } else { + delete func.prototype[pair[0]]; + } + }); + return func; + }; + }, + 'nthArg': function(nthArg) { + return function(n) { + var arity = n < 0 ? 1 : (toInteger(n) + 1); + return curry(nthArg(n), arity); + }; + }, + 'rearg': function(rearg) { + return function(func, indexes) { + var arity = indexes ? indexes.length : 0; + return curry(rearg(func, indexes), arity); + }; + }, + 'runInContext': function(runInContext) { + return function(context) { + return baseConvert(util, runInContext(context), options); + }; + } + }; + + /*--------------------------------------------------------------------------*/ + + /** + * Casts `func` to a function with an arity capped iteratee if needed. + * + * @private + * @param {string} name The name of the function to inspect. + * @param {Function} func The function to inspect. + * @returns {Function} Returns the cast function. + */ + function castCap(name, func) { + if (config.cap) { + var indexes = mapping.iterateeRearg[name]; + if (indexes) { + return iterateeRearg(func, indexes); + } + var n = !isLib && mapping.iterateeAry[name]; + if (n) { + return iterateeAry(func, n); + } + } + return func; + } + + /** + * Casts `func` to a curried function if needed. + * + * @private + * @param {string} name The name of the function to inspect. + * @param {Function} func The function to inspect. + * @param {number} n The arity of `func`. + * @returns {Function} Returns the cast function. + */ + function castCurry(name, func, n) { + return (forceCurry || (config.curry && n > 1)) + ? curry(func, n) + : func; + } + + /** + * Casts `func` to a fixed arity function if needed. + * + * @private + * @param {string} name The name of the function to inspect. + * @param {Function} func The function to inspect. + * @param {number} n The arity cap. + * @returns {Function} Returns the cast function. + */ + function castFixed(name, func, n) { + if (config.fixed && (forceFixed || !mapping.skipFixed[name])) { + var data = mapping.methodSpread[name], + start = data && data.start; + + return start === undefined ? ary(func, n) : flatSpread(func, start); + } + return func; + } + + /** + * Casts `func` to an rearged function if needed. + * + * @private + * @param {string} name The name of the function to inspect. + * @param {Function} func The function to inspect. + * @param {number} n The arity of `func`. + * @returns {Function} Returns the cast function. + */ + function castRearg(name, func, n) { + return (config.rearg && n > 1 && (forceRearg || !mapping.skipRearg[name])) + ? rearg(func, mapping.methodRearg[name] || mapping.aryRearg[n]) + : func; + } + + /** + * Creates a clone of `object` by `path`. + * + * @private + * @param {Object} object The object to clone. + * @param {Array|string} path The path to clone by. + * @returns {Object} Returns the cloned object. + */ + function cloneByPath(object, path) { + path = toPath(path); + + var index = -1, + length = path.length, + lastIndex = length - 1, + result = clone(Object(object)), + nested = result; + + while (nested != null && ++index < length) { + var key = path[index], + value = nested[key]; + + if (value != null && + !(isFunction(value) || isError(value) || isWeakMap(value))) { + nested[key] = clone(index == lastIndex ? value : Object(value)); + } + nested = nested[key]; + } + return result; + } + + /** + * Converts `lodash` to an immutable auto-curried iteratee-first data-last + * version with conversion `options` applied. + * + * @param {Object} [options] The options object. See `baseConvert` for more details. + * @returns {Function} Returns the converted `lodash`. + */ + function convertLib(options) { + return _.runInContext.convert(options)(undefined); + } + + /** + * Create a converter function for `func` of `name`. + * + * @param {string} name The name of the function to convert. + * @param {Function} func The function to convert. + * @returns {Function} Returns the new converter function. + */ + function createConverter(name, func) { + var realName = mapping.aliasToReal[name] || name, + methodName = mapping.remap[realName] || realName, + oldOptions = options; + + return function(options) { + var newUtil = isLib ? pristine : helpers, + newFunc = isLib ? pristine[methodName] : func, + newOptions = assign(assign({}, oldOptions), options); + + return baseConvert(newUtil, realName, newFunc, newOptions); + }; + } + + /** + * Creates a function that wraps `func` to invoke its iteratee, with up to `n` + * arguments, ignoring any additional arguments. + * + * @private + * @param {Function} func The function to cap iteratee arguments for. + * @param {number} n The arity cap. + * @returns {Function} Returns the new function. + */ + function iterateeAry(func, n) { + return overArg(func, function(func) { + return typeof func == 'function' ? baseAry(func, n) : func; + }); + } + + /** + * Creates a function that wraps `func` to invoke its iteratee with arguments + * arranged according to the specified `indexes` where the argument value at + * the first index is provided as the first argument, the argument value at + * the second index is provided as the second argument, and so on. + * + * @private + * @param {Function} func The function to rearrange iteratee arguments for. + * @param {number[]} indexes The arranged argument indexes. + * @returns {Function} Returns the new function. + */ + function iterateeRearg(func, indexes) { + return overArg(func, function(func) { + var n = indexes.length; + return baseArity(rearg(baseAry(func, n), indexes), n); + }); + } + + /** + * Creates a function that invokes `func` with its first argument transformed. + * + * @private + * @param {Function} func The function to wrap. + * @param {Function} transform The argument transform. + * @returns {Function} Returns the new function. + */ + function overArg(func, transform) { + return function() { + var length = arguments.length; + if (!length) { + return func(); + } + var args = Array(length); + while (length--) { + args[length] = arguments[length]; + } + var index = config.rearg ? 0 : (length - 1); + args[index] = transform(args[index]); + return func.apply(undefined, args); + }; + } + + /** + * Creates a function that wraps `func` and applys the conversions + * rules by `name`. + * + * @private + * @param {string} name The name of the function to wrap. + * @param {Function} func The function to wrap. + * @returns {Function} Returns the converted function. + */ + function wrap(name, func, placeholder) { + var result, + realName = mapping.aliasToReal[name] || name, + wrapped = func, + wrapper = wrappers[realName]; + + if (wrapper) { + wrapped = wrapper(func); + } + else if (config.immutable) { + if (mapping.mutate.array[realName]) { + wrapped = wrapImmutable(func, cloneArray); + } + else if (mapping.mutate.object[realName]) { + wrapped = wrapImmutable(func, createCloner(func)); + } + else if (mapping.mutate.set[realName]) { + wrapped = wrapImmutable(func, cloneByPath); + } + } + each(aryMethodKeys, function(aryKey) { + each(mapping.aryMethod[aryKey], function(otherName) { + if (realName == otherName) { + var data = mapping.methodSpread[realName], + afterRearg = data && data.afterRearg; + + result = afterRearg + ? castFixed(realName, castRearg(realName, wrapped, aryKey), aryKey) + : castRearg(realName, castFixed(realName, wrapped, aryKey), aryKey); + + result = castCap(realName, result); + result = castCurry(realName, result, aryKey); + return false; + } + }); + return !result; + }); + + result || (result = wrapped); + if (result == func) { + result = forceCurry ? curry(result, 1) : function() { + return func.apply(this, arguments); + }; + } + result.convert = createConverter(realName, func); + result.placeholder = func.placeholder = placeholder; + + return result; + } + + /*--------------------------------------------------------------------------*/ + + if (!isObj) { + return wrap(name, func, defaultHolder); + } + var _ = func; + + // Convert methods by ary cap. + var pairs = []; + each(aryMethodKeys, function(aryKey) { + each(mapping.aryMethod[aryKey], function(key) { + var func = _[mapping.remap[key] || key]; + if (func) { + pairs.push([key, wrap(key, func, _)]); + } + }); + }); + + // Convert remaining methods. + each(keys(_), function(key) { + var func = _[key]; + if (typeof func == 'function') { + var length = pairs.length; + while (length--) { + if (pairs[length][0] == key) { + return; + } + } + func.convert = createConverter(key, func); + pairs.push([key, func]); + } + }); + + // Assign to `_` leaving `_.prototype` unchanged to allow chaining. + each(pairs, function(pair) { + _[pair[0]] = pair[1]; + }); + + _.convert = convertLib; + _.placeholder = _; + + // Assign aliases. + each(keys(_), function(key) { + each(mapping.realToAlias[key] || [], function(alias) { + _[alias] = _[key]; + }); + }); + + return _; +} + +module.exports = baseConvert; diff --git a/backend/node_modules/lodash/fp/_convertBrowser.js b/backend/node_modules/lodash/fp/_convertBrowser.js new file mode 100644 index 00000000..bde030dc --- /dev/null +++ b/backend/node_modules/lodash/fp/_convertBrowser.js @@ -0,0 +1,18 @@ +var baseConvert = require('./_baseConvert'); + +/** + * Converts `lodash` to an immutable auto-curried iteratee-first data-last + * version with conversion `options` applied. + * + * @param {Function} lodash The lodash function to convert. + * @param {Object} [options] The options object. See `baseConvert` for more details. + * @returns {Function} Returns the converted `lodash`. + */ +function browserConvert(lodash, options) { + return baseConvert(lodash, lodash, options); +} + +if (typeof _ == 'function' && typeof _.runInContext == 'function') { + _ = browserConvert(_.runInContext()); +} +module.exports = browserConvert; diff --git a/backend/node_modules/lodash/fp/_falseOptions.js b/backend/node_modules/lodash/fp/_falseOptions.js new file mode 100644 index 00000000..773235e3 --- /dev/null +++ b/backend/node_modules/lodash/fp/_falseOptions.js @@ -0,0 +1,7 @@ +module.exports = { + 'cap': false, + 'curry': false, + 'fixed': false, + 'immutable': false, + 'rearg': false +}; diff --git a/backend/node_modules/lodash/fp/_mapping.js b/backend/node_modules/lodash/fp/_mapping.js new file mode 100644 index 00000000..a642ec05 --- /dev/null +++ b/backend/node_modules/lodash/fp/_mapping.js @@ -0,0 +1,358 @@ +/** Used to map aliases to their real names. */ +exports.aliasToReal = { + + // Lodash aliases. + 'each': 'forEach', + 'eachRight': 'forEachRight', + 'entries': 'toPairs', + 'entriesIn': 'toPairsIn', + 'extend': 'assignIn', + 'extendAll': 'assignInAll', + 'extendAllWith': 'assignInAllWith', + 'extendWith': 'assignInWith', + 'first': 'head', + + // Methods that are curried variants of others. + 'conforms': 'conformsTo', + 'matches': 'isMatch', + 'property': 'get', + + // Ramda aliases. + '__': 'placeholder', + 'F': 'stubFalse', + 'T': 'stubTrue', + 'all': 'every', + 'allPass': 'overEvery', + 'always': 'constant', + 'any': 'some', + 'anyPass': 'overSome', + 'apply': 'spread', + 'assoc': 'set', + 'assocPath': 'set', + 'complement': 'negate', + 'compose': 'flowRight', + 'contains': 'includes', + 'dissoc': 'unset', + 'dissocPath': 'unset', + 'dropLast': 'dropRight', + 'dropLastWhile': 'dropRightWhile', + 'equals': 'isEqual', + 'identical': 'eq', + 'indexBy': 'keyBy', + 'init': 'initial', + 'invertObj': 'invert', + 'juxt': 'over', + 'omitAll': 'omit', + 'nAry': 'ary', + 'path': 'get', + 'pathEq': 'matchesProperty', + 'pathOr': 'getOr', + 'paths': 'at', + 'pickAll': 'pick', + 'pipe': 'flow', + 'pluck': 'map', + 'prop': 'get', + 'propEq': 'matchesProperty', + 'propOr': 'getOr', + 'props': 'at', + 'symmetricDifference': 'xor', + 'symmetricDifferenceBy': 'xorBy', + 'symmetricDifferenceWith': 'xorWith', + 'takeLast': 'takeRight', + 'takeLastWhile': 'takeRightWhile', + 'unapply': 'rest', + 'unnest': 'flatten', + 'useWith': 'overArgs', + 'where': 'conformsTo', + 'whereEq': 'isMatch', + 'zipObj': 'zipObject' +}; + +/** Used to map ary to method names. */ +exports.aryMethod = { + '1': [ + 'assignAll', 'assignInAll', 'attempt', 'castArray', 'ceil', 'create', + 'curry', 'curryRight', 'defaultsAll', 'defaultsDeepAll', 'floor', 'flow', + 'flowRight', 'fromPairs', 'invert', 'iteratee', 'memoize', 'method', 'mergeAll', + 'methodOf', 'mixin', 'nthArg', 'over', 'overEvery', 'overSome','rest', 'reverse', + 'round', 'runInContext', 'spread', 'template', 'trim', 'trimEnd', 'trimStart', + 'uniqueId', 'words', 'zipAll' + ], + '2': [ + 'add', 'after', 'ary', 'assign', 'assignAllWith', 'assignIn', 'assignInAllWith', + 'at', 'before', 'bind', 'bindAll', 'bindKey', 'chunk', 'cloneDeepWith', + 'cloneWith', 'concat', 'conformsTo', 'countBy', 'curryN', 'curryRightN', + 'debounce', 'defaults', 'defaultsDeep', 'defaultTo', 'delay', 'difference', + 'divide', 'drop', 'dropRight', 'dropRightWhile', 'dropWhile', 'endsWith', 'eq', + 'every', 'filter', 'find', 'findIndex', 'findKey', 'findLast', 'findLastIndex', + 'findLastKey', 'flatMap', 'flatMapDeep', 'flattenDepth', 'forEach', + 'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight', 'get', + 'groupBy', 'gt', 'gte', 'has', 'hasIn', 'includes', 'indexOf', 'intersection', + 'invertBy', 'invoke', 'invokeMap', 'isEqual', 'isMatch', 'join', 'keyBy', + 'lastIndexOf', 'lt', 'lte', 'map', 'mapKeys', 'mapValues', 'matchesProperty', + 'maxBy', 'meanBy', 'merge', 'mergeAllWith', 'minBy', 'multiply', 'nth', 'omit', + 'omitBy', 'overArgs', 'pad', 'padEnd', 'padStart', 'parseInt', 'partial', + 'partialRight', 'partition', 'pick', 'pickBy', 'propertyOf', 'pull', 'pullAll', + 'pullAt', 'random', 'range', 'rangeRight', 'rearg', 'reject', 'remove', + 'repeat', 'restFrom', 'result', 'sampleSize', 'some', 'sortBy', 'sortedIndex', + 'sortedIndexOf', 'sortedLastIndex', 'sortedLastIndexOf', 'sortedUniqBy', + 'split', 'spreadFrom', 'startsWith', 'subtract', 'sumBy', 'take', 'takeRight', + 'takeRightWhile', 'takeWhile', 'tap', 'throttle', 'thru', 'times', 'trimChars', + 'trimCharsEnd', 'trimCharsStart', 'truncate', 'union', 'uniqBy', 'uniqWith', + 'unset', 'unzipWith', 'without', 'wrap', 'xor', 'zip', 'zipObject', + 'zipObjectDeep' + ], + '3': [ + 'assignInWith', 'assignWith', 'clamp', 'differenceBy', 'differenceWith', + 'findFrom', 'findIndexFrom', 'findLastFrom', 'findLastIndexFrom', 'getOr', + 'includesFrom', 'indexOfFrom', 'inRange', 'intersectionBy', 'intersectionWith', + 'invokeArgs', 'invokeArgsMap', 'isEqualWith', 'isMatchWith', 'flatMapDepth', + 'lastIndexOfFrom', 'mergeWith', 'orderBy', 'padChars', 'padCharsEnd', + 'padCharsStart', 'pullAllBy', 'pullAllWith', 'rangeStep', 'rangeStepRight', + 'reduce', 'reduceRight', 'replace', 'set', 'slice', 'sortedIndexBy', + 'sortedLastIndexBy', 'transform', 'unionBy', 'unionWith', 'update', 'xorBy', + 'xorWith', 'zipWith' + ], + '4': [ + 'fill', 'setWith', 'updateWith' + ] +}; + +/** Used to map ary to rearg configs. */ +exports.aryRearg = { + '2': [1, 0], + '3': [2, 0, 1], + '4': [3, 2, 0, 1] +}; + +/** Used to map method names to their iteratee ary. */ +exports.iterateeAry = { + 'dropRightWhile': 1, + 'dropWhile': 1, + 'every': 1, + 'filter': 1, + 'find': 1, + 'findFrom': 1, + 'findIndex': 1, + 'findIndexFrom': 1, + 'findKey': 1, + 'findLast': 1, + 'findLastFrom': 1, + 'findLastIndex': 1, + 'findLastIndexFrom': 1, + 'findLastKey': 1, + 'flatMap': 1, + 'flatMapDeep': 1, + 'flatMapDepth': 1, + 'forEach': 1, + 'forEachRight': 1, + 'forIn': 1, + 'forInRight': 1, + 'forOwn': 1, + 'forOwnRight': 1, + 'map': 1, + 'mapKeys': 1, + 'mapValues': 1, + 'partition': 1, + 'reduce': 2, + 'reduceRight': 2, + 'reject': 1, + 'remove': 1, + 'some': 1, + 'takeRightWhile': 1, + 'takeWhile': 1, + 'times': 1, + 'transform': 2 +}; + +/** Used to map method names to iteratee rearg configs. */ +exports.iterateeRearg = { + 'mapKeys': [1], + 'reduceRight': [1, 0] +}; + +/** Used to map method names to rearg configs. */ +exports.methodRearg = { + 'assignInAllWith': [1, 0], + 'assignInWith': [1, 2, 0], + 'assignAllWith': [1, 0], + 'assignWith': [1, 2, 0], + 'differenceBy': [1, 2, 0], + 'differenceWith': [1, 2, 0], + 'getOr': [2, 1, 0], + 'intersectionBy': [1, 2, 0], + 'intersectionWith': [1, 2, 0], + 'isEqualWith': [1, 2, 0], + 'isMatchWith': [2, 1, 0], + 'mergeAllWith': [1, 0], + 'mergeWith': [1, 2, 0], + 'padChars': [2, 1, 0], + 'padCharsEnd': [2, 1, 0], + 'padCharsStart': [2, 1, 0], + 'pullAllBy': [2, 1, 0], + 'pullAllWith': [2, 1, 0], + 'rangeStep': [1, 2, 0], + 'rangeStepRight': [1, 2, 0], + 'setWith': [3, 1, 2, 0], + 'sortedIndexBy': [2, 1, 0], + 'sortedLastIndexBy': [2, 1, 0], + 'unionBy': [1, 2, 0], + 'unionWith': [1, 2, 0], + 'updateWith': [3, 1, 2, 0], + 'xorBy': [1, 2, 0], + 'xorWith': [1, 2, 0], + 'zipWith': [1, 2, 0] +}; + +/** Used to map method names to spread configs. */ +exports.methodSpread = { + 'assignAll': { 'start': 0 }, + 'assignAllWith': { 'start': 0 }, + 'assignInAll': { 'start': 0 }, + 'assignInAllWith': { 'start': 0 }, + 'defaultsAll': { 'start': 0 }, + 'defaultsDeepAll': { 'start': 0 }, + 'invokeArgs': { 'start': 2 }, + 'invokeArgsMap': { 'start': 2 }, + 'mergeAll': { 'start': 0 }, + 'mergeAllWith': { 'start': 0 }, + 'partial': { 'start': 1 }, + 'partialRight': { 'start': 1 }, + 'without': { 'start': 1 }, + 'zipAll': { 'start': 0 } +}; + +/** Used to identify methods which mutate arrays or objects. */ +exports.mutate = { + 'array': { + 'fill': true, + 'pull': true, + 'pullAll': true, + 'pullAllBy': true, + 'pullAllWith': true, + 'pullAt': true, + 'remove': true, + 'reverse': true + }, + 'object': { + 'assign': true, + 'assignAll': true, + 'assignAllWith': true, + 'assignIn': true, + 'assignInAll': true, + 'assignInAllWith': true, + 'assignInWith': true, + 'assignWith': true, + 'defaults': true, + 'defaultsAll': true, + 'defaultsDeep': true, + 'defaultsDeepAll': true, + 'merge': true, + 'mergeAll': true, + 'mergeAllWith': true, + 'mergeWith': true, + }, + 'set': { + 'set': true, + 'setWith': true, + 'unset': true, + 'update': true, + 'updateWith': true + } +}; + +/** Used to map real names to their aliases. */ +exports.realToAlias = (function() { + var hasOwnProperty = Object.prototype.hasOwnProperty, + object = exports.aliasToReal, + result = {}; + + for (var key in object) { + var value = object[key]; + if (hasOwnProperty.call(result, value)) { + result[value].push(key); + } else { + result[value] = [key]; + } + } + return result; +}()); + +/** Used to map method names to other names. */ +exports.remap = { + 'assignAll': 'assign', + 'assignAllWith': 'assignWith', + 'assignInAll': 'assignIn', + 'assignInAllWith': 'assignInWith', + 'curryN': 'curry', + 'curryRightN': 'curryRight', + 'defaultsAll': 'defaults', + 'defaultsDeepAll': 'defaultsDeep', + 'findFrom': 'find', + 'findIndexFrom': 'findIndex', + 'findLastFrom': 'findLast', + 'findLastIndexFrom': 'findLastIndex', + 'getOr': 'get', + 'includesFrom': 'includes', + 'indexOfFrom': 'indexOf', + 'invokeArgs': 'invoke', + 'invokeArgsMap': 'invokeMap', + 'lastIndexOfFrom': 'lastIndexOf', + 'mergeAll': 'merge', + 'mergeAllWith': 'mergeWith', + 'padChars': 'pad', + 'padCharsEnd': 'padEnd', + 'padCharsStart': 'padStart', + 'propertyOf': 'get', + 'rangeStep': 'range', + 'rangeStepRight': 'rangeRight', + 'restFrom': 'rest', + 'spreadFrom': 'spread', + 'trimChars': 'trim', + 'trimCharsEnd': 'trimEnd', + 'trimCharsStart': 'trimStart', + 'zipAll': 'zip' +}; + +/** Used to track methods that skip fixing their arity. */ +exports.skipFixed = { + 'castArray': true, + 'flow': true, + 'flowRight': true, + 'iteratee': true, + 'mixin': true, + 'rearg': true, + 'runInContext': true +}; + +/** Used to track methods that skip rearranging arguments. */ +exports.skipRearg = { + 'add': true, + 'assign': true, + 'assignIn': true, + 'bind': true, + 'bindKey': true, + 'concat': true, + 'difference': true, + 'divide': true, + 'eq': true, + 'gt': true, + 'gte': true, + 'isEqual': true, + 'lt': true, + 'lte': true, + 'matchesProperty': true, + 'merge': true, + 'multiply': true, + 'overArgs': true, + 'partial': true, + 'partialRight': true, + 'propertyOf': true, + 'random': true, + 'range': true, + 'rangeRight': true, + 'subtract': true, + 'zip': true, + 'zipObject': true, + 'zipObjectDeep': true +}; diff --git a/backend/node_modules/lodash/fp/_util.js b/backend/node_modules/lodash/fp/_util.js new file mode 100644 index 00000000..1dbf36f5 --- /dev/null +++ b/backend/node_modules/lodash/fp/_util.js @@ -0,0 +1,16 @@ +module.exports = { + 'ary': require('../ary'), + 'assign': require('../_baseAssign'), + 'clone': require('../clone'), + 'curry': require('../curry'), + 'forEach': require('../_arrayEach'), + 'isArray': require('../isArray'), + 'isError': require('../isError'), + 'isFunction': require('../isFunction'), + 'isWeakMap': require('../isWeakMap'), + 'iteratee': require('../iteratee'), + 'keys': require('../_baseKeys'), + 'rearg': require('../rearg'), + 'toInteger': require('../toInteger'), + 'toPath': require('../toPath') +}; diff --git a/backend/node_modules/lodash/fp/add.js b/backend/node_modules/lodash/fp/add.js new file mode 100644 index 00000000..816eeece --- /dev/null +++ b/backend/node_modules/lodash/fp/add.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('add', require('../add')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/after.js b/backend/node_modules/lodash/fp/after.js new file mode 100644 index 00000000..21a0167a --- /dev/null +++ b/backend/node_modules/lodash/fp/after.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('after', require('../after')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/all.js b/backend/node_modules/lodash/fp/all.js new file mode 100644 index 00000000..d0839f77 --- /dev/null +++ b/backend/node_modules/lodash/fp/all.js @@ -0,0 +1 @@ +module.exports = require('./every'); diff --git a/backend/node_modules/lodash/fp/allPass.js b/backend/node_modules/lodash/fp/allPass.js new file mode 100644 index 00000000..79b73ef8 --- /dev/null +++ b/backend/node_modules/lodash/fp/allPass.js @@ -0,0 +1 @@ +module.exports = require('./overEvery'); diff --git a/backend/node_modules/lodash/fp/always.js b/backend/node_modules/lodash/fp/always.js new file mode 100644 index 00000000..98877030 --- /dev/null +++ b/backend/node_modules/lodash/fp/always.js @@ -0,0 +1 @@ +module.exports = require('./constant'); diff --git a/backend/node_modules/lodash/fp/any.js b/backend/node_modules/lodash/fp/any.js new file mode 100644 index 00000000..900ac25e --- /dev/null +++ b/backend/node_modules/lodash/fp/any.js @@ -0,0 +1 @@ +module.exports = require('./some'); diff --git a/backend/node_modules/lodash/fp/anyPass.js b/backend/node_modules/lodash/fp/anyPass.js new file mode 100644 index 00000000..2774ab37 --- /dev/null +++ b/backend/node_modules/lodash/fp/anyPass.js @@ -0,0 +1 @@ +module.exports = require('./overSome'); diff --git a/backend/node_modules/lodash/fp/apply.js b/backend/node_modules/lodash/fp/apply.js new file mode 100644 index 00000000..2b757129 --- /dev/null +++ b/backend/node_modules/lodash/fp/apply.js @@ -0,0 +1 @@ +module.exports = require('./spread'); diff --git a/backend/node_modules/lodash/fp/array.js b/backend/node_modules/lodash/fp/array.js new file mode 100644 index 00000000..fe939c2c --- /dev/null +++ b/backend/node_modules/lodash/fp/array.js @@ -0,0 +1,2 @@ +var convert = require('./convert'); +module.exports = convert(require('../array')); diff --git a/backend/node_modules/lodash/fp/ary.js b/backend/node_modules/lodash/fp/ary.js new file mode 100644 index 00000000..8edf1877 --- /dev/null +++ b/backend/node_modules/lodash/fp/ary.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('ary', require('../ary')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/assign.js b/backend/node_modules/lodash/fp/assign.js new file mode 100644 index 00000000..23f47af1 --- /dev/null +++ b/backend/node_modules/lodash/fp/assign.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('assign', require('../assign')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/assignAll.js b/backend/node_modules/lodash/fp/assignAll.js new file mode 100644 index 00000000..b1d36c7e --- /dev/null +++ b/backend/node_modules/lodash/fp/assignAll.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('assignAll', require('../assign')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/assignAllWith.js b/backend/node_modules/lodash/fp/assignAllWith.js new file mode 100644 index 00000000..21e836e6 --- /dev/null +++ b/backend/node_modules/lodash/fp/assignAllWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('assignAllWith', require('../assignWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/assignIn.js b/backend/node_modules/lodash/fp/assignIn.js new file mode 100644 index 00000000..6e7c65fa --- /dev/null +++ b/backend/node_modules/lodash/fp/assignIn.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('assignIn', require('../assignIn')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/assignInAll.js b/backend/node_modules/lodash/fp/assignInAll.js new file mode 100644 index 00000000..7ba75dba --- /dev/null +++ b/backend/node_modules/lodash/fp/assignInAll.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('assignInAll', require('../assignIn')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/assignInAllWith.js b/backend/node_modules/lodash/fp/assignInAllWith.js new file mode 100644 index 00000000..e766903d --- /dev/null +++ b/backend/node_modules/lodash/fp/assignInAllWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('assignInAllWith', require('../assignInWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/assignInWith.js b/backend/node_modules/lodash/fp/assignInWith.js new file mode 100644 index 00000000..acb59236 --- /dev/null +++ b/backend/node_modules/lodash/fp/assignInWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('assignInWith', require('../assignInWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/assignWith.js b/backend/node_modules/lodash/fp/assignWith.js new file mode 100644 index 00000000..eb925212 --- /dev/null +++ b/backend/node_modules/lodash/fp/assignWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('assignWith', require('../assignWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/assoc.js b/backend/node_modules/lodash/fp/assoc.js new file mode 100644 index 00000000..7648820c --- /dev/null +++ b/backend/node_modules/lodash/fp/assoc.js @@ -0,0 +1 @@ +module.exports = require('./set'); diff --git a/backend/node_modules/lodash/fp/assocPath.js b/backend/node_modules/lodash/fp/assocPath.js new file mode 100644 index 00000000..7648820c --- /dev/null +++ b/backend/node_modules/lodash/fp/assocPath.js @@ -0,0 +1 @@ +module.exports = require('./set'); diff --git a/backend/node_modules/lodash/fp/at.js b/backend/node_modules/lodash/fp/at.js new file mode 100644 index 00000000..cc39d257 --- /dev/null +++ b/backend/node_modules/lodash/fp/at.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('at', require('../at')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/attempt.js b/backend/node_modules/lodash/fp/attempt.js new file mode 100644 index 00000000..26ca42ea --- /dev/null +++ b/backend/node_modules/lodash/fp/attempt.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('attempt', require('../attempt')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/before.js b/backend/node_modules/lodash/fp/before.js new file mode 100644 index 00000000..7a2de65d --- /dev/null +++ b/backend/node_modules/lodash/fp/before.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('before', require('../before')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/bind.js b/backend/node_modules/lodash/fp/bind.js new file mode 100644 index 00000000..5cbe4f30 --- /dev/null +++ b/backend/node_modules/lodash/fp/bind.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('bind', require('../bind')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/bindAll.js b/backend/node_modules/lodash/fp/bindAll.js new file mode 100644 index 00000000..6b4a4a0f --- /dev/null +++ b/backend/node_modules/lodash/fp/bindAll.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('bindAll', require('../bindAll')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/bindKey.js b/backend/node_modules/lodash/fp/bindKey.js new file mode 100644 index 00000000..6a46c6b1 --- /dev/null +++ b/backend/node_modules/lodash/fp/bindKey.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('bindKey', require('../bindKey')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/camelCase.js b/backend/node_modules/lodash/fp/camelCase.js new file mode 100644 index 00000000..87b77b49 --- /dev/null +++ b/backend/node_modules/lodash/fp/camelCase.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('camelCase', require('../camelCase'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/capitalize.js b/backend/node_modules/lodash/fp/capitalize.js new file mode 100644 index 00000000..cac74e14 --- /dev/null +++ b/backend/node_modules/lodash/fp/capitalize.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('capitalize', require('../capitalize'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/castArray.js b/backend/node_modules/lodash/fp/castArray.js new file mode 100644 index 00000000..8681c099 --- /dev/null +++ b/backend/node_modules/lodash/fp/castArray.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('castArray', require('../castArray')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/ceil.js b/backend/node_modules/lodash/fp/ceil.js new file mode 100644 index 00000000..f416b729 --- /dev/null +++ b/backend/node_modules/lodash/fp/ceil.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('ceil', require('../ceil')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/chain.js b/backend/node_modules/lodash/fp/chain.js new file mode 100644 index 00000000..604fe398 --- /dev/null +++ b/backend/node_modules/lodash/fp/chain.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('chain', require('../chain'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/chunk.js b/backend/node_modules/lodash/fp/chunk.js new file mode 100644 index 00000000..871ab085 --- /dev/null +++ b/backend/node_modules/lodash/fp/chunk.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('chunk', require('../chunk')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/clamp.js b/backend/node_modules/lodash/fp/clamp.js new file mode 100644 index 00000000..3b06c01c --- /dev/null +++ b/backend/node_modules/lodash/fp/clamp.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('clamp', require('../clamp')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/clone.js b/backend/node_modules/lodash/fp/clone.js new file mode 100644 index 00000000..cadb59c9 --- /dev/null +++ b/backend/node_modules/lodash/fp/clone.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('clone', require('../clone'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/cloneDeep.js b/backend/node_modules/lodash/fp/cloneDeep.js new file mode 100644 index 00000000..a6107aac --- /dev/null +++ b/backend/node_modules/lodash/fp/cloneDeep.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('cloneDeep', require('../cloneDeep'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/cloneDeepWith.js b/backend/node_modules/lodash/fp/cloneDeepWith.js new file mode 100644 index 00000000..6f01e44a --- /dev/null +++ b/backend/node_modules/lodash/fp/cloneDeepWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('cloneDeepWith', require('../cloneDeepWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/cloneWith.js b/backend/node_modules/lodash/fp/cloneWith.js new file mode 100644 index 00000000..aa885781 --- /dev/null +++ b/backend/node_modules/lodash/fp/cloneWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('cloneWith', require('../cloneWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/collection.js b/backend/node_modules/lodash/fp/collection.js new file mode 100644 index 00000000..fc8b328a --- /dev/null +++ b/backend/node_modules/lodash/fp/collection.js @@ -0,0 +1,2 @@ +var convert = require('./convert'); +module.exports = convert(require('../collection')); diff --git a/backend/node_modules/lodash/fp/commit.js b/backend/node_modules/lodash/fp/commit.js new file mode 100644 index 00000000..130a894f --- /dev/null +++ b/backend/node_modules/lodash/fp/commit.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('commit', require('../commit'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/compact.js b/backend/node_modules/lodash/fp/compact.js new file mode 100644 index 00000000..ce8f7a1a --- /dev/null +++ b/backend/node_modules/lodash/fp/compact.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('compact', require('../compact'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/complement.js b/backend/node_modules/lodash/fp/complement.js new file mode 100644 index 00000000..93eb462b --- /dev/null +++ b/backend/node_modules/lodash/fp/complement.js @@ -0,0 +1 @@ +module.exports = require('./negate'); diff --git a/backend/node_modules/lodash/fp/compose.js b/backend/node_modules/lodash/fp/compose.js new file mode 100644 index 00000000..1954e942 --- /dev/null +++ b/backend/node_modules/lodash/fp/compose.js @@ -0,0 +1 @@ +module.exports = require('./flowRight'); diff --git a/backend/node_modules/lodash/fp/concat.js b/backend/node_modules/lodash/fp/concat.js new file mode 100644 index 00000000..e59346ad --- /dev/null +++ b/backend/node_modules/lodash/fp/concat.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('concat', require('../concat')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/cond.js b/backend/node_modules/lodash/fp/cond.js new file mode 100644 index 00000000..6a0120ef --- /dev/null +++ b/backend/node_modules/lodash/fp/cond.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('cond', require('../cond'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/conforms.js b/backend/node_modules/lodash/fp/conforms.js new file mode 100644 index 00000000..3247f64a --- /dev/null +++ b/backend/node_modules/lodash/fp/conforms.js @@ -0,0 +1 @@ +module.exports = require('./conformsTo'); diff --git a/backend/node_modules/lodash/fp/conformsTo.js b/backend/node_modules/lodash/fp/conformsTo.js new file mode 100644 index 00000000..aa7f41ec --- /dev/null +++ b/backend/node_modules/lodash/fp/conformsTo.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('conformsTo', require('../conformsTo')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/constant.js b/backend/node_modules/lodash/fp/constant.js new file mode 100644 index 00000000..9e406fc0 --- /dev/null +++ b/backend/node_modules/lodash/fp/constant.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('constant', require('../constant'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/contains.js b/backend/node_modules/lodash/fp/contains.js new file mode 100644 index 00000000..594722af --- /dev/null +++ b/backend/node_modules/lodash/fp/contains.js @@ -0,0 +1 @@ +module.exports = require('./includes'); diff --git a/backend/node_modules/lodash/fp/convert.js b/backend/node_modules/lodash/fp/convert.js new file mode 100644 index 00000000..4795dc42 --- /dev/null +++ b/backend/node_modules/lodash/fp/convert.js @@ -0,0 +1,18 @@ +var baseConvert = require('./_baseConvert'), + util = require('./_util'); + +/** + * Converts `func` of `name` to an immutable auto-curried iteratee-first data-last + * version with conversion `options` applied. If `name` is an object its methods + * will be converted. + * + * @param {string} name The name of the function to wrap. + * @param {Function} [func] The function to wrap. + * @param {Object} [options] The options object. See `baseConvert` for more details. + * @returns {Function|Object} Returns the converted function or object. + */ +function convert(name, func, options) { + return baseConvert(util, name, func, options); +} + +module.exports = convert; diff --git a/backend/node_modules/lodash/fp/countBy.js b/backend/node_modules/lodash/fp/countBy.js new file mode 100644 index 00000000..dfa46432 --- /dev/null +++ b/backend/node_modules/lodash/fp/countBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('countBy', require('../countBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/create.js b/backend/node_modules/lodash/fp/create.js new file mode 100644 index 00000000..752025fb --- /dev/null +++ b/backend/node_modules/lodash/fp/create.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('create', require('../create')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/curry.js b/backend/node_modules/lodash/fp/curry.js new file mode 100644 index 00000000..b0b4168c --- /dev/null +++ b/backend/node_modules/lodash/fp/curry.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('curry', require('../curry')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/curryN.js b/backend/node_modules/lodash/fp/curryN.js new file mode 100644 index 00000000..2ae7d00a --- /dev/null +++ b/backend/node_modules/lodash/fp/curryN.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('curryN', require('../curry')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/curryRight.js b/backend/node_modules/lodash/fp/curryRight.js new file mode 100644 index 00000000..cb619eb5 --- /dev/null +++ b/backend/node_modules/lodash/fp/curryRight.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('curryRight', require('../curryRight')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/curryRightN.js b/backend/node_modules/lodash/fp/curryRightN.js new file mode 100644 index 00000000..2495afc8 --- /dev/null +++ b/backend/node_modules/lodash/fp/curryRightN.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('curryRightN', require('../curryRight')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/date.js b/backend/node_modules/lodash/fp/date.js new file mode 100644 index 00000000..82cb952b --- /dev/null +++ b/backend/node_modules/lodash/fp/date.js @@ -0,0 +1,2 @@ +var convert = require('./convert'); +module.exports = convert(require('../date')); diff --git a/backend/node_modules/lodash/fp/debounce.js b/backend/node_modules/lodash/fp/debounce.js new file mode 100644 index 00000000..26122293 --- /dev/null +++ b/backend/node_modules/lodash/fp/debounce.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('debounce', require('../debounce')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/deburr.js b/backend/node_modules/lodash/fp/deburr.js new file mode 100644 index 00000000..96463ab8 --- /dev/null +++ b/backend/node_modules/lodash/fp/deburr.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('deburr', require('../deburr'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/defaultTo.js b/backend/node_modules/lodash/fp/defaultTo.js new file mode 100644 index 00000000..d6b52a44 --- /dev/null +++ b/backend/node_modules/lodash/fp/defaultTo.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('defaultTo', require('../defaultTo')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/defaults.js b/backend/node_modules/lodash/fp/defaults.js new file mode 100644 index 00000000..e1a8e6e7 --- /dev/null +++ b/backend/node_modules/lodash/fp/defaults.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('defaults', require('../defaults')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/defaultsAll.js b/backend/node_modules/lodash/fp/defaultsAll.js new file mode 100644 index 00000000..238fcc3c --- /dev/null +++ b/backend/node_modules/lodash/fp/defaultsAll.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('defaultsAll', require('../defaults')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/defaultsDeep.js b/backend/node_modules/lodash/fp/defaultsDeep.js new file mode 100644 index 00000000..1f172ff9 --- /dev/null +++ b/backend/node_modules/lodash/fp/defaultsDeep.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('defaultsDeep', require('../defaultsDeep')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/defaultsDeepAll.js b/backend/node_modules/lodash/fp/defaultsDeepAll.js new file mode 100644 index 00000000..6835f2f0 --- /dev/null +++ b/backend/node_modules/lodash/fp/defaultsDeepAll.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('defaultsDeepAll', require('../defaultsDeep')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/defer.js b/backend/node_modules/lodash/fp/defer.js new file mode 100644 index 00000000..ec7990fe --- /dev/null +++ b/backend/node_modules/lodash/fp/defer.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('defer', require('../defer'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/delay.js b/backend/node_modules/lodash/fp/delay.js new file mode 100644 index 00000000..556dbd56 --- /dev/null +++ b/backend/node_modules/lodash/fp/delay.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('delay', require('../delay')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/difference.js b/backend/node_modules/lodash/fp/difference.js new file mode 100644 index 00000000..2d037654 --- /dev/null +++ b/backend/node_modules/lodash/fp/difference.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('difference', require('../difference')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/differenceBy.js b/backend/node_modules/lodash/fp/differenceBy.js new file mode 100644 index 00000000..2f914910 --- /dev/null +++ b/backend/node_modules/lodash/fp/differenceBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('differenceBy', require('../differenceBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/differenceWith.js b/backend/node_modules/lodash/fp/differenceWith.js new file mode 100644 index 00000000..bcf5ad2e --- /dev/null +++ b/backend/node_modules/lodash/fp/differenceWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('differenceWith', require('../differenceWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/dissoc.js b/backend/node_modules/lodash/fp/dissoc.js new file mode 100644 index 00000000..7ec7be19 --- /dev/null +++ b/backend/node_modules/lodash/fp/dissoc.js @@ -0,0 +1 @@ +module.exports = require('./unset'); diff --git a/backend/node_modules/lodash/fp/dissocPath.js b/backend/node_modules/lodash/fp/dissocPath.js new file mode 100644 index 00000000..7ec7be19 --- /dev/null +++ b/backend/node_modules/lodash/fp/dissocPath.js @@ -0,0 +1 @@ +module.exports = require('./unset'); diff --git a/backend/node_modules/lodash/fp/divide.js b/backend/node_modules/lodash/fp/divide.js new file mode 100644 index 00000000..82048c5e --- /dev/null +++ b/backend/node_modules/lodash/fp/divide.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('divide', require('../divide')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/drop.js b/backend/node_modules/lodash/fp/drop.js new file mode 100644 index 00000000..2fa9b4fa --- /dev/null +++ b/backend/node_modules/lodash/fp/drop.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('drop', require('../drop')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/dropLast.js b/backend/node_modules/lodash/fp/dropLast.js new file mode 100644 index 00000000..174e5255 --- /dev/null +++ b/backend/node_modules/lodash/fp/dropLast.js @@ -0,0 +1 @@ +module.exports = require('./dropRight'); diff --git a/backend/node_modules/lodash/fp/dropLastWhile.js b/backend/node_modules/lodash/fp/dropLastWhile.js new file mode 100644 index 00000000..be2a9d24 --- /dev/null +++ b/backend/node_modules/lodash/fp/dropLastWhile.js @@ -0,0 +1 @@ +module.exports = require('./dropRightWhile'); diff --git a/backend/node_modules/lodash/fp/dropRight.js b/backend/node_modules/lodash/fp/dropRight.js new file mode 100644 index 00000000..e98881fc --- /dev/null +++ b/backend/node_modules/lodash/fp/dropRight.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('dropRight', require('../dropRight')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/dropRightWhile.js b/backend/node_modules/lodash/fp/dropRightWhile.js new file mode 100644 index 00000000..cacaa701 --- /dev/null +++ b/backend/node_modules/lodash/fp/dropRightWhile.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('dropRightWhile', require('../dropRightWhile')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/dropWhile.js b/backend/node_modules/lodash/fp/dropWhile.js new file mode 100644 index 00000000..285f864d --- /dev/null +++ b/backend/node_modules/lodash/fp/dropWhile.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('dropWhile', require('../dropWhile')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/each.js b/backend/node_modules/lodash/fp/each.js new file mode 100644 index 00000000..8800f420 --- /dev/null +++ b/backend/node_modules/lodash/fp/each.js @@ -0,0 +1 @@ +module.exports = require('./forEach'); diff --git a/backend/node_modules/lodash/fp/eachRight.js b/backend/node_modules/lodash/fp/eachRight.js new file mode 100644 index 00000000..3252b2ab --- /dev/null +++ b/backend/node_modules/lodash/fp/eachRight.js @@ -0,0 +1 @@ +module.exports = require('./forEachRight'); diff --git a/backend/node_modules/lodash/fp/endsWith.js b/backend/node_modules/lodash/fp/endsWith.js new file mode 100644 index 00000000..17dc2a49 --- /dev/null +++ b/backend/node_modules/lodash/fp/endsWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('endsWith', require('../endsWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/entries.js b/backend/node_modules/lodash/fp/entries.js new file mode 100644 index 00000000..7a88df20 --- /dev/null +++ b/backend/node_modules/lodash/fp/entries.js @@ -0,0 +1 @@ +module.exports = require('./toPairs'); diff --git a/backend/node_modules/lodash/fp/entriesIn.js b/backend/node_modules/lodash/fp/entriesIn.js new file mode 100644 index 00000000..f6c6331c --- /dev/null +++ b/backend/node_modules/lodash/fp/entriesIn.js @@ -0,0 +1 @@ +module.exports = require('./toPairsIn'); diff --git a/backend/node_modules/lodash/fp/eq.js b/backend/node_modules/lodash/fp/eq.js new file mode 100644 index 00000000..9a3d21bf --- /dev/null +++ b/backend/node_modules/lodash/fp/eq.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('eq', require('../eq')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/equals.js b/backend/node_modules/lodash/fp/equals.js new file mode 100644 index 00000000..e6a5ce0c --- /dev/null +++ b/backend/node_modules/lodash/fp/equals.js @@ -0,0 +1 @@ +module.exports = require('./isEqual'); diff --git a/backend/node_modules/lodash/fp/escape.js b/backend/node_modules/lodash/fp/escape.js new file mode 100644 index 00000000..52c1fbba --- /dev/null +++ b/backend/node_modules/lodash/fp/escape.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('escape', require('../escape'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/escapeRegExp.js b/backend/node_modules/lodash/fp/escapeRegExp.js new file mode 100644 index 00000000..369b2eff --- /dev/null +++ b/backend/node_modules/lodash/fp/escapeRegExp.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('escapeRegExp', require('../escapeRegExp'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/every.js b/backend/node_modules/lodash/fp/every.js new file mode 100644 index 00000000..95c2776c --- /dev/null +++ b/backend/node_modules/lodash/fp/every.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('every', require('../every')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/extend.js b/backend/node_modules/lodash/fp/extend.js new file mode 100644 index 00000000..e00166c2 --- /dev/null +++ b/backend/node_modules/lodash/fp/extend.js @@ -0,0 +1 @@ +module.exports = require('./assignIn'); diff --git a/backend/node_modules/lodash/fp/extendAll.js b/backend/node_modules/lodash/fp/extendAll.js new file mode 100644 index 00000000..cc55b64f --- /dev/null +++ b/backend/node_modules/lodash/fp/extendAll.js @@ -0,0 +1 @@ +module.exports = require('./assignInAll'); diff --git a/backend/node_modules/lodash/fp/extendAllWith.js b/backend/node_modules/lodash/fp/extendAllWith.js new file mode 100644 index 00000000..6679d208 --- /dev/null +++ b/backend/node_modules/lodash/fp/extendAllWith.js @@ -0,0 +1 @@ +module.exports = require('./assignInAllWith'); diff --git a/backend/node_modules/lodash/fp/extendWith.js b/backend/node_modules/lodash/fp/extendWith.js new file mode 100644 index 00000000..dbdcb3b4 --- /dev/null +++ b/backend/node_modules/lodash/fp/extendWith.js @@ -0,0 +1 @@ +module.exports = require('./assignInWith'); diff --git a/backend/node_modules/lodash/fp/fill.js b/backend/node_modules/lodash/fp/fill.js new file mode 100644 index 00000000..b2d47e84 --- /dev/null +++ b/backend/node_modules/lodash/fp/fill.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('fill', require('../fill')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/filter.js b/backend/node_modules/lodash/fp/filter.js new file mode 100644 index 00000000..796d501c --- /dev/null +++ b/backend/node_modules/lodash/fp/filter.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('filter', require('../filter')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/find.js b/backend/node_modules/lodash/fp/find.js new file mode 100644 index 00000000..f805d336 --- /dev/null +++ b/backend/node_modules/lodash/fp/find.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('find', require('../find')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/findFrom.js b/backend/node_modules/lodash/fp/findFrom.js new file mode 100644 index 00000000..da8275e8 --- /dev/null +++ b/backend/node_modules/lodash/fp/findFrom.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('findFrom', require('../find')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/findIndex.js b/backend/node_modules/lodash/fp/findIndex.js new file mode 100644 index 00000000..8c15fd11 --- /dev/null +++ b/backend/node_modules/lodash/fp/findIndex.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('findIndex', require('../findIndex')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/findIndexFrom.js b/backend/node_modules/lodash/fp/findIndexFrom.js new file mode 100644 index 00000000..32e98cb9 --- /dev/null +++ b/backend/node_modules/lodash/fp/findIndexFrom.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('findIndexFrom', require('../findIndex')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/findKey.js b/backend/node_modules/lodash/fp/findKey.js new file mode 100644 index 00000000..475bcfa8 --- /dev/null +++ b/backend/node_modules/lodash/fp/findKey.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('findKey', require('../findKey')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/findLast.js b/backend/node_modules/lodash/fp/findLast.js new file mode 100644 index 00000000..093fe94e --- /dev/null +++ b/backend/node_modules/lodash/fp/findLast.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('findLast', require('../findLast')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/findLastFrom.js b/backend/node_modules/lodash/fp/findLastFrom.js new file mode 100644 index 00000000..76c38fba --- /dev/null +++ b/backend/node_modules/lodash/fp/findLastFrom.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('findLastFrom', require('../findLast')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/findLastIndex.js b/backend/node_modules/lodash/fp/findLastIndex.js new file mode 100644 index 00000000..36986df0 --- /dev/null +++ b/backend/node_modules/lodash/fp/findLastIndex.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('findLastIndex', require('../findLastIndex')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/findLastIndexFrom.js b/backend/node_modules/lodash/fp/findLastIndexFrom.js new file mode 100644 index 00000000..34c8176c --- /dev/null +++ b/backend/node_modules/lodash/fp/findLastIndexFrom.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('findLastIndexFrom', require('../findLastIndex')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/findLastKey.js b/backend/node_modules/lodash/fp/findLastKey.js new file mode 100644 index 00000000..5f81b604 --- /dev/null +++ b/backend/node_modules/lodash/fp/findLastKey.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('findLastKey', require('../findLastKey')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/first.js b/backend/node_modules/lodash/fp/first.js new file mode 100644 index 00000000..53f4ad13 --- /dev/null +++ b/backend/node_modules/lodash/fp/first.js @@ -0,0 +1 @@ +module.exports = require('./head'); diff --git a/backend/node_modules/lodash/fp/flatMap.js b/backend/node_modules/lodash/fp/flatMap.js new file mode 100644 index 00000000..d01dc4d0 --- /dev/null +++ b/backend/node_modules/lodash/fp/flatMap.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('flatMap', require('../flatMap')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/flatMapDeep.js b/backend/node_modules/lodash/fp/flatMapDeep.js new file mode 100644 index 00000000..569c42eb --- /dev/null +++ b/backend/node_modules/lodash/fp/flatMapDeep.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('flatMapDeep', require('../flatMapDeep')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/flatMapDepth.js b/backend/node_modules/lodash/fp/flatMapDepth.js new file mode 100644 index 00000000..6eb68fde --- /dev/null +++ b/backend/node_modules/lodash/fp/flatMapDepth.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('flatMapDepth', require('../flatMapDepth')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/flatten.js b/backend/node_modules/lodash/fp/flatten.js new file mode 100644 index 00000000..30425d89 --- /dev/null +++ b/backend/node_modules/lodash/fp/flatten.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('flatten', require('../flatten'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/flattenDeep.js b/backend/node_modules/lodash/fp/flattenDeep.js new file mode 100644 index 00000000..aed5db27 --- /dev/null +++ b/backend/node_modules/lodash/fp/flattenDeep.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('flattenDeep', require('../flattenDeep'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/flattenDepth.js b/backend/node_modules/lodash/fp/flattenDepth.js new file mode 100644 index 00000000..ad65e378 --- /dev/null +++ b/backend/node_modules/lodash/fp/flattenDepth.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('flattenDepth', require('../flattenDepth')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/flip.js b/backend/node_modules/lodash/fp/flip.js new file mode 100644 index 00000000..0547e7b4 --- /dev/null +++ b/backend/node_modules/lodash/fp/flip.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('flip', require('../flip'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/floor.js b/backend/node_modules/lodash/fp/floor.js new file mode 100644 index 00000000..a6cf3358 --- /dev/null +++ b/backend/node_modules/lodash/fp/floor.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('floor', require('../floor')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/flow.js b/backend/node_modules/lodash/fp/flow.js new file mode 100644 index 00000000..cd83677a --- /dev/null +++ b/backend/node_modules/lodash/fp/flow.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('flow', require('../flow')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/flowRight.js b/backend/node_modules/lodash/fp/flowRight.js new file mode 100644 index 00000000..972a5b9b --- /dev/null +++ b/backend/node_modules/lodash/fp/flowRight.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('flowRight', require('../flowRight')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/forEach.js b/backend/node_modules/lodash/fp/forEach.js new file mode 100644 index 00000000..2f494521 --- /dev/null +++ b/backend/node_modules/lodash/fp/forEach.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('forEach', require('../forEach')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/forEachRight.js b/backend/node_modules/lodash/fp/forEachRight.js new file mode 100644 index 00000000..3ff97336 --- /dev/null +++ b/backend/node_modules/lodash/fp/forEachRight.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('forEachRight', require('../forEachRight')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/forIn.js b/backend/node_modules/lodash/fp/forIn.js new file mode 100644 index 00000000..9341749b --- /dev/null +++ b/backend/node_modules/lodash/fp/forIn.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('forIn', require('../forIn')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/forInRight.js b/backend/node_modules/lodash/fp/forInRight.js new file mode 100644 index 00000000..cecf8bbf --- /dev/null +++ b/backend/node_modules/lodash/fp/forInRight.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('forInRight', require('../forInRight')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/forOwn.js b/backend/node_modules/lodash/fp/forOwn.js new file mode 100644 index 00000000..246449e9 --- /dev/null +++ b/backend/node_modules/lodash/fp/forOwn.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('forOwn', require('../forOwn')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/forOwnRight.js b/backend/node_modules/lodash/fp/forOwnRight.js new file mode 100644 index 00000000..c5e826e0 --- /dev/null +++ b/backend/node_modules/lodash/fp/forOwnRight.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('forOwnRight', require('../forOwnRight')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/fromPairs.js b/backend/node_modules/lodash/fp/fromPairs.js new file mode 100644 index 00000000..f8cc5968 --- /dev/null +++ b/backend/node_modules/lodash/fp/fromPairs.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('fromPairs', require('../fromPairs')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/function.js b/backend/node_modules/lodash/fp/function.js new file mode 100644 index 00000000..dfe69b1f --- /dev/null +++ b/backend/node_modules/lodash/fp/function.js @@ -0,0 +1,2 @@ +var convert = require('./convert'); +module.exports = convert(require('../function')); diff --git a/backend/node_modules/lodash/fp/functions.js b/backend/node_modules/lodash/fp/functions.js new file mode 100644 index 00000000..09d1bb1b --- /dev/null +++ b/backend/node_modules/lodash/fp/functions.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('functions', require('../functions'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/functionsIn.js b/backend/node_modules/lodash/fp/functionsIn.js new file mode 100644 index 00000000..2cfeb83e --- /dev/null +++ b/backend/node_modules/lodash/fp/functionsIn.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('functionsIn', require('../functionsIn'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/get.js b/backend/node_modules/lodash/fp/get.js new file mode 100644 index 00000000..6d3a3286 --- /dev/null +++ b/backend/node_modules/lodash/fp/get.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('get', require('../get')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/getOr.js b/backend/node_modules/lodash/fp/getOr.js new file mode 100644 index 00000000..7dbf771f --- /dev/null +++ b/backend/node_modules/lodash/fp/getOr.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('getOr', require('../get')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/groupBy.js b/backend/node_modules/lodash/fp/groupBy.js new file mode 100644 index 00000000..fc0bc78a --- /dev/null +++ b/backend/node_modules/lodash/fp/groupBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('groupBy', require('../groupBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/gt.js b/backend/node_modules/lodash/fp/gt.js new file mode 100644 index 00000000..9e57c808 --- /dev/null +++ b/backend/node_modules/lodash/fp/gt.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('gt', require('../gt')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/gte.js b/backend/node_modules/lodash/fp/gte.js new file mode 100644 index 00000000..45847863 --- /dev/null +++ b/backend/node_modules/lodash/fp/gte.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('gte', require('../gte')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/has.js b/backend/node_modules/lodash/fp/has.js new file mode 100644 index 00000000..b9012983 --- /dev/null +++ b/backend/node_modules/lodash/fp/has.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('has', require('../has')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/hasIn.js b/backend/node_modules/lodash/fp/hasIn.js new file mode 100644 index 00000000..b3c3d1a3 --- /dev/null +++ b/backend/node_modules/lodash/fp/hasIn.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('hasIn', require('../hasIn')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/head.js b/backend/node_modules/lodash/fp/head.js new file mode 100644 index 00000000..2694f0a2 --- /dev/null +++ b/backend/node_modules/lodash/fp/head.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('head', require('../head'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/identical.js b/backend/node_modules/lodash/fp/identical.js new file mode 100644 index 00000000..85563f4a --- /dev/null +++ b/backend/node_modules/lodash/fp/identical.js @@ -0,0 +1 @@ +module.exports = require('./eq'); diff --git a/backend/node_modules/lodash/fp/identity.js b/backend/node_modules/lodash/fp/identity.js new file mode 100644 index 00000000..096415a5 --- /dev/null +++ b/backend/node_modules/lodash/fp/identity.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('identity', require('../identity'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/inRange.js b/backend/node_modules/lodash/fp/inRange.js new file mode 100644 index 00000000..202d940b --- /dev/null +++ b/backend/node_modules/lodash/fp/inRange.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('inRange', require('../inRange')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/includes.js b/backend/node_modules/lodash/fp/includes.js new file mode 100644 index 00000000..11467805 --- /dev/null +++ b/backend/node_modules/lodash/fp/includes.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('includes', require('../includes')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/includesFrom.js b/backend/node_modules/lodash/fp/includesFrom.js new file mode 100644 index 00000000..683afdb4 --- /dev/null +++ b/backend/node_modules/lodash/fp/includesFrom.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('includesFrom', require('../includes')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/indexBy.js b/backend/node_modules/lodash/fp/indexBy.js new file mode 100644 index 00000000..7e64bc0f --- /dev/null +++ b/backend/node_modules/lodash/fp/indexBy.js @@ -0,0 +1 @@ +module.exports = require('./keyBy'); diff --git a/backend/node_modules/lodash/fp/indexOf.js b/backend/node_modules/lodash/fp/indexOf.js new file mode 100644 index 00000000..524658eb --- /dev/null +++ b/backend/node_modules/lodash/fp/indexOf.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('indexOf', require('../indexOf')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/indexOfFrom.js b/backend/node_modules/lodash/fp/indexOfFrom.js new file mode 100644 index 00000000..d99c822f --- /dev/null +++ b/backend/node_modules/lodash/fp/indexOfFrom.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('indexOfFrom', require('../indexOf')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/init.js b/backend/node_modules/lodash/fp/init.js new file mode 100644 index 00000000..2f88d8b0 --- /dev/null +++ b/backend/node_modules/lodash/fp/init.js @@ -0,0 +1 @@ +module.exports = require('./initial'); diff --git a/backend/node_modules/lodash/fp/initial.js b/backend/node_modules/lodash/fp/initial.js new file mode 100644 index 00000000..b732ba0b --- /dev/null +++ b/backend/node_modules/lodash/fp/initial.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('initial', require('../initial'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/intersection.js b/backend/node_modules/lodash/fp/intersection.js new file mode 100644 index 00000000..52936d56 --- /dev/null +++ b/backend/node_modules/lodash/fp/intersection.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('intersection', require('../intersection')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/intersectionBy.js b/backend/node_modules/lodash/fp/intersectionBy.js new file mode 100644 index 00000000..72629f27 --- /dev/null +++ b/backend/node_modules/lodash/fp/intersectionBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('intersectionBy', require('../intersectionBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/intersectionWith.js b/backend/node_modules/lodash/fp/intersectionWith.js new file mode 100644 index 00000000..e064f400 --- /dev/null +++ b/backend/node_modules/lodash/fp/intersectionWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('intersectionWith', require('../intersectionWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/invert.js b/backend/node_modules/lodash/fp/invert.js new file mode 100644 index 00000000..2d5d1f0d --- /dev/null +++ b/backend/node_modules/lodash/fp/invert.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('invert', require('../invert')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/invertBy.js b/backend/node_modules/lodash/fp/invertBy.js new file mode 100644 index 00000000..63ca97ec --- /dev/null +++ b/backend/node_modules/lodash/fp/invertBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('invertBy', require('../invertBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/invertObj.js b/backend/node_modules/lodash/fp/invertObj.js new file mode 100644 index 00000000..f1d842e4 --- /dev/null +++ b/backend/node_modules/lodash/fp/invertObj.js @@ -0,0 +1 @@ +module.exports = require('./invert'); diff --git a/backend/node_modules/lodash/fp/invoke.js b/backend/node_modules/lodash/fp/invoke.js new file mode 100644 index 00000000..fcf17f0d --- /dev/null +++ b/backend/node_modules/lodash/fp/invoke.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('invoke', require('../invoke')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/invokeArgs.js b/backend/node_modules/lodash/fp/invokeArgs.js new file mode 100644 index 00000000..d3f2953f --- /dev/null +++ b/backend/node_modules/lodash/fp/invokeArgs.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('invokeArgs', require('../invoke')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/invokeArgsMap.js b/backend/node_modules/lodash/fp/invokeArgsMap.js new file mode 100644 index 00000000..eaa9f84f --- /dev/null +++ b/backend/node_modules/lodash/fp/invokeArgsMap.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('invokeArgsMap', require('../invokeMap')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/invokeMap.js b/backend/node_modules/lodash/fp/invokeMap.js new file mode 100644 index 00000000..6515fd73 --- /dev/null +++ b/backend/node_modules/lodash/fp/invokeMap.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('invokeMap', require('../invokeMap')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isArguments.js b/backend/node_modules/lodash/fp/isArguments.js new file mode 100644 index 00000000..1d93c9e5 --- /dev/null +++ b/backend/node_modules/lodash/fp/isArguments.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isArguments', require('../isArguments'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isArray.js b/backend/node_modules/lodash/fp/isArray.js new file mode 100644 index 00000000..ba7ade8d --- /dev/null +++ b/backend/node_modules/lodash/fp/isArray.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isArray', require('../isArray'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isArrayBuffer.js b/backend/node_modules/lodash/fp/isArrayBuffer.js new file mode 100644 index 00000000..5088513f --- /dev/null +++ b/backend/node_modules/lodash/fp/isArrayBuffer.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isArrayBuffer', require('../isArrayBuffer'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isArrayLike.js b/backend/node_modules/lodash/fp/isArrayLike.js new file mode 100644 index 00000000..8f1856bf --- /dev/null +++ b/backend/node_modules/lodash/fp/isArrayLike.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isArrayLike', require('../isArrayLike'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isArrayLikeObject.js b/backend/node_modules/lodash/fp/isArrayLikeObject.js new file mode 100644 index 00000000..21084984 --- /dev/null +++ b/backend/node_modules/lodash/fp/isArrayLikeObject.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isArrayLikeObject', require('../isArrayLikeObject'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isBoolean.js b/backend/node_modules/lodash/fp/isBoolean.js new file mode 100644 index 00000000..9339f75b --- /dev/null +++ b/backend/node_modules/lodash/fp/isBoolean.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isBoolean', require('../isBoolean'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isBuffer.js b/backend/node_modules/lodash/fp/isBuffer.js new file mode 100644 index 00000000..e60b1238 --- /dev/null +++ b/backend/node_modules/lodash/fp/isBuffer.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isBuffer', require('../isBuffer'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isDate.js b/backend/node_modules/lodash/fp/isDate.js new file mode 100644 index 00000000..dc41d089 --- /dev/null +++ b/backend/node_modules/lodash/fp/isDate.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isDate', require('../isDate'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isElement.js b/backend/node_modules/lodash/fp/isElement.js new file mode 100644 index 00000000..18ee039a --- /dev/null +++ b/backend/node_modules/lodash/fp/isElement.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isElement', require('../isElement'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isEmpty.js b/backend/node_modules/lodash/fp/isEmpty.js new file mode 100644 index 00000000..0f4ae841 --- /dev/null +++ b/backend/node_modules/lodash/fp/isEmpty.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isEmpty', require('../isEmpty'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isEqual.js b/backend/node_modules/lodash/fp/isEqual.js new file mode 100644 index 00000000..41383865 --- /dev/null +++ b/backend/node_modules/lodash/fp/isEqual.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isEqual', require('../isEqual')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isEqualWith.js b/backend/node_modules/lodash/fp/isEqualWith.js new file mode 100644 index 00000000..029ff5cd --- /dev/null +++ b/backend/node_modules/lodash/fp/isEqualWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isEqualWith', require('../isEqualWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isError.js b/backend/node_modules/lodash/fp/isError.js new file mode 100644 index 00000000..3dfd81cc --- /dev/null +++ b/backend/node_modules/lodash/fp/isError.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isError', require('../isError'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isFinite.js b/backend/node_modules/lodash/fp/isFinite.js new file mode 100644 index 00000000..0b647b84 --- /dev/null +++ b/backend/node_modules/lodash/fp/isFinite.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isFinite', require('../isFinite'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isFunction.js b/backend/node_modules/lodash/fp/isFunction.js new file mode 100644 index 00000000..ff8e5c45 --- /dev/null +++ b/backend/node_modules/lodash/fp/isFunction.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isFunction', require('../isFunction'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isInteger.js b/backend/node_modules/lodash/fp/isInteger.js new file mode 100644 index 00000000..67af4ff6 --- /dev/null +++ b/backend/node_modules/lodash/fp/isInteger.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isInteger', require('../isInteger'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isLength.js b/backend/node_modules/lodash/fp/isLength.js new file mode 100644 index 00000000..fc101c5a --- /dev/null +++ b/backend/node_modules/lodash/fp/isLength.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isLength', require('../isLength'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isMap.js b/backend/node_modules/lodash/fp/isMap.js new file mode 100644 index 00000000..a209aa66 --- /dev/null +++ b/backend/node_modules/lodash/fp/isMap.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isMap', require('../isMap'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isMatch.js b/backend/node_modules/lodash/fp/isMatch.js new file mode 100644 index 00000000..6264ca17 --- /dev/null +++ b/backend/node_modules/lodash/fp/isMatch.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isMatch', require('../isMatch')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isMatchWith.js b/backend/node_modules/lodash/fp/isMatchWith.js new file mode 100644 index 00000000..d95f3193 --- /dev/null +++ b/backend/node_modules/lodash/fp/isMatchWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isMatchWith', require('../isMatchWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isNaN.js b/backend/node_modules/lodash/fp/isNaN.js new file mode 100644 index 00000000..66a978f1 --- /dev/null +++ b/backend/node_modules/lodash/fp/isNaN.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isNaN', require('../isNaN'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isNative.js b/backend/node_modules/lodash/fp/isNative.js new file mode 100644 index 00000000..3d775ba9 --- /dev/null +++ b/backend/node_modules/lodash/fp/isNative.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isNative', require('../isNative'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isNil.js b/backend/node_modules/lodash/fp/isNil.js new file mode 100644 index 00000000..5952c028 --- /dev/null +++ b/backend/node_modules/lodash/fp/isNil.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isNil', require('../isNil'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isNull.js b/backend/node_modules/lodash/fp/isNull.js new file mode 100644 index 00000000..f201a354 --- /dev/null +++ b/backend/node_modules/lodash/fp/isNull.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isNull', require('../isNull'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isNumber.js b/backend/node_modules/lodash/fp/isNumber.js new file mode 100644 index 00000000..a2b5fa04 --- /dev/null +++ b/backend/node_modules/lodash/fp/isNumber.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isNumber', require('../isNumber'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isObject.js b/backend/node_modules/lodash/fp/isObject.js new file mode 100644 index 00000000..231ace03 --- /dev/null +++ b/backend/node_modules/lodash/fp/isObject.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isObject', require('../isObject'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isObjectLike.js b/backend/node_modules/lodash/fp/isObjectLike.js new file mode 100644 index 00000000..f16082e6 --- /dev/null +++ b/backend/node_modules/lodash/fp/isObjectLike.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isObjectLike', require('../isObjectLike'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isPlainObject.js b/backend/node_modules/lodash/fp/isPlainObject.js new file mode 100644 index 00000000..b5bea90d --- /dev/null +++ b/backend/node_modules/lodash/fp/isPlainObject.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isPlainObject', require('../isPlainObject'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isRegExp.js b/backend/node_modules/lodash/fp/isRegExp.js new file mode 100644 index 00000000..12a1a3d7 --- /dev/null +++ b/backend/node_modules/lodash/fp/isRegExp.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isRegExp', require('../isRegExp'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isSafeInteger.js b/backend/node_modules/lodash/fp/isSafeInteger.js new file mode 100644 index 00000000..7230f552 --- /dev/null +++ b/backend/node_modules/lodash/fp/isSafeInteger.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isSafeInteger', require('../isSafeInteger'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isSet.js b/backend/node_modules/lodash/fp/isSet.js new file mode 100644 index 00000000..35c01f6f --- /dev/null +++ b/backend/node_modules/lodash/fp/isSet.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isSet', require('../isSet'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isString.js b/backend/node_modules/lodash/fp/isString.js new file mode 100644 index 00000000..1fd0679e --- /dev/null +++ b/backend/node_modules/lodash/fp/isString.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isString', require('../isString'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isSymbol.js b/backend/node_modules/lodash/fp/isSymbol.js new file mode 100644 index 00000000..38676956 --- /dev/null +++ b/backend/node_modules/lodash/fp/isSymbol.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isSymbol', require('../isSymbol'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isTypedArray.js b/backend/node_modules/lodash/fp/isTypedArray.js new file mode 100644 index 00000000..85679538 --- /dev/null +++ b/backend/node_modules/lodash/fp/isTypedArray.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isTypedArray', require('../isTypedArray'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isUndefined.js b/backend/node_modules/lodash/fp/isUndefined.js new file mode 100644 index 00000000..ddbca31c --- /dev/null +++ b/backend/node_modules/lodash/fp/isUndefined.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isUndefined', require('../isUndefined'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isWeakMap.js b/backend/node_modules/lodash/fp/isWeakMap.js new file mode 100644 index 00000000..ef60c613 --- /dev/null +++ b/backend/node_modules/lodash/fp/isWeakMap.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isWeakMap', require('../isWeakMap'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/isWeakSet.js b/backend/node_modules/lodash/fp/isWeakSet.js new file mode 100644 index 00000000..c99bfaa6 --- /dev/null +++ b/backend/node_modules/lodash/fp/isWeakSet.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('isWeakSet', require('../isWeakSet'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/iteratee.js b/backend/node_modules/lodash/fp/iteratee.js new file mode 100644 index 00000000..9f0f7173 --- /dev/null +++ b/backend/node_modules/lodash/fp/iteratee.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('iteratee', require('../iteratee')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/join.js b/backend/node_modules/lodash/fp/join.js new file mode 100644 index 00000000..a220e003 --- /dev/null +++ b/backend/node_modules/lodash/fp/join.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('join', require('../join')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/juxt.js b/backend/node_modules/lodash/fp/juxt.js new file mode 100644 index 00000000..f71e04e0 --- /dev/null +++ b/backend/node_modules/lodash/fp/juxt.js @@ -0,0 +1 @@ +module.exports = require('./over'); diff --git a/backend/node_modules/lodash/fp/kebabCase.js b/backend/node_modules/lodash/fp/kebabCase.js new file mode 100644 index 00000000..60737f17 --- /dev/null +++ b/backend/node_modules/lodash/fp/kebabCase.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('kebabCase', require('../kebabCase'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/keyBy.js b/backend/node_modules/lodash/fp/keyBy.js new file mode 100644 index 00000000..9a6a85d4 --- /dev/null +++ b/backend/node_modules/lodash/fp/keyBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('keyBy', require('../keyBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/keys.js b/backend/node_modules/lodash/fp/keys.js new file mode 100644 index 00000000..e12bb07f --- /dev/null +++ b/backend/node_modules/lodash/fp/keys.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('keys', require('../keys'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/keysIn.js b/backend/node_modules/lodash/fp/keysIn.js new file mode 100644 index 00000000..f3eb36a8 --- /dev/null +++ b/backend/node_modules/lodash/fp/keysIn.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('keysIn', require('../keysIn'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/lang.js b/backend/node_modules/lodash/fp/lang.js new file mode 100644 index 00000000..08cc9c14 --- /dev/null +++ b/backend/node_modules/lodash/fp/lang.js @@ -0,0 +1,2 @@ +var convert = require('./convert'); +module.exports = convert(require('../lang')); diff --git a/backend/node_modules/lodash/fp/last.js b/backend/node_modules/lodash/fp/last.js new file mode 100644 index 00000000..0f716993 --- /dev/null +++ b/backend/node_modules/lodash/fp/last.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('last', require('../last'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/lastIndexOf.js b/backend/node_modules/lodash/fp/lastIndexOf.js new file mode 100644 index 00000000..ddf39c30 --- /dev/null +++ b/backend/node_modules/lodash/fp/lastIndexOf.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('lastIndexOf', require('../lastIndexOf')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/lastIndexOfFrom.js b/backend/node_modules/lodash/fp/lastIndexOfFrom.js new file mode 100644 index 00000000..1ff6a0b5 --- /dev/null +++ b/backend/node_modules/lodash/fp/lastIndexOfFrom.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('lastIndexOfFrom', require('../lastIndexOf')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/lowerCase.js b/backend/node_modules/lodash/fp/lowerCase.js new file mode 100644 index 00000000..ea64bc15 --- /dev/null +++ b/backend/node_modules/lodash/fp/lowerCase.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('lowerCase', require('../lowerCase'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/lowerFirst.js b/backend/node_modules/lodash/fp/lowerFirst.js new file mode 100644 index 00000000..539720a3 --- /dev/null +++ b/backend/node_modules/lodash/fp/lowerFirst.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('lowerFirst', require('../lowerFirst'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/lt.js b/backend/node_modules/lodash/fp/lt.js new file mode 100644 index 00000000..a31d21ec --- /dev/null +++ b/backend/node_modules/lodash/fp/lt.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('lt', require('../lt')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/lte.js b/backend/node_modules/lodash/fp/lte.js new file mode 100644 index 00000000..d795d10e --- /dev/null +++ b/backend/node_modules/lodash/fp/lte.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('lte', require('../lte')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/map.js b/backend/node_modules/lodash/fp/map.js new file mode 100644 index 00000000..cf987943 --- /dev/null +++ b/backend/node_modules/lodash/fp/map.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('map', require('../map')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/mapKeys.js b/backend/node_modules/lodash/fp/mapKeys.js new file mode 100644 index 00000000..16845870 --- /dev/null +++ b/backend/node_modules/lodash/fp/mapKeys.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('mapKeys', require('../mapKeys')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/mapValues.js b/backend/node_modules/lodash/fp/mapValues.js new file mode 100644 index 00000000..40049727 --- /dev/null +++ b/backend/node_modules/lodash/fp/mapValues.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('mapValues', require('../mapValues')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/matches.js b/backend/node_modules/lodash/fp/matches.js new file mode 100644 index 00000000..29d1e1e4 --- /dev/null +++ b/backend/node_modules/lodash/fp/matches.js @@ -0,0 +1 @@ +module.exports = require('./isMatch'); diff --git a/backend/node_modules/lodash/fp/matchesProperty.js b/backend/node_modules/lodash/fp/matchesProperty.js new file mode 100644 index 00000000..4575bd24 --- /dev/null +++ b/backend/node_modules/lodash/fp/matchesProperty.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('matchesProperty', require('../matchesProperty')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/math.js b/backend/node_modules/lodash/fp/math.js new file mode 100644 index 00000000..e8f50f79 --- /dev/null +++ b/backend/node_modules/lodash/fp/math.js @@ -0,0 +1,2 @@ +var convert = require('./convert'); +module.exports = convert(require('../math')); diff --git a/backend/node_modules/lodash/fp/max.js b/backend/node_modules/lodash/fp/max.js new file mode 100644 index 00000000..a66acac2 --- /dev/null +++ b/backend/node_modules/lodash/fp/max.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('max', require('../max'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/maxBy.js b/backend/node_modules/lodash/fp/maxBy.js new file mode 100644 index 00000000..d083fd64 --- /dev/null +++ b/backend/node_modules/lodash/fp/maxBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('maxBy', require('../maxBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/mean.js b/backend/node_modules/lodash/fp/mean.js new file mode 100644 index 00000000..31172460 --- /dev/null +++ b/backend/node_modules/lodash/fp/mean.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('mean', require('../mean'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/meanBy.js b/backend/node_modules/lodash/fp/meanBy.js new file mode 100644 index 00000000..556f25ed --- /dev/null +++ b/backend/node_modules/lodash/fp/meanBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('meanBy', require('../meanBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/memoize.js b/backend/node_modules/lodash/fp/memoize.js new file mode 100644 index 00000000..638eec63 --- /dev/null +++ b/backend/node_modules/lodash/fp/memoize.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('memoize', require('../memoize')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/merge.js b/backend/node_modules/lodash/fp/merge.js new file mode 100644 index 00000000..ac66adde --- /dev/null +++ b/backend/node_modules/lodash/fp/merge.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('merge', require('../merge')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/mergeAll.js b/backend/node_modules/lodash/fp/mergeAll.js new file mode 100644 index 00000000..a3674d67 --- /dev/null +++ b/backend/node_modules/lodash/fp/mergeAll.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('mergeAll', require('../merge')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/mergeAllWith.js b/backend/node_modules/lodash/fp/mergeAllWith.js new file mode 100644 index 00000000..4bd4206d --- /dev/null +++ b/backend/node_modules/lodash/fp/mergeAllWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('mergeAllWith', require('../mergeWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/mergeWith.js b/backend/node_modules/lodash/fp/mergeWith.js new file mode 100644 index 00000000..00d44d5e --- /dev/null +++ b/backend/node_modules/lodash/fp/mergeWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('mergeWith', require('../mergeWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/method.js b/backend/node_modules/lodash/fp/method.js new file mode 100644 index 00000000..f4060c68 --- /dev/null +++ b/backend/node_modules/lodash/fp/method.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('method', require('../method')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/methodOf.js b/backend/node_modules/lodash/fp/methodOf.js new file mode 100644 index 00000000..61399056 --- /dev/null +++ b/backend/node_modules/lodash/fp/methodOf.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('methodOf', require('../methodOf')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/min.js b/backend/node_modules/lodash/fp/min.js new file mode 100644 index 00000000..d12c6b40 --- /dev/null +++ b/backend/node_modules/lodash/fp/min.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('min', require('../min'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/minBy.js b/backend/node_modules/lodash/fp/minBy.js new file mode 100644 index 00000000..fdb9e24d --- /dev/null +++ b/backend/node_modules/lodash/fp/minBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('minBy', require('../minBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/mixin.js b/backend/node_modules/lodash/fp/mixin.js new file mode 100644 index 00000000..332e6fbf --- /dev/null +++ b/backend/node_modules/lodash/fp/mixin.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('mixin', require('../mixin')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/multiply.js b/backend/node_modules/lodash/fp/multiply.js new file mode 100644 index 00000000..4dcf0b0d --- /dev/null +++ b/backend/node_modules/lodash/fp/multiply.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('multiply', require('../multiply')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/nAry.js b/backend/node_modules/lodash/fp/nAry.js new file mode 100644 index 00000000..f262a76c --- /dev/null +++ b/backend/node_modules/lodash/fp/nAry.js @@ -0,0 +1 @@ +module.exports = require('./ary'); diff --git a/backend/node_modules/lodash/fp/negate.js b/backend/node_modules/lodash/fp/negate.js new file mode 100644 index 00000000..8b6dc7c5 --- /dev/null +++ b/backend/node_modules/lodash/fp/negate.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('negate', require('../negate'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/next.js b/backend/node_modules/lodash/fp/next.js new file mode 100644 index 00000000..140155e2 --- /dev/null +++ b/backend/node_modules/lodash/fp/next.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('next', require('../next'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/noop.js b/backend/node_modules/lodash/fp/noop.js new file mode 100644 index 00000000..b9e32cc8 --- /dev/null +++ b/backend/node_modules/lodash/fp/noop.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('noop', require('../noop'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/now.js b/backend/node_modules/lodash/fp/now.js new file mode 100644 index 00000000..6de2068a --- /dev/null +++ b/backend/node_modules/lodash/fp/now.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('now', require('../now'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/nth.js b/backend/node_modules/lodash/fp/nth.js new file mode 100644 index 00000000..da4fda74 --- /dev/null +++ b/backend/node_modules/lodash/fp/nth.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('nth', require('../nth')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/nthArg.js b/backend/node_modules/lodash/fp/nthArg.js new file mode 100644 index 00000000..fce31659 --- /dev/null +++ b/backend/node_modules/lodash/fp/nthArg.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('nthArg', require('../nthArg')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/number.js b/backend/node_modules/lodash/fp/number.js new file mode 100644 index 00000000..5c10b884 --- /dev/null +++ b/backend/node_modules/lodash/fp/number.js @@ -0,0 +1,2 @@ +var convert = require('./convert'); +module.exports = convert(require('../number')); diff --git a/backend/node_modules/lodash/fp/object.js b/backend/node_modules/lodash/fp/object.js new file mode 100644 index 00000000..ae39a134 --- /dev/null +++ b/backend/node_modules/lodash/fp/object.js @@ -0,0 +1,2 @@ +var convert = require('./convert'); +module.exports = convert(require('../object')); diff --git a/backend/node_modules/lodash/fp/omit.js b/backend/node_modules/lodash/fp/omit.js new file mode 100644 index 00000000..fd685291 --- /dev/null +++ b/backend/node_modules/lodash/fp/omit.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('omit', require('../omit')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/omitAll.js b/backend/node_modules/lodash/fp/omitAll.js new file mode 100644 index 00000000..144cf4b9 --- /dev/null +++ b/backend/node_modules/lodash/fp/omitAll.js @@ -0,0 +1 @@ +module.exports = require('./omit'); diff --git a/backend/node_modules/lodash/fp/omitBy.js b/backend/node_modules/lodash/fp/omitBy.js new file mode 100644 index 00000000..90df7380 --- /dev/null +++ b/backend/node_modules/lodash/fp/omitBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('omitBy', require('../omitBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/once.js b/backend/node_modules/lodash/fp/once.js new file mode 100644 index 00000000..f8f0a5c7 --- /dev/null +++ b/backend/node_modules/lodash/fp/once.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('once', require('../once'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/orderBy.js b/backend/node_modules/lodash/fp/orderBy.js new file mode 100644 index 00000000..848e2107 --- /dev/null +++ b/backend/node_modules/lodash/fp/orderBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('orderBy', require('../orderBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/over.js b/backend/node_modules/lodash/fp/over.js new file mode 100644 index 00000000..01eba7b9 --- /dev/null +++ b/backend/node_modules/lodash/fp/over.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('over', require('../over')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/overArgs.js b/backend/node_modules/lodash/fp/overArgs.js new file mode 100644 index 00000000..738556f0 --- /dev/null +++ b/backend/node_modules/lodash/fp/overArgs.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('overArgs', require('../overArgs')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/overEvery.js b/backend/node_modules/lodash/fp/overEvery.js new file mode 100644 index 00000000..9f5a032d --- /dev/null +++ b/backend/node_modules/lodash/fp/overEvery.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('overEvery', require('../overEvery')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/overSome.js b/backend/node_modules/lodash/fp/overSome.js new file mode 100644 index 00000000..15939d58 --- /dev/null +++ b/backend/node_modules/lodash/fp/overSome.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('overSome', require('../overSome')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/pad.js b/backend/node_modules/lodash/fp/pad.js new file mode 100644 index 00000000..f1dea4a9 --- /dev/null +++ b/backend/node_modules/lodash/fp/pad.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('pad', require('../pad')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/padChars.js b/backend/node_modules/lodash/fp/padChars.js new file mode 100644 index 00000000..d6e0804c --- /dev/null +++ b/backend/node_modules/lodash/fp/padChars.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('padChars', require('../pad')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/padCharsEnd.js b/backend/node_modules/lodash/fp/padCharsEnd.js new file mode 100644 index 00000000..d4ab79ad --- /dev/null +++ b/backend/node_modules/lodash/fp/padCharsEnd.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('padCharsEnd', require('../padEnd')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/padCharsStart.js b/backend/node_modules/lodash/fp/padCharsStart.js new file mode 100644 index 00000000..a08a3000 --- /dev/null +++ b/backend/node_modules/lodash/fp/padCharsStart.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('padCharsStart', require('../padStart')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/padEnd.js b/backend/node_modules/lodash/fp/padEnd.js new file mode 100644 index 00000000..a8522ec3 --- /dev/null +++ b/backend/node_modules/lodash/fp/padEnd.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('padEnd', require('../padEnd')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/padStart.js b/backend/node_modules/lodash/fp/padStart.js new file mode 100644 index 00000000..f4ca79d4 --- /dev/null +++ b/backend/node_modules/lodash/fp/padStart.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('padStart', require('../padStart')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/parseInt.js b/backend/node_modules/lodash/fp/parseInt.js new file mode 100644 index 00000000..27314ccb --- /dev/null +++ b/backend/node_modules/lodash/fp/parseInt.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('parseInt', require('../parseInt')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/partial.js b/backend/node_modules/lodash/fp/partial.js new file mode 100644 index 00000000..5d460159 --- /dev/null +++ b/backend/node_modules/lodash/fp/partial.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('partial', require('../partial')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/partialRight.js b/backend/node_modules/lodash/fp/partialRight.js new file mode 100644 index 00000000..7f05fed0 --- /dev/null +++ b/backend/node_modules/lodash/fp/partialRight.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('partialRight', require('../partialRight')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/partition.js b/backend/node_modules/lodash/fp/partition.js new file mode 100644 index 00000000..2ebcacc1 --- /dev/null +++ b/backend/node_modules/lodash/fp/partition.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('partition', require('../partition')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/path.js b/backend/node_modules/lodash/fp/path.js new file mode 100644 index 00000000..b29cfb21 --- /dev/null +++ b/backend/node_modules/lodash/fp/path.js @@ -0,0 +1 @@ +module.exports = require('./get'); diff --git a/backend/node_modules/lodash/fp/pathEq.js b/backend/node_modules/lodash/fp/pathEq.js new file mode 100644 index 00000000..36c027a3 --- /dev/null +++ b/backend/node_modules/lodash/fp/pathEq.js @@ -0,0 +1 @@ +module.exports = require('./matchesProperty'); diff --git a/backend/node_modules/lodash/fp/pathOr.js b/backend/node_modules/lodash/fp/pathOr.js new file mode 100644 index 00000000..4ab58209 --- /dev/null +++ b/backend/node_modules/lodash/fp/pathOr.js @@ -0,0 +1 @@ +module.exports = require('./getOr'); diff --git a/backend/node_modules/lodash/fp/paths.js b/backend/node_modules/lodash/fp/paths.js new file mode 100644 index 00000000..1eb7950a --- /dev/null +++ b/backend/node_modules/lodash/fp/paths.js @@ -0,0 +1 @@ +module.exports = require('./at'); diff --git a/backend/node_modules/lodash/fp/pick.js b/backend/node_modules/lodash/fp/pick.js new file mode 100644 index 00000000..197393de --- /dev/null +++ b/backend/node_modules/lodash/fp/pick.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('pick', require('../pick')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/pickAll.js b/backend/node_modules/lodash/fp/pickAll.js new file mode 100644 index 00000000..a8ecd461 --- /dev/null +++ b/backend/node_modules/lodash/fp/pickAll.js @@ -0,0 +1 @@ +module.exports = require('./pick'); diff --git a/backend/node_modules/lodash/fp/pickBy.js b/backend/node_modules/lodash/fp/pickBy.js new file mode 100644 index 00000000..d832d16b --- /dev/null +++ b/backend/node_modules/lodash/fp/pickBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('pickBy', require('../pickBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/pipe.js b/backend/node_modules/lodash/fp/pipe.js new file mode 100644 index 00000000..b2e1e2cc --- /dev/null +++ b/backend/node_modules/lodash/fp/pipe.js @@ -0,0 +1 @@ +module.exports = require('./flow'); diff --git a/backend/node_modules/lodash/fp/placeholder.js b/backend/node_modules/lodash/fp/placeholder.js new file mode 100644 index 00000000..1ce17393 --- /dev/null +++ b/backend/node_modules/lodash/fp/placeholder.js @@ -0,0 +1,6 @@ +/** + * The default argument placeholder value for methods. + * + * @type {Object} + */ +module.exports = {}; diff --git a/backend/node_modules/lodash/fp/plant.js b/backend/node_modules/lodash/fp/plant.js new file mode 100644 index 00000000..eca8f32b --- /dev/null +++ b/backend/node_modules/lodash/fp/plant.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('plant', require('../plant'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/pluck.js b/backend/node_modules/lodash/fp/pluck.js new file mode 100644 index 00000000..0d1e1abf --- /dev/null +++ b/backend/node_modules/lodash/fp/pluck.js @@ -0,0 +1 @@ +module.exports = require('./map'); diff --git a/backend/node_modules/lodash/fp/prop.js b/backend/node_modules/lodash/fp/prop.js new file mode 100644 index 00000000..b29cfb21 --- /dev/null +++ b/backend/node_modules/lodash/fp/prop.js @@ -0,0 +1 @@ +module.exports = require('./get'); diff --git a/backend/node_modules/lodash/fp/propEq.js b/backend/node_modules/lodash/fp/propEq.js new file mode 100644 index 00000000..36c027a3 --- /dev/null +++ b/backend/node_modules/lodash/fp/propEq.js @@ -0,0 +1 @@ +module.exports = require('./matchesProperty'); diff --git a/backend/node_modules/lodash/fp/propOr.js b/backend/node_modules/lodash/fp/propOr.js new file mode 100644 index 00000000..4ab58209 --- /dev/null +++ b/backend/node_modules/lodash/fp/propOr.js @@ -0,0 +1 @@ +module.exports = require('./getOr'); diff --git a/backend/node_modules/lodash/fp/property.js b/backend/node_modules/lodash/fp/property.js new file mode 100644 index 00000000..b29cfb21 --- /dev/null +++ b/backend/node_modules/lodash/fp/property.js @@ -0,0 +1 @@ +module.exports = require('./get'); diff --git a/backend/node_modules/lodash/fp/propertyOf.js b/backend/node_modules/lodash/fp/propertyOf.js new file mode 100644 index 00000000..f6273ee4 --- /dev/null +++ b/backend/node_modules/lodash/fp/propertyOf.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('propertyOf', require('../get')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/props.js b/backend/node_modules/lodash/fp/props.js new file mode 100644 index 00000000..1eb7950a --- /dev/null +++ b/backend/node_modules/lodash/fp/props.js @@ -0,0 +1 @@ +module.exports = require('./at'); diff --git a/backend/node_modules/lodash/fp/pull.js b/backend/node_modules/lodash/fp/pull.js new file mode 100644 index 00000000..8d7084f0 --- /dev/null +++ b/backend/node_modules/lodash/fp/pull.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('pull', require('../pull')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/pullAll.js b/backend/node_modules/lodash/fp/pullAll.js new file mode 100644 index 00000000..98d5c9a7 --- /dev/null +++ b/backend/node_modules/lodash/fp/pullAll.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('pullAll', require('../pullAll')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/pullAllBy.js b/backend/node_modules/lodash/fp/pullAllBy.js new file mode 100644 index 00000000..876bc3bf --- /dev/null +++ b/backend/node_modules/lodash/fp/pullAllBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('pullAllBy', require('../pullAllBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/pullAllWith.js b/backend/node_modules/lodash/fp/pullAllWith.js new file mode 100644 index 00000000..f71ba4d7 --- /dev/null +++ b/backend/node_modules/lodash/fp/pullAllWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('pullAllWith', require('../pullAllWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/pullAt.js b/backend/node_modules/lodash/fp/pullAt.js new file mode 100644 index 00000000..e8b3bb61 --- /dev/null +++ b/backend/node_modules/lodash/fp/pullAt.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('pullAt', require('../pullAt')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/random.js b/backend/node_modules/lodash/fp/random.js new file mode 100644 index 00000000..99d852e4 --- /dev/null +++ b/backend/node_modules/lodash/fp/random.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('random', require('../random')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/range.js b/backend/node_modules/lodash/fp/range.js new file mode 100644 index 00000000..a6bb5911 --- /dev/null +++ b/backend/node_modules/lodash/fp/range.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('range', require('../range')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/rangeRight.js b/backend/node_modules/lodash/fp/rangeRight.js new file mode 100644 index 00000000..fdb712f9 --- /dev/null +++ b/backend/node_modules/lodash/fp/rangeRight.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('rangeRight', require('../rangeRight')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/rangeStep.js b/backend/node_modules/lodash/fp/rangeStep.js new file mode 100644 index 00000000..d72dfc20 --- /dev/null +++ b/backend/node_modules/lodash/fp/rangeStep.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('rangeStep', require('../range')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/rangeStepRight.js b/backend/node_modules/lodash/fp/rangeStepRight.js new file mode 100644 index 00000000..8b2a67bc --- /dev/null +++ b/backend/node_modules/lodash/fp/rangeStepRight.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('rangeStepRight', require('../rangeRight')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/rearg.js b/backend/node_modules/lodash/fp/rearg.js new file mode 100644 index 00000000..678e02a3 --- /dev/null +++ b/backend/node_modules/lodash/fp/rearg.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('rearg', require('../rearg')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/reduce.js b/backend/node_modules/lodash/fp/reduce.js new file mode 100644 index 00000000..4cef0a00 --- /dev/null +++ b/backend/node_modules/lodash/fp/reduce.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('reduce', require('../reduce')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/reduceRight.js b/backend/node_modules/lodash/fp/reduceRight.js new file mode 100644 index 00000000..caf5bb51 --- /dev/null +++ b/backend/node_modules/lodash/fp/reduceRight.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('reduceRight', require('../reduceRight')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/reject.js b/backend/node_modules/lodash/fp/reject.js new file mode 100644 index 00000000..c1632738 --- /dev/null +++ b/backend/node_modules/lodash/fp/reject.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('reject', require('../reject')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/remove.js b/backend/node_modules/lodash/fp/remove.js new file mode 100644 index 00000000..e9d13273 --- /dev/null +++ b/backend/node_modules/lodash/fp/remove.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('remove', require('../remove')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/repeat.js b/backend/node_modules/lodash/fp/repeat.js new file mode 100644 index 00000000..08470f24 --- /dev/null +++ b/backend/node_modules/lodash/fp/repeat.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('repeat', require('../repeat')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/replace.js b/backend/node_modules/lodash/fp/replace.js new file mode 100644 index 00000000..2227db62 --- /dev/null +++ b/backend/node_modules/lodash/fp/replace.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('replace', require('../replace')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/rest.js b/backend/node_modules/lodash/fp/rest.js new file mode 100644 index 00000000..c1f3d64b --- /dev/null +++ b/backend/node_modules/lodash/fp/rest.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('rest', require('../rest')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/restFrom.js b/backend/node_modules/lodash/fp/restFrom.js new file mode 100644 index 00000000..714e42b5 --- /dev/null +++ b/backend/node_modules/lodash/fp/restFrom.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('restFrom', require('../rest')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/result.js b/backend/node_modules/lodash/fp/result.js new file mode 100644 index 00000000..f86ce071 --- /dev/null +++ b/backend/node_modules/lodash/fp/result.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('result', require('../result')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/reverse.js b/backend/node_modules/lodash/fp/reverse.js new file mode 100644 index 00000000..07c9f5e4 --- /dev/null +++ b/backend/node_modules/lodash/fp/reverse.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('reverse', require('../reverse')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/round.js b/backend/node_modules/lodash/fp/round.js new file mode 100644 index 00000000..4c0e5c82 --- /dev/null +++ b/backend/node_modules/lodash/fp/round.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('round', require('../round')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sample.js b/backend/node_modules/lodash/fp/sample.js new file mode 100644 index 00000000..6bea1254 --- /dev/null +++ b/backend/node_modules/lodash/fp/sample.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sample', require('../sample'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sampleSize.js b/backend/node_modules/lodash/fp/sampleSize.js new file mode 100644 index 00000000..359ed6fc --- /dev/null +++ b/backend/node_modules/lodash/fp/sampleSize.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sampleSize', require('../sampleSize')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/seq.js b/backend/node_modules/lodash/fp/seq.js new file mode 100644 index 00000000..d8f42b0a --- /dev/null +++ b/backend/node_modules/lodash/fp/seq.js @@ -0,0 +1,2 @@ +var convert = require('./convert'); +module.exports = convert(require('../seq')); diff --git a/backend/node_modules/lodash/fp/set.js b/backend/node_modules/lodash/fp/set.js new file mode 100644 index 00000000..0b56a56c --- /dev/null +++ b/backend/node_modules/lodash/fp/set.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('set', require('../set')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/setWith.js b/backend/node_modules/lodash/fp/setWith.js new file mode 100644 index 00000000..0b584952 --- /dev/null +++ b/backend/node_modules/lodash/fp/setWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('setWith', require('../setWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/shuffle.js b/backend/node_modules/lodash/fp/shuffle.js new file mode 100644 index 00000000..aa3a1ca5 --- /dev/null +++ b/backend/node_modules/lodash/fp/shuffle.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('shuffle', require('../shuffle'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/size.js b/backend/node_modules/lodash/fp/size.js new file mode 100644 index 00000000..7490136e --- /dev/null +++ b/backend/node_modules/lodash/fp/size.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('size', require('../size'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/slice.js b/backend/node_modules/lodash/fp/slice.js new file mode 100644 index 00000000..15945d32 --- /dev/null +++ b/backend/node_modules/lodash/fp/slice.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('slice', require('../slice')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/snakeCase.js b/backend/node_modules/lodash/fp/snakeCase.js new file mode 100644 index 00000000..a0ff7808 --- /dev/null +++ b/backend/node_modules/lodash/fp/snakeCase.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('snakeCase', require('../snakeCase'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/some.js b/backend/node_modules/lodash/fp/some.js new file mode 100644 index 00000000..a4fa2d00 --- /dev/null +++ b/backend/node_modules/lodash/fp/some.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('some', require('../some')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sortBy.js b/backend/node_modules/lodash/fp/sortBy.js new file mode 100644 index 00000000..e0790ad5 --- /dev/null +++ b/backend/node_modules/lodash/fp/sortBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sortBy', require('../sortBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sortedIndex.js b/backend/node_modules/lodash/fp/sortedIndex.js new file mode 100644 index 00000000..364a0543 --- /dev/null +++ b/backend/node_modules/lodash/fp/sortedIndex.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sortedIndex', require('../sortedIndex')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sortedIndexBy.js b/backend/node_modules/lodash/fp/sortedIndexBy.js new file mode 100644 index 00000000..9593dbd1 --- /dev/null +++ b/backend/node_modules/lodash/fp/sortedIndexBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sortedIndexBy', require('../sortedIndexBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sortedIndexOf.js b/backend/node_modules/lodash/fp/sortedIndexOf.js new file mode 100644 index 00000000..c9084cab --- /dev/null +++ b/backend/node_modules/lodash/fp/sortedIndexOf.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sortedIndexOf', require('../sortedIndexOf')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sortedLastIndex.js b/backend/node_modules/lodash/fp/sortedLastIndex.js new file mode 100644 index 00000000..47fe241a --- /dev/null +++ b/backend/node_modules/lodash/fp/sortedLastIndex.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sortedLastIndex', require('../sortedLastIndex')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sortedLastIndexBy.js b/backend/node_modules/lodash/fp/sortedLastIndexBy.js new file mode 100644 index 00000000..0f9a3473 --- /dev/null +++ b/backend/node_modules/lodash/fp/sortedLastIndexBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sortedLastIndexBy', require('../sortedLastIndexBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sortedLastIndexOf.js b/backend/node_modules/lodash/fp/sortedLastIndexOf.js new file mode 100644 index 00000000..0d4d9327 --- /dev/null +++ b/backend/node_modules/lodash/fp/sortedLastIndexOf.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sortedLastIndexOf', require('../sortedLastIndexOf')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sortedUniq.js b/backend/node_modules/lodash/fp/sortedUniq.js new file mode 100644 index 00000000..882d2837 --- /dev/null +++ b/backend/node_modules/lodash/fp/sortedUniq.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sortedUniq', require('../sortedUniq'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sortedUniqBy.js b/backend/node_modules/lodash/fp/sortedUniqBy.js new file mode 100644 index 00000000..033db91c --- /dev/null +++ b/backend/node_modules/lodash/fp/sortedUniqBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sortedUniqBy', require('../sortedUniqBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/split.js b/backend/node_modules/lodash/fp/split.js new file mode 100644 index 00000000..14de1a7e --- /dev/null +++ b/backend/node_modules/lodash/fp/split.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('split', require('../split')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/spread.js b/backend/node_modules/lodash/fp/spread.js new file mode 100644 index 00000000..2d11b707 --- /dev/null +++ b/backend/node_modules/lodash/fp/spread.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('spread', require('../spread')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/spreadFrom.js b/backend/node_modules/lodash/fp/spreadFrom.js new file mode 100644 index 00000000..0b630df1 --- /dev/null +++ b/backend/node_modules/lodash/fp/spreadFrom.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('spreadFrom', require('../spread')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/startCase.js b/backend/node_modules/lodash/fp/startCase.js new file mode 100644 index 00000000..ada98c94 --- /dev/null +++ b/backend/node_modules/lodash/fp/startCase.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('startCase', require('../startCase'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/startsWith.js b/backend/node_modules/lodash/fp/startsWith.js new file mode 100644 index 00000000..985e2f29 --- /dev/null +++ b/backend/node_modules/lodash/fp/startsWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('startsWith', require('../startsWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/string.js b/backend/node_modules/lodash/fp/string.js new file mode 100644 index 00000000..773b0370 --- /dev/null +++ b/backend/node_modules/lodash/fp/string.js @@ -0,0 +1,2 @@ +var convert = require('./convert'); +module.exports = convert(require('../string')); diff --git a/backend/node_modules/lodash/fp/stubArray.js b/backend/node_modules/lodash/fp/stubArray.js new file mode 100644 index 00000000..cd604cb4 --- /dev/null +++ b/backend/node_modules/lodash/fp/stubArray.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('stubArray', require('../stubArray'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/stubFalse.js b/backend/node_modules/lodash/fp/stubFalse.js new file mode 100644 index 00000000..32966645 --- /dev/null +++ b/backend/node_modules/lodash/fp/stubFalse.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('stubFalse', require('../stubFalse'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/stubObject.js b/backend/node_modules/lodash/fp/stubObject.js new file mode 100644 index 00000000..c6c8ec47 --- /dev/null +++ b/backend/node_modules/lodash/fp/stubObject.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('stubObject', require('../stubObject'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/stubString.js b/backend/node_modules/lodash/fp/stubString.js new file mode 100644 index 00000000..701051e8 --- /dev/null +++ b/backend/node_modules/lodash/fp/stubString.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('stubString', require('../stubString'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/stubTrue.js b/backend/node_modules/lodash/fp/stubTrue.js new file mode 100644 index 00000000..9249082c --- /dev/null +++ b/backend/node_modules/lodash/fp/stubTrue.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('stubTrue', require('../stubTrue'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/subtract.js b/backend/node_modules/lodash/fp/subtract.js new file mode 100644 index 00000000..d32b16d4 --- /dev/null +++ b/backend/node_modules/lodash/fp/subtract.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('subtract', require('../subtract')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sum.js b/backend/node_modules/lodash/fp/sum.js new file mode 100644 index 00000000..5cce12b3 --- /dev/null +++ b/backend/node_modules/lodash/fp/sum.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sum', require('../sum'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/sumBy.js b/backend/node_modules/lodash/fp/sumBy.js new file mode 100644 index 00000000..c8826565 --- /dev/null +++ b/backend/node_modules/lodash/fp/sumBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('sumBy', require('../sumBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/symmetricDifference.js b/backend/node_modules/lodash/fp/symmetricDifference.js new file mode 100644 index 00000000..78c16add --- /dev/null +++ b/backend/node_modules/lodash/fp/symmetricDifference.js @@ -0,0 +1 @@ +module.exports = require('./xor'); diff --git a/backend/node_modules/lodash/fp/symmetricDifferenceBy.js b/backend/node_modules/lodash/fp/symmetricDifferenceBy.js new file mode 100644 index 00000000..298fc7ff --- /dev/null +++ b/backend/node_modules/lodash/fp/symmetricDifferenceBy.js @@ -0,0 +1 @@ +module.exports = require('./xorBy'); diff --git a/backend/node_modules/lodash/fp/symmetricDifferenceWith.js b/backend/node_modules/lodash/fp/symmetricDifferenceWith.js new file mode 100644 index 00000000..70bc6faf --- /dev/null +++ b/backend/node_modules/lodash/fp/symmetricDifferenceWith.js @@ -0,0 +1 @@ +module.exports = require('./xorWith'); diff --git a/backend/node_modules/lodash/fp/tail.js b/backend/node_modules/lodash/fp/tail.js new file mode 100644 index 00000000..f122f0ac --- /dev/null +++ b/backend/node_modules/lodash/fp/tail.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('tail', require('../tail'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/take.js b/backend/node_modules/lodash/fp/take.js new file mode 100644 index 00000000..9af98a7b --- /dev/null +++ b/backend/node_modules/lodash/fp/take.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('take', require('../take')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/takeLast.js b/backend/node_modules/lodash/fp/takeLast.js new file mode 100644 index 00000000..e98c84a1 --- /dev/null +++ b/backend/node_modules/lodash/fp/takeLast.js @@ -0,0 +1 @@ +module.exports = require('./takeRight'); diff --git a/backend/node_modules/lodash/fp/takeLastWhile.js b/backend/node_modules/lodash/fp/takeLastWhile.js new file mode 100644 index 00000000..5367968a --- /dev/null +++ b/backend/node_modules/lodash/fp/takeLastWhile.js @@ -0,0 +1 @@ +module.exports = require('./takeRightWhile'); diff --git a/backend/node_modules/lodash/fp/takeRight.js b/backend/node_modules/lodash/fp/takeRight.js new file mode 100644 index 00000000..b82950a6 --- /dev/null +++ b/backend/node_modules/lodash/fp/takeRight.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('takeRight', require('../takeRight')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/takeRightWhile.js b/backend/node_modules/lodash/fp/takeRightWhile.js new file mode 100644 index 00000000..8ffb0a28 --- /dev/null +++ b/backend/node_modules/lodash/fp/takeRightWhile.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('takeRightWhile', require('../takeRightWhile')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/takeWhile.js b/backend/node_modules/lodash/fp/takeWhile.js new file mode 100644 index 00000000..28136644 --- /dev/null +++ b/backend/node_modules/lodash/fp/takeWhile.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('takeWhile', require('../takeWhile')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/tap.js b/backend/node_modules/lodash/fp/tap.js new file mode 100644 index 00000000..d33ad6ec --- /dev/null +++ b/backend/node_modules/lodash/fp/tap.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('tap', require('../tap')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/template.js b/backend/node_modules/lodash/fp/template.js new file mode 100644 index 00000000..74857e1c --- /dev/null +++ b/backend/node_modules/lodash/fp/template.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('template', require('../template')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/templateSettings.js b/backend/node_modules/lodash/fp/templateSettings.js new file mode 100644 index 00000000..7bcc0a82 --- /dev/null +++ b/backend/node_modules/lodash/fp/templateSettings.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('templateSettings', require('../templateSettings'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/throttle.js b/backend/node_modules/lodash/fp/throttle.js new file mode 100644 index 00000000..77fff142 --- /dev/null +++ b/backend/node_modules/lodash/fp/throttle.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('throttle', require('../throttle')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/thru.js b/backend/node_modules/lodash/fp/thru.js new file mode 100644 index 00000000..d42b3b1d --- /dev/null +++ b/backend/node_modules/lodash/fp/thru.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('thru', require('../thru')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/times.js b/backend/node_modules/lodash/fp/times.js new file mode 100644 index 00000000..0dab06da --- /dev/null +++ b/backend/node_modules/lodash/fp/times.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('times', require('../times')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toArray.js b/backend/node_modules/lodash/fp/toArray.js new file mode 100644 index 00000000..f0c360ac --- /dev/null +++ b/backend/node_modules/lodash/fp/toArray.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toArray', require('../toArray'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toFinite.js b/backend/node_modules/lodash/fp/toFinite.js new file mode 100644 index 00000000..3a47687d --- /dev/null +++ b/backend/node_modules/lodash/fp/toFinite.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toFinite', require('../toFinite'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toInteger.js b/backend/node_modules/lodash/fp/toInteger.js new file mode 100644 index 00000000..e0af6a75 --- /dev/null +++ b/backend/node_modules/lodash/fp/toInteger.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toInteger', require('../toInteger'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toIterator.js b/backend/node_modules/lodash/fp/toIterator.js new file mode 100644 index 00000000..65e6baa9 --- /dev/null +++ b/backend/node_modules/lodash/fp/toIterator.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toIterator', require('../toIterator'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toJSON.js b/backend/node_modules/lodash/fp/toJSON.js new file mode 100644 index 00000000..2d718d0b --- /dev/null +++ b/backend/node_modules/lodash/fp/toJSON.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toJSON', require('../toJSON'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toLength.js b/backend/node_modules/lodash/fp/toLength.js new file mode 100644 index 00000000..b97cdd93 --- /dev/null +++ b/backend/node_modules/lodash/fp/toLength.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toLength', require('../toLength'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toLower.js b/backend/node_modules/lodash/fp/toLower.js new file mode 100644 index 00000000..616ef36a --- /dev/null +++ b/backend/node_modules/lodash/fp/toLower.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toLower', require('../toLower'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toNumber.js b/backend/node_modules/lodash/fp/toNumber.js new file mode 100644 index 00000000..d0c6f4d3 --- /dev/null +++ b/backend/node_modules/lodash/fp/toNumber.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toNumber', require('../toNumber'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toPairs.js b/backend/node_modules/lodash/fp/toPairs.js new file mode 100644 index 00000000..af783786 --- /dev/null +++ b/backend/node_modules/lodash/fp/toPairs.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toPairs', require('../toPairs'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toPairsIn.js b/backend/node_modules/lodash/fp/toPairsIn.js new file mode 100644 index 00000000..66504abf --- /dev/null +++ b/backend/node_modules/lodash/fp/toPairsIn.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toPairsIn', require('../toPairsIn'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toPath.js b/backend/node_modules/lodash/fp/toPath.js new file mode 100644 index 00000000..b4d5e50f --- /dev/null +++ b/backend/node_modules/lodash/fp/toPath.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toPath', require('../toPath'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toPlainObject.js b/backend/node_modules/lodash/fp/toPlainObject.js new file mode 100644 index 00000000..278bb863 --- /dev/null +++ b/backend/node_modules/lodash/fp/toPlainObject.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toPlainObject', require('../toPlainObject'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toSafeInteger.js b/backend/node_modules/lodash/fp/toSafeInteger.js new file mode 100644 index 00000000..367a26fd --- /dev/null +++ b/backend/node_modules/lodash/fp/toSafeInteger.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toSafeInteger', require('../toSafeInteger'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toString.js b/backend/node_modules/lodash/fp/toString.js new file mode 100644 index 00000000..cec4f8e2 --- /dev/null +++ b/backend/node_modules/lodash/fp/toString.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toString', require('../toString'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/toUpper.js b/backend/node_modules/lodash/fp/toUpper.js new file mode 100644 index 00000000..54f9a560 --- /dev/null +++ b/backend/node_modules/lodash/fp/toUpper.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('toUpper', require('../toUpper'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/transform.js b/backend/node_modules/lodash/fp/transform.js new file mode 100644 index 00000000..759d088f --- /dev/null +++ b/backend/node_modules/lodash/fp/transform.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('transform', require('../transform')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/trim.js b/backend/node_modules/lodash/fp/trim.js new file mode 100644 index 00000000..e6319a74 --- /dev/null +++ b/backend/node_modules/lodash/fp/trim.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('trim', require('../trim')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/trimChars.js b/backend/node_modules/lodash/fp/trimChars.js new file mode 100644 index 00000000..c9294de4 --- /dev/null +++ b/backend/node_modules/lodash/fp/trimChars.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('trimChars', require('../trim')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/trimCharsEnd.js b/backend/node_modules/lodash/fp/trimCharsEnd.js new file mode 100644 index 00000000..284bc2f8 --- /dev/null +++ b/backend/node_modules/lodash/fp/trimCharsEnd.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('trimCharsEnd', require('../trimEnd')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/trimCharsStart.js b/backend/node_modules/lodash/fp/trimCharsStart.js new file mode 100644 index 00000000..ff0ee65d --- /dev/null +++ b/backend/node_modules/lodash/fp/trimCharsStart.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('trimCharsStart', require('../trimStart')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/trimEnd.js b/backend/node_modules/lodash/fp/trimEnd.js new file mode 100644 index 00000000..71908805 --- /dev/null +++ b/backend/node_modules/lodash/fp/trimEnd.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('trimEnd', require('../trimEnd')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/trimStart.js b/backend/node_modules/lodash/fp/trimStart.js new file mode 100644 index 00000000..fda902c3 --- /dev/null +++ b/backend/node_modules/lodash/fp/trimStart.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('trimStart', require('../trimStart')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/truncate.js b/backend/node_modules/lodash/fp/truncate.js new file mode 100644 index 00000000..d265c1de --- /dev/null +++ b/backend/node_modules/lodash/fp/truncate.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('truncate', require('../truncate')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/unapply.js b/backend/node_modules/lodash/fp/unapply.js new file mode 100644 index 00000000..c5dfe779 --- /dev/null +++ b/backend/node_modules/lodash/fp/unapply.js @@ -0,0 +1 @@ +module.exports = require('./rest'); diff --git a/backend/node_modules/lodash/fp/unary.js b/backend/node_modules/lodash/fp/unary.js new file mode 100644 index 00000000..286c945f --- /dev/null +++ b/backend/node_modules/lodash/fp/unary.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('unary', require('../unary'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/unescape.js b/backend/node_modules/lodash/fp/unescape.js new file mode 100644 index 00000000..fddcb46e --- /dev/null +++ b/backend/node_modules/lodash/fp/unescape.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('unescape', require('../unescape'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/union.js b/backend/node_modules/lodash/fp/union.js new file mode 100644 index 00000000..ef8228d7 --- /dev/null +++ b/backend/node_modules/lodash/fp/union.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('union', require('../union')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/unionBy.js b/backend/node_modules/lodash/fp/unionBy.js new file mode 100644 index 00000000..603687a1 --- /dev/null +++ b/backend/node_modules/lodash/fp/unionBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('unionBy', require('../unionBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/unionWith.js b/backend/node_modules/lodash/fp/unionWith.js new file mode 100644 index 00000000..65bb3a79 --- /dev/null +++ b/backend/node_modules/lodash/fp/unionWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('unionWith', require('../unionWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/uniq.js b/backend/node_modules/lodash/fp/uniq.js new file mode 100644 index 00000000..bc185249 --- /dev/null +++ b/backend/node_modules/lodash/fp/uniq.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('uniq', require('../uniq'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/uniqBy.js b/backend/node_modules/lodash/fp/uniqBy.js new file mode 100644 index 00000000..634c6a8b --- /dev/null +++ b/backend/node_modules/lodash/fp/uniqBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('uniqBy', require('../uniqBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/uniqWith.js b/backend/node_modules/lodash/fp/uniqWith.js new file mode 100644 index 00000000..0ec601a9 --- /dev/null +++ b/backend/node_modules/lodash/fp/uniqWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('uniqWith', require('../uniqWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/uniqueId.js b/backend/node_modules/lodash/fp/uniqueId.js new file mode 100644 index 00000000..aa8fc2f7 --- /dev/null +++ b/backend/node_modules/lodash/fp/uniqueId.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('uniqueId', require('../uniqueId')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/unnest.js b/backend/node_modules/lodash/fp/unnest.js new file mode 100644 index 00000000..5d34060a --- /dev/null +++ b/backend/node_modules/lodash/fp/unnest.js @@ -0,0 +1 @@ +module.exports = require('./flatten'); diff --git a/backend/node_modules/lodash/fp/unset.js b/backend/node_modules/lodash/fp/unset.js new file mode 100644 index 00000000..ea203a0f --- /dev/null +++ b/backend/node_modules/lodash/fp/unset.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('unset', require('../unset')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/unzip.js b/backend/node_modules/lodash/fp/unzip.js new file mode 100644 index 00000000..cc364b3c --- /dev/null +++ b/backend/node_modules/lodash/fp/unzip.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('unzip', require('../unzip'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/unzipWith.js b/backend/node_modules/lodash/fp/unzipWith.js new file mode 100644 index 00000000..182eaa10 --- /dev/null +++ b/backend/node_modules/lodash/fp/unzipWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('unzipWith', require('../unzipWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/update.js b/backend/node_modules/lodash/fp/update.js new file mode 100644 index 00000000..b8ce2cc9 --- /dev/null +++ b/backend/node_modules/lodash/fp/update.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('update', require('../update')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/updateWith.js b/backend/node_modules/lodash/fp/updateWith.js new file mode 100644 index 00000000..d5e8282d --- /dev/null +++ b/backend/node_modules/lodash/fp/updateWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('updateWith', require('../updateWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/upperCase.js b/backend/node_modules/lodash/fp/upperCase.js new file mode 100644 index 00000000..c886f202 --- /dev/null +++ b/backend/node_modules/lodash/fp/upperCase.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('upperCase', require('../upperCase'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/upperFirst.js b/backend/node_modules/lodash/fp/upperFirst.js new file mode 100644 index 00000000..d8c04df5 --- /dev/null +++ b/backend/node_modules/lodash/fp/upperFirst.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('upperFirst', require('../upperFirst'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/useWith.js b/backend/node_modules/lodash/fp/useWith.js new file mode 100644 index 00000000..d8b3df5a --- /dev/null +++ b/backend/node_modules/lodash/fp/useWith.js @@ -0,0 +1 @@ +module.exports = require('./overArgs'); diff --git a/backend/node_modules/lodash/fp/util.js b/backend/node_modules/lodash/fp/util.js new file mode 100644 index 00000000..18c00bae --- /dev/null +++ b/backend/node_modules/lodash/fp/util.js @@ -0,0 +1,2 @@ +var convert = require('./convert'); +module.exports = convert(require('../util')); diff --git a/backend/node_modules/lodash/fp/value.js b/backend/node_modules/lodash/fp/value.js new file mode 100644 index 00000000..555eec7a --- /dev/null +++ b/backend/node_modules/lodash/fp/value.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('value', require('../value'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/valueOf.js b/backend/node_modules/lodash/fp/valueOf.js new file mode 100644 index 00000000..f968807d --- /dev/null +++ b/backend/node_modules/lodash/fp/valueOf.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('valueOf', require('../valueOf'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/values.js b/backend/node_modules/lodash/fp/values.js new file mode 100644 index 00000000..2dfc5613 --- /dev/null +++ b/backend/node_modules/lodash/fp/values.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('values', require('../values'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/valuesIn.js b/backend/node_modules/lodash/fp/valuesIn.js new file mode 100644 index 00000000..a1b2bb87 --- /dev/null +++ b/backend/node_modules/lodash/fp/valuesIn.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('valuesIn', require('../valuesIn'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/where.js b/backend/node_modules/lodash/fp/where.js new file mode 100644 index 00000000..3247f64a --- /dev/null +++ b/backend/node_modules/lodash/fp/where.js @@ -0,0 +1 @@ +module.exports = require('./conformsTo'); diff --git a/backend/node_modules/lodash/fp/whereEq.js b/backend/node_modules/lodash/fp/whereEq.js new file mode 100644 index 00000000..29d1e1e4 --- /dev/null +++ b/backend/node_modules/lodash/fp/whereEq.js @@ -0,0 +1 @@ +module.exports = require('./isMatch'); diff --git a/backend/node_modules/lodash/fp/without.js b/backend/node_modules/lodash/fp/without.js new file mode 100644 index 00000000..bad9e125 --- /dev/null +++ b/backend/node_modules/lodash/fp/without.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('without', require('../without')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/words.js b/backend/node_modules/lodash/fp/words.js new file mode 100644 index 00000000..4a901414 --- /dev/null +++ b/backend/node_modules/lodash/fp/words.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('words', require('../words')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/wrap.js b/backend/node_modules/lodash/fp/wrap.js new file mode 100644 index 00000000..e93bd8a1 --- /dev/null +++ b/backend/node_modules/lodash/fp/wrap.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('wrap', require('../wrap')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/wrapperAt.js b/backend/node_modules/lodash/fp/wrapperAt.js new file mode 100644 index 00000000..8f0a310f --- /dev/null +++ b/backend/node_modules/lodash/fp/wrapperAt.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('wrapperAt', require('../wrapperAt'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/wrapperChain.js b/backend/node_modules/lodash/fp/wrapperChain.js new file mode 100644 index 00000000..2a48ea2b --- /dev/null +++ b/backend/node_modules/lodash/fp/wrapperChain.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('wrapperChain', require('../wrapperChain'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/wrapperLodash.js b/backend/node_modules/lodash/fp/wrapperLodash.js new file mode 100644 index 00000000..a7162d08 --- /dev/null +++ b/backend/node_modules/lodash/fp/wrapperLodash.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('wrapperLodash', require('../wrapperLodash'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/wrapperReverse.js b/backend/node_modules/lodash/fp/wrapperReverse.js new file mode 100644 index 00000000..e1481aab --- /dev/null +++ b/backend/node_modules/lodash/fp/wrapperReverse.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('wrapperReverse', require('../wrapperReverse'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/wrapperValue.js b/backend/node_modules/lodash/fp/wrapperValue.js new file mode 100644 index 00000000..8eb9112f --- /dev/null +++ b/backend/node_modules/lodash/fp/wrapperValue.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('wrapperValue', require('../wrapperValue'), require('./_falseOptions')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/xor.js b/backend/node_modules/lodash/fp/xor.js new file mode 100644 index 00000000..29e28194 --- /dev/null +++ b/backend/node_modules/lodash/fp/xor.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('xor', require('../xor')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/xorBy.js b/backend/node_modules/lodash/fp/xorBy.js new file mode 100644 index 00000000..b355686d --- /dev/null +++ b/backend/node_modules/lodash/fp/xorBy.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('xorBy', require('../xorBy')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/xorWith.js b/backend/node_modules/lodash/fp/xorWith.js new file mode 100644 index 00000000..8e05739a --- /dev/null +++ b/backend/node_modules/lodash/fp/xorWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('xorWith', require('../xorWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/zip.js b/backend/node_modules/lodash/fp/zip.js new file mode 100644 index 00000000..69e147a4 --- /dev/null +++ b/backend/node_modules/lodash/fp/zip.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('zip', require('../zip')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/zipAll.js b/backend/node_modules/lodash/fp/zipAll.js new file mode 100644 index 00000000..efa8ccbf --- /dev/null +++ b/backend/node_modules/lodash/fp/zipAll.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('zipAll', require('../zip')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/zipObj.js b/backend/node_modules/lodash/fp/zipObj.js new file mode 100644 index 00000000..f4a34531 --- /dev/null +++ b/backend/node_modules/lodash/fp/zipObj.js @@ -0,0 +1 @@ +module.exports = require('./zipObject'); diff --git a/backend/node_modules/lodash/fp/zipObject.js b/backend/node_modules/lodash/fp/zipObject.js new file mode 100644 index 00000000..462dbb68 --- /dev/null +++ b/backend/node_modules/lodash/fp/zipObject.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('zipObject', require('../zipObject')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/zipObjectDeep.js b/backend/node_modules/lodash/fp/zipObjectDeep.js new file mode 100644 index 00000000..53a5d338 --- /dev/null +++ b/backend/node_modules/lodash/fp/zipObjectDeep.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('zipObjectDeep', require('../zipObjectDeep')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fp/zipWith.js b/backend/node_modules/lodash/fp/zipWith.js new file mode 100644 index 00000000..c5cf9e21 --- /dev/null +++ b/backend/node_modules/lodash/fp/zipWith.js @@ -0,0 +1,5 @@ +var convert = require('./convert'), + func = convert('zipWith', require('../zipWith')); + +func.placeholder = require('./placeholder'); +module.exports = func; diff --git a/backend/node_modules/lodash/fromPairs.js b/backend/node_modules/lodash/fromPairs.js new file mode 100644 index 00000000..ee7940d2 --- /dev/null +++ b/backend/node_modules/lodash/fromPairs.js @@ -0,0 +1,28 @@ +/** + * The inverse of `_.toPairs`; this method returns an object composed + * from key-value `pairs`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} pairs The key-value pairs. + * @returns {Object} Returns the new object. + * @example + * + * _.fromPairs([['a', 1], ['b', 2]]); + * // => { 'a': 1, 'b': 2 } + */ +function fromPairs(pairs) { + var index = -1, + length = pairs == null ? 0 : pairs.length, + result = {}; + + while (++index < length) { + var pair = pairs[index]; + result[pair[0]] = pair[1]; + } + return result; +} + +module.exports = fromPairs; diff --git a/backend/node_modules/lodash/function.js b/backend/node_modules/lodash/function.js new file mode 100644 index 00000000..b0fc6d93 --- /dev/null +++ b/backend/node_modules/lodash/function.js @@ -0,0 +1,25 @@ +module.exports = { + 'after': require('./after'), + 'ary': require('./ary'), + 'before': require('./before'), + 'bind': require('./bind'), + 'bindKey': require('./bindKey'), + 'curry': require('./curry'), + 'curryRight': require('./curryRight'), + 'debounce': require('./debounce'), + 'defer': require('./defer'), + 'delay': require('./delay'), + 'flip': require('./flip'), + 'memoize': require('./memoize'), + 'negate': require('./negate'), + 'once': require('./once'), + 'overArgs': require('./overArgs'), + 'partial': require('./partial'), + 'partialRight': require('./partialRight'), + 'rearg': require('./rearg'), + 'rest': require('./rest'), + 'spread': require('./spread'), + 'throttle': require('./throttle'), + 'unary': require('./unary'), + 'wrap': require('./wrap') +}; diff --git a/backend/node_modules/lodash/functions.js b/backend/node_modules/lodash/functions.js new file mode 100644 index 00000000..9722928f --- /dev/null +++ b/backend/node_modules/lodash/functions.js @@ -0,0 +1,31 @@ +var baseFunctions = require('./_baseFunctions'), + keys = require('./keys'); + +/** + * Creates an array of function property names from own enumerable properties + * of `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to inspect. + * @returns {Array} Returns the function names. + * @see _.functionsIn + * @example + * + * function Foo() { + * this.a = _.constant('a'); + * this.b = _.constant('b'); + * } + * + * Foo.prototype.c = _.constant('c'); + * + * _.functions(new Foo); + * // => ['a', 'b'] + */ +function functions(object) { + return object == null ? [] : baseFunctions(object, keys(object)); +} + +module.exports = functions; diff --git a/backend/node_modules/lodash/functionsIn.js b/backend/node_modules/lodash/functionsIn.js new file mode 100644 index 00000000..f00345d0 --- /dev/null +++ b/backend/node_modules/lodash/functionsIn.js @@ -0,0 +1,31 @@ +var baseFunctions = require('./_baseFunctions'), + keysIn = require('./keysIn'); + +/** + * Creates an array of function property names from own and inherited + * enumerable properties of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to inspect. + * @returns {Array} Returns the function names. + * @see _.functions + * @example + * + * function Foo() { + * this.a = _.constant('a'); + * this.b = _.constant('b'); + * } + * + * Foo.prototype.c = _.constant('c'); + * + * _.functionsIn(new Foo); + * // => ['a', 'b', 'c'] + */ +function functionsIn(object) { + return object == null ? [] : baseFunctions(object, keysIn(object)); +} + +module.exports = functionsIn; diff --git a/backend/node_modules/lodash/get.js b/backend/node_modules/lodash/get.js new file mode 100644 index 00000000..8805ff92 --- /dev/null +++ b/backend/node_modules/lodash/get.js @@ -0,0 +1,33 @@ +var baseGet = require('./_baseGet'); + +/** + * Gets the value at `path` of `object`. If the resolved value is + * `undefined`, the `defaultValue` is returned in its place. + * + * @static + * @memberOf _ + * @since 3.7.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to get. + * @param {*} [defaultValue] The value returned for `undefined` resolved values. + * @returns {*} Returns the resolved value. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.get(object, 'a[0].b.c'); + * // => 3 + * + * _.get(object, ['a', '0', 'b', 'c']); + * // => 3 + * + * _.get(object, 'a.b.c', 'default'); + * // => 'default' + */ +function get(object, path, defaultValue) { + var result = object == null ? undefined : baseGet(object, path); + return result === undefined ? defaultValue : result; +} + +module.exports = get; diff --git a/backend/node_modules/lodash/groupBy.js b/backend/node_modules/lodash/groupBy.js new file mode 100644 index 00000000..babf4f6b --- /dev/null +++ b/backend/node_modules/lodash/groupBy.js @@ -0,0 +1,41 @@ +var baseAssignValue = require('./_baseAssignValue'), + createAggregator = require('./_createAggregator'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The order of grouped values + * is determined by the order they occur in `collection`. The corresponding + * value of each key is an array of elements responsible for generating the + * key. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * _.groupBy([6.1, 4.2, 6.3], Math.floor); + * // => { '4': [4.2], '6': [6.1, 6.3] } + * + * // The `_.property` iteratee shorthand. + * _.groupBy(['one', 'two', 'three'], 'length'); + * // => { '3': ['one', 'two'], '5': ['three'] } + */ +var groupBy = createAggregator(function(result, value, key) { + if (hasOwnProperty.call(result, key)) { + result[key].push(value); + } else { + baseAssignValue(result, key, [value]); + } +}); + +module.exports = groupBy; diff --git a/backend/node_modules/lodash/gt.js b/backend/node_modules/lodash/gt.js new file mode 100644 index 00000000..3a662828 --- /dev/null +++ b/backend/node_modules/lodash/gt.js @@ -0,0 +1,29 @@ +var baseGt = require('./_baseGt'), + createRelationalOperation = require('./_createRelationalOperation'); + +/** + * Checks if `value` is greater than `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than `other`, + * else `false`. + * @see _.lt + * @example + * + * _.gt(3, 1); + * // => true + * + * _.gt(3, 3); + * // => false + * + * _.gt(1, 3); + * // => false + */ +var gt = createRelationalOperation(baseGt); + +module.exports = gt; diff --git a/backend/node_modules/lodash/gte.js b/backend/node_modules/lodash/gte.js new file mode 100644 index 00000000..4180a687 --- /dev/null +++ b/backend/node_modules/lodash/gte.js @@ -0,0 +1,30 @@ +var createRelationalOperation = require('./_createRelationalOperation'); + +/** + * Checks if `value` is greater than or equal to `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than or equal to + * `other`, else `false`. + * @see _.lte + * @example + * + * _.gte(3, 1); + * // => true + * + * _.gte(3, 3); + * // => true + * + * _.gte(1, 3); + * // => false + */ +var gte = createRelationalOperation(function(value, other) { + return value >= other; +}); + +module.exports = gte; diff --git a/backend/node_modules/lodash/has.js b/backend/node_modules/lodash/has.js new file mode 100644 index 00000000..34df55e8 --- /dev/null +++ b/backend/node_modules/lodash/has.js @@ -0,0 +1,35 @@ +var baseHas = require('./_baseHas'), + hasPath = require('./_hasPath'); + +/** + * Checks if `path` is a direct property of `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + * @example + * + * var object = { 'a': { 'b': 2 } }; + * var other = _.create({ 'a': _.create({ 'b': 2 }) }); + * + * _.has(object, 'a'); + * // => true + * + * _.has(object, 'a.b'); + * // => true + * + * _.has(object, ['a', 'b']); + * // => true + * + * _.has(other, 'a'); + * // => false + */ +function has(object, path) { + return object != null && hasPath(object, path, baseHas); +} + +module.exports = has; diff --git a/backend/node_modules/lodash/hasIn.js b/backend/node_modules/lodash/hasIn.js new file mode 100644 index 00000000..06a36865 --- /dev/null +++ b/backend/node_modules/lodash/hasIn.js @@ -0,0 +1,34 @@ +var baseHasIn = require('./_baseHasIn'), + hasPath = require('./_hasPath'); + +/** + * Checks if `path` is a direct or inherited property of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + * @example + * + * var object = _.create({ 'a': _.create({ 'b': 2 }) }); + * + * _.hasIn(object, 'a'); + * // => true + * + * _.hasIn(object, 'a.b'); + * // => true + * + * _.hasIn(object, ['a', 'b']); + * // => true + * + * _.hasIn(object, 'b'); + * // => false + */ +function hasIn(object, path) { + return object != null && hasPath(object, path, baseHasIn); +} + +module.exports = hasIn; diff --git a/backend/node_modules/lodash/head.js b/backend/node_modules/lodash/head.js new file mode 100644 index 00000000..dee9d1f1 --- /dev/null +++ b/backend/node_modules/lodash/head.js @@ -0,0 +1,23 @@ +/** + * Gets the first element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @alias first + * @category Array + * @param {Array} array The array to query. + * @returns {*} Returns the first element of `array`. + * @example + * + * _.head([1, 2, 3]); + * // => 1 + * + * _.head([]); + * // => undefined + */ +function head(array) { + return (array && array.length) ? array[0] : undefined; +} + +module.exports = head; diff --git a/backend/node_modules/lodash/identity.js b/backend/node_modules/lodash/identity.js new file mode 100644 index 00000000..2d5d963c --- /dev/null +++ b/backend/node_modules/lodash/identity.js @@ -0,0 +1,21 @@ +/** + * This method returns the first argument it receives. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Util + * @param {*} value Any value. + * @returns {*} Returns `value`. + * @example + * + * var object = { 'a': 1 }; + * + * console.log(_.identity(object) === object); + * // => true + */ +function identity(value) { + return value; +} + +module.exports = identity; diff --git a/backend/node_modules/lodash/inRange.js b/backend/node_modules/lodash/inRange.js new file mode 100644 index 00000000..f20728d9 --- /dev/null +++ b/backend/node_modules/lodash/inRange.js @@ -0,0 +1,55 @@ +var baseInRange = require('./_baseInRange'), + toFinite = require('./toFinite'), + toNumber = require('./toNumber'); + +/** + * Checks if `n` is between `start` and up to, but not including, `end`. If + * `end` is not specified, it's set to `start` with `start` then set to `0`. + * If `start` is greater than `end` the params are swapped to support + * negative ranges. + * + * @static + * @memberOf _ + * @since 3.3.0 + * @category Number + * @param {number} number The number to check. + * @param {number} [start=0] The start of the range. + * @param {number} end The end of the range. + * @returns {boolean} Returns `true` if `number` is in the range, else `false`. + * @see _.range, _.rangeRight + * @example + * + * _.inRange(3, 2, 4); + * // => true + * + * _.inRange(4, 8); + * // => true + * + * _.inRange(4, 2); + * // => false + * + * _.inRange(2, 2); + * // => false + * + * _.inRange(1.2, 2); + * // => true + * + * _.inRange(5.2, 4); + * // => false + * + * _.inRange(-3, -2, -6); + * // => true + */ +function inRange(number, start, end) { + start = toFinite(start); + if (end === undefined) { + end = start; + start = 0; + } else { + end = toFinite(end); + } + number = toNumber(number); + return baseInRange(number, start, end); +} + +module.exports = inRange; diff --git a/backend/node_modules/lodash/includes.js b/backend/node_modules/lodash/includes.js new file mode 100644 index 00000000..ae0deedc --- /dev/null +++ b/backend/node_modules/lodash/includes.js @@ -0,0 +1,53 @@ +var baseIndexOf = require('./_baseIndexOf'), + isArrayLike = require('./isArrayLike'), + isString = require('./isString'), + toInteger = require('./toInteger'), + values = require('./values'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max; + +/** + * Checks if `value` is in `collection`. If `collection` is a string, it's + * checked for a substring of `value`, otherwise + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * is used for equality comparisons. If `fromIndex` is negative, it's used as + * the offset from the end of `collection`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object|string} collection The collection to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.reduce`. + * @returns {boolean} Returns `true` if `value` is found, else `false`. + * @example + * + * _.includes([1, 2, 3], 1); + * // => true + * + * _.includes([1, 2, 3], 1, 2); + * // => false + * + * _.includes({ 'a': 1, 'b': 2 }, 1); + * // => true + * + * _.includes('abcd', 'bc'); + * // => true + */ +function includes(collection, value, fromIndex, guard) { + collection = isArrayLike(collection) ? collection : values(collection); + fromIndex = (fromIndex && !guard) ? toInteger(fromIndex) : 0; + + var length = collection.length; + if (fromIndex < 0) { + fromIndex = nativeMax(length + fromIndex, 0); + } + return isString(collection) + ? (fromIndex <= length && collection.indexOf(value, fromIndex) > -1) + : (!!length && baseIndexOf(collection, value, fromIndex) > -1); +} + +module.exports = includes; diff --git a/backend/node_modules/lodash/index.js b/backend/node_modules/lodash/index.js new file mode 100644 index 00000000..5d063e21 --- /dev/null +++ b/backend/node_modules/lodash/index.js @@ -0,0 +1 @@ +module.exports = require('./lodash'); \ No newline at end of file diff --git a/backend/node_modules/lodash/indexOf.js b/backend/node_modules/lodash/indexOf.js new file mode 100644 index 00000000..3c644af2 --- /dev/null +++ b/backend/node_modules/lodash/indexOf.js @@ -0,0 +1,42 @@ +var baseIndexOf = require('./_baseIndexOf'), + toInteger = require('./toInteger'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max; + +/** + * Gets the index at which the first occurrence of `value` is found in `array` + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. If `fromIndex` is negative, it's used as the + * offset from the end of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.indexOf([1, 2, 1, 2], 2); + * // => 1 + * + * // Search from the `fromIndex`. + * _.indexOf([1, 2, 1, 2], 2, 2); + * // => 3 + */ +function indexOf(array, value, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = fromIndex == null ? 0 : toInteger(fromIndex); + if (index < 0) { + index = nativeMax(length + index, 0); + } + return baseIndexOf(array, value, index); +} + +module.exports = indexOf; diff --git a/backend/node_modules/lodash/initial.js b/backend/node_modules/lodash/initial.js new file mode 100644 index 00000000..f47fc509 --- /dev/null +++ b/backend/node_modules/lodash/initial.js @@ -0,0 +1,22 @@ +var baseSlice = require('./_baseSlice'); + +/** + * Gets all but the last element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.initial([1, 2, 3]); + * // => [1, 2] + */ +function initial(array) { + var length = array == null ? 0 : array.length; + return length ? baseSlice(array, 0, -1) : []; +} + +module.exports = initial; diff --git a/backend/node_modules/lodash/intersection.js b/backend/node_modules/lodash/intersection.js new file mode 100644 index 00000000..a94c1351 --- /dev/null +++ b/backend/node_modules/lodash/intersection.js @@ -0,0 +1,30 @@ +var arrayMap = require('./_arrayMap'), + baseIntersection = require('./_baseIntersection'), + baseRest = require('./_baseRest'), + castArrayLikeObject = require('./_castArrayLikeObject'); + +/** + * Creates an array of unique values that are included in all given arrays + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. The order and references of result values are + * determined by the first array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * _.intersection([2, 1], [2, 3]); + * // => [2] + */ +var intersection = baseRest(function(arrays) { + var mapped = arrayMap(arrays, castArrayLikeObject); + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped) + : []; +}); + +module.exports = intersection; diff --git a/backend/node_modules/lodash/intersectionBy.js b/backend/node_modules/lodash/intersectionBy.js new file mode 100644 index 00000000..31461aae --- /dev/null +++ b/backend/node_modules/lodash/intersectionBy.js @@ -0,0 +1,45 @@ +var arrayMap = require('./_arrayMap'), + baseIntersection = require('./_baseIntersection'), + baseIteratee = require('./_baseIteratee'), + baseRest = require('./_baseRest'), + castArrayLikeObject = require('./_castArrayLikeObject'), + last = require('./last'); + +/** + * This method is like `_.intersection` except that it accepts `iteratee` + * which is invoked for each element of each `arrays` to generate the criterion + * by which they're compared. The order and references of result values are + * determined by the first array. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * _.intersectionBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [2.1] + * + * // The `_.property` iteratee shorthand. + * _.intersectionBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }] + */ +var intersectionBy = baseRest(function(arrays) { + var iteratee = last(arrays), + mapped = arrayMap(arrays, castArrayLikeObject); + + if (iteratee === last(mapped)) { + iteratee = undefined; + } else { + mapped.pop(); + } + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped, baseIteratee(iteratee, 2)) + : []; +}); + +module.exports = intersectionBy; diff --git a/backend/node_modules/lodash/intersectionWith.js b/backend/node_modules/lodash/intersectionWith.js new file mode 100644 index 00000000..63cabfaa --- /dev/null +++ b/backend/node_modules/lodash/intersectionWith.js @@ -0,0 +1,41 @@ +var arrayMap = require('./_arrayMap'), + baseIntersection = require('./_baseIntersection'), + baseRest = require('./_baseRest'), + castArrayLikeObject = require('./_castArrayLikeObject'), + last = require('./last'); + +/** + * This method is like `_.intersection` except that it accepts `comparator` + * which is invoked to compare elements of `arrays`. The order and references + * of result values are determined by the first array. The comparator is + * invoked with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.intersectionWith(objects, others, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }] + */ +var intersectionWith = baseRest(function(arrays) { + var comparator = last(arrays), + mapped = arrayMap(arrays, castArrayLikeObject); + + comparator = typeof comparator == 'function' ? comparator : undefined; + if (comparator) { + mapped.pop(); + } + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped, undefined, comparator) + : []; +}); + +module.exports = intersectionWith; diff --git a/backend/node_modules/lodash/invert.js b/backend/node_modules/lodash/invert.js new file mode 100644 index 00000000..8c479509 --- /dev/null +++ b/backend/node_modules/lodash/invert.js @@ -0,0 +1,42 @@ +var constant = require('./constant'), + createInverter = require('./_createInverter'), + identity = require('./identity'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var nativeObjectToString = objectProto.toString; + +/** + * Creates an object composed of the inverted keys and values of `object`. + * If `object` contains duplicate values, subsequent values overwrite + * property assignments of previous values. + * + * @static + * @memberOf _ + * @since 0.7.0 + * @category Object + * @param {Object} object The object to invert. + * @returns {Object} Returns the new inverted object. + * @example + * + * var object = { 'a': 1, 'b': 2, 'c': 1 }; + * + * _.invert(object); + * // => { '1': 'c', '2': 'b' } + */ +var invert = createInverter(function(result, value, key) { + if (value != null && + typeof value.toString != 'function') { + value = nativeObjectToString.call(value); + } + + result[value] = key; +}, constant(identity)); + +module.exports = invert; diff --git a/backend/node_modules/lodash/invertBy.js b/backend/node_modules/lodash/invertBy.js new file mode 100644 index 00000000..3f4f7e53 --- /dev/null +++ b/backend/node_modules/lodash/invertBy.js @@ -0,0 +1,56 @@ +var baseIteratee = require('./_baseIteratee'), + createInverter = require('./_createInverter'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var nativeObjectToString = objectProto.toString; + +/** + * This method is like `_.invert` except that the inverted object is generated + * from the results of running each element of `object` thru `iteratee`. The + * corresponding inverted value of each inverted key is an array of keys + * responsible for generating the inverted value. The iteratee is invoked + * with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.1.0 + * @category Object + * @param {Object} object The object to invert. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Object} Returns the new inverted object. + * @example + * + * var object = { 'a': 1, 'b': 2, 'c': 1 }; + * + * _.invertBy(object); + * // => { '1': ['a', 'c'], '2': ['b'] } + * + * _.invertBy(object, function(value) { + * return 'group' + value; + * }); + * // => { 'group1': ['a', 'c'], 'group2': ['b'] } + */ +var invertBy = createInverter(function(result, value, key) { + if (value != null && + typeof value.toString != 'function') { + value = nativeObjectToString.call(value); + } + + if (hasOwnProperty.call(result, value)) { + result[value].push(key); + } else { + result[value] = [key]; + } +}, baseIteratee); + +module.exports = invertBy; diff --git a/backend/node_modules/lodash/invoke.js b/backend/node_modules/lodash/invoke.js new file mode 100644 index 00000000..97d51eb5 --- /dev/null +++ b/backend/node_modules/lodash/invoke.js @@ -0,0 +1,24 @@ +var baseInvoke = require('./_baseInvoke'), + baseRest = require('./_baseRest'); + +/** + * Invokes the method at `path` of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the method to invoke. + * @param {...*} [args] The arguments to invoke the method with. + * @returns {*} Returns the result of the invoked method. + * @example + * + * var object = { 'a': [{ 'b': { 'c': [1, 2, 3, 4] } }] }; + * + * _.invoke(object, 'a[0].b.c.slice', 1, 3); + * // => [2, 3] + */ +var invoke = baseRest(baseInvoke); + +module.exports = invoke; diff --git a/backend/node_modules/lodash/invokeMap.js b/backend/node_modules/lodash/invokeMap.js new file mode 100644 index 00000000..8da5126c --- /dev/null +++ b/backend/node_modules/lodash/invokeMap.js @@ -0,0 +1,41 @@ +var apply = require('./_apply'), + baseEach = require('./_baseEach'), + baseInvoke = require('./_baseInvoke'), + baseRest = require('./_baseRest'), + isArrayLike = require('./isArrayLike'); + +/** + * Invokes the method at `path` of each element in `collection`, returning + * an array of the results of each invoked method. Any additional arguments + * are provided to each invoked method. If `path` is a function, it's invoked + * for, and `this` bound to, each element in `collection`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Array|Function|string} path The path of the method to invoke or + * the function invoked per iteration. + * @param {...*} [args] The arguments to invoke each method with. + * @returns {Array} Returns the array of results. + * @example + * + * _.invokeMap([[5, 1, 7], [3, 2, 1]], 'sort'); + * // => [[1, 5, 7], [1, 2, 3]] + * + * _.invokeMap([123, 456], String.prototype.split, ''); + * // => [['1', '2', '3'], ['4', '5', '6']] + */ +var invokeMap = baseRest(function(collection, path, args) { + var index = -1, + isFunc = typeof path == 'function', + result = isArrayLike(collection) ? Array(collection.length) : []; + + baseEach(collection, function(value) { + result[++index] = isFunc ? apply(path, value, args) : baseInvoke(value, path, args); + }); + return result; +}); + +module.exports = invokeMap; diff --git a/backend/node_modules/lodash/isArguments.js b/backend/node_modules/lodash/isArguments.js new file mode 100644 index 00000000..8b9ed66c --- /dev/null +++ b/backend/node_modules/lodash/isArguments.js @@ -0,0 +1,36 @@ +var baseIsArguments = require('./_baseIsArguments'), + isObjectLike = require('./isObjectLike'); + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** Built-in value references. */ +var propertyIsEnumerable = objectProto.propertyIsEnumerable; + +/** + * Checks if `value` is likely an `arguments` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + * else `false`. + * @example + * + * _.isArguments(function() { return arguments; }()); + * // => true + * + * _.isArguments([1, 2, 3]); + * // => false + */ +var isArguments = baseIsArguments(function() { return arguments; }()) ? baseIsArguments : function(value) { + return isObjectLike(value) && hasOwnProperty.call(value, 'callee') && + !propertyIsEnumerable.call(value, 'callee'); +}; + +module.exports = isArguments; diff --git a/backend/node_modules/lodash/isArray.js b/backend/node_modules/lodash/isArray.js new file mode 100644 index 00000000..88ab55fd --- /dev/null +++ b/backend/node_modules/lodash/isArray.js @@ -0,0 +1,26 @@ +/** + * Checks if `value` is classified as an `Array` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array, else `false`. + * @example + * + * _.isArray([1, 2, 3]); + * // => true + * + * _.isArray(document.body.children); + * // => false + * + * _.isArray('abc'); + * // => false + * + * _.isArray(_.noop); + * // => false + */ +var isArray = Array.isArray; + +module.exports = isArray; diff --git a/backend/node_modules/lodash/isArrayBuffer.js b/backend/node_modules/lodash/isArrayBuffer.js new file mode 100644 index 00000000..12904a64 --- /dev/null +++ b/backend/node_modules/lodash/isArrayBuffer.js @@ -0,0 +1,27 @@ +var baseIsArrayBuffer = require('./_baseIsArrayBuffer'), + baseUnary = require('./_baseUnary'), + nodeUtil = require('./_nodeUtil'); + +/* Node.js helper references. */ +var nodeIsArrayBuffer = nodeUtil && nodeUtil.isArrayBuffer; + +/** + * Checks if `value` is classified as an `ArrayBuffer` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array buffer, else `false`. + * @example + * + * _.isArrayBuffer(new ArrayBuffer(2)); + * // => true + * + * _.isArrayBuffer(new Array(2)); + * // => false + */ +var isArrayBuffer = nodeIsArrayBuffer ? baseUnary(nodeIsArrayBuffer) : baseIsArrayBuffer; + +module.exports = isArrayBuffer; diff --git a/backend/node_modules/lodash/isArrayLike.js b/backend/node_modules/lodash/isArrayLike.js new file mode 100644 index 00000000..0f966805 --- /dev/null +++ b/backend/node_modules/lodash/isArrayLike.js @@ -0,0 +1,33 @@ +var isFunction = require('./isFunction'), + isLength = require('./isLength'); + +/** + * Checks if `value` is array-like. A value is considered array-like if it's + * not a function and has a `value.length` that's an integer greater than or + * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is array-like, else `false`. + * @example + * + * _.isArrayLike([1, 2, 3]); + * // => true + * + * _.isArrayLike(document.body.children); + * // => true + * + * _.isArrayLike('abc'); + * // => true + * + * _.isArrayLike(_.noop); + * // => false + */ +function isArrayLike(value) { + return value != null && isLength(value.length) && !isFunction(value); +} + +module.exports = isArrayLike; diff --git a/backend/node_modules/lodash/isArrayLikeObject.js b/backend/node_modules/lodash/isArrayLikeObject.js new file mode 100644 index 00000000..6c4812a8 --- /dev/null +++ b/backend/node_modules/lodash/isArrayLikeObject.js @@ -0,0 +1,33 @@ +var isArrayLike = require('./isArrayLike'), + isObjectLike = require('./isObjectLike'); + +/** + * This method is like `_.isArrayLike` except that it also checks if `value` + * is an object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array-like object, + * else `false`. + * @example + * + * _.isArrayLikeObject([1, 2, 3]); + * // => true + * + * _.isArrayLikeObject(document.body.children); + * // => true + * + * _.isArrayLikeObject('abc'); + * // => false + * + * _.isArrayLikeObject(_.noop); + * // => false + */ +function isArrayLikeObject(value) { + return isObjectLike(value) && isArrayLike(value); +} + +module.exports = isArrayLikeObject; diff --git a/backend/node_modules/lodash/isBoolean.js b/backend/node_modules/lodash/isBoolean.js new file mode 100644 index 00000000..a43ed4b8 --- /dev/null +++ b/backend/node_modules/lodash/isBoolean.js @@ -0,0 +1,29 @@ +var baseGetTag = require('./_baseGetTag'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var boolTag = '[object Boolean]'; + +/** + * Checks if `value` is classified as a boolean primitive or object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a boolean, else `false`. + * @example + * + * _.isBoolean(false); + * // => true + * + * _.isBoolean(null); + * // => false + */ +function isBoolean(value) { + return value === true || value === false || + (isObjectLike(value) && baseGetTag(value) == boolTag); +} + +module.exports = isBoolean; diff --git a/backend/node_modules/lodash/isBuffer.js b/backend/node_modules/lodash/isBuffer.js new file mode 100644 index 00000000..c103cc74 --- /dev/null +++ b/backend/node_modules/lodash/isBuffer.js @@ -0,0 +1,38 @@ +var root = require('./_root'), + stubFalse = require('./stubFalse'); + +/** Detect free variable `exports`. */ +var freeExports = typeof exports == 'object' && exports && !exports.nodeType && exports; + +/** Detect free variable `module`. */ +var freeModule = freeExports && typeof module == 'object' && module && !module.nodeType && module; + +/** Detect the popular CommonJS extension `module.exports`. */ +var moduleExports = freeModule && freeModule.exports === freeExports; + +/** Built-in value references. */ +var Buffer = moduleExports ? root.Buffer : undefined; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeIsBuffer = Buffer ? Buffer.isBuffer : undefined; + +/** + * Checks if `value` is a buffer. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a buffer, else `false`. + * @example + * + * _.isBuffer(new Buffer(2)); + * // => true + * + * _.isBuffer(new Uint8Array(2)); + * // => false + */ +var isBuffer = nativeIsBuffer || stubFalse; + +module.exports = isBuffer; diff --git a/backend/node_modules/lodash/isDate.js b/backend/node_modules/lodash/isDate.js new file mode 100644 index 00000000..7f0209fc --- /dev/null +++ b/backend/node_modules/lodash/isDate.js @@ -0,0 +1,27 @@ +var baseIsDate = require('./_baseIsDate'), + baseUnary = require('./_baseUnary'), + nodeUtil = require('./_nodeUtil'); + +/* Node.js helper references. */ +var nodeIsDate = nodeUtil && nodeUtil.isDate; + +/** + * Checks if `value` is classified as a `Date` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a date object, else `false`. + * @example + * + * _.isDate(new Date); + * // => true + * + * _.isDate('Mon April 23 2012'); + * // => false + */ +var isDate = nodeIsDate ? baseUnary(nodeIsDate) : baseIsDate; + +module.exports = isDate; diff --git a/backend/node_modules/lodash/isElement.js b/backend/node_modules/lodash/isElement.js new file mode 100644 index 00000000..76ae29c3 --- /dev/null +++ b/backend/node_modules/lodash/isElement.js @@ -0,0 +1,25 @@ +var isObjectLike = require('./isObjectLike'), + isPlainObject = require('./isPlainObject'); + +/** + * Checks if `value` is likely a DOM element. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a DOM element, else `false`. + * @example + * + * _.isElement(document.body); + * // => true + * + * _.isElement(''); + * // => false + */ +function isElement(value) { + return isObjectLike(value) && value.nodeType === 1 && !isPlainObject(value); +} + +module.exports = isElement; diff --git a/backend/node_modules/lodash/isEmpty.js b/backend/node_modules/lodash/isEmpty.js new file mode 100644 index 00000000..3597294a --- /dev/null +++ b/backend/node_modules/lodash/isEmpty.js @@ -0,0 +1,77 @@ +var baseKeys = require('./_baseKeys'), + getTag = require('./_getTag'), + isArguments = require('./isArguments'), + isArray = require('./isArray'), + isArrayLike = require('./isArrayLike'), + isBuffer = require('./isBuffer'), + isPrototype = require('./_isPrototype'), + isTypedArray = require('./isTypedArray'); + +/** `Object#toString` result references. */ +var mapTag = '[object Map]', + setTag = '[object Set]'; + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Checks if `value` is an empty object, collection, map, or set. + * + * Objects are considered empty if they have no own enumerable string keyed + * properties. + * + * Array-like values such as `arguments` objects, arrays, buffers, strings, or + * jQuery-like collections are considered empty if they have a `length` of `0`. + * Similarly, maps and sets are considered empty if they have a `size` of `0`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is empty, else `false`. + * @example + * + * _.isEmpty(null); + * // => true + * + * _.isEmpty(true); + * // => true + * + * _.isEmpty(1); + * // => true + * + * _.isEmpty([1, 2, 3]); + * // => false + * + * _.isEmpty({ 'a': 1 }); + * // => false + */ +function isEmpty(value) { + if (value == null) { + return true; + } + if (isArrayLike(value) && + (isArray(value) || typeof value == 'string' || typeof value.splice == 'function' || + isBuffer(value) || isTypedArray(value) || isArguments(value))) { + return !value.length; + } + var tag = getTag(value); + if (tag == mapTag || tag == setTag) { + return !value.size; + } + if (isPrototype(value)) { + return !baseKeys(value).length; + } + for (var key in value) { + if (hasOwnProperty.call(value, key)) { + return false; + } + } + return true; +} + +module.exports = isEmpty; diff --git a/backend/node_modules/lodash/isEqual.js b/backend/node_modules/lodash/isEqual.js new file mode 100644 index 00000000..5e23e76c --- /dev/null +++ b/backend/node_modules/lodash/isEqual.js @@ -0,0 +1,35 @@ +var baseIsEqual = require('./_baseIsEqual'); + +/** + * Performs a deep comparison between two values to determine if they are + * equivalent. + * + * **Note:** This method supports comparing arrays, array buffers, booleans, + * date objects, error objects, maps, numbers, `Object` objects, regexes, + * sets, strings, symbols, and typed arrays. `Object` objects are compared + * by their own, not inherited, enumerable properties. Functions and DOM + * nodes are compared by strict equality, i.e. `===`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.isEqual(object, other); + * // => true + * + * object === other; + * // => false + */ +function isEqual(value, other) { + return baseIsEqual(value, other); +} + +module.exports = isEqual; diff --git a/backend/node_modules/lodash/isEqualWith.js b/backend/node_modules/lodash/isEqualWith.js new file mode 100644 index 00000000..21bdc7ff --- /dev/null +++ b/backend/node_modules/lodash/isEqualWith.js @@ -0,0 +1,41 @@ +var baseIsEqual = require('./_baseIsEqual'); + +/** + * This method is like `_.isEqual` except that it accepts `customizer` which + * is invoked to compare values. If `customizer` returns `undefined`, comparisons + * are handled by the method instead. The `customizer` is invoked with up to + * six arguments: (objValue, othValue [, index|key, object, other, stack]). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * function isGreeting(value) { + * return /^h(?:i|ello)$/.test(value); + * } + * + * function customizer(objValue, othValue) { + * if (isGreeting(objValue) && isGreeting(othValue)) { + * return true; + * } + * } + * + * var array = ['hello', 'goodbye']; + * var other = ['hi', 'goodbye']; + * + * _.isEqualWith(array, other, customizer); + * // => true + */ +function isEqualWith(value, other, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + var result = customizer ? customizer(value, other) : undefined; + return result === undefined ? baseIsEqual(value, other, undefined, customizer) : !!result; +} + +module.exports = isEqualWith; diff --git a/backend/node_modules/lodash/isError.js b/backend/node_modules/lodash/isError.js new file mode 100644 index 00000000..b4f41e00 --- /dev/null +++ b/backend/node_modules/lodash/isError.js @@ -0,0 +1,36 @@ +var baseGetTag = require('./_baseGetTag'), + isObjectLike = require('./isObjectLike'), + isPlainObject = require('./isPlainObject'); + +/** `Object#toString` result references. */ +var domExcTag = '[object DOMException]', + errorTag = '[object Error]'; + +/** + * Checks if `value` is an `Error`, `EvalError`, `RangeError`, `ReferenceError`, + * `SyntaxError`, `TypeError`, or `URIError` object. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an error object, else `false`. + * @example + * + * _.isError(new Error); + * // => true + * + * _.isError(Error); + * // => false + */ +function isError(value) { + if (!isObjectLike(value)) { + return false; + } + var tag = baseGetTag(value); + return tag == errorTag || tag == domExcTag || + (typeof value.message == 'string' && typeof value.name == 'string' && !isPlainObject(value)); +} + +module.exports = isError; diff --git a/backend/node_modules/lodash/isFinite.js b/backend/node_modules/lodash/isFinite.js new file mode 100644 index 00000000..601842bc --- /dev/null +++ b/backend/node_modules/lodash/isFinite.js @@ -0,0 +1,36 @@ +var root = require('./_root'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeIsFinite = root.isFinite; + +/** + * Checks if `value` is a finite primitive number. + * + * **Note:** This method is based on + * [`Number.isFinite`](https://mdn.io/Number/isFinite). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a finite number, else `false`. + * @example + * + * _.isFinite(3); + * // => true + * + * _.isFinite(Number.MIN_VALUE); + * // => true + * + * _.isFinite(Infinity); + * // => false + * + * _.isFinite('3'); + * // => false + */ +function isFinite(value) { + return typeof value == 'number' && nativeIsFinite(value); +} + +module.exports = isFinite; diff --git a/backend/node_modules/lodash/isFunction.js b/backend/node_modules/lodash/isFunction.js new file mode 100644 index 00000000..907a8cd8 --- /dev/null +++ b/backend/node_modules/lodash/isFunction.js @@ -0,0 +1,37 @@ +var baseGetTag = require('./_baseGetTag'), + isObject = require('./isObject'); + +/** `Object#toString` result references. */ +var asyncTag = '[object AsyncFunction]', + funcTag = '[object Function]', + genTag = '[object GeneratorFunction]', + proxyTag = '[object Proxy]'; + +/** + * Checks if `value` is classified as a `Function` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a function, else `false`. + * @example + * + * _.isFunction(_); + * // => true + * + * _.isFunction(/abc/); + * // => false + */ +function isFunction(value) { + if (!isObject(value)) { + return false; + } + // The use of `Object#toString` avoids issues with the `typeof` operator + // in Safari 9 which returns 'object' for typed arrays and other constructors. + var tag = baseGetTag(value); + return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag; +} + +module.exports = isFunction; diff --git a/backend/node_modules/lodash/isInteger.js b/backend/node_modules/lodash/isInteger.js new file mode 100644 index 00000000..66aa87d5 --- /dev/null +++ b/backend/node_modules/lodash/isInteger.js @@ -0,0 +1,33 @@ +var toInteger = require('./toInteger'); + +/** + * Checks if `value` is an integer. + * + * **Note:** This method is based on + * [`Number.isInteger`](https://mdn.io/Number/isInteger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an integer, else `false`. + * @example + * + * _.isInteger(3); + * // => true + * + * _.isInteger(Number.MIN_VALUE); + * // => false + * + * _.isInteger(Infinity); + * // => false + * + * _.isInteger('3'); + * // => false + */ +function isInteger(value) { + return typeof value == 'number' && value == toInteger(value); +} + +module.exports = isInteger; diff --git a/backend/node_modules/lodash/isLength.js b/backend/node_modules/lodash/isLength.js new file mode 100644 index 00000000..3a95caa9 --- /dev/null +++ b/backend/node_modules/lodash/isLength.js @@ -0,0 +1,35 @@ +/** Used as references for various `Number` constants. */ +var MAX_SAFE_INTEGER = 9007199254740991; + +/** + * Checks if `value` is a valid array-like length. + * + * **Note:** This method is loosely based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. + * @example + * + * _.isLength(3); + * // => true + * + * _.isLength(Number.MIN_VALUE); + * // => false + * + * _.isLength(Infinity); + * // => false + * + * _.isLength('3'); + * // => false + */ +function isLength(value) { + return typeof value == 'number' && + value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; +} + +module.exports = isLength; diff --git a/backend/node_modules/lodash/isMap.js b/backend/node_modules/lodash/isMap.js new file mode 100644 index 00000000..44f8517e --- /dev/null +++ b/backend/node_modules/lodash/isMap.js @@ -0,0 +1,27 @@ +var baseIsMap = require('./_baseIsMap'), + baseUnary = require('./_baseUnary'), + nodeUtil = require('./_nodeUtil'); + +/* Node.js helper references. */ +var nodeIsMap = nodeUtil && nodeUtil.isMap; + +/** + * Checks if `value` is classified as a `Map` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a map, else `false`. + * @example + * + * _.isMap(new Map); + * // => true + * + * _.isMap(new WeakMap); + * // => false + */ +var isMap = nodeIsMap ? baseUnary(nodeIsMap) : baseIsMap; + +module.exports = isMap; diff --git a/backend/node_modules/lodash/isMatch.js b/backend/node_modules/lodash/isMatch.js new file mode 100644 index 00000000..9773a18c --- /dev/null +++ b/backend/node_modules/lodash/isMatch.js @@ -0,0 +1,36 @@ +var baseIsMatch = require('./_baseIsMatch'), + getMatchData = require('./_getMatchData'); + +/** + * Performs a partial deep comparison between `object` and `source` to + * determine if `object` contains equivalent property values. + * + * **Note:** This method is equivalent to `_.matches` when `source` is + * partially applied. + * + * Partial comparisons will match empty array and empty object `source` + * values against any array or object value, respectively. See `_.isEqual` + * for a list of supported value comparisons. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * + * _.isMatch(object, { 'b': 2 }); + * // => true + * + * _.isMatch(object, { 'b': 1 }); + * // => false + */ +function isMatch(object, source) { + return object === source || baseIsMatch(object, source, getMatchData(source)); +} + +module.exports = isMatch; diff --git a/backend/node_modules/lodash/isMatchWith.js b/backend/node_modules/lodash/isMatchWith.js new file mode 100644 index 00000000..187b6a61 --- /dev/null +++ b/backend/node_modules/lodash/isMatchWith.js @@ -0,0 +1,41 @@ +var baseIsMatch = require('./_baseIsMatch'), + getMatchData = require('./_getMatchData'); + +/** + * This method is like `_.isMatch` except that it accepts `customizer` which + * is invoked to compare values. If `customizer` returns `undefined`, comparisons + * are handled by the method instead. The `customizer` is invoked with five + * arguments: (objValue, srcValue, index|key, object, source). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + * @example + * + * function isGreeting(value) { + * return /^h(?:i|ello)$/.test(value); + * } + * + * function customizer(objValue, srcValue) { + * if (isGreeting(objValue) && isGreeting(srcValue)) { + * return true; + * } + * } + * + * var object = { 'greeting': 'hello' }; + * var source = { 'greeting': 'hi' }; + * + * _.isMatchWith(object, source, customizer); + * // => true + */ +function isMatchWith(object, source, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseIsMatch(object, source, getMatchData(source), customizer); +} + +module.exports = isMatchWith; diff --git a/backend/node_modules/lodash/isNaN.js b/backend/node_modules/lodash/isNaN.js new file mode 100644 index 00000000..7d0d783b --- /dev/null +++ b/backend/node_modules/lodash/isNaN.js @@ -0,0 +1,38 @@ +var isNumber = require('./isNumber'); + +/** + * Checks if `value` is `NaN`. + * + * **Note:** This method is based on + * [`Number.isNaN`](https://mdn.io/Number/isNaN) and is not the same as + * global [`isNaN`](https://mdn.io/isNaN) which returns `true` for + * `undefined` and other non-number values. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + * @example + * + * _.isNaN(NaN); + * // => true + * + * _.isNaN(new Number(NaN)); + * // => true + * + * isNaN(undefined); + * // => true + * + * _.isNaN(undefined); + * // => false + */ +function isNaN(value) { + // An `NaN` primitive is the only value that is not equal to itself. + // Perform the `toStringTag` check first to avoid errors with some + // ActiveX objects in IE. + return isNumber(value) && value != +value; +} + +module.exports = isNaN; diff --git a/backend/node_modules/lodash/isNative.js b/backend/node_modules/lodash/isNative.js new file mode 100644 index 00000000..f0cb8d58 --- /dev/null +++ b/backend/node_modules/lodash/isNative.js @@ -0,0 +1,40 @@ +var baseIsNative = require('./_baseIsNative'), + isMaskable = require('./_isMaskable'); + +/** Error message constants. */ +var CORE_ERROR_TEXT = 'Unsupported core-js use. Try https://npms.io/search?q=ponyfill.'; + +/** + * Checks if `value` is a pristine native function. + * + * **Note:** This method can't reliably detect native functions in the presence + * of the core-js package because core-js circumvents this kind of detection. + * Despite multiple requests, the core-js maintainer has made it clear: any + * attempt to fix the detection will be obstructed. As a result, we're left + * with little choice but to throw an error. Unfortunately, this also affects + * packages, like [babel-polyfill](https://www.npmjs.com/package/babel-polyfill), + * which rely on core-js. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a native function, + * else `false`. + * @example + * + * _.isNative(Array.prototype.push); + * // => true + * + * _.isNative(_); + * // => false + */ +function isNative(value) { + if (isMaskable(value)) { + throw new Error(CORE_ERROR_TEXT); + } + return baseIsNative(value); +} + +module.exports = isNative; diff --git a/backend/node_modules/lodash/isNil.js b/backend/node_modules/lodash/isNil.js new file mode 100644 index 00000000..79f05052 --- /dev/null +++ b/backend/node_modules/lodash/isNil.js @@ -0,0 +1,25 @@ +/** + * Checks if `value` is `null` or `undefined`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is nullish, else `false`. + * @example + * + * _.isNil(null); + * // => true + * + * _.isNil(void 0); + * // => true + * + * _.isNil(NaN); + * // => false + */ +function isNil(value) { + return value == null; +} + +module.exports = isNil; diff --git a/backend/node_modules/lodash/isNull.js b/backend/node_modules/lodash/isNull.js new file mode 100644 index 00000000..c0a374d7 --- /dev/null +++ b/backend/node_modules/lodash/isNull.js @@ -0,0 +1,22 @@ +/** + * Checks if `value` is `null`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `null`, else `false`. + * @example + * + * _.isNull(null); + * // => true + * + * _.isNull(void 0); + * // => false + */ +function isNull(value) { + return value === null; +} + +module.exports = isNull; diff --git a/backend/node_modules/lodash/isNumber.js b/backend/node_modules/lodash/isNumber.js new file mode 100644 index 00000000..cd34ee46 --- /dev/null +++ b/backend/node_modules/lodash/isNumber.js @@ -0,0 +1,38 @@ +var baseGetTag = require('./_baseGetTag'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var numberTag = '[object Number]'; + +/** + * Checks if `value` is classified as a `Number` primitive or object. + * + * **Note:** To exclude `Infinity`, `-Infinity`, and `NaN`, which are + * classified as numbers, use the `_.isFinite` method. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a number, else `false`. + * @example + * + * _.isNumber(3); + * // => true + * + * _.isNumber(Number.MIN_VALUE); + * // => true + * + * _.isNumber(Infinity); + * // => true + * + * _.isNumber('3'); + * // => false + */ +function isNumber(value) { + return typeof value == 'number' || + (isObjectLike(value) && baseGetTag(value) == numberTag); +} + +module.exports = isNumber; diff --git a/backend/node_modules/lodash/isObject.js b/backend/node_modules/lodash/isObject.js new file mode 100644 index 00000000..1dc89391 --- /dev/null +++ b/backend/node_modules/lodash/isObject.js @@ -0,0 +1,31 @@ +/** + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false + */ +function isObject(value) { + var type = typeof value; + return value != null && (type == 'object' || type == 'function'); +} + +module.exports = isObject; diff --git a/backend/node_modules/lodash/isObjectLike.js b/backend/node_modules/lodash/isObjectLike.js new file mode 100644 index 00000000..301716b5 --- /dev/null +++ b/backend/node_modules/lodash/isObjectLike.js @@ -0,0 +1,29 @@ +/** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ +function isObjectLike(value) { + return value != null && typeof value == 'object'; +} + +module.exports = isObjectLike; diff --git a/backend/node_modules/lodash/isPlainObject.js b/backend/node_modules/lodash/isPlainObject.js new file mode 100644 index 00000000..23873731 --- /dev/null +++ b/backend/node_modules/lodash/isPlainObject.js @@ -0,0 +1,62 @@ +var baseGetTag = require('./_baseGetTag'), + getPrototype = require('./_getPrototype'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var objectTag = '[object Object]'; + +/** Used for built-in method references. */ +var funcProto = Function.prototype, + objectProto = Object.prototype; + +/** Used to resolve the decompiled source of functions. */ +var funcToString = funcProto.toString; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** Used to infer the `Object` constructor. */ +var objectCtorString = funcToString.call(Object); + +/** + * Checks if `value` is a plain object, that is, an object created by the + * `Object` constructor or one with a `[[Prototype]]` of `null`. + * + * @static + * @memberOf _ + * @since 0.8.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a plain object, else `false`. + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * _.isPlainObject(new Foo); + * // => false + * + * _.isPlainObject([1, 2, 3]); + * // => false + * + * _.isPlainObject({ 'x': 0, 'y': 0 }); + * // => true + * + * _.isPlainObject(Object.create(null)); + * // => true + */ +function isPlainObject(value) { + if (!isObjectLike(value) || baseGetTag(value) != objectTag) { + return false; + } + var proto = getPrototype(value); + if (proto === null) { + return true; + } + var Ctor = hasOwnProperty.call(proto, 'constructor') && proto.constructor; + return typeof Ctor == 'function' && Ctor instanceof Ctor && + funcToString.call(Ctor) == objectCtorString; +} + +module.exports = isPlainObject; diff --git a/backend/node_modules/lodash/isRegExp.js b/backend/node_modules/lodash/isRegExp.js new file mode 100644 index 00000000..76c9b6e9 --- /dev/null +++ b/backend/node_modules/lodash/isRegExp.js @@ -0,0 +1,27 @@ +var baseIsRegExp = require('./_baseIsRegExp'), + baseUnary = require('./_baseUnary'), + nodeUtil = require('./_nodeUtil'); + +/* Node.js helper references. */ +var nodeIsRegExp = nodeUtil && nodeUtil.isRegExp; + +/** + * Checks if `value` is classified as a `RegExp` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a regexp, else `false`. + * @example + * + * _.isRegExp(/abc/); + * // => true + * + * _.isRegExp('/abc/'); + * // => false + */ +var isRegExp = nodeIsRegExp ? baseUnary(nodeIsRegExp) : baseIsRegExp; + +module.exports = isRegExp; diff --git a/backend/node_modules/lodash/isSafeInteger.js b/backend/node_modules/lodash/isSafeInteger.js new file mode 100644 index 00000000..2a48526e --- /dev/null +++ b/backend/node_modules/lodash/isSafeInteger.js @@ -0,0 +1,37 @@ +var isInteger = require('./isInteger'); + +/** Used as references for various `Number` constants. */ +var MAX_SAFE_INTEGER = 9007199254740991; + +/** + * Checks if `value` is a safe integer. An integer is safe if it's an IEEE-754 + * double precision number which isn't the result of a rounded unsafe integer. + * + * **Note:** This method is based on + * [`Number.isSafeInteger`](https://mdn.io/Number/isSafeInteger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a safe integer, else `false`. + * @example + * + * _.isSafeInteger(3); + * // => true + * + * _.isSafeInteger(Number.MIN_VALUE); + * // => false + * + * _.isSafeInteger(Infinity); + * // => false + * + * _.isSafeInteger('3'); + * // => false + */ +function isSafeInteger(value) { + return isInteger(value) && value >= -MAX_SAFE_INTEGER && value <= MAX_SAFE_INTEGER; +} + +module.exports = isSafeInteger; diff --git a/backend/node_modules/lodash/isSet.js b/backend/node_modules/lodash/isSet.js new file mode 100644 index 00000000..ab88bdf8 --- /dev/null +++ b/backend/node_modules/lodash/isSet.js @@ -0,0 +1,27 @@ +var baseIsSet = require('./_baseIsSet'), + baseUnary = require('./_baseUnary'), + nodeUtil = require('./_nodeUtil'); + +/* Node.js helper references. */ +var nodeIsSet = nodeUtil && nodeUtil.isSet; + +/** + * Checks if `value` is classified as a `Set` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a set, else `false`. + * @example + * + * _.isSet(new Set); + * // => true + * + * _.isSet(new WeakSet); + * // => false + */ +var isSet = nodeIsSet ? baseUnary(nodeIsSet) : baseIsSet; + +module.exports = isSet; diff --git a/backend/node_modules/lodash/isString.js b/backend/node_modules/lodash/isString.js new file mode 100644 index 00000000..627eb9c3 --- /dev/null +++ b/backend/node_modules/lodash/isString.js @@ -0,0 +1,30 @@ +var baseGetTag = require('./_baseGetTag'), + isArray = require('./isArray'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var stringTag = '[object String]'; + +/** + * Checks if `value` is classified as a `String` primitive or object. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a string, else `false`. + * @example + * + * _.isString('abc'); + * // => true + * + * _.isString(1); + * // => false + */ +function isString(value) { + return typeof value == 'string' || + (!isArray(value) && isObjectLike(value) && baseGetTag(value) == stringTag); +} + +module.exports = isString; diff --git a/backend/node_modules/lodash/isSymbol.js b/backend/node_modules/lodash/isSymbol.js new file mode 100644 index 00000000..dfb60b97 --- /dev/null +++ b/backend/node_modules/lodash/isSymbol.js @@ -0,0 +1,29 @@ +var baseGetTag = require('./_baseGetTag'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var symbolTag = '[object Symbol]'; + +/** + * Checks if `value` is classified as a `Symbol` primitive or object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. + * @example + * + * _.isSymbol(Symbol.iterator); + * // => true + * + * _.isSymbol('abc'); + * // => false + */ +function isSymbol(value) { + return typeof value == 'symbol' || + (isObjectLike(value) && baseGetTag(value) == symbolTag); +} + +module.exports = isSymbol; diff --git a/backend/node_modules/lodash/isTypedArray.js b/backend/node_modules/lodash/isTypedArray.js new file mode 100644 index 00000000..da3f8dd1 --- /dev/null +++ b/backend/node_modules/lodash/isTypedArray.js @@ -0,0 +1,27 @@ +var baseIsTypedArray = require('./_baseIsTypedArray'), + baseUnary = require('./_baseUnary'), + nodeUtil = require('./_nodeUtil'); + +/* Node.js helper references. */ +var nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray; + +/** + * Checks if `value` is classified as a typed array. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. + * @example + * + * _.isTypedArray(new Uint8Array); + * // => true + * + * _.isTypedArray([]); + * // => false + */ +var isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray; + +module.exports = isTypedArray; diff --git a/backend/node_modules/lodash/isUndefined.js b/backend/node_modules/lodash/isUndefined.js new file mode 100644 index 00000000..377d121a --- /dev/null +++ b/backend/node_modules/lodash/isUndefined.js @@ -0,0 +1,22 @@ +/** + * Checks if `value` is `undefined`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `undefined`, else `false`. + * @example + * + * _.isUndefined(void 0); + * // => true + * + * _.isUndefined(null); + * // => false + */ +function isUndefined(value) { + return value === undefined; +} + +module.exports = isUndefined; diff --git a/backend/node_modules/lodash/isWeakMap.js b/backend/node_modules/lodash/isWeakMap.js new file mode 100644 index 00000000..8d36f663 --- /dev/null +++ b/backend/node_modules/lodash/isWeakMap.js @@ -0,0 +1,28 @@ +var getTag = require('./_getTag'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var weakMapTag = '[object WeakMap]'; + +/** + * Checks if `value` is classified as a `WeakMap` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a weak map, else `false`. + * @example + * + * _.isWeakMap(new WeakMap); + * // => true + * + * _.isWeakMap(new Map); + * // => false + */ +function isWeakMap(value) { + return isObjectLike(value) && getTag(value) == weakMapTag; +} + +module.exports = isWeakMap; diff --git a/backend/node_modules/lodash/isWeakSet.js b/backend/node_modules/lodash/isWeakSet.js new file mode 100644 index 00000000..e628b261 --- /dev/null +++ b/backend/node_modules/lodash/isWeakSet.js @@ -0,0 +1,28 @@ +var baseGetTag = require('./_baseGetTag'), + isObjectLike = require('./isObjectLike'); + +/** `Object#toString` result references. */ +var weakSetTag = '[object WeakSet]'; + +/** + * Checks if `value` is classified as a `WeakSet` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a weak set, else `false`. + * @example + * + * _.isWeakSet(new WeakSet); + * // => true + * + * _.isWeakSet(new Set); + * // => false + */ +function isWeakSet(value) { + return isObjectLike(value) && baseGetTag(value) == weakSetTag; +} + +module.exports = isWeakSet; diff --git a/backend/node_modules/lodash/iteratee.js b/backend/node_modules/lodash/iteratee.js new file mode 100644 index 00000000..61b73a8c --- /dev/null +++ b/backend/node_modules/lodash/iteratee.js @@ -0,0 +1,53 @@ +var baseClone = require('./_baseClone'), + baseIteratee = require('./_baseIteratee'); + +/** Used to compose bitmasks for cloning. */ +var CLONE_DEEP_FLAG = 1; + +/** + * Creates a function that invokes `func` with the arguments of the created + * function. If `func` is a property name, the created function returns the + * property value for a given element. If `func` is an array or object, the + * created function returns `true` for elements that contain the equivalent + * source properties, otherwise it returns `false`. + * + * @static + * @since 4.0.0 + * @memberOf _ + * @category Util + * @param {*} [func=_.identity] The value to convert to a callback. + * @returns {Function} Returns the callback. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.filter(users, _.iteratee({ 'user': 'barney', 'active': true })); + * // => [{ 'user': 'barney', 'age': 36, 'active': true }] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.filter(users, _.iteratee(['user', 'fred'])); + * // => [{ 'user': 'fred', 'age': 40 }] + * + * // The `_.property` iteratee shorthand. + * _.map(users, _.iteratee('user')); + * // => ['barney', 'fred'] + * + * // Create custom iteratee shorthands. + * _.iteratee = _.wrap(_.iteratee, function(iteratee, func) { + * return !_.isRegExp(func) ? iteratee(func) : function(string) { + * return func.test(string); + * }; + * }); + * + * _.filter(['abc', 'def'], /ef/); + * // => ['def'] + */ +function iteratee(func) { + return baseIteratee(typeof func == 'function' ? func : baseClone(func, CLONE_DEEP_FLAG)); +} + +module.exports = iteratee; diff --git a/backend/node_modules/lodash/join.js b/backend/node_modules/lodash/join.js new file mode 100644 index 00000000..45de079f --- /dev/null +++ b/backend/node_modules/lodash/join.js @@ -0,0 +1,26 @@ +/** Used for built-in method references. */ +var arrayProto = Array.prototype; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeJoin = arrayProto.join; + +/** + * Converts all elements in `array` into a string separated by `separator`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to convert. + * @param {string} [separator=','] The element separator. + * @returns {string} Returns the joined string. + * @example + * + * _.join(['a', 'b', 'c'], '~'); + * // => 'a~b~c' + */ +function join(array, separator) { + return array == null ? '' : nativeJoin.call(array, separator); +} + +module.exports = join; diff --git a/backend/node_modules/lodash/kebabCase.js b/backend/node_modules/lodash/kebabCase.js new file mode 100644 index 00000000..8a52be64 --- /dev/null +++ b/backend/node_modules/lodash/kebabCase.js @@ -0,0 +1,28 @@ +var createCompounder = require('./_createCompounder'); + +/** + * Converts `string` to + * [kebab case](https://en.wikipedia.org/wiki/Letter_case#Special_case_styles). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the kebab cased string. + * @example + * + * _.kebabCase('Foo Bar'); + * // => 'foo-bar' + * + * _.kebabCase('fooBar'); + * // => 'foo-bar' + * + * _.kebabCase('__FOO_BAR__'); + * // => 'foo-bar' + */ +var kebabCase = createCompounder(function(result, word, index) { + return result + (index ? '-' : '') + word.toLowerCase(); +}); + +module.exports = kebabCase; diff --git a/backend/node_modules/lodash/keyBy.js b/backend/node_modules/lodash/keyBy.js new file mode 100644 index 00000000..acc007a0 --- /dev/null +++ b/backend/node_modules/lodash/keyBy.js @@ -0,0 +1,36 @@ +var baseAssignValue = require('./_baseAssignValue'), + createAggregator = require('./_createAggregator'); + +/** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The corresponding value of + * each key is the last element responsible for generating the key. The + * iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * var array = [ + * { 'dir': 'left', 'code': 97 }, + * { 'dir': 'right', 'code': 100 } + * ]; + * + * _.keyBy(array, function(o) { + * return String.fromCharCode(o.code); + * }); + * // => { 'a': { 'dir': 'left', 'code': 97 }, 'd': { 'dir': 'right', 'code': 100 } } + * + * _.keyBy(array, 'dir'); + * // => { 'left': { 'dir': 'left', 'code': 97 }, 'right': { 'dir': 'right', 'code': 100 } } + */ +var keyBy = createAggregator(function(result, value, key) { + baseAssignValue(result, key, value); +}); + +module.exports = keyBy; diff --git a/backend/node_modules/lodash/keys.js b/backend/node_modules/lodash/keys.js new file mode 100644 index 00000000..d143c718 --- /dev/null +++ b/backend/node_modules/lodash/keys.js @@ -0,0 +1,37 @@ +var arrayLikeKeys = require('./_arrayLikeKeys'), + baseKeys = require('./_baseKeys'), + isArrayLike = require('./isArrayLike'); + +/** + * Creates an array of the own enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. See the + * [ES spec](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * for more details. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keys(new Foo); + * // => ['a', 'b'] (iteration order is not guaranteed) + * + * _.keys('hi'); + * // => ['0', '1'] + */ +function keys(object) { + return isArrayLike(object) ? arrayLikeKeys(object) : baseKeys(object); +} + +module.exports = keys; diff --git a/backend/node_modules/lodash/keysIn.js b/backend/node_modules/lodash/keysIn.js new file mode 100644 index 00000000..a62308f2 --- /dev/null +++ b/backend/node_modules/lodash/keysIn.js @@ -0,0 +1,32 @@ +var arrayLikeKeys = require('./_arrayLikeKeys'), + baseKeysIn = require('./_baseKeysIn'), + isArrayLike = require('./isArrayLike'); + +/** + * Creates an array of the own and inherited enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keysIn(new Foo); + * // => ['a', 'b', 'c'] (iteration order is not guaranteed) + */ +function keysIn(object) { + return isArrayLike(object) ? arrayLikeKeys(object, true) : baseKeysIn(object); +} + +module.exports = keysIn; diff --git a/backend/node_modules/lodash/lang.js b/backend/node_modules/lodash/lang.js new file mode 100644 index 00000000..a3962169 --- /dev/null +++ b/backend/node_modules/lodash/lang.js @@ -0,0 +1,58 @@ +module.exports = { + 'castArray': require('./castArray'), + 'clone': require('./clone'), + 'cloneDeep': require('./cloneDeep'), + 'cloneDeepWith': require('./cloneDeepWith'), + 'cloneWith': require('./cloneWith'), + 'conformsTo': require('./conformsTo'), + 'eq': require('./eq'), + 'gt': require('./gt'), + 'gte': require('./gte'), + 'isArguments': require('./isArguments'), + 'isArray': require('./isArray'), + 'isArrayBuffer': require('./isArrayBuffer'), + 'isArrayLike': require('./isArrayLike'), + 'isArrayLikeObject': require('./isArrayLikeObject'), + 'isBoolean': require('./isBoolean'), + 'isBuffer': require('./isBuffer'), + 'isDate': require('./isDate'), + 'isElement': require('./isElement'), + 'isEmpty': require('./isEmpty'), + 'isEqual': require('./isEqual'), + 'isEqualWith': require('./isEqualWith'), + 'isError': require('./isError'), + 'isFinite': require('./isFinite'), + 'isFunction': require('./isFunction'), + 'isInteger': require('./isInteger'), + 'isLength': require('./isLength'), + 'isMap': require('./isMap'), + 'isMatch': require('./isMatch'), + 'isMatchWith': require('./isMatchWith'), + 'isNaN': require('./isNaN'), + 'isNative': require('./isNative'), + 'isNil': require('./isNil'), + 'isNull': require('./isNull'), + 'isNumber': require('./isNumber'), + 'isObject': require('./isObject'), + 'isObjectLike': require('./isObjectLike'), + 'isPlainObject': require('./isPlainObject'), + 'isRegExp': require('./isRegExp'), + 'isSafeInteger': require('./isSafeInteger'), + 'isSet': require('./isSet'), + 'isString': require('./isString'), + 'isSymbol': require('./isSymbol'), + 'isTypedArray': require('./isTypedArray'), + 'isUndefined': require('./isUndefined'), + 'isWeakMap': require('./isWeakMap'), + 'isWeakSet': require('./isWeakSet'), + 'lt': require('./lt'), + 'lte': require('./lte'), + 'toArray': require('./toArray'), + 'toFinite': require('./toFinite'), + 'toInteger': require('./toInteger'), + 'toLength': require('./toLength'), + 'toNumber': require('./toNumber'), + 'toPlainObject': require('./toPlainObject'), + 'toSafeInteger': require('./toSafeInteger'), + 'toString': require('./toString') +}; diff --git a/backend/node_modules/lodash/last.js b/backend/node_modules/lodash/last.js new file mode 100644 index 00000000..cad1eafa --- /dev/null +++ b/backend/node_modules/lodash/last.js @@ -0,0 +1,20 @@ +/** + * Gets the last element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @returns {*} Returns the last element of `array`. + * @example + * + * _.last([1, 2, 3]); + * // => 3 + */ +function last(array) { + var length = array == null ? 0 : array.length; + return length ? array[length - 1] : undefined; +} + +module.exports = last; diff --git a/backend/node_modules/lodash/lastIndexOf.js b/backend/node_modules/lodash/lastIndexOf.js new file mode 100644 index 00000000..dabfb613 --- /dev/null +++ b/backend/node_modules/lodash/lastIndexOf.js @@ -0,0 +1,46 @@ +var baseFindIndex = require('./_baseFindIndex'), + baseIsNaN = require('./_baseIsNaN'), + strictLastIndexOf = require('./_strictLastIndexOf'), + toInteger = require('./toInteger'); + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max, + nativeMin = Math.min; + +/** + * This method is like `_.indexOf` except that it iterates over elements of + * `array` from right to left. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=array.length-1] The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.lastIndexOf([1, 2, 1, 2], 2); + * // => 3 + * + * // Search from the `fromIndex`. + * _.lastIndexOf([1, 2, 1, 2], 2, 2); + * // => 1 + */ +function lastIndexOf(array, value, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = length; + if (fromIndex !== undefined) { + index = toInteger(fromIndex); + index = index < 0 ? nativeMax(length + index, 0) : nativeMin(index, length - 1); + } + return value === value + ? strictLastIndexOf(array, value, index) + : baseFindIndex(array, baseIsNaN, index, true); +} + +module.exports = lastIndexOf; diff --git a/backend/node_modules/lodash/lodash.js b/backend/node_modules/lodash/lodash.js new file mode 100644 index 00000000..ba61bcba --- /dev/null +++ b/backend/node_modules/lodash/lodash.js @@ -0,0 +1,17248 @@ +/** + * @license + * Lodash + * Copyright OpenJS Foundation and other contributors + * Released under MIT license + * Based on Underscore.js 1.8.3 + * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + */ +;(function() { + + /** Used as a safe reference for `undefined` in pre-ES5 environments. */ + var undefined; + + /** Used as the semantic version number. */ + var VERSION = '4.17.23'; + + /** Used as the size to enable large array optimizations. */ + var LARGE_ARRAY_SIZE = 200; + + /** Error message constants. */ + var CORE_ERROR_TEXT = 'Unsupported core-js use. Try https://npms.io/search?q=ponyfill.', + FUNC_ERROR_TEXT = 'Expected a function', + INVALID_TEMPL_VAR_ERROR_TEXT = 'Invalid `variable` option passed into `_.template`'; + + /** Used to stand-in for `undefined` hash values. */ + var HASH_UNDEFINED = '__lodash_hash_undefined__'; + + /** Used as the maximum memoize cache size. */ + var MAX_MEMOIZE_SIZE = 500; + + /** Used as the internal argument placeholder. */ + var PLACEHOLDER = '__lodash_placeholder__'; + + /** Used to compose bitmasks for cloning. */ + var CLONE_DEEP_FLAG = 1, + CLONE_FLAT_FLAG = 2, + CLONE_SYMBOLS_FLAG = 4; + + /** Used to compose bitmasks for value comparisons. */ + var COMPARE_PARTIAL_FLAG = 1, + COMPARE_UNORDERED_FLAG = 2; + + /** Used to compose bitmasks for function metadata. */ + var WRAP_BIND_FLAG = 1, + WRAP_BIND_KEY_FLAG = 2, + WRAP_CURRY_BOUND_FLAG = 4, + WRAP_CURRY_FLAG = 8, + WRAP_CURRY_RIGHT_FLAG = 16, + WRAP_PARTIAL_FLAG = 32, + WRAP_PARTIAL_RIGHT_FLAG = 64, + WRAP_ARY_FLAG = 128, + WRAP_REARG_FLAG = 256, + WRAP_FLIP_FLAG = 512; + + /** Used as default options for `_.truncate`. */ + var DEFAULT_TRUNC_LENGTH = 30, + DEFAULT_TRUNC_OMISSION = '...'; + + /** Used to detect hot functions by number of calls within a span of milliseconds. */ + var HOT_COUNT = 800, + HOT_SPAN = 16; + + /** Used to indicate the type of lazy iteratees. */ + var LAZY_FILTER_FLAG = 1, + LAZY_MAP_FLAG = 2, + LAZY_WHILE_FLAG = 3; + + /** Used as references for various `Number` constants. */ + var INFINITY = 1 / 0, + MAX_SAFE_INTEGER = 9007199254740991, + MAX_INTEGER = 1.7976931348623157e+308, + NAN = 0 / 0; + + /** Used as references for the maximum length and index of an array. */ + var MAX_ARRAY_LENGTH = 4294967295, + MAX_ARRAY_INDEX = MAX_ARRAY_LENGTH - 1, + HALF_MAX_ARRAY_LENGTH = MAX_ARRAY_LENGTH >>> 1; + + /** Used to associate wrap methods with their bit flags. */ + var wrapFlags = [ + ['ary', WRAP_ARY_FLAG], + ['bind', WRAP_BIND_FLAG], + ['bindKey', WRAP_BIND_KEY_FLAG], + ['curry', WRAP_CURRY_FLAG], + ['curryRight', WRAP_CURRY_RIGHT_FLAG], + ['flip', WRAP_FLIP_FLAG], + ['partial', WRAP_PARTIAL_FLAG], + ['partialRight', WRAP_PARTIAL_RIGHT_FLAG], + ['rearg', WRAP_REARG_FLAG] + ]; + + /** `Object#toString` result references. */ + var argsTag = '[object Arguments]', + arrayTag = '[object Array]', + asyncTag = '[object AsyncFunction]', + boolTag = '[object Boolean]', + dateTag = '[object Date]', + domExcTag = '[object DOMException]', + errorTag = '[object Error]', + funcTag = '[object Function]', + genTag = '[object GeneratorFunction]', + mapTag = '[object Map]', + numberTag = '[object Number]', + nullTag = '[object Null]', + objectTag = '[object Object]', + promiseTag = '[object Promise]', + proxyTag = '[object Proxy]', + regexpTag = '[object RegExp]', + setTag = '[object Set]', + stringTag = '[object String]', + symbolTag = '[object Symbol]', + undefinedTag = '[object Undefined]', + weakMapTag = '[object WeakMap]', + weakSetTag = '[object WeakSet]'; + + var arrayBufferTag = '[object ArrayBuffer]', + dataViewTag = '[object DataView]', + float32Tag = '[object Float32Array]', + float64Tag = '[object Float64Array]', + int8Tag = '[object Int8Array]', + int16Tag = '[object Int16Array]', + int32Tag = '[object Int32Array]', + uint8Tag = '[object Uint8Array]', + uint8ClampedTag = '[object Uint8ClampedArray]', + uint16Tag = '[object Uint16Array]', + uint32Tag = '[object Uint32Array]'; + + /** Used to match empty string literals in compiled template source. */ + var reEmptyStringLeading = /\b__p \+= '';/g, + reEmptyStringMiddle = /\b(__p \+=) '' \+/g, + reEmptyStringTrailing = /(__e\(.*?\)|\b__t\)) \+\n'';/g; + + /** Used to match HTML entities and HTML characters. */ + var reEscapedHtml = /&(?:amp|lt|gt|quot|#39);/g, + reUnescapedHtml = /[&<>"']/g, + reHasEscapedHtml = RegExp(reEscapedHtml.source), + reHasUnescapedHtml = RegExp(reUnescapedHtml.source); + + /** Used to match template delimiters. */ + var reEscape = /<%-([\s\S]+?)%>/g, + reEvaluate = /<%([\s\S]+?)%>/g, + reInterpolate = /<%=([\s\S]+?)%>/g; + + /** Used to match property names within property paths. */ + var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, + reIsPlainProp = /^\w*$/, + rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; + + /** + * Used to match `RegExp` + * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). + */ + var reRegExpChar = /[\\^$.*+?()[\]{}|]/g, + reHasRegExpChar = RegExp(reRegExpChar.source); + + /** Used to match leading whitespace. */ + var reTrimStart = /^\s+/; + + /** Used to match a single whitespace character. */ + var reWhitespace = /\s/; + + /** Used to match wrap detail comments. */ + var reWrapComment = /\{(?:\n\/\* \[wrapped with .+\] \*\/)?\n?/, + reWrapDetails = /\{\n\/\* \[wrapped with (.+)\] \*/, + reSplitDetails = /,? & /; + + /** Used to match words composed of alphanumeric characters. */ + var reAsciiWord = /[^\x00-\x2f\x3a-\x40\x5b-\x60\x7b-\x7f]+/g; + + /** + * Used to validate the `validate` option in `_.template` variable. + * + * Forbids characters which could potentially change the meaning of the function argument definition: + * - "()," (modification of function parameters) + * - "=" (default value) + * - "[]{}" (destructuring of function parameters) + * - "/" (beginning of a comment) + * - whitespace + */ + var reForbiddenIdentifierChars = /[()=,{}\[\]\/\s]/; + + /** Used to match backslashes in property paths. */ + var reEscapeChar = /\\(\\)?/g; + + /** + * Used to match + * [ES template delimiters](http://ecma-international.org/ecma-262/7.0/#sec-template-literal-lexical-components). + */ + var reEsTemplate = /\$\{([^\\}]*(?:\\.[^\\}]*)*)\}/g; + + /** Used to match `RegExp` flags from their coerced string values. */ + var reFlags = /\w*$/; + + /** Used to detect bad signed hexadecimal string values. */ + var reIsBadHex = /^[-+]0x[0-9a-f]+$/i; + + /** Used to detect binary string values. */ + var reIsBinary = /^0b[01]+$/i; + + /** Used to detect host constructors (Safari). */ + var reIsHostCtor = /^\[object .+?Constructor\]$/; + + /** Used to detect octal string values. */ + var reIsOctal = /^0o[0-7]+$/i; + + /** Used to detect unsigned integer values. */ + var reIsUint = /^(?:0|[1-9]\d*)$/; + + /** Used to match Latin Unicode letters (excluding mathematical operators). */ + var reLatin = /[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g; + + /** Used to ensure capturing order of template delimiters. */ + var reNoMatch = /($^)/; + + /** Used to match unescaped characters in compiled string literals. */ + var reUnescapedString = /['\n\r\u2028\u2029\\]/g; + + /** Used to compose unicode character classes. */ + var rsAstralRange = '\\ud800-\\udfff', + rsComboMarksRange = '\\u0300-\\u036f', + reComboHalfMarksRange = '\\ufe20-\\ufe2f', + rsComboSymbolsRange = '\\u20d0-\\u20ff', + rsComboRange = rsComboMarksRange + reComboHalfMarksRange + rsComboSymbolsRange, + rsDingbatRange = '\\u2700-\\u27bf', + rsLowerRange = 'a-z\\xdf-\\xf6\\xf8-\\xff', + rsMathOpRange = '\\xac\\xb1\\xd7\\xf7', + rsNonCharRange = '\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf', + rsPunctuationRange = '\\u2000-\\u206f', + rsSpaceRange = ' \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000', + rsUpperRange = 'A-Z\\xc0-\\xd6\\xd8-\\xde', + rsVarRange = '\\ufe0e\\ufe0f', + rsBreakRange = rsMathOpRange + rsNonCharRange + rsPunctuationRange + rsSpaceRange; + + /** Used to compose unicode capture groups. */ + var rsApos = "['\u2019]", + rsAstral = '[' + rsAstralRange + ']', + rsBreak = '[' + rsBreakRange + ']', + rsCombo = '[' + rsComboRange + ']', + rsDigits = '\\d+', + rsDingbat = '[' + rsDingbatRange + ']', + rsLower = '[' + rsLowerRange + ']', + rsMisc = '[^' + rsAstralRange + rsBreakRange + rsDigits + rsDingbatRange + rsLowerRange + rsUpperRange + ']', + rsFitz = '\\ud83c[\\udffb-\\udfff]', + rsModifier = '(?:' + rsCombo + '|' + rsFitz + ')', + rsNonAstral = '[^' + rsAstralRange + ']', + rsRegional = '(?:\\ud83c[\\udde6-\\uddff]){2}', + rsSurrPair = '[\\ud800-\\udbff][\\udc00-\\udfff]', + rsUpper = '[' + rsUpperRange + ']', + rsZWJ = '\\u200d'; + + /** Used to compose unicode regexes. */ + var rsMiscLower = '(?:' + rsLower + '|' + rsMisc + ')', + rsMiscUpper = '(?:' + rsUpper + '|' + rsMisc + ')', + rsOptContrLower = '(?:' + rsApos + '(?:d|ll|m|re|s|t|ve))?', + rsOptContrUpper = '(?:' + rsApos + '(?:D|LL|M|RE|S|T|VE))?', + reOptMod = rsModifier + '?', + rsOptVar = '[' + rsVarRange + ']?', + rsOptJoin = '(?:' + rsZWJ + '(?:' + [rsNonAstral, rsRegional, rsSurrPair].join('|') + ')' + rsOptVar + reOptMod + ')*', + rsOrdLower = '\\d*(?:1st|2nd|3rd|(?![123])\\dth)(?=\\b|[A-Z_])', + rsOrdUpper = '\\d*(?:1ST|2ND|3RD|(?![123])\\dTH)(?=\\b|[a-z_])', + rsSeq = rsOptVar + reOptMod + rsOptJoin, + rsEmoji = '(?:' + [rsDingbat, rsRegional, rsSurrPair].join('|') + ')' + rsSeq, + rsSymbol = '(?:' + [rsNonAstral + rsCombo + '?', rsCombo, rsRegional, rsSurrPair, rsAstral].join('|') + ')'; + + /** Used to match apostrophes. */ + var reApos = RegExp(rsApos, 'g'); + + /** + * Used to match [combining diacritical marks](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks) and + * [combining diacritical marks for symbols](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks_for_Symbols). + */ + var reComboMark = RegExp(rsCombo, 'g'); + + /** Used to match [string symbols](https://mathiasbynens.be/notes/javascript-unicode). */ + var reUnicode = RegExp(rsFitz + '(?=' + rsFitz + ')|' + rsSymbol + rsSeq, 'g'); + + /** Used to match complex or compound words. */ + var reUnicodeWord = RegExp([ + rsUpper + '?' + rsLower + '+' + rsOptContrLower + '(?=' + [rsBreak, rsUpper, '$'].join('|') + ')', + rsMiscUpper + '+' + rsOptContrUpper + '(?=' + [rsBreak, rsUpper + rsMiscLower, '$'].join('|') + ')', + rsUpper + '?' + rsMiscLower + '+' + rsOptContrLower, + rsUpper + '+' + rsOptContrUpper, + rsOrdUpper, + rsOrdLower, + rsDigits, + rsEmoji + ].join('|'), 'g'); + + /** Used to detect strings with [zero-width joiners or code points from the astral planes](http://eev.ee/blog/2015/09/12/dark-corners-of-unicode/). */ + var reHasUnicode = RegExp('[' + rsZWJ + rsAstralRange + rsComboRange + rsVarRange + ']'); + + /** Used to detect strings that need a more robust regexp to match words. */ + var reHasUnicodeWord = /[a-z][A-Z]|[A-Z]{2}[a-z]|[0-9][a-zA-Z]|[a-zA-Z][0-9]|[^a-zA-Z0-9 ]/; + + /** Used to assign default `context` object properties. */ + var contextProps = [ + 'Array', 'Buffer', 'DataView', 'Date', 'Error', 'Float32Array', 'Float64Array', + 'Function', 'Int8Array', 'Int16Array', 'Int32Array', 'Map', 'Math', 'Object', + 'Promise', 'RegExp', 'Set', 'String', 'Symbol', 'TypeError', 'Uint8Array', + 'Uint8ClampedArray', 'Uint16Array', 'Uint32Array', 'WeakMap', + '_', 'clearTimeout', 'isFinite', 'parseInt', 'setTimeout' + ]; + + /** Used to make template sourceURLs easier to identify. */ + var templateCounter = -1; + + /** Used to identify `toStringTag` values of typed arrays. */ + var typedArrayTags = {}; + typedArrayTags[float32Tag] = typedArrayTags[float64Tag] = + typedArrayTags[int8Tag] = typedArrayTags[int16Tag] = + typedArrayTags[int32Tag] = typedArrayTags[uint8Tag] = + typedArrayTags[uint8ClampedTag] = typedArrayTags[uint16Tag] = + typedArrayTags[uint32Tag] = true; + typedArrayTags[argsTag] = typedArrayTags[arrayTag] = + typedArrayTags[arrayBufferTag] = typedArrayTags[boolTag] = + typedArrayTags[dataViewTag] = typedArrayTags[dateTag] = + typedArrayTags[errorTag] = typedArrayTags[funcTag] = + typedArrayTags[mapTag] = typedArrayTags[numberTag] = + typedArrayTags[objectTag] = typedArrayTags[regexpTag] = + typedArrayTags[setTag] = typedArrayTags[stringTag] = + typedArrayTags[weakMapTag] = false; + + /** Used to identify `toStringTag` values supported by `_.clone`. */ + var cloneableTags = {}; + cloneableTags[argsTag] = cloneableTags[arrayTag] = + cloneableTags[arrayBufferTag] = cloneableTags[dataViewTag] = + cloneableTags[boolTag] = cloneableTags[dateTag] = + cloneableTags[float32Tag] = cloneableTags[float64Tag] = + cloneableTags[int8Tag] = cloneableTags[int16Tag] = + cloneableTags[int32Tag] = cloneableTags[mapTag] = + cloneableTags[numberTag] = cloneableTags[objectTag] = + cloneableTags[regexpTag] = cloneableTags[setTag] = + cloneableTags[stringTag] = cloneableTags[symbolTag] = + cloneableTags[uint8Tag] = cloneableTags[uint8ClampedTag] = + cloneableTags[uint16Tag] = cloneableTags[uint32Tag] = true; + cloneableTags[errorTag] = cloneableTags[funcTag] = + cloneableTags[weakMapTag] = false; + + /** Used to map Latin Unicode letters to basic Latin letters. */ + var deburredLetters = { + // Latin-1 Supplement block. + '\xc0': 'A', '\xc1': 'A', '\xc2': 'A', '\xc3': 'A', '\xc4': 'A', '\xc5': 'A', + '\xe0': 'a', '\xe1': 'a', '\xe2': 'a', '\xe3': 'a', '\xe4': 'a', '\xe5': 'a', + '\xc7': 'C', '\xe7': 'c', + '\xd0': 'D', '\xf0': 'd', + '\xc8': 'E', '\xc9': 'E', '\xca': 'E', '\xcb': 'E', + '\xe8': 'e', '\xe9': 'e', '\xea': 'e', '\xeb': 'e', + '\xcc': 'I', '\xcd': 'I', '\xce': 'I', '\xcf': 'I', + '\xec': 'i', '\xed': 'i', '\xee': 'i', '\xef': 'i', + '\xd1': 'N', '\xf1': 'n', + '\xd2': 'O', '\xd3': 'O', '\xd4': 'O', '\xd5': 'O', '\xd6': 'O', '\xd8': 'O', + '\xf2': 'o', '\xf3': 'o', '\xf4': 'o', '\xf5': 'o', '\xf6': 'o', '\xf8': 'o', + '\xd9': 'U', '\xda': 'U', '\xdb': 'U', '\xdc': 'U', + '\xf9': 'u', '\xfa': 'u', '\xfb': 'u', '\xfc': 'u', + '\xdd': 'Y', '\xfd': 'y', '\xff': 'y', + '\xc6': 'Ae', '\xe6': 'ae', + '\xde': 'Th', '\xfe': 'th', + '\xdf': 'ss', + // Latin Extended-A block. + '\u0100': 'A', '\u0102': 'A', '\u0104': 'A', + '\u0101': 'a', '\u0103': 'a', '\u0105': 'a', + '\u0106': 'C', '\u0108': 'C', '\u010a': 'C', '\u010c': 'C', + '\u0107': 'c', '\u0109': 'c', '\u010b': 'c', '\u010d': 'c', + '\u010e': 'D', '\u0110': 'D', '\u010f': 'd', '\u0111': 'd', + '\u0112': 'E', '\u0114': 'E', '\u0116': 'E', '\u0118': 'E', '\u011a': 'E', + '\u0113': 'e', '\u0115': 'e', '\u0117': 'e', '\u0119': 'e', '\u011b': 'e', + '\u011c': 'G', '\u011e': 'G', '\u0120': 'G', '\u0122': 'G', + '\u011d': 'g', '\u011f': 'g', '\u0121': 'g', '\u0123': 'g', + '\u0124': 'H', '\u0126': 'H', '\u0125': 'h', '\u0127': 'h', + '\u0128': 'I', '\u012a': 'I', '\u012c': 'I', '\u012e': 'I', '\u0130': 'I', + '\u0129': 'i', '\u012b': 'i', '\u012d': 'i', '\u012f': 'i', '\u0131': 'i', + '\u0134': 'J', '\u0135': 'j', + '\u0136': 'K', '\u0137': 'k', '\u0138': 'k', + '\u0139': 'L', '\u013b': 'L', '\u013d': 'L', '\u013f': 'L', '\u0141': 'L', + '\u013a': 'l', '\u013c': 'l', '\u013e': 'l', '\u0140': 'l', '\u0142': 'l', + '\u0143': 'N', '\u0145': 'N', '\u0147': 'N', '\u014a': 'N', + '\u0144': 'n', '\u0146': 'n', '\u0148': 'n', '\u014b': 'n', + '\u014c': 'O', '\u014e': 'O', '\u0150': 'O', + '\u014d': 'o', '\u014f': 'o', '\u0151': 'o', + '\u0154': 'R', '\u0156': 'R', '\u0158': 'R', + '\u0155': 'r', '\u0157': 'r', '\u0159': 'r', + '\u015a': 'S', '\u015c': 'S', '\u015e': 'S', '\u0160': 'S', + '\u015b': 's', '\u015d': 's', '\u015f': 's', '\u0161': 's', + '\u0162': 'T', '\u0164': 'T', '\u0166': 'T', + '\u0163': 't', '\u0165': 't', '\u0167': 't', + '\u0168': 'U', '\u016a': 'U', '\u016c': 'U', '\u016e': 'U', '\u0170': 'U', '\u0172': 'U', + '\u0169': 'u', '\u016b': 'u', '\u016d': 'u', '\u016f': 'u', '\u0171': 'u', '\u0173': 'u', + '\u0174': 'W', '\u0175': 'w', + '\u0176': 'Y', '\u0177': 'y', '\u0178': 'Y', + '\u0179': 'Z', '\u017b': 'Z', '\u017d': 'Z', + '\u017a': 'z', '\u017c': 'z', '\u017e': 'z', + '\u0132': 'IJ', '\u0133': 'ij', + '\u0152': 'Oe', '\u0153': 'oe', + '\u0149': "'n", '\u017f': 's' + }; + + /** Used to map characters to HTML entities. */ + var htmlEscapes = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''' + }; + + /** Used to map HTML entities to characters. */ + var htmlUnescapes = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + ''': "'" + }; + + /** Used to escape characters for inclusion in compiled string literals. */ + var stringEscapes = { + '\\': '\\', + "'": "'", + '\n': 'n', + '\r': 'r', + '\u2028': 'u2028', + '\u2029': 'u2029' + }; + + /** Built-in method references without a dependency on `root`. */ + var freeParseFloat = parseFloat, + freeParseInt = parseInt; + + /** Detect free variable `global` from Node.js. */ + var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; + + /** Detect free variable `self`. */ + var freeSelf = typeof self == 'object' && self && self.Object === Object && self; + + /** Used as a reference to the global object. */ + var root = freeGlobal || freeSelf || Function('return this')(); + + /** Detect free variable `exports`. */ + var freeExports = typeof exports == 'object' && exports && !exports.nodeType && exports; + + /** Detect free variable `module`. */ + var freeModule = freeExports && typeof module == 'object' && module && !module.nodeType && module; + + /** Detect the popular CommonJS extension `module.exports`. */ + var moduleExports = freeModule && freeModule.exports === freeExports; + + /** Detect free variable `process` from Node.js. */ + var freeProcess = moduleExports && freeGlobal.process; + + /** Used to access faster Node.js helpers. */ + var nodeUtil = (function() { + try { + // Use `util.types` for Node.js 10+. + var types = freeModule && freeModule.require && freeModule.require('util').types; + + if (types) { + return types; + } + + // Legacy `process.binding('util')` for Node.js < 10. + return freeProcess && freeProcess.binding && freeProcess.binding('util'); + } catch (e) {} + }()); + + /* Node.js helper references. */ + var nodeIsArrayBuffer = nodeUtil && nodeUtil.isArrayBuffer, + nodeIsDate = nodeUtil && nodeUtil.isDate, + nodeIsMap = nodeUtil && nodeUtil.isMap, + nodeIsRegExp = nodeUtil && nodeUtil.isRegExp, + nodeIsSet = nodeUtil && nodeUtil.isSet, + nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray; + + /*--------------------------------------------------------------------------*/ + + /** + * A faster alternative to `Function#apply`, this function invokes `func` + * with the `this` binding of `thisArg` and the arguments of `args`. + * + * @private + * @param {Function} func The function to invoke. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} args The arguments to invoke `func` with. + * @returns {*} Returns the result of `func`. + */ + function apply(func, thisArg, args) { + switch (args.length) { + case 0: return func.call(thisArg); + case 1: return func.call(thisArg, args[0]); + case 2: return func.call(thisArg, args[0], args[1]); + case 3: return func.call(thisArg, args[0], args[1], args[2]); + } + return func.apply(thisArg, args); + } + + /** + * A specialized version of `baseAggregator` for arrays. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform keys. + * @param {Object} accumulator The initial aggregated object. + * @returns {Function} Returns `accumulator`. + */ + function arrayAggregator(array, setter, iteratee, accumulator) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + var value = array[index]; + setter(accumulator, value, iteratee(value), array); + } + return accumulator; + } + + /** + * A specialized version of `_.forEach` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns `array`. + */ + function arrayEach(array, iteratee) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (iteratee(array[index], index, array) === false) { + break; + } + } + return array; + } + + /** + * A specialized version of `_.forEachRight` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns `array`. + */ + function arrayEachRight(array, iteratee) { + var length = array == null ? 0 : array.length; + + while (length--) { + if (iteratee(array[length], length, array) === false) { + break; + } + } + return array; + } + + /** + * A specialized version of `_.every` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false`. + */ + function arrayEvery(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (!predicate(array[index], index, array)) { + return false; + } + } + return true; + } + + /** + * A specialized version of `_.filter` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + */ + function arrayFilter(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (predicate(value, index, array)) { + result[resIndex++] = value; + } + } + return result; + } + + /** + * A specialized version of `_.includes` for arrays without support for + * specifying an index to search from. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ + function arrayIncludes(array, value) { + var length = array == null ? 0 : array.length; + return !!length && baseIndexOf(array, value, 0) > -1; + } + + /** + * This function is like `arrayIncludes` except that it accepts a comparator. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @param {Function} comparator The comparator invoked per element. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ + function arrayIncludesWith(array, value, comparator) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (comparator(value, array[index])) { + return true; + } + } + return false; + } + + /** + * A specialized version of `_.map` for arrays without support for iteratee + * shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ + function arrayMap(array, iteratee) { + var index = -1, + length = array == null ? 0 : array.length, + result = Array(length); + + while (++index < length) { + result[index] = iteratee(array[index], index, array); + } + return result; + } + + /** + * Appends the elements of `values` to `array`. + * + * @private + * @param {Array} array The array to modify. + * @param {Array} values The values to append. + * @returns {Array} Returns `array`. + */ + function arrayPush(array, values) { + var index = -1, + length = values.length, + offset = array.length; + + while (++index < length) { + array[offset + index] = values[index]; + } + return array; + } + + /** + * A specialized version of `_.reduce` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @param {boolean} [initAccum] Specify using the first element of `array` as + * the initial value. + * @returns {*} Returns the accumulated value. + */ + function arrayReduce(array, iteratee, accumulator, initAccum) { + var index = -1, + length = array == null ? 0 : array.length; + + if (initAccum && length) { + accumulator = array[++index]; + } + while (++index < length) { + accumulator = iteratee(accumulator, array[index], index, array); + } + return accumulator; + } + + /** + * A specialized version of `_.reduceRight` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @param {boolean} [initAccum] Specify using the last element of `array` as + * the initial value. + * @returns {*} Returns the accumulated value. + */ + function arrayReduceRight(array, iteratee, accumulator, initAccum) { + var length = array == null ? 0 : array.length; + if (initAccum && length) { + accumulator = array[--length]; + } + while (length--) { + accumulator = iteratee(accumulator, array[length], length, array); + } + return accumulator; + } + + /** + * A specialized version of `_.some` for arrays without support for iteratee + * shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + */ + function arraySome(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (predicate(array[index], index, array)) { + return true; + } + } + return false; + } + + /** + * Gets the size of an ASCII `string`. + * + * @private + * @param {string} string The string inspect. + * @returns {number} Returns the string size. + */ + var asciiSize = baseProperty('length'); + + /** + * Converts an ASCII `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ + function asciiToArray(string) { + return string.split(''); + } + + /** + * Splits an ASCII `string` into an array of its words. + * + * @private + * @param {string} The string to inspect. + * @returns {Array} Returns the words of `string`. + */ + function asciiWords(string) { + return string.match(reAsciiWord) || []; + } + + /** + * The base implementation of methods like `_.findKey` and `_.findLastKey`, + * without support for iteratee shorthands, which iterates over `collection` + * using `eachFunc`. + * + * @private + * @param {Array|Object} collection The collection to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {Function} eachFunc The function to iterate over `collection`. + * @returns {*} Returns the found element or its key, else `undefined`. + */ + function baseFindKey(collection, predicate, eachFunc) { + var result; + eachFunc(collection, function(value, key, collection) { + if (predicate(value, key, collection)) { + result = key; + return false; + } + }); + return result; + } + + /** + * The base implementation of `_.findIndex` and `_.findLastIndex` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {number} fromIndex The index to search from. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseFindIndex(array, predicate, fromIndex, fromRight) { + var length = array.length, + index = fromIndex + (fromRight ? 1 : -1); + + while ((fromRight ? index-- : ++index < length)) { + if (predicate(array[index], index, array)) { + return index; + } + } + return -1; + } + + /** + * The base implementation of `_.indexOf` without `fromIndex` bounds checks. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseIndexOf(array, value, fromIndex) { + return value === value + ? strictIndexOf(array, value, fromIndex) + : baseFindIndex(array, baseIsNaN, fromIndex); + } + + /** + * This function is like `baseIndexOf` except that it accepts a comparator. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @param {Function} comparator The comparator invoked per element. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseIndexOfWith(array, value, fromIndex, comparator) { + var index = fromIndex - 1, + length = array.length; + + while (++index < length) { + if (comparator(array[index], value)) { + return index; + } + } + return -1; + } + + /** + * The base implementation of `_.isNaN` without support for number objects. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + */ + function baseIsNaN(value) { + return value !== value; + } + + /** + * The base implementation of `_.mean` and `_.meanBy` without support for + * iteratee shorthands. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {number} Returns the mean. + */ + function baseMean(array, iteratee) { + var length = array == null ? 0 : array.length; + return length ? (baseSum(array, iteratee) / length) : NAN; + } + + /** + * The base implementation of `_.property` without support for deep paths. + * + * @private + * @param {string} key The key of the property to get. + * @returns {Function} Returns the new accessor function. + */ + function baseProperty(key) { + return function(object) { + return object == null ? undefined : object[key]; + }; + } + + /** + * The base implementation of `_.propertyOf` without support for deep paths. + * + * @private + * @param {Object} object The object to query. + * @returns {Function} Returns the new accessor function. + */ + function basePropertyOf(object) { + return function(key) { + return object == null ? undefined : object[key]; + }; + } + + /** + * The base implementation of `_.reduce` and `_.reduceRight`, without support + * for iteratee shorthands, which iterates over `collection` using `eachFunc`. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} accumulator The initial value. + * @param {boolean} initAccum Specify using the first or last element of + * `collection` as the initial value. + * @param {Function} eachFunc The function to iterate over `collection`. + * @returns {*} Returns the accumulated value. + */ + function baseReduce(collection, iteratee, accumulator, initAccum, eachFunc) { + eachFunc(collection, function(value, index, collection) { + accumulator = initAccum + ? (initAccum = false, value) + : iteratee(accumulator, value, index, collection); + }); + return accumulator; + } + + /** + * The base implementation of `_.sortBy` which uses `comparer` to define the + * sort order of `array` and replaces criteria objects with their corresponding + * values. + * + * @private + * @param {Array} array The array to sort. + * @param {Function} comparer The function to define sort order. + * @returns {Array} Returns `array`. + */ + function baseSortBy(array, comparer) { + var length = array.length; + + array.sort(comparer); + while (length--) { + array[length] = array[length].value; + } + return array; + } + + /** + * The base implementation of `_.sum` and `_.sumBy` without support for + * iteratee shorthands. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {number} Returns the sum. + */ + function baseSum(array, iteratee) { + var result, + index = -1, + length = array.length; + + while (++index < length) { + var current = iteratee(array[index]); + if (current !== undefined) { + result = result === undefined ? current : (result + current); + } + } + return result; + } + + /** + * The base implementation of `_.times` without support for iteratee shorthands + * or max array length checks. + * + * @private + * @param {number} n The number of times to invoke `iteratee`. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the array of results. + */ + function baseTimes(n, iteratee) { + var index = -1, + result = Array(n); + + while (++index < n) { + result[index] = iteratee(index); + } + return result; + } + + /** + * The base implementation of `_.toPairs` and `_.toPairsIn` which creates an array + * of key-value pairs for `object` corresponding to the property names of `props`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} props The property names to get values for. + * @returns {Object} Returns the key-value pairs. + */ + function baseToPairs(object, props) { + return arrayMap(props, function(key) { + return [key, object[key]]; + }); + } + + /** + * The base implementation of `_.trim`. + * + * @private + * @param {string} string The string to trim. + * @returns {string} Returns the trimmed string. + */ + function baseTrim(string) { + return string + ? string.slice(0, trimmedEndIndex(string) + 1).replace(reTrimStart, '') + : string; + } + + /** + * The base implementation of `_.unary` without support for storing metadata. + * + * @private + * @param {Function} func The function to cap arguments for. + * @returns {Function} Returns the new capped function. + */ + function baseUnary(func) { + return function(value) { + return func(value); + }; + } + + /** + * The base implementation of `_.values` and `_.valuesIn` which creates an + * array of `object` property values corresponding to the property names + * of `props`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} props The property names to get values for. + * @returns {Object} Returns the array of property values. + */ + function baseValues(object, props) { + return arrayMap(props, function(key) { + return object[key]; + }); + } + + /** + * Checks if a `cache` value for `key` exists. + * + * @private + * @param {Object} cache The cache to query. + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function cacheHas(cache, key) { + return cache.has(key); + } + + /** + * Used by `_.trim` and `_.trimStart` to get the index of the first string symbol + * that is not found in the character symbols. + * + * @private + * @param {Array} strSymbols The string symbols to inspect. + * @param {Array} chrSymbols The character symbols to find. + * @returns {number} Returns the index of the first unmatched string symbol. + */ + function charsStartIndex(strSymbols, chrSymbols) { + var index = -1, + length = strSymbols.length; + + while (++index < length && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} + return index; + } + + /** + * Used by `_.trim` and `_.trimEnd` to get the index of the last string symbol + * that is not found in the character symbols. + * + * @private + * @param {Array} strSymbols The string symbols to inspect. + * @param {Array} chrSymbols The character symbols to find. + * @returns {number} Returns the index of the last unmatched string symbol. + */ + function charsEndIndex(strSymbols, chrSymbols) { + var index = strSymbols.length; + + while (index-- && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} + return index; + } + + /** + * Gets the number of `placeholder` occurrences in `array`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} placeholder The placeholder to search for. + * @returns {number} Returns the placeholder count. + */ + function countHolders(array, placeholder) { + var length = array.length, + result = 0; + + while (length--) { + if (array[length] === placeholder) { + ++result; + } + } + return result; + } + + /** + * Used by `_.deburr` to convert Latin-1 Supplement and Latin Extended-A + * letters to basic Latin letters. + * + * @private + * @param {string} letter The matched letter to deburr. + * @returns {string} Returns the deburred letter. + */ + var deburrLetter = basePropertyOf(deburredLetters); + + /** + * Used by `_.escape` to convert characters to HTML entities. + * + * @private + * @param {string} chr The matched character to escape. + * @returns {string} Returns the escaped character. + */ + var escapeHtmlChar = basePropertyOf(htmlEscapes); + + /** + * Used by `_.template` to escape characters for inclusion in compiled string literals. + * + * @private + * @param {string} chr The matched character to escape. + * @returns {string} Returns the escaped character. + */ + function escapeStringChar(chr) { + return '\\' + stringEscapes[chr]; + } + + /** + * Gets the value at `key` of `object`. + * + * @private + * @param {Object} [object] The object to query. + * @param {string} key The key of the property to get. + * @returns {*} Returns the property value. + */ + function getValue(object, key) { + return object == null ? undefined : object[key]; + } + + /** + * Checks if `string` contains Unicode symbols. + * + * @private + * @param {string} string The string to inspect. + * @returns {boolean} Returns `true` if a symbol is found, else `false`. + */ + function hasUnicode(string) { + return reHasUnicode.test(string); + } + + /** + * Checks if `string` contains a word composed of Unicode symbols. + * + * @private + * @param {string} string The string to inspect. + * @returns {boolean} Returns `true` if a word is found, else `false`. + */ + function hasUnicodeWord(string) { + return reHasUnicodeWord.test(string); + } + + /** + * Converts `iterator` to an array. + * + * @private + * @param {Object} iterator The iterator to convert. + * @returns {Array} Returns the converted array. + */ + function iteratorToArray(iterator) { + var data, + result = []; + + while (!(data = iterator.next()).done) { + result.push(data.value); + } + return result; + } + + /** + * Converts `map` to its key-value pairs. + * + * @private + * @param {Object} map The map to convert. + * @returns {Array} Returns the key-value pairs. + */ + function mapToArray(map) { + var index = -1, + result = Array(map.size); + + map.forEach(function(value, key) { + result[++index] = [key, value]; + }); + return result; + } + + /** + * Creates a unary function that invokes `func` with its argument transformed. + * + * @private + * @param {Function} func The function to wrap. + * @param {Function} transform The argument transform. + * @returns {Function} Returns the new function. + */ + function overArg(func, transform) { + return function(arg) { + return func(transform(arg)); + }; + } + + /** + * Replaces all `placeholder` elements in `array` with an internal placeholder + * and returns an array of their indexes. + * + * @private + * @param {Array} array The array to modify. + * @param {*} placeholder The placeholder to replace. + * @returns {Array} Returns the new array of placeholder indexes. + */ + function replaceHolders(array, placeholder) { + var index = -1, + length = array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (value === placeholder || value === PLACEHOLDER) { + array[index] = PLACEHOLDER; + result[resIndex++] = index; + } + } + return result; + } + + /** + * Converts `set` to an array of its values. + * + * @private + * @param {Object} set The set to convert. + * @returns {Array} Returns the values. + */ + function setToArray(set) { + var index = -1, + result = Array(set.size); + + set.forEach(function(value) { + result[++index] = value; + }); + return result; + } + + /** + * Converts `set` to its value-value pairs. + * + * @private + * @param {Object} set The set to convert. + * @returns {Array} Returns the value-value pairs. + */ + function setToPairs(set) { + var index = -1, + result = Array(set.size); + + set.forEach(function(value) { + result[++index] = [value, value]; + }); + return result; + } + + /** + * A specialized version of `_.indexOf` which performs strict equality + * comparisons of values, i.e. `===`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function strictIndexOf(array, value, fromIndex) { + var index = fromIndex - 1, + length = array.length; + + while (++index < length) { + if (array[index] === value) { + return index; + } + } + return -1; + } + + /** + * A specialized version of `_.lastIndexOf` which performs strict equality + * comparisons of values, i.e. `===`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function strictLastIndexOf(array, value, fromIndex) { + var index = fromIndex + 1; + while (index--) { + if (array[index] === value) { + return index; + } + } + return index; + } + + /** + * Gets the number of symbols in `string`. + * + * @private + * @param {string} string The string to inspect. + * @returns {number} Returns the string size. + */ + function stringSize(string) { + return hasUnicode(string) + ? unicodeSize(string) + : asciiSize(string); + } + + /** + * Converts `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ + function stringToArray(string) { + return hasUnicode(string) + ? unicodeToArray(string) + : asciiToArray(string); + } + + /** + * Used by `_.trim` and `_.trimEnd` to get the index of the last non-whitespace + * character of `string`. + * + * @private + * @param {string} string The string to inspect. + * @returns {number} Returns the index of the last non-whitespace character. + */ + function trimmedEndIndex(string) { + var index = string.length; + + while (index-- && reWhitespace.test(string.charAt(index))) {} + return index; + } + + /** + * Used by `_.unescape` to convert HTML entities to characters. + * + * @private + * @param {string} chr The matched character to unescape. + * @returns {string} Returns the unescaped character. + */ + var unescapeHtmlChar = basePropertyOf(htmlUnescapes); + + /** + * Gets the size of a Unicode `string`. + * + * @private + * @param {string} string The string inspect. + * @returns {number} Returns the string size. + */ + function unicodeSize(string) { + var result = reUnicode.lastIndex = 0; + while (reUnicode.test(string)) { + ++result; + } + return result; + } + + /** + * Converts a Unicode `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ + function unicodeToArray(string) { + return string.match(reUnicode) || []; + } + + /** + * Splits a Unicode `string` into an array of its words. + * + * @private + * @param {string} The string to inspect. + * @returns {Array} Returns the words of `string`. + */ + function unicodeWords(string) { + return string.match(reUnicodeWord) || []; + } + + /*--------------------------------------------------------------------------*/ + + /** + * Create a new pristine `lodash` function using the `context` object. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Util + * @param {Object} [context=root] The context object. + * @returns {Function} Returns a new `lodash` function. + * @example + * + * _.mixin({ 'foo': _.constant('foo') }); + * + * var lodash = _.runInContext(); + * lodash.mixin({ 'bar': lodash.constant('bar') }); + * + * _.isFunction(_.foo); + * // => true + * _.isFunction(_.bar); + * // => false + * + * lodash.isFunction(lodash.foo); + * // => false + * lodash.isFunction(lodash.bar); + * // => true + * + * // Create a suped-up `defer` in Node.js. + * var defer = _.runInContext({ 'setTimeout': setImmediate }).defer; + */ + var runInContext = (function runInContext(context) { + context = context == null ? root : _.defaults(root.Object(), context, _.pick(root, contextProps)); + + /** Built-in constructor references. */ + var Array = context.Array, + Date = context.Date, + Error = context.Error, + Function = context.Function, + Math = context.Math, + Object = context.Object, + RegExp = context.RegExp, + String = context.String, + TypeError = context.TypeError; + + /** Used for built-in method references. */ + var arrayProto = Array.prototype, + funcProto = Function.prototype, + objectProto = Object.prototype; + + /** Used to detect overreaching core-js shims. */ + var coreJsData = context['__core-js_shared__']; + + /** Used to resolve the decompiled source of functions. */ + var funcToString = funcProto.toString; + + /** Used to check objects for own properties. */ + var hasOwnProperty = objectProto.hasOwnProperty; + + /** Used to generate unique IDs. */ + var idCounter = 0; + + /** Used to detect methods masquerading as native. */ + var maskSrcKey = (function() { + var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); + return uid ? ('Symbol(src)_1.' + uid) : ''; + }()); + + /** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ + var nativeObjectToString = objectProto.toString; + + /** Used to infer the `Object` constructor. */ + var objectCtorString = funcToString.call(Object); + + /** Used to restore the original `_` reference in `_.noConflict`. */ + var oldDash = root._; + + /** Used to detect if a method is native. */ + var reIsNative = RegExp('^' + + funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') + .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' + ); + + /** Built-in value references. */ + var Buffer = moduleExports ? context.Buffer : undefined, + Symbol = context.Symbol, + Uint8Array = context.Uint8Array, + allocUnsafe = Buffer ? Buffer.allocUnsafe : undefined, + getPrototype = overArg(Object.getPrototypeOf, Object), + objectCreate = Object.create, + propertyIsEnumerable = objectProto.propertyIsEnumerable, + splice = arrayProto.splice, + spreadableSymbol = Symbol ? Symbol.isConcatSpreadable : undefined, + symIterator = Symbol ? Symbol.iterator : undefined, + symToStringTag = Symbol ? Symbol.toStringTag : undefined; + + var defineProperty = (function() { + try { + var func = getNative(Object, 'defineProperty'); + func({}, '', {}); + return func; + } catch (e) {} + }()); + + /** Mocked built-ins. */ + var ctxClearTimeout = context.clearTimeout !== root.clearTimeout && context.clearTimeout, + ctxNow = Date && Date.now !== root.Date.now && Date.now, + ctxSetTimeout = context.setTimeout !== root.setTimeout && context.setTimeout; + + /* Built-in method references for those with the same name as other `lodash` methods. */ + var nativeCeil = Math.ceil, + nativeFloor = Math.floor, + nativeGetSymbols = Object.getOwnPropertySymbols, + nativeIsBuffer = Buffer ? Buffer.isBuffer : undefined, + nativeIsFinite = context.isFinite, + nativeJoin = arrayProto.join, + nativeKeys = overArg(Object.keys, Object), + nativeMax = Math.max, + nativeMin = Math.min, + nativeNow = Date.now, + nativeParseInt = context.parseInt, + nativeRandom = Math.random, + nativeReverse = arrayProto.reverse; + + /* Built-in method references that are verified to be native. */ + var DataView = getNative(context, 'DataView'), + Map = getNative(context, 'Map'), + Promise = getNative(context, 'Promise'), + Set = getNative(context, 'Set'), + WeakMap = getNative(context, 'WeakMap'), + nativeCreate = getNative(Object, 'create'); + + /** Used to store function metadata. */ + var metaMap = WeakMap && new WeakMap; + + /** Used to lookup unminified function names. */ + var realNames = {}; + + /** Used to detect maps, sets, and weakmaps. */ + var dataViewCtorString = toSource(DataView), + mapCtorString = toSource(Map), + promiseCtorString = toSource(Promise), + setCtorString = toSource(Set), + weakMapCtorString = toSource(WeakMap); + + /** Used to convert symbols to primitives and strings. */ + var symbolProto = Symbol ? Symbol.prototype : undefined, + symbolValueOf = symbolProto ? symbolProto.valueOf : undefined, + symbolToString = symbolProto ? symbolProto.toString : undefined; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a `lodash` object which wraps `value` to enable implicit method + * chain sequences. Methods that operate on and return arrays, collections, + * and functions can be chained together. Methods that retrieve a single value + * or may return a primitive value will automatically end the chain sequence + * and return the unwrapped value. Otherwise, the value must be unwrapped + * with `_#value`. + * + * Explicit chain sequences, which must be unwrapped with `_#value`, may be + * enabled using `_.chain`. + * + * The execution of chained methods is lazy, that is, it's deferred until + * `_#value` is implicitly or explicitly called. + * + * Lazy evaluation allows several methods to support shortcut fusion. + * Shortcut fusion is an optimization to merge iteratee calls; this avoids + * the creation of intermediate arrays and can greatly reduce the number of + * iteratee executions. Sections of a chain sequence qualify for shortcut + * fusion if the section is applied to an array and iteratees accept only + * one argument. The heuristic for whether a section qualifies for shortcut + * fusion is subject to change. + * + * Chaining is supported in custom builds as long as the `_#value` method is + * directly or indirectly included in the build. + * + * In addition to lodash methods, wrappers have `Array` and `String` methods. + * + * The wrapper `Array` methods are: + * `concat`, `join`, `pop`, `push`, `shift`, `sort`, `splice`, and `unshift` + * + * The wrapper `String` methods are: + * `replace` and `split` + * + * The wrapper methods that support shortcut fusion are: + * `at`, `compact`, `drop`, `dropRight`, `dropWhile`, `filter`, `find`, + * `findLast`, `head`, `initial`, `last`, `map`, `reject`, `reverse`, `slice`, + * `tail`, `take`, `takeRight`, `takeRightWhile`, `takeWhile`, and `toArray` + * + * The chainable wrapper methods are: + * `after`, `ary`, `assign`, `assignIn`, `assignInWith`, `assignWith`, `at`, + * `before`, `bind`, `bindAll`, `bindKey`, `castArray`, `chain`, `chunk`, + * `commit`, `compact`, `concat`, `conforms`, `constant`, `countBy`, `create`, + * `curry`, `debounce`, `defaults`, `defaultsDeep`, `defer`, `delay`, + * `difference`, `differenceBy`, `differenceWith`, `drop`, `dropRight`, + * `dropRightWhile`, `dropWhile`, `extend`, `extendWith`, `fill`, `filter`, + * `flatMap`, `flatMapDeep`, `flatMapDepth`, `flatten`, `flattenDeep`, + * `flattenDepth`, `flip`, `flow`, `flowRight`, `fromPairs`, `functions`, + * `functionsIn`, `groupBy`, `initial`, `intersection`, `intersectionBy`, + * `intersectionWith`, `invert`, `invertBy`, `invokeMap`, `iteratee`, `keyBy`, + * `keys`, `keysIn`, `map`, `mapKeys`, `mapValues`, `matches`, `matchesProperty`, + * `memoize`, `merge`, `mergeWith`, `method`, `methodOf`, `mixin`, `negate`, + * `nthArg`, `omit`, `omitBy`, `once`, `orderBy`, `over`, `overArgs`, + * `overEvery`, `overSome`, `partial`, `partialRight`, `partition`, `pick`, + * `pickBy`, `plant`, `property`, `propertyOf`, `pull`, `pullAll`, `pullAllBy`, + * `pullAllWith`, `pullAt`, `push`, `range`, `rangeRight`, `rearg`, `reject`, + * `remove`, `rest`, `reverse`, `sampleSize`, `set`, `setWith`, `shuffle`, + * `slice`, `sort`, `sortBy`, `splice`, `spread`, `tail`, `take`, `takeRight`, + * `takeRightWhile`, `takeWhile`, `tap`, `throttle`, `thru`, `toArray`, + * `toPairs`, `toPairsIn`, `toPath`, `toPlainObject`, `transform`, `unary`, + * `union`, `unionBy`, `unionWith`, `uniq`, `uniqBy`, `uniqWith`, `unset`, + * `unshift`, `unzip`, `unzipWith`, `update`, `updateWith`, `values`, + * `valuesIn`, `without`, `wrap`, `xor`, `xorBy`, `xorWith`, `zip`, + * `zipObject`, `zipObjectDeep`, and `zipWith` + * + * The wrapper methods that are **not** chainable by default are: + * `add`, `attempt`, `camelCase`, `capitalize`, `ceil`, `clamp`, `clone`, + * `cloneDeep`, `cloneDeepWith`, `cloneWith`, `conformsTo`, `deburr`, + * `defaultTo`, `divide`, `each`, `eachRight`, `endsWith`, `eq`, `escape`, + * `escapeRegExp`, `every`, `find`, `findIndex`, `findKey`, `findLast`, + * `findLastIndex`, `findLastKey`, `first`, `floor`, `forEach`, `forEachRight`, + * `forIn`, `forInRight`, `forOwn`, `forOwnRight`, `get`, `gt`, `gte`, `has`, + * `hasIn`, `head`, `identity`, `includes`, `indexOf`, `inRange`, `invoke`, + * `isArguments`, `isArray`, `isArrayBuffer`, `isArrayLike`, `isArrayLikeObject`, + * `isBoolean`, `isBuffer`, `isDate`, `isElement`, `isEmpty`, `isEqual`, + * `isEqualWith`, `isError`, `isFinite`, `isFunction`, `isInteger`, `isLength`, + * `isMap`, `isMatch`, `isMatchWith`, `isNaN`, `isNative`, `isNil`, `isNull`, + * `isNumber`, `isObject`, `isObjectLike`, `isPlainObject`, `isRegExp`, + * `isSafeInteger`, `isSet`, `isString`, `isUndefined`, `isTypedArray`, + * `isWeakMap`, `isWeakSet`, `join`, `kebabCase`, `last`, `lastIndexOf`, + * `lowerCase`, `lowerFirst`, `lt`, `lte`, `max`, `maxBy`, `mean`, `meanBy`, + * `min`, `minBy`, `multiply`, `noConflict`, `noop`, `now`, `nth`, `pad`, + * `padEnd`, `padStart`, `parseInt`, `pop`, `random`, `reduce`, `reduceRight`, + * `repeat`, `result`, `round`, `runInContext`, `sample`, `shift`, `size`, + * `snakeCase`, `some`, `sortedIndex`, `sortedIndexBy`, `sortedLastIndex`, + * `sortedLastIndexBy`, `startCase`, `startsWith`, `stubArray`, `stubFalse`, + * `stubObject`, `stubString`, `stubTrue`, `subtract`, `sum`, `sumBy`, + * `template`, `times`, `toFinite`, `toInteger`, `toJSON`, `toLength`, + * `toLower`, `toNumber`, `toSafeInteger`, `toString`, `toUpper`, `trim`, + * `trimEnd`, `trimStart`, `truncate`, `unescape`, `uniqueId`, `upperCase`, + * `upperFirst`, `value`, and `words` + * + * @name _ + * @constructor + * @category Seq + * @param {*} value The value to wrap in a `lodash` instance. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * function square(n) { + * return n * n; + * } + * + * var wrapped = _([1, 2, 3]); + * + * // Returns an unwrapped value. + * wrapped.reduce(_.add); + * // => 6 + * + * // Returns a wrapped value. + * var squares = wrapped.map(square); + * + * _.isArray(squares); + * // => false + * + * _.isArray(squares.value()); + * // => true + */ + function lodash(value) { + if (isObjectLike(value) && !isArray(value) && !(value instanceof LazyWrapper)) { + if (value instanceof LodashWrapper) { + return value; + } + if (hasOwnProperty.call(value, '__wrapped__')) { + return wrapperClone(value); + } + } + return new LodashWrapper(value); + } + + /** + * The base implementation of `_.create` without support for assigning + * properties to the created object. + * + * @private + * @param {Object} proto The object to inherit from. + * @returns {Object} Returns the new object. + */ + var baseCreate = (function() { + function object() {} + return function(proto) { + if (!isObject(proto)) { + return {}; + } + if (objectCreate) { + return objectCreate(proto); + } + object.prototype = proto; + var result = new object; + object.prototype = undefined; + return result; + }; + }()); + + /** + * The function whose prototype chain sequence wrappers inherit from. + * + * @private + */ + function baseLodash() { + // No operation performed. + } + + /** + * The base constructor for creating `lodash` wrapper objects. + * + * @private + * @param {*} value The value to wrap. + * @param {boolean} [chainAll] Enable explicit method chain sequences. + */ + function LodashWrapper(value, chainAll) { + this.__wrapped__ = value; + this.__actions__ = []; + this.__chain__ = !!chainAll; + this.__index__ = 0; + this.__values__ = undefined; + } + + /** + * By default, the template delimiters used by lodash are like those in + * embedded Ruby (ERB) as well as ES2015 template strings. Change the + * following template settings to use alternative delimiters. + * + * @static + * @memberOf _ + * @type {Object} + */ + lodash.templateSettings = { + + /** + * Used to detect `data` property values to be HTML-escaped. + * + * @memberOf _.templateSettings + * @type {RegExp} + */ + 'escape': reEscape, + + /** + * Used to detect code to be evaluated. + * + * @memberOf _.templateSettings + * @type {RegExp} + */ + 'evaluate': reEvaluate, + + /** + * Used to detect `data` property values to inject. + * + * @memberOf _.templateSettings + * @type {RegExp} + */ + 'interpolate': reInterpolate, + + /** + * Used to reference the data object in the template text. + * + * @memberOf _.templateSettings + * @type {string} + */ + 'variable': '', + + /** + * Used to import variables into the compiled template. + * + * @memberOf _.templateSettings + * @type {Object} + */ + 'imports': { + + /** + * A reference to the `lodash` function. + * + * @memberOf _.templateSettings.imports + * @type {Function} + */ + '_': lodash + } + }; + + // Ensure wrappers are instances of `baseLodash`. + lodash.prototype = baseLodash.prototype; + lodash.prototype.constructor = lodash; + + LodashWrapper.prototype = baseCreate(baseLodash.prototype); + LodashWrapper.prototype.constructor = LodashWrapper; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a lazy wrapper object which wraps `value` to enable lazy evaluation. + * + * @private + * @constructor + * @param {*} value The value to wrap. + */ + function LazyWrapper(value) { + this.__wrapped__ = value; + this.__actions__ = []; + this.__dir__ = 1; + this.__filtered__ = false; + this.__iteratees__ = []; + this.__takeCount__ = MAX_ARRAY_LENGTH; + this.__views__ = []; + } + + /** + * Creates a clone of the lazy wrapper object. + * + * @private + * @name clone + * @memberOf LazyWrapper + * @returns {Object} Returns the cloned `LazyWrapper` object. + */ + function lazyClone() { + var result = new LazyWrapper(this.__wrapped__); + result.__actions__ = copyArray(this.__actions__); + result.__dir__ = this.__dir__; + result.__filtered__ = this.__filtered__; + result.__iteratees__ = copyArray(this.__iteratees__); + result.__takeCount__ = this.__takeCount__; + result.__views__ = copyArray(this.__views__); + return result; + } + + /** + * Reverses the direction of lazy iteration. + * + * @private + * @name reverse + * @memberOf LazyWrapper + * @returns {Object} Returns the new reversed `LazyWrapper` object. + */ + function lazyReverse() { + if (this.__filtered__) { + var result = new LazyWrapper(this); + result.__dir__ = -1; + result.__filtered__ = true; + } else { + result = this.clone(); + result.__dir__ *= -1; + } + return result; + } + + /** + * Extracts the unwrapped value from its lazy wrapper. + * + * @private + * @name value + * @memberOf LazyWrapper + * @returns {*} Returns the unwrapped value. + */ + function lazyValue() { + var array = this.__wrapped__.value(), + dir = this.__dir__, + isArr = isArray(array), + isRight = dir < 0, + arrLength = isArr ? array.length : 0, + view = getView(0, arrLength, this.__views__), + start = view.start, + end = view.end, + length = end - start, + index = isRight ? end : (start - 1), + iteratees = this.__iteratees__, + iterLength = iteratees.length, + resIndex = 0, + takeCount = nativeMin(length, this.__takeCount__); + + if (!isArr || (!isRight && arrLength == length && takeCount == length)) { + return baseWrapperValue(array, this.__actions__); + } + var result = []; + + outer: + while (length-- && resIndex < takeCount) { + index += dir; + + var iterIndex = -1, + value = array[index]; + + while (++iterIndex < iterLength) { + var data = iteratees[iterIndex], + iteratee = data.iteratee, + type = data.type, + computed = iteratee(value); + + if (type == LAZY_MAP_FLAG) { + value = computed; + } else if (!computed) { + if (type == LAZY_FILTER_FLAG) { + continue outer; + } else { + break outer; + } + } + } + result[resIndex++] = value; + } + return result; + } + + // Ensure `LazyWrapper` is an instance of `baseLodash`. + LazyWrapper.prototype = baseCreate(baseLodash.prototype); + LazyWrapper.prototype.constructor = LazyWrapper; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a hash object. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function Hash(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } + } + + /** + * Removes all key-value entries from the hash. + * + * @private + * @name clear + * @memberOf Hash + */ + function hashClear() { + this.__data__ = nativeCreate ? nativeCreate(null) : {}; + this.size = 0; + } + + /** + * Removes `key` and its value from the hash. + * + * @private + * @name delete + * @memberOf Hash + * @param {Object} hash The hash to modify. + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function hashDelete(key) { + var result = this.has(key) && delete this.__data__[key]; + this.size -= result ? 1 : 0; + return result; + } + + /** + * Gets the hash value for `key`. + * + * @private + * @name get + * @memberOf Hash + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function hashGet(key) { + var data = this.__data__; + if (nativeCreate) { + var result = data[key]; + return result === HASH_UNDEFINED ? undefined : result; + } + return hasOwnProperty.call(data, key) ? data[key] : undefined; + } + + /** + * Checks if a hash value for `key` exists. + * + * @private + * @name has + * @memberOf Hash + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function hashHas(key) { + var data = this.__data__; + return nativeCreate ? (data[key] !== undefined) : hasOwnProperty.call(data, key); + } + + /** + * Sets the hash `key` to `value`. + * + * @private + * @name set + * @memberOf Hash + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the hash instance. + */ + function hashSet(key, value) { + var data = this.__data__; + this.size += this.has(key) ? 0 : 1; + data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; + return this; + } + + // Add methods to `Hash`. + Hash.prototype.clear = hashClear; + Hash.prototype['delete'] = hashDelete; + Hash.prototype.get = hashGet; + Hash.prototype.has = hashHas; + Hash.prototype.set = hashSet; + + /*------------------------------------------------------------------------*/ + + /** + * Creates an list cache object. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function ListCache(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } + } + + /** + * Removes all key-value entries from the list cache. + * + * @private + * @name clear + * @memberOf ListCache + */ + function listCacheClear() { + this.__data__ = []; + this.size = 0; + } + + /** + * Removes `key` and its value from the list cache. + * + * @private + * @name delete + * @memberOf ListCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function listCacheDelete(key) { + var data = this.__data__, + index = assocIndexOf(data, key); + + if (index < 0) { + return false; + } + var lastIndex = data.length - 1; + if (index == lastIndex) { + data.pop(); + } else { + splice.call(data, index, 1); + } + --this.size; + return true; + } + + /** + * Gets the list cache value for `key`. + * + * @private + * @name get + * @memberOf ListCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function listCacheGet(key) { + var data = this.__data__, + index = assocIndexOf(data, key); + + return index < 0 ? undefined : data[index][1]; + } + + /** + * Checks if a list cache value for `key` exists. + * + * @private + * @name has + * @memberOf ListCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function listCacheHas(key) { + return assocIndexOf(this.__data__, key) > -1; + } + + /** + * Sets the list cache `key` to `value`. + * + * @private + * @name set + * @memberOf ListCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the list cache instance. + */ + function listCacheSet(key, value) { + var data = this.__data__, + index = assocIndexOf(data, key); + + if (index < 0) { + ++this.size; + data.push([key, value]); + } else { + data[index][1] = value; + } + return this; + } + + // Add methods to `ListCache`. + ListCache.prototype.clear = listCacheClear; + ListCache.prototype['delete'] = listCacheDelete; + ListCache.prototype.get = listCacheGet; + ListCache.prototype.has = listCacheHas; + ListCache.prototype.set = listCacheSet; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a map cache object to store key-value pairs. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function MapCache(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } + } + + /** + * Removes all key-value entries from the map. + * + * @private + * @name clear + * @memberOf MapCache + */ + function mapCacheClear() { + this.size = 0; + this.__data__ = { + 'hash': new Hash, + 'map': new (Map || ListCache), + 'string': new Hash + }; + } + + /** + * Removes `key` and its value from the map. + * + * @private + * @name delete + * @memberOf MapCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function mapCacheDelete(key) { + var result = getMapData(this, key)['delete'](key); + this.size -= result ? 1 : 0; + return result; + } + + /** + * Gets the map value for `key`. + * + * @private + * @name get + * @memberOf MapCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function mapCacheGet(key) { + return getMapData(this, key).get(key); + } + + /** + * Checks if a map value for `key` exists. + * + * @private + * @name has + * @memberOf MapCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function mapCacheHas(key) { + return getMapData(this, key).has(key); + } + + /** + * Sets the map `key` to `value`. + * + * @private + * @name set + * @memberOf MapCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the map cache instance. + */ + function mapCacheSet(key, value) { + var data = getMapData(this, key), + size = data.size; + + data.set(key, value); + this.size += data.size == size ? 0 : 1; + return this; + } + + // Add methods to `MapCache`. + MapCache.prototype.clear = mapCacheClear; + MapCache.prototype['delete'] = mapCacheDelete; + MapCache.prototype.get = mapCacheGet; + MapCache.prototype.has = mapCacheHas; + MapCache.prototype.set = mapCacheSet; + + /*------------------------------------------------------------------------*/ + + /** + * + * Creates an array cache object to store unique values. + * + * @private + * @constructor + * @param {Array} [values] The values to cache. + */ + function SetCache(values) { + var index = -1, + length = values == null ? 0 : values.length; + + this.__data__ = new MapCache; + while (++index < length) { + this.add(values[index]); + } + } + + /** + * Adds `value` to the array cache. + * + * @private + * @name add + * @memberOf SetCache + * @alias push + * @param {*} value The value to cache. + * @returns {Object} Returns the cache instance. + */ + function setCacheAdd(value) { + this.__data__.set(value, HASH_UNDEFINED); + return this; + } + + /** + * Checks if `value` is in the array cache. + * + * @private + * @name has + * @memberOf SetCache + * @param {*} value The value to search for. + * @returns {number} Returns `true` if `value` is found, else `false`. + */ + function setCacheHas(value) { + return this.__data__.has(value); + } + + // Add methods to `SetCache`. + SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; + SetCache.prototype.has = setCacheHas; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a stack cache object to store key-value pairs. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function Stack(entries) { + var data = this.__data__ = new ListCache(entries); + this.size = data.size; + } + + /** + * Removes all key-value entries from the stack. + * + * @private + * @name clear + * @memberOf Stack + */ + function stackClear() { + this.__data__ = new ListCache; + this.size = 0; + } + + /** + * Removes `key` and its value from the stack. + * + * @private + * @name delete + * @memberOf Stack + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function stackDelete(key) { + var data = this.__data__, + result = data['delete'](key); + + this.size = data.size; + return result; + } + + /** + * Gets the stack value for `key`. + * + * @private + * @name get + * @memberOf Stack + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function stackGet(key) { + return this.__data__.get(key); + } + + /** + * Checks if a stack value for `key` exists. + * + * @private + * @name has + * @memberOf Stack + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function stackHas(key) { + return this.__data__.has(key); + } + + /** + * Sets the stack `key` to `value`. + * + * @private + * @name set + * @memberOf Stack + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the stack cache instance. + */ + function stackSet(key, value) { + var data = this.__data__; + if (data instanceof ListCache) { + var pairs = data.__data__; + if (!Map || (pairs.length < LARGE_ARRAY_SIZE - 1)) { + pairs.push([key, value]); + this.size = ++data.size; + return this; + } + data = this.__data__ = new MapCache(pairs); + } + data.set(key, value); + this.size = data.size; + return this; + } + + // Add methods to `Stack`. + Stack.prototype.clear = stackClear; + Stack.prototype['delete'] = stackDelete; + Stack.prototype.get = stackGet; + Stack.prototype.has = stackHas; + Stack.prototype.set = stackSet; + + /*------------------------------------------------------------------------*/ + + /** + * Creates an array of the enumerable property names of the array-like `value`. + * + * @private + * @param {*} value The value to query. + * @param {boolean} inherited Specify returning inherited property names. + * @returns {Array} Returns the array of property names. + */ + function arrayLikeKeys(value, inherited) { + var isArr = isArray(value), + isArg = !isArr && isArguments(value), + isBuff = !isArr && !isArg && isBuffer(value), + isType = !isArr && !isArg && !isBuff && isTypedArray(value), + skipIndexes = isArr || isArg || isBuff || isType, + result = skipIndexes ? baseTimes(value.length, String) : [], + length = result.length; + + for (var key in value) { + if ((inherited || hasOwnProperty.call(value, key)) && + !(skipIndexes && ( + // Safari 9 has enumerable `arguments.length` in strict mode. + key == 'length' || + // Node.js 0.10 has enumerable non-index properties on buffers. + (isBuff && (key == 'offset' || key == 'parent')) || + // PhantomJS 2 has enumerable non-index properties on typed arrays. + (isType && (key == 'buffer' || key == 'byteLength' || key == 'byteOffset')) || + // Skip index properties. + isIndex(key, length) + ))) { + result.push(key); + } + } + return result; + } + + /** + * A specialized version of `_.sample` for arrays. + * + * @private + * @param {Array} array The array to sample. + * @returns {*} Returns the random element. + */ + function arraySample(array) { + var length = array.length; + return length ? array[baseRandom(0, length - 1)] : undefined; + } + + /** + * A specialized version of `_.sampleSize` for arrays. + * + * @private + * @param {Array} array The array to sample. + * @param {number} n The number of elements to sample. + * @returns {Array} Returns the random elements. + */ + function arraySampleSize(array, n) { + return shuffleSelf(copyArray(array), baseClamp(n, 0, array.length)); + } + + /** + * A specialized version of `_.shuffle` for arrays. + * + * @private + * @param {Array} array The array to shuffle. + * @returns {Array} Returns the new shuffled array. + */ + function arrayShuffle(array) { + return shuffleSelf(copyArray(array)); + } + + /** + * This function is like `assignValue` except that it doesn't assign + * `undefined` values. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function assignMergeValue(object, key, value) { + if ((value !== undefined && !eq(object[key], value)) || + (value === undefined && !(key in object))) { + baseAssignValue(object, key, value); + } + } + + /** + * Assigns `value` to `key` of `object` if the existing value is not equivalent + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function assignValue(object, key, value) { + var objValue = object[key]; + if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) || + (value === undefined && !(key in object))) { + baseAssignValue(object, key, value); + } + } + + /** + * Gets the index at which the `key` is found in `array` of key-value pairs. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} key The key to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function assocIndexOf(array, key) { + var length = array.length; + while (length--) { + if (eq(array[length][0], key)) { + return length; + } + } + return -1; + } + + /** + * Aggregates elements of `collection` on `accumulator` with keys transformed + * by `iteratee` and values set by `setter`. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform keys. + * @param {Object} accumulator The initial aggregated object. + * @returns {Function} Returns `accumulator`. + */ + function baseAggregator(collection, setter, iteratee, accumulator) { + baseEach(collection, function(value, key, collection) { + setter(accumulator, value, iteratee(value), collection); + }); + return accumulator; + } + + /** + * The base implementation of `_.assign` without support for multiple sources + * or `customizer` functions. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @returns {Object} Returns `object`. + */ + function baseAssign(object, source) { + return object && copyObject(source, keys(source), object); + } + + /** + * The base implementation of `_.assignIn` without support for multiple sources + * or `customizer` functions. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @returns {Object} Returns `object`. + */ + function baseAssignIn(object, source) { + return object && copyObject(source, keysIn(source), object); + } + + /** + * The base implementation of `assignValue` and `assignMergeValue` without + * value checks. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function baseAssignValue(object, key, value) { + if (key == '__proto__' && defineProperty) { + defineProperty(object, key, { + 'configurable': true, + 'enumerable': true, + 'value': value, + 'writable': true + }); + } else { + object[key] = value; + } + } + + /** + * The base implementation of `_.at` without support for individual paths. + * + * @private + * @param {Object} object The object to iterate over. + * @param {string[]} paths The property paths to pick. + * @returns {Array} Returns the picked elements. + */ + function baseAt(object, paths) { + var index = -1, + length = paths.length, + result = Array(length), + skip = object == null; + + while (++index < length) { + result[index] = skip ? undefined : get(object, paths[index]); + } + return result; + } + + /** + * The base implementation of `_.clamp` which doesn't coerce arguments. + * + * @private + * @param {number} number The number to clamp. + * @param {number} [lower] The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the clamped number. + */ + function baseClamp(number, lower, upper) { + if (number === number) { + if (upper !== undefined) { + number = number <= upper ? number : upper; + } + if (lower !== undefined) { + number = number >= lower ? number : lower; + } + } + return number; + } + + /** + * The base implementation of `_.clone` and `_.cloneDeep` which tracks + * traversed objects. + * + * @private + * @param {*} value The value to clone. + * @param {boolean} bitmask The bitmask flags. + * 1 - Deep clone + * 2 - Flatten inherited properties + * 4 - Clone symbols + * @param {Function} [customizer] The function to customize cloning. + * @param {string} [key] The key of `value`. + * @param {Object} [object] The parent object of `value`. + * @param {Object} [stack] Tracks traversed objects and their clone counterparts. + * @returns {*} Returns the cloned value. + */ + function baseClone(value, bitmask, customizer, key, object, stack) { + var result, + isDeep = bitmask & CLONE_DEEP_FLAG, + isFlat = bitmask & CLONE_FLAT_FLAG, + isFull = bitmask & CLONE_SYMBOLS_FLAG; + + if (customizer) { + result = object ? customizer(value, key, object, stack) : customizer(value); + } + if (result !== undefined) { + return result; + } + if (!isObject(value)) { + return value; + } + var isArr = isArray(value); + if (isArr) { + result = initCloneArray(value); + if (!isDeep) { + return copyArray(value, result); + } + } else { + var tag = getTag(value), + isFunc = tag == funcTag || tag == genTag; + + if (isBuffer(value)) { + return cloneBuffer(value, isDeep); + } + if (tag == objectTag || tag == argsTag || (isFunc && !object)) { + result = (isFlat || isFunc) ? {} : initCloneObject(value); + if (!isDeep) { + return isFlat + ? copySymbolsIn(value, baseAssignIn(result, value)) + : copySymbols(value, baseAssign(result, value)); + } + } else { + if (!cloneableTags[tag]) { + return object ? value : {}; + } + result = initCloneByTag(value, tag, isDeep); + } + } + // Check for circular references and return its corresponding clone. + stack || (stack = new Stack); + var stacked = stack.get(value); + if (stacked) { + return stacked; + } + stack.set(value, result); + + if (isSet(value)) { + value.forEach(function(subValue) { + result.add(baseClone(subValue, bitmask, customizer, subValue, value, stack)); + }); + } else if (isMap(value)) { + value.forEach(function(subValue, key) { + result.set(key, baseClone(subValue, bitmask, customizer, key, value, stack)); + }); + } + + var keysFunc = isFull + ? (isFlat ? getAllKeysIn : getAllKeys) + : (isFlat ? keysIn : keys); + + var props = isArr ? undefined : keysFunc(value); + arrayEach(props || value, function(subValue, key) { + if (props) { + key = subValue; + subValue = value[key]; + } + // Recursively populate clone (susceptible to call stack limits). + assignValue(result, key, baseClone(subValue, bitmask, customizer, key, value, stack)); + }); + return result; + } + + /** + * The base implementation of `_.conforms` which doesn't clone `source`. + * + * @private + * @param {Object} source The object of property predicates to conform to. + * @returns {Function} Returns the new spec function. + */ + function baseConforms(source) { + var props = keys(source); + return function(object) { + return baseConformsTo(object, source, props); + }; + } + + /** + * The base implementation of `_.conformsTo` which accepts `props` to check. + * + * @private + * @param {Object} object The object to inspect. + * @param {Object} source The object of property predicates to conform to. + * @returns {boolean} Returns `true` if `object` conforms, else `false`. + */ + function baseConformsTo(object, source, props) { + var length = props.length; + if (object == null) { + return !length; + } + object = Object(object); + while (length--) { + var key = props[length], + predicate = source[key], + value = object[key]; + + if ((value === undefined && !(key in object)) || !predicate(value)) { + return false; + } + } + return true; + } + + /** + * The base implementation of `_.delay` and `_.defer` which accepts `args` + * to provide to `func`. + * + * @private + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @param {Array} args The arguments to provide to `func`. + * @returns {number|Object} Returns the timer id or timeout object. + */ + function baseDelay(func, wait, args) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + return setTimeout(function() { func.apply(undefined, args); }, wait); + } + + /** + * The base implementation of methods like `_.difference` without support + * for excluding multiple arrays or iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Array} values The values to exclude. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + */ + function baseDifference(array, values, iteratee, comparator) { + var index = -1, + includes = arrayIncludes, + isCommon = true, + length = array.length, + result = [], + valuesLength = values.length; + + if (!length) { + return result; + } + if (iteratee) { + values = arrayMap(values, baseUnary(iteratee)); + } + if (comparator) { + includes = arrayIncludesWith; + isCommon = false; + } + else if (values.length >= LARGE_ARRAY_SIZE) { + includes = cacheHas; + isCommon = false; + values = new SetCache(values); + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee == null ? value : iteratee(value); + + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var valuesIndex = valuesLength; + while (valuesIndex--) { + if (values[valuesIndex] === computed) { + continue outer; + } + } + result.push(value); + } + else if (!includes(values, computed, comparator)) { + result.push(value); + } + } + return result; + } + + /** + * The base implementation of `_.forEach` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + */ + var baseEach = createBaseEach(baseForOwn); + + /** + * The base implementation of `_.forEachRight` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + */ + var baseEachRight = createBaseEach(baseForOwnRight, true); + + /** + * The base implementation of `_.every` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false` + */ + function baseEvery(collection, predicate) { + var result = true; + baseEach(collection, function(value, index, collection) { + result = !!predicate(value, index, collection); + return result; + }); + return result; + } + + /** + * The base implementation of methods like `_.max` and `_.min` which accepts a + * `comparator` to determine the extremum value. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The iteratee invoked per iteration. + * @param {Function} comparator The comparator used to compare values. + * @returns {*} Returns the extremum value. + */ + function baseExtremum(array, iteratee, comparator) { + var index = -1, + length = array.length; + + while (++index < length) { + var value = array[index], + current = iteratee(value); + + if (current != null && (computed === undefined + ? (current === current && !isSymbol(current)) + : comparator(current, computed) + )) { + var computed = current, + result = value; + } + } + return result; + } + + /** + * The base implementation of `_.fill` without an iteratee call guard. + * + * @private + * @param {Array} array The array to fill. + * @param {*} value The value to fill `array` with. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns `array`. + */ + function baseFill(array, value, start, end) { + var length = array.length; + + start = toInteger(start); + if (start < 0) { + start = -start > length ? 0 : (length + start); + } + end = (end === undefined || end > length) ? length : toInteger(end); + if (end < 0) { + end += length; + } + end = start > end ? 0 : toLength(end); + while (start < end) { + array[start++] = value; + } + return array; + } + + /** + * The base implementation of `_.filter` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + */ + function baseFilter(collection, predicate) { + var result = []; + baseEach(collection, function(value, index, collection) { + if (predicate(value, index, collection)) { + result.push(value); + } + }); + return result; + } + + /** + * The base implementation of `_.flatten` with support for restricting flattening. + * + * @private + * @param {Array} array The array to flatten. + * @param {number} depth The maximum recursion depth. + * @param {boolean} [predicate=isFlattenable] The function invoked per iteration. + * @param {boolean} [isStrict] Restrict to values that pass `predicate` checks. + * @param {Array} [result=[]] The initial result value. + * @returns {Array} Returns the new flattened array. + */ + function baseFlatten(array, depth, predicate, isStrict, result) { + var index = -1, + length = array.length; + + predicate || (predicate = isFlattenable); + result || (result = []); + + while (++index < length) { + var value = array[index]; + if (depth > 0 && predicate(value)) { + if (depth > 1) { + // Recursively flatten arrays (susceptible to call stack limits). + baseFlatten(value, depth - 1, predicate, isStrict, result); + } else { + arrayPush(result, value); + } + } else if (!isStrict) { + result[result.length] = value; + } + } + return result; + } + + /** + * The base implementation of `baseForOwn` which iterates over `object` + * properties returned by `keysFunc` and invokes `iteratee` for each property. + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {Function} keysFunc The function to get the keys of `object`. + * @returns {Object} Returns `object`. + */ + var baseFor = createBaseFor(); + + /** + * This function is like `baseFor` except that it iterates over properties + * in the opposite order. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {Function} keysFunc The function to get the keys of `object`. + * @returns {Object} Returns `object`. + */ + var baseForRight = createBaseFor(true); + + /** + * The base implementation of `_.forOwn` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Object} Returns `object`. + */ + function baseForOwn(object, iteratee) { + return object && baseFor(object, iteratee, keys); + } + + /** + * The base implementation of `_.forOwnRight` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Object} Returns `object`. + */ + function baseForOwnRight(object, iteratee) { + return object && baseForRight(object, iteratee, keys); + } + + /** + * The base implementation of `_.functions` which creates an array of + * `object` function property names filtered from `props`. + * + * @private + * @param {Object} object The object to inspect. + * @param {Array} props The property names to filter. + * @returns {Array} Returns the function names. + */ + function baseFunctions(object, props) { + return arrayFilter(props, function(key) { + return isFunction(object[key]); + }); + } + + /** + * The base implementation of `_.get` without support for default values. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to get. + * @returns {*} Returns the resolved value. + */ + function baseGet(object, path) { + path = castPath(path, object); + + var index = 0, + length = path.length; + + while (object != null && index < length) { + object = object[toKey(path[index++])]; + } + return (index && index == length) ? object : undefined; + } + + /** + * The base implementation of `getAllKeys` and `getAllKeysIn` which uses + * `keysFunc` and `symbolsFunc` to get the enumerable property names and + * symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Function} keysFunc The function to get the keys of `object`. + * @param {Function} symbolsFunc The function to get the symbols of `object`. + * @returns {Array} Returns the array of property names and symbols. + */ + function baseGetAllKeys(object, keysFunc, symbolsFunc) { + var result = keysFunc(object); + return isArray(object) ? result : arrayPush(result, symbolsFunc(object)); + } + + /** + * The base implementation of `getTag` without fallbacks for buggy environments. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. + */ + function baseGetTag(value) { + if (value == null) { + return value === undefined ? undefinedTag : nullTag; + } + return (symToStringTag && symToStringTag in Object(value)) + ? getRawTag(value) + : objectToString(value); + } + + /** + * The base implementation of `_.gt` which doesn't coerce arguments. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than `other`, + * else `false`. + */ + function baseGt(value, other) { + return value > other; + } + + /** + * The base implementation of `_.has` without support for deep paths. + * + * @private + * @param {Object} [object] The object to query. + * @param {Array|string} key The key to check. + * @returns {boolean} Returns `true` if `key` exists, else `false`. + */ + function baseHas(object, key) { + return object != null && hasOwnProperty.call(object, key); + } + + /** + * The base implementation of `_.hasIn` without support for deep paths. + * + * @private + * @param {Object} [object] The object to query. + * @param {Array|string} key The key to check. + * @returns {boolean} Returns `true` if `key` exists, else `false`. + */ + function baseHasIn(object, key) { + return object != null && key in Object(object); + } + + /** + * The base implementation of `_.inRange` which doesn't coerce arguments. + * + * @private + * @param {number} number The number to check. + * @param {number} start The start of the range. + * @param {number} end The end of the range. + * @returns {boolean} Returns `true` if `number` is in the range, else `false`. + */ + function baseInRange(number, start, end) { + return number >= nativeMin(start, end) && number < nativeMax(start, end); + } + + /** + * The base implementation of methods like `_.intersection`, without support + * for iteratee shorthands, that accepts an array of arrays to inspect. + * + * @private + * @param {Array} arrays The arrays to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of shared values. + */ + function baseIntersection(arrays, iteratee, comparator) { + var includes = comparator ? arrayIncludesWith : arrayIncludes, + length = arrays[0].length, + othLength = arrays.length, + othIndex = othLength, + caches = Array(othLength), + maxLength = Infinity, + result = []; + + while (othIndex--) { + var array = arrays[othIndex]; + if (othIndex && iteratee) { + array = arrayMap(array, baseUnary(iteratee)); + } + maxLength = nativeMin(array.length, maxLength); + caches[othIndex] = !comparator && (iteratee || (length >= 120 && array.length >= 120)) + ? new SetCache(othIndex && array) + : undefined; + } + array = arrays[0]; + + var index = -1, + seen = caches[0]; + + outer: + while (++index < length && result.length < maxLength) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + value = (comparator || value !== 0) ? value : 0; + if (!(seen + ? cacheHas(seen, computed) + : includes(result, computed, comparator) + )) { + othIndex = othLength; + while (--othIndex) { + var cache = caches[othIndex]; + if (!(cache + ? cacheHas(cache, computed) + : includes(arrays[othIndex], computed, comparator)) + ) { + continue outer; + } + } + if (seen) { + seen.push(computed); + } + result.push(value); + } + } + return result; + } + + /** + * The base implementation of `_.invert` and `_.invertBy` which inverts + * `object` with values transformed by `iteratee` and set by `setter`. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform values. + * @param {Object} accumulator The initial inverted object. + * @returns {Function} Returns `accumulator`. + */ + function baseInverter(object, setter, iteratee, accumulator) { + baseForOwn(object, function(value, key, object) { + setter(accumulator, iteratee(value), key, object); + }); + return accumulator; + } + + /** + * The base implementation of `_.invoke` without support for individual + * method arguments. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path of the method to invoke. + * @param {Array} args The arguments to invoke the method with. + * @returns {*} Returns the result of the invoked method. + */ + function baseInvoke(object, path, args) { + path = castPath(path, object); + object = parent(object, path); + var func = object == null ? object : object[toKey(last(path))]; + return func == null ? undefined : apply(func, object, args); + } + + /** + * The base implementation of `_.isArguments`. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + */ + function baseIsArguments(value) { + return isObjectLike(value) && baseGetTag(value) == argsTag; + } + + /** + * The base implementation of `_.isArrayBuffer` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array buffer, else `false`. + */ + function baseIsArrayBuffer(value) { + return isObjectLike(value) && baseGetTag(value) == arrayBufferTag; + } + + /** + * The base implementation of `_.isDate` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a date object, else `false`. + */ + function baseIsDate(value) { + return isObjectLike(value) && baseGetTag(value) == dateTag; + } + + /** + * The base implementation of `_.isEqual` which supports partial comparisons + * and tracks traversed objects. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @param {boolean} bitmask The bitmask flags. + * 1 - Unordered comparison + * 2 - Partial comparison + * @param {Function} [customizer] The function to customize comparisons. + * @param {Object} [stack] Tracks traversed `value` and `other` objects. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + */ + function baseIsEqual(value, other, bitmask, customizer, stack) { + if (value === other) { + return true; + } + if (value == null || other == null || (!isObjectLike(value) && !isObjectLike(other))) { + return value !== value && other !== other; + } + return baseIsEqualDeep(value, other, bitmask, customizer, baseIsEqual, stack); + } + + /** + * A specialized version of `baseIsEqual` for arrays and objects which performs + * deep comparisons and tracks traversed objects enabling objects with circular + * references to be compared. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} [stack] Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function baseIsEqualDeep(object, other, bitmask, customizer, equalFunc, stack) { + var objIsArr = isArray(object), + othIsArr = isArray(other), + objTag = objIsArr ? arrayTag : getTag(object), + othTag = othIsArr ? arrayTag : getTag(other); + + objTag = objTag == argsTag ? objectTag : objTag; + othTag = othTag == argsTag ? objectTag : othTag; + + var objIsObj = objTag == objectTag, + othIsObj = othTag == objectTag, + isSameTag = objTag == othTag; + + if (isSameTag && isBuffer(object)) { + if (!isBuffer(other)) { + return false; + } + objIsArr = true; + objIsObj = false; + } + if (isSameTag && !objIsObj) { + stack || (stack = new Stack); + return (objIsArr || isTypedArray(object)) + ? equalArrays(object, other, bitmask, customizer, equalFunc, stack) + : equalByTag(object, other, objTag, bitmask, customizer, equalFunc, stack); + } + if (!(bitmask & COMPARE_PARTIAL_FLAG)) { + var objIsWrapped = objIsObj && hasOwnProperty.call(object, '__wrapped__'), + othIsWrapped = othIsObj && hasOwnProperty.call(other, '__wrapped__'); + + if (objIsWrapped || othIsWrapped) { + var objUnwrapped = objIsWrapped ? object.value() : object, + othUnwrapped = othIsWrapped ? other.value() : other; + + stack || (stack = new Stack); + return equalFunc(objUnwrapped, othUnwrapped, bitmask, customizer, stack); + } + } + if (!isSameTag) { + return false; + } + stack || (stack = new Stack); + return equalObjects(object, other, bitmask, customizer, equalFunc, stack); + } + + /** + * The base implementation of `_.isMap` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a map, else `false`. + */ + function baseIsMap(value) { + return isObjectLike(value) && getTag(value) == mapTag; + } + + /** + * The base implementation of `_.isMatch` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @param {Array} matchData The property names, values, and compare flags to match. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + */ + function baseIsMatch(object, source, matchData, customizer) { + var index = matchData.length, + length = index, + noCustomizer = !customizer; + + if (object == null) { + return !length; + } + object = Object(object); + while (index--) { + var data = matchData[index]; + if ((noCustomizer && data[2]) + ? data[1] !== object[data[0]] + : !(data[0] in object) + ) { + return false; + } + } + while (++index < length) { + data = matchData[index]; + var key = data[0], + objValue = object[key], + srcValue = data[1]; + + if (noCustomizer && data[2]) { + if (objValue === undefined && !(key in object)) { + return false; + } + } else { + var stack = new Stack; + if (customizer) { + var result = customizer(objValue, srcValue, key, object, source, stack); + } + if (!(result === undefined + ? baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG, customizer, stack) + : result + )) { + return false; + } + } + } + return true; + } + + /** + * The base implementation of `_.isNative` without bad shim checks. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a native function, + * else `false`. + */ + function baseIsNative(value) { + if (!isObject(value) || isMasked(value)) { + return false; + } + var pattern = isFunction(value) ? reIsNative : reIsHostCtor; + return pattern.test(toSource(value)); + } + + /** + * The base implementation of `_.isRegExp` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a regexp, else `false`. + */ + function baseIsRegExp(value) { + return isObjectLike(value) && baseGetTag(value) == regexpTag; + } + + /** + * The base implementation of `_.isSet` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a set, else `false`. + */ + function baseIsSet(value) { + return isObjectLike(value) && getTag(value) == setTag; + } + + /** + * The base implementation of `_.isTypedArray` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. + */ + function baseIsTypedArray(value) { + return isObjectLike(value) && + isLength(value.length) && !!typedArrayTags[baseGetTag(value)]; + } + + /** + * The base implementation of `_.iteratee`. + * + * @private + * @param {*} [value=_.identity] The value to convert to an iteratee. + * @returns {Function} Returns the iteratee. + */ + function baseIteratee(value) { + // Don't store the `typeof` result in a variable to avoid a JIT bug in Safari 9. + // See https://bugs.webkit.org/show_bug.cgi?id=156034 for more details. + if (typeof value == 'function') { + return value; + } + if (value == null) { + return identity; + } + if (typeof value == 'object') { + return isArray(value) + ? baseMatchesProperty(value[0], value[1]) + : baseMatches(value); + } + return property(value); + } + + /** + * The base implementation of `_.keys` which doesn't treat sparse arrays as dense. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ + function baseKeys(object) { + if (!isPrototype(object)) { + return nativeKeys(object); + } + var result = []; + for (var key in Object(object)) { + if (hasOwnProperty.call(object, key) && key != 'constructor') { + result.push(key); + } + } + return result; + } + + /** + * The base implementation of `_.keysIn` which doesn't treat sparse arrays as dense. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ + function baseKeysIn(object) { + if (!isObject(object)) { + return nativeKeysIn(object); + } + var isProto = isPrototype(object), + result = []; + + for (var key in object) { + if (!(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) { + result.push(key); + } + } + return result; + } + + /** + * The base implementation of `_.lt` which doesn't coerce arguments. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than `other`, + * else `false`. + */ + function baseLt(value, other) { + return value < other; + } + + /** + * The base implementation of `_.map` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ + function baseMap(collection, iteratee) { + var index = -1, + result = isArrayLike(collection) ? Array(collection.length) : []; + + baseEach(collection, function(value, key, collection) { + result[++index] = iteratee(value, key, collection); + }); + return result; + } + + /** + * The base implementation of `_.matches` which doesn't clone `source`. + * + * @private + * @param {Object} source The object of property values to match. + * @returns {Function} Returns the new spec function. + */ + function baseMatches(source) { + var matchData = getMatchData(source); + if (matchData.length == 1 && matchData[0][2]) { + return matchesStrictComparable(matchData[0][0], matchData[0][1]); + } + return function(object) { + return object === source || baseIsMatch(object, source, matchData); + }; + } + + /** + * The base implementation of `_.matchesProperty` which doesn't clone `srcValue`. + * + * @private + * @param {string} path The path of the property to get. + * @param {*} srcValue The value to match. + * @returns {Function} Returns the new spec function. + */ + function baseMatchesProperty(path, srcValue) { + if (isKey(path) && isStrictComparable(srcValue)) { + return matchesStrictComparable(toKey(path), srcValue); + } + return function(object) { + var objValue = get(object, path); + return (objValue === undefined && objValue === srcValue) + ? hasIn(object, path) + : baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG); + }; + } + + /** + * The base implementation of `_.merge` without support for multiple sources. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @param {number} srcIndex The index of `source`. + * @param {Function} [customizer] The function to customize merged values. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + */ + function baseMerge(object, source, srcIndex, customizer, stack) { + if (object === source) { + return; + } + baseFor(source, function(srcValue, key) { + stack || (stack = new Stack); + if (isObject(srcValue)) { + baseMergeDeep(object, source, key, srcIndex, baseMerge, customizer, stack); + } + else { + var newValue = customizer + ? customizer(safeGet(object, key), srcValue, (key + ''), object, source, stack) + : undefined; + + if (newValue === undefined) { + newValue = srcValue; + } + assignMergeValue(object, key, newValue); + } + }, keysIn); + } + + /** + * A specialized version of `baseMerge` for arrays and objects which performs + * deep merges and tracks traversed objects enabling objects with circular + * references to be merged. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @param {string} key The key of the value to merge. + * @param {number} srcIndex The index of `source`. + * @param {Function} mergeFunc The function to merge values. + * @param {Function} [customizer] The function to customize assigned values. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + */ + function baseMergeDeep(object, source, key, srcIndex, mergeFunc, customizer, stack) { + var objValue = safeGet(object, key), + srcValue = safeGet(source, key), + stacked = stack.get(srcValue); + + if (stacked) { + assignMergeValue(object, key, stacked); + return; + } + var newValue = customizer + ? customizer(objValue, srcValue, (key + ''), object, source, stack) + : undefined; + + var isCommon = newValue === undefined; + + if (isCommon) { + var isArr = isArray(srcValue), + isBuff = !isArr && isBuffer(srcValue), + isTyped = !isArr && !isBuff && isTypedArray(srcValue); + + newValue = srcValue; + if (isArr || isBuff || isTyped) { + if (isArray(objValue)) { + newValue = objValue; + } + else if (isArrayLikeObject(objValue)) { + newValue = copyArray(objValue); + } + else if (isBuff) { + isCommon = false; + newValue = cloneBuffer(srcValue, true); + } + else if (isTyped) { + isCommon = false; + newValue = cloneTypedArray(srcValue, true); + } + else { + newValue = []; + } + } + else if (isPlainObject(srcValue) || isArguments(srcValue)) { + newValue = objValue; + if (isArguments(objValue)) { + newValue = toPlainObject(objValue); + } + else if (!isObject(objValue) || isFunction(objValue)) { + newValue = initCloneObject(srcValue); + } + } + else { + isCommon = false; + } + } + if (isCommon) { + // Recursively merge objects and arrays (susceptible to call stack limits). + stack.set(srcValue, newValue); + mergeFunc(newValue, srcValue, srcIndex, customizer, stack); + stack['delete'](srcValue); + } + assignMergeValue(object, key, newValue); + } + + /** + * The base implementation of `_.nth` which doesn't coerce arguments. + * + * @private + * @param {Array} array The array to query. + * @param {number} n The index of the element to return. + * @returns {*} Returns the nth element of `array`. + */ + function baseNth(array, n) { + var length = array.length; + if (!length) { + return; + } + n += n < 0 ? length : 0; + return isIndex(n, length) ? array[n] : undefined; + } + + /** + * The base implementation of `_.orderBy` without param guards. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function[]|Object[]|string[]} iteratees The iteratees to sort by. + * @param {string[]} orders The sort orders of `iteratees`. + * @returns {Array} Returns the new sorted array. + */ + function baseOrderBy(collection, iteratees, orders) { + if (iteratees.length) { + iteratees = arrayMap(iteratees, function(iteratee) { + if (isArray(iteratee)) { + return function(value) { + return baseGet(value, iteratee.length === 1 ? iteratee[0] : iteratee); + }; + } + return iteratee; + }); + } else { + iteratees = [identity]; + } + + var index = -1; + iteratees = arrayMap(iteratees, baseUnary(getIteratee())); + + var result = baseMap(collection, function(value, key, collection) { + var criteria = arrayMap(iteratees, function(iteratee) { + return iteratee(value); + }); + return { 'criteria': criteria, 'index': ++index, 'value': value }; + }); + + return baseSortBy(result, function(object, other) { + return compareMultiple(object, other, orders); + }); + } + + /** + * The base implementation of `_.pick` without support for individual + * property identifiers. + * + * @private + * @param {Object} object The source object. + * @param {string[]} paths The property paths to pick. + * @returns {Object} Returns the new object. + */ + function basePick(object, paths) { + return basePickBy(object, paths, function(value, path) { + return hasIn(object, path); + }); + } + + /** + * The base implementation of `_.pickBy` without support for iteratee shorthands. + * + * @private + * @param {Object} object The source object. + * @param {string[]} paths The property paths to pick. + * @param {Function} predicate The function invoked per property. + * @returns {Object} Returns the new object. + */ + function basePickBy(object, paths, predicate) { + var index = -1, + length = paths.length, + result = {}; + + while (++index < length) { + var path = paths[index], + value = baseGet(object, path); + + if (predicate(value, path)) { + baseSet(result, castPath(path, object), value); + } + } + return result; + } + + /** + * A specialized version of `baseProperty` which supports deep paths. + * + * @private + * @param {Array|string} path The path of the property to get. + * @returns {Function} Returns the new accessor function. + */ + function basePropertyDeep(path) { + return function(object) { + return baseGet(object, path); + }; + } + + /** + * The base implementation of `_.pullAllBy` without support for iteratee + * shorthands. + * + * @private + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns `array`. + */ + function basePullAll(array, values, iteratee, comparator) { + var indexOf = comparator ? baseIndexOfWith : baseIndexOf, + index = -1, + length = values.length, + seen = array; + + if (array === values) { + values = copyArray(values); + } + if (iteratee) { + seen = arrayMap(array, baseUnary(iteratee)); + } + while (++index < length) { + var fromIndex = 0, + value = values[index], + computed = iteratee ? iteratee(value) : value; + + while ((fromIndex = indexOf(seen, computed, fromIndex, comparator)) > -1) { + if (seen !== array) { + splice.call(seen, fromIndex, 1); + } + splice.call(array, fromIndex, 1); + } + } + return array; + } + + /** + * The base implementation of `_.pullAt` without support for individual + * indexes or capturing the removed elements. + * + * @private + * @param {Array} array The array to modify. + * @param {number[]} indexes The indexes of elements to remove. + * @returns {Array} Returns `array`. + */ + function basePullAt(array, indexes) { + var length = array ? indexes.length : 0, + lastIndex = length - 1; + + while (length--) { + var index = indexes[length]; + if (length == lastIndex || index !== previous) { + var previous = index; + if (isIndex(index)) { + splice.call(array, index, 1); + } else { + baseUnset(array, index); + } + } + } + return array; + } + + /** + * The base implementation of `_.random` without support for returning + * floating-point numbers. + * + * @private + * @param {number} lower The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the random number. + */ + function baseRandom(lower, upper) { + return lower + nativeFloor(nativeRandom() * (upper - lower + 1)); + } + + /** + * The base implementation of `_.range` and `_.rangeRight` which doesn't + * coerce arguments. + * + * @private + * @param {number} start The start of the range. + * @param {number} end The end of the range. + * @param {number} step The value to increment or decrement by. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Array} Returns the range of numbers. + */ + function baseRange(start, end, step, fromRight) { + var index = -1, + length = nativeMax(nativeCeil((end - start) / (step || 1)), 0), + result = Array(length); + + while (length--) { + result[fromRight ? length : ++index] = start; + start += step; + } + return result; + } + + /** + * The base implementation of `_.repeat` which doesn't coerce arguments. + * + * @private + * @param {string} string The string to repeat. + * @param {number} n The number of times to repeat the string. + * @returns {string} Returns the repeated string. + */ + function baseRepeat(string, n) { + var result = ''; + if (!string || n < 1 || n > MAX_SAFE_INTEGER) { + return result; + } + // Leverage the exponentiation by squaring algorithm for a faster repeat. + // See https://en.wikipedia.org/wiki/Exponentiation_by_squaring for more details. + do { + if (n % 2) { + result += string; + } + n = nativeFloor(n / 2); + if (n) { + string += string; + } + } while (n); + + return result; + } + + /** + * The base implementation of `_.rest` which doesn't validate or coerce arguments. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @returns {Function} Returns the new function. + */ + function baseRest(func, start) { + return setToString(overRest(func, start, identity), func + ''); + } + + /** + * The base implementation of `_.sample`. + * + * @private + * @param {Array|Object} collection The collection to sample. + * @returns {*} Returns the random element. + */ + function baseSample(collection) { + return arraySample(values(collection)); + } + + /** + * The base implementation of `_.sampleSize` without param guards. + * + * @private + * @param {Array|Object} collection The collection to sample. + * @param {number} n The number of elements to sample. + * @returns {Array} Returns the random elements. + */ + function baseSampleSize(collection, n) { + var array = values(collection); + return shuffleSelf(array, baseClamp(n, 0, array.length)); + } + + /** + * The base implementation of `_.set`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @param {Function} [customizer] The function to customize path creation. + * @returns {Object} Returns `object`. + */ + function baseSet(object, path, value, customizer) { + if (!isObject(object)) { + return object; + } + path = castPath(path, object); + + var index = -1, + length = path.length, + lastIndex = length - 1, + nested = object; + + while (nested != null && ++index < length) { + var key = toKey(path[index]), + newValue = value; + + if (key === '__proto__' || key === 'constructor' || key === 'prototype') { + return object; + } + + if (index != lastIndex) { + var objValue = nested[key]; + newValue = customizer ? customizer(objValue, key, nested) : undefined; + if (newValue === undefined) { + newValue = isObject(objValue) + ? objValue + : (isIndex(path[index + 1]) ? [] : {}); + } + } + assignValue(nested, key, newValue); + nested = nested[key]; + } + return object; + } + + /** + * The base implementation of `setData` without support for hot loop shorting. + * + * @private + * @param {Function} func The function to associate metadata with. + * @param {*} data The metadata. + * @returns {Function} Returns `func`. + */ + var baseSetData = !metaMap ? identity : function(func, data) { + metaMap.set(func, data); + return func; + }; + + /** + * The base implementation of `setToString` without support for hot loop shorting. + * + * @private + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. + */ + var baseSetToString = !defineProperty ? identity : function(func, string) { + return defineProperty(func, 'toString', { + 'configurable': true, + 'enumerable': false, + 'value': constant(string), + 'writable': true + }); + }; + + /** + * The base implementation of `_.shuffle`. + * + * @private + * @param {Array|Object} collection The collection to shuffle. + * @returns {Array} Returns the new shuffled array. + */ + function baseShuffle(collection) { + return shuffleSelf(values(collection)); + } + + /** + * The base implementation of `_.slice` without an iteratee call guard. + * + * @private + * @param {Array} array The array to slice. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the slice of `array`. + */ + function baseSlice(array, start, end) { + var index = -1, + length = array.length; + + if (start < 0) { + start = -start > length ? 0 : (length + start); + } + end = end > length ? length : end; + if (end < 0) { + end += length; + } + length = start > end ? 0 : ((end - start) >>> 0); + start >>>= 0; + + var result = Array(length); + while (++index < length) { + result[index] = array[index + start]; + } + return result; + } + + /** + * The base implementation of `_.some` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + */ + function baseSome(collection, predicate) { + var result; + + baseEach(collection, function(value, index, collection) { + result = predicate(value, index, collection); + return !result; + }); + return !!result; + } + + /** + * The base implementation of `_.sortedIndex` and `_.sortedLastIndex` which + * performs a binary search of `array` to determine the index at which `value` + * should be inserted into `array` in order to maintain its sort order. + * + * @private + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {boolean} [retHighest] Specify returning the highest qualified index. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + */ + function baseSortedIndex(array, value, retHighest) { + var low = 0, + high = array == null ? low : array.length; + + if (typeof value == 'number' && value === value && high <= HALF_MAX_ARRAY_LENGTH) { + while (low < high) { + var mid = (low + high) >>> 1, + computed = array[mid]; + + if (computed !== null && !isSymbol(computed) && + (retHighest ? (computed <= value) : (computed < value))) { + low = mid + 1; + } else { + high = mid; + } + } + return high; + } + return baseSortedIndexBy(array, value, identity, retHighest); + } + + /** + * The base implementation of `_.sortedIndexBy` and `_.sortedLastIndexBy` + * which invokes `iteratee` for `value` and each element of `array` to compute + * their sort ranking. The iteratee is invoked with one argument; (value). + * + * @private + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} iteratee The iteratee invoked per element. + * @param {boolean} [retHighest] Specify returning the highest qualified index. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + */ + function baseSortedIndexBy(array, value, iteratee, retHighest) { + var low = 0, + high = array == null ? 0 : array.length; + if (high === 0) { + return 0; + } + + value = iteratee(value); + var valIsNaN = value !== value, + valIsNull = value === null, + valIsSymbol = isSymbol(value), + valIsUndefined = value === undefined; + + while (low < high) { + var mid = nativeFloor((low + high) / 2), + computed = iteratee(array[mid]), + othIsDefined = computed !== undefined, + othIsNull = computed === null, + othIsReflexive = computed === computed, + othIsSymbol = isSymbol(computed); + + if (valIsNaN) { + var setLow = retHighest || othIsReflexive; + } else if (valIsUndefined) { + setLow = othIsReflexive && (retHighest || othIsDefined); + } else if (valIsNull) { + setLow = othIsReflexive && othIsDefined && (retHighest || !othIsNull); + } else if (valIsSymbol) { + setLow = othIsReflexive && othIsDefined && !othIsNull && (retHighest || !othIsSymbol); + } else if (othIsNull || othIsSymbol) { + setLow = false; + } else { + setLow = retHighest ? (computed <= value) : (computed < value); + } + if (setLow) { + low = mid + 1; + } else { + high = mid; + } + } + return nativeMin(high, MAX_ARRAY_INDEX); + } + + /** + * The base implementation of `_.sortedUniq` and `_.sortedUniqBy` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + */ + function baseSortedUniq(array, iteratee) { + var index = -1, + length = array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + if (!index || !eq(computed, seen)) { + var seen = computed; + result[resIndex++] = value === 0 ? 0 : value; + } + } + return result; + } + + /** + * The base implementation of `_.toNumber` which doesn't ensure correct + * conversions of binary, hexadecimal, or octal string values. + * + * @private + * @param {*} value The value to process. + * @returns {number} Returns the number. + */ + function baseToNumber(value) { + if (typeof value == 'number') { + return value; + } + if (isSymbol(value)) { + return NAN; + } + return +value; + } + + /** + * The base implementation of `_.toString` which doesn't convert nullish + * values to empty strings. + * + * @private + * @param {*} value The value to process. + * @returns {string} Returns the string. + */ + function baseToString(value) { + // Exit early for strings to avoid a performance hit in some environments. + if (typeof value == 'string') { + return value; + } + if (isArray(value)) { + // Recursively convert values (susceptible to call stack limits). + return arrayMap(value, baseToString) + ''; + } + if (isSymbol(value)) { + return symbolToString ? symbolToString.call(value) : ''; + } + var result = (value + ''); + return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; + } + + /** + * The base implementation of `_.uniqBy` without support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new duplicate free array. + */ + function baseUniq(array, iteratee, comparator) { + var index = -1, + includes = arrayIncludes, + length = array.length, + isCommon = true, + result = [], + seen = result; + + if (comparator) { + isCommon = false; + includes = arrayIncludesWith; + } + else if (length >= LARGE_ARRAY_SIZE) { + var set = iteratee ? null : createSet(array); + if (set) { + return setToArray(set); + } + isCommon = false; + includes = cacheHas; + seen = new SetCache; + } + else { + seen = iteratee ? [] : result; + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var seenIndex = seen.length; + while (seenIndex--) { + if (seen[seenIndex] === computed) { + continue outer; + } + } + if (iteratee) { + seen.push(computed); + } + result.push(value); + } + else if (!includes(seen, computed, comparator)) { + if (seen !== result) { + seen.push(computed); + } + result.push(value); + } + } + return result; + } + + /** + * The base implementation of `_.unset`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The property path to unset. + * @returns {boolean} Returns `true` if the property is deleted, else `false`. + */ + function baseUnset(object, path) { + path = castPath(path, object); + + // Prevent prototype pollution, see: https://github.com/lodash/lodash/security/advisories/GHSA-xxjr-mmjv-4gpg + var index = -1, + length = path.length; + + if (!length) { + return true; + } + + var isRootPrimitive = object == null || (typeof object !== 'object' && typeof object !== 'function'); + + while (++index < length) { + var key = path[index]; + + // skip non-string keys (e.g., Symbols, numbers) + if (typeof key !== 'string') { + continue; + } + + // Always block "__proto__" anywhere in the path if it's not expected + if (key === '__proto__' && !hasOwnProperty.call(object, '__proto__')) { + return false; + } + + // Block "constructor.prototype" chains + if (key === 'constructor' && + (index + 1) < length && + typeof path[index + 1] === 'string' && + path[index + 1] === 'prototype') { + + // Allow ONLY when the path starts at a primitive root, e.g., _.unset(0, 'constructor.prototype.a') + if (isRootPrimitive && index === 0) { + continue; + } + + return false; + } + } + + var obj = parent(object, path); + return obj == null || delete obj[toKey(last(path))]; + } + + /** + * The base implementation of `_.update`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to update. + * @param {Function} updater The function to produce the updated value. + * @param {Function} [customizer] The function to customize path creation. + * @returns {Object} Returns `object`. + */ + function baseUpdate(object, path, updater, customizer) { + return baseSet(object, path, updater(baseGet(object, path)), customizer); + } + + /** + * The base implementation of methods like `_.dropWhile` and `_.takeWhile` + * without support for iteratee shorthands. + * + * @private + * @param {Array} array The array to query. + * @param {Function} predicate The function invoked per iteration. + * @param {boolean} [isDrop] Specify dropping elements instead of taking them. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Array} Returns the slice of `array`. + */ + function baseWhile(array, predicate, isDrop, fromRight) { + var length = array.length, + index = fromRight ? length : -1; + + while ((fromRight ? index-- : ++index < length) && + predicate(array[index], index, array)) {} + + return isDrop + ? baseSlice(array, (fromRight ? 0 : index), (fromRight ? index + 1 : length)) + : baseSlice(array, (fromRight ? index + 1 : 0), (fromRight ? length : index)); + } + + /** + * The base implementation of `wrapperValue` which returns the result of + * performing a sequence of actions on the unwrapped `value`, where each + * successive action is supplied the return value of the previous. + * + * @private + * @param {*} value The unwrapped value. + * @param {Array} actions Actions to perform to resolve the unwrapped value. + * @returns {*} Returns the resolved value. + */ + function baseWrapperValue(value, actions) { + var result = value; + if (result instanceof LazyWrapper) { + result = result.value(); + } + return arrayReduce(actions, function(result, action) { + return action.func.apply(action.thisArg, arrayPush([result], action.args)); + }, result); + } + + /** + * The base implementation of methods like `_.xor`, without support for + * iteratee shorthands, that accepts an array of arrays to inspect. + * + * @private + * @param {Array} arrays The arrays to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of values. + */ + function baseXor(arrays, iteratee, comparator) { + var length = arrays.length; + if (length < 2) { + return length ? baseUniq(arrays[0]) : []; + } + var index = -1, + result = Array(length); + + while (++index < length) { + var array = arrays[index], + othIndex = -1; + + while (++othIndex < length) { + if (othIndex != index) { + result[index] = baseDifference(result[index] || array, arrays[othIndex], iteratee, comparator); + } + } + } + return baseUniq(baseFlatten(result, 1), iteratee, comparator); + } + + /** + * This base implementation of `_.zipObject` which assigns values using `assignFunc`. + * + * @private + * @param {Array} props The property identifiers. + * @param {Array} values The property values. + * @param {Function} assignFunc The function to assign values. + * @returns {Object} Returns the new object. + */ + function baseZipObject(props, values, assignFunc) { + var index = -1, + length = props.length, + valsLength = values.length, + result = {}; + + while (++index < length) { + var value = index < valsLength ? values[index] : undefined; + assignFunc(result, props[index], value); + } + return result; + } + + /** + * Casts `value` to an empty array if it's not an array like object. + * + * @private + * @param {*} value The value to inspect. + * @returns {Array|Object} Returns the cast array-like object. + */ + function castArrayLikeObject(value) { + return isArrayLikeObject(value) ? value : []; + } + + /** + * Casts `value` to `identity` if it's not a function. + * + * @private + * @param {*} value The value to inspect. + * @returns {Function} Returns cast function. + */ + function castFunction(value) { + return typeof value == 'function' ? value : identity; + } + + /** + * Casts `value` to a path array if it's not one. + * + * @private + * @param {*} value The value to inspect. + * @param {Object} [object] The object to query keys on. + * @returns {Array} Returns the cast property path array. + */ + function castPath(value, object) { + if (isArray(value)) { + return value; + } + return isKey(value, object) ? [value] : stringToPath(toString(value)); + } + + /** + * A `baseRest` alias which can be replaced with `identity` by module + * replacement plugins. + * + * @private + * @type {Function} + * @param {Function} func The function to apply a rest parameter to. + * @returns {Function} Returns the new function. + */ + var castRest = baseRest; + + /** + * Casts `array` to a slice if it's needed. + * + * @private + * @param {Array} array The array to inspect. + * @param {number} start The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the cast slice. + */ + function castSlice(array, start, end) { + var length = array.length; + end = end === undefined ? length : end; + return (!start && end >= length) ? array : baseSlice(array, start, end); + } + + /** + * A simple wrapper around the global [`clearTimeout`](https://mdn.io/clearTimeout). + * + * @private + * @param {number|Object} id The timer id or timeout object of the timer to clear. + */ + var clearTimeout = ctxClearTimeout || function(id) { + return root.clearTimeout(id); + }; + + /** + * Creates a clone of `buffer`. + * + * @private + * @param {Buffer} buffer The buffer to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Buffer} Returns the cloned buffer. + */ + function cloneBuffer(buffer, isDeep) { + if (isDeep) { + return buffer.slice(); + } + var length = buffer.length, + result = allocUnsafe ? allocUnsafe(length) : new buffer.constructor(length); + + buffer.copy(result); + return result; + } + + /** + * Creates a clone of `arrayBuffer`. + * + * @private + * @param {ArrayBuffer} arrayBuffer The array buffer to clone. + * @returns {ArrayBuffer} Returns the cloned array buffer. + */ + function cloneArrayBuffer(arrayBuffer) { + var result = new arrayBuffer.constructor(arrayBuffer.byteLength); + new Uint8Array(result).set(new Uint8Array(arrayBuffer)); + return result; + } + + /** + * Creates a clone of `dataView`. + * + * @private + * @param {Object} dataView The data view to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the cloned data view. + */ + function cloneDataView(dataView, isDeep) { + var buffer = isDeep ? cloneArrayBuffer(dataView.buffer) : dataView.buffer; + return new dataView.constructor(buffer, dataView.byteOffset, dataView.byteLength); + } + + /** + * Creates a clone of `regexp`. + * + * @private + * @param {Object} regexp The regexp to clone. + * @returns {Object} Returns the cloned regexp. + */ + function cloneRegExp(regexp) { + var result = new regexp.constructor(regexp.source, reFlags.exec(regexp)); + result.lastIndex = regexp.lastIndex; + return result; + } + + /** + * Creates a clone of the `symbol` object. + * + * @private + * @param {Object} symbol The symbol object to clone. + * @returns {Object} Returns the cloned symbol object. + */ + function cloneSymbol(symbol) { + return symbolValueOf ? Object(symbolValueOf.call(symbol)) : {}; + } + + /** + * Creates a clone of `typedArray`. + * + * @private + * @param {Object} typedArray The typed array to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the cloned typed array. + */ + function cloneTypedArray(typedArray, isDeep) { + var buffer = isDeep ? cloneArrayBuffer(typedArray.buffer) : typedArray.buffer; + return new typedArray.constructor(buffer, typedArray.byteOffset, typedArray.length); + } + + /** + * Compares values to sort them in ascending order. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {number} Returns the sort order indicator for `value`. + */ + function compareAscending(value, other) { + if (value !== other) { + var valIsDefined = value !== undefined, + valIsNull = value === null, + valIsReflexive = value === value, + valIsSymbol = isSymbol(value); + + var othIsDefined = other !== undefined, + othIsNull = other === null, + othIsReflexive = other === other, + othIsSymbol = isSymbol(other); + + if ((!othIsNull && !othIsSymbol && !valIsSymbol && value > other) || + (valIsSymbol && othIsDefined && othIsReflexive && !othIsNull && !othIsSymbol) || + (valIsNull && othIsDefined && othIsReflexive) || + (!valIsDefined && othIsReflexive) || + !valIsReflexive) { + return 1; + } + if ((!valIsNull && !valIsSymbol && !othIsSymbol && value < other) || + (othIsSymbol && valIsDefined && valIsReflexive && !valIsNull && !valIsSymbol) || + (othIsNull && valIsDefined && valIsReflexive) || + (!othIsDefined && valIsReflexive) || + !othIsReflexive) { + return -1; + } + } + return 0; + } + + /** + * Used by `_.orderBy` to compare multiple properties of a value to another + * and stable sort them. + * + * If `orders` is unspecified, all values are sorted in ascending order. Otherwise, + * specify an order of "desc" for descending or "asc" for ascending sort order + * of corresponding values. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {boolean[]|string[]} orders The order to sort by for each property. + * @returns {number} Returns the sort order indicator for `object`. + */ + function compareMultiple(object, other, orders) { + var index = -1, + objCriteria = object.criteria, + othCriteria = other.criteria, + length = objCriteria.length, + ordersLength = orders.length; + + while (++index < length) { + var result = compareAscending(objCriteria[index], othCriteria[index]); + if (result) { + if (index >= ordersLength) { + return result; + } + var order = orders[index]; + return result * (order == 'desc' ? -1 : 1); + } + } + // Fixes an `Array#sort` bug in the JS engine embedded in Adobe applications + // that causes it, under certain circumstances, to provide the same value for + // `object` and `other`. See https://github.com/jashkenas/underscore/pull/1247 + // for more details. + // + // This also ensures a stable sort in V8 and other engines. + // See https://bugs.chromium.org/p/v8/issues/detail?id=90 for more details. + return object.index - other.index; + } + + /** + * Creates an array that is the composition of partially applied arguments, + * placeholders, and provided arguments into a single array of arguments. + * + * @private + * @param {Array} args The provided arguments. + * @param {Array} partials The arguments to prepend to those provided. + * @param {Array} holders The `partials` placeholder indexes. + * @params {boolean} [isCurried] Specify composing for a curried function. + * @returns {Array} Returns the new array of composed arguments. + */ + function composeArgs(args, partials, holders, isCurried) { + var argsIndex = -1, + argsLength = args.length, + holdersLength = holders.length, + leftIndex = -1, + leftLength = partials.length, + rangeLength = nativeMax(argsLength - holdersLength, 0), + result = Array(leftLength + rangeLength), + isUncurried = !isCurried; + + while (++leftIndex < leftLength) { + result[leftIndex] = partials[leftIndex]; + } + while (++argsIndex < holdersLength) { + if (isUncurried || argsIndex < argsLength) { + result[holders[argsIndex]] = args[argsIndex]; + } + } + while (rangeLength--) { + result[leftIndex++] = args[argsIndex++]; + } + return result; + } + + /** + * This function is like `composeArgs` except that the arguments composition + * is tailored for `_.partialRight`. + * + * @private + * @param {Array} args The provided arguments. + * @param {Array} partials The arguments to append to those provided. + * @param {Array} holders The `partials` placeholder indexes. + * @params {boolean} [isCurried] Specify composing for a curried function. + * @returns {Array} Returns the new array of composed arguments. + */ + function composeArgsRight(args, partials, holders, isCurried) { + var argsIndex = -1, + argsLength = args.length, + holdersIndex = -1, + holdersLength = holders.length, + rightIndex = -1, + rightLength = partials.length, + rangeLength = nativeMax(argsLength - holdersLength, 0), + result = Array(rangeLength + rightLength), + isUncurried = !isCurried; + + while (++argsIndex < rangeLength) { + result[argsIndex] = args[argsIndex]; + } + var offset = argsIndex; + while (++rightIndex < rightLength) { + result[offset + rightIndex] = partials[rightIndex]; + } + while (++holdersIndex < holdersLength) { + if (isUncurried || argsIndex < argsLength) { + result[offset + holders[holdersIndex]] = args[argsIndex++]; + } + } + return result; + } + + /** + * Copies the values of `source` to `array`. + * + * @private + * @param {Array} source The array to copy values from. + * @param {Array} [array=[]] The array to copy values to. + * @returns {Array} Returns `array`. + */ + function copyArray(source, array) { + var index = -1, + length = source.length; + + array || (array = Array(length)); + while (++index < length) { + array[index] = source[index]; + } + return array; + } + + /** + * Copies properties of `source` to `object`. + * + * @private + * @param {Object} source The object to copy properties from. + * @param {Array} props The property identifiers to copy. + * @param {Object} [object={}] The object to copy properties to. + * @param {Function} [customizer] The function to customize copied values. + * @returns {Object} Returns `object`. + */ + function copyObject(source, props, object, customizer) { + var isNew = !object; + object || (object = {}); + + var index = -1, + length = props.length; + + while (++index < length) { + var key = props[index]; + + var newValue = customizer + ? customizer(object[key], source[key], key, object, source) + : undefined; + + if (newValue === undefined) { + newValue = source[key]; + } + if (isNew) { + baseAssignValue(object, key, newValue); + } else { + assignValue(object, key, newValue); + } + } + return object; + } + + /** + * Copies own symbols of `source` to `object`. + * + * @private + * @param {Object} source The object to copy symbols from. + * @param {Object} [object={}] The object to copy symbols to. + * @returns {Object} Returns `object`. + */ + function copySymbols(source, object) { + return copyObject(source, getSymbols(source), object); + } + + /** + * Copies own and inherited symbols of `source` to `object`. + * + * @private + * @param {Object} source The object to copy symbols from. + * @param {Object} [object={}] The object to copy symbols to. + * @returns {Object} Returns `object`. + */ + function copySymbolsIn(source, object) { + return copyObject(source, getSymbolsIn(source), object); + } + + /** + * Creates a function like `_.groupBy`. + * + * @private + * @param {Function} setter The function to set accumulator values. + * @param {Function} [initializer] The accumulator object initializer. + * @returns {Function} Returns the new aggregator function. + */ + function createAggregator(setter, initializer) { + return function(collection, iteratee) { + var func = isArray(collection) ? arrayAggregator : baseAggregator, + accumulator = initializer ? initializer() : {}; + + return func(collection, setter, getIteratee(iteratee, 2), accumulator); + }; + } + + /** + * Creates a function like `_.assign`. + * + * @private + * @param {Function} assigner The function to assign values. + * @returns {Function} Returns the new assigner function. + */ + function createAssigner(assigner) { + return baseRest(function(object, sources) { + var index = -1, + length = sources.length, + customizer = length > 1 ? sources[length - 1] : undefined, + guard = length > 2 ? sources[2] : undefined; + + customizer = (assigner.length > 3 && typeof customizer == 'function') + ? (length--, customizer) + : undefined; + + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + customizer = length < 3 ? undefined : customizer; + length = 1; + } + object = Object(object); + while (++index < length) { + var source = sources[index]; + if (source) { + assigner(object, source, index, customizer); + } + } + return object; + }); + } + + /** + * Creates a `baseEach` or `baseEachRight` function. + * + * @private + * @param {Function} eachFunc The function to iterate over a collection. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new base function. + */ + function createBaseEach(eachFunc, fromRight) { + return function(collection, iteratee) { + if (collection == null) { + return collection; + } + if (!isArrayLike(collection)) { + return eachFunc(collection, iteratee); + } + var length = collection.length, + index = fromRight ? length : -1, + iterable = Object(collection); + + while ((fromRight ? index-- : ++index < length)) { + if (iteratee(iterable[index], index, iterable) === false) { + break; + } + } + return collection; + }; + } + + /** + * Creates a base function for methods like `_.forIn` and `_.forOwn`. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new base function. + */ + function createBaseFor(fromRight) { + return function(object, iteratee, keysFunc) { + var index = -1, + iterable = Object(object), + props = keysFunc(object), + length = props.length; + + while (length--) { + var key = props[fromRight ? length : ++index]; + if (iteratee(iterable[key], key, iterable) === false) { + break; + } + } + return object; + }; + } + + /** + * Creates a function that wraps `func` to invoke it with the optional `this` + * binding of `thisArg`. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} [thisArg] The `this` binding of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createBind(func, bitmask, thisArg) { + var isBind = bitmask & WRAP_BIND_FLAG, + Ctor = createCtor(func); + + function wrapper() { + var fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + return fn.apply(isBind ? thisArg : this, arguments); + } + return wrapper; + } + + /** + * Creates a function like `_.lowerFirst`. + * + * @private + * @param {string} methodName The name of the `String` case method to use. + * @returns {Function} Returns the new case function. + */ + function createCaseFirst(methodName) { + return function(string) { + string = toString(string); + + var strSymbols = hasUnicode(string) + ? stringToArray(string) + : undefined; + + var chr = strSymbols + ? strSymbols[0] + : string.charAt(0); + + var trailing = strSymbols + ? castSlice(strSymbols, 1).join('') + : string.slice(1); + + return chr[methodName]() + trailing; + }; + } + + /** + * Creates a function like `_.camelCase`. + * + * @private + * @param {Function} callback The function to combine each word. + * @returns {Function} Returns the new compounder function. + */ + function createCompounder(callback) { + return function(string) { + return arrayReduce(words(deburr(string).replace(reApos, '')), callback, ''); + }; + } + + /** + * Creates a function that produces an instance of `Ctor` regardless of + * whether it was invoked as part of a `new` expression or by `call` or `apply`. + * + * @private + * @param {Function} Ctor The constructor to wrap. + * @returns {Function} Returns the new wrapped function. + */ + function createCtor(Ctor) { + return function() { + // Use a `switch` statement to work with class constructors. See + // http://ecma-international.org/ecma-262/7.0/#sec-ecmascript-function-objects-call-thisargument-argumentslist + // for more details. + var args = arguments; + switch (args.length) { + case 0: return new Ctor; + case 1: return new Ctor(args[0]); + case 2: return new Ctor(args[0], args[1]); + case 3: return new Ctor(args[0], args[1], args[2]); + case 4: return new Ctor(args[0], args[1], args[2], args[3]); + case 5: return new Ctor(args[0], args[1], args[2], args[3], args[4]); + case 6: return new Ctor(args[0], args[1], args[2], args[3], args[4], args[5]); + case 7: return new Ctor(args[0], args[1], args[2], args[3], args[4], args[5], args[6]); + } + var thisBinding = baseCreate(Ctor.prototype), + result = Ctor.apply(thisBinding, args); + + // Mimic the constructor's `return` behavior. + // See https://es5.github.io/#x13.2.2 for more details. + return isObject(result) ? result : thisBinding; + }; + } + + /** + * Creates a function that wraps `func` to enable currying. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {number} arity The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createCurry(func, bitmask, arity) { + var Ctor = createCtor(func); + + function wrapper() { + var length = arguments.length, + args = Array(length), + index = length, + placeholder = getHolder(wrapper); + + while (index--) { + args[index] = arguments[index]; + } + var holders = (length < 3 && args[0] !== placeholder && args[length - 1] !== placeholder) + ? [] + : replaceHolders(args, placeholder); + + length -= holders.length; + if (length < arity) { + return createRecurry( + func, bitmask, createHybrid, wrapper.placeholder, undefined, + args, holders, undefined, undefined, arity - length); + } + var fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + return apply(fn, this, args); + } + return wrapper; + } + + /** + * Creates a `_.find` or `_.findLast` function. + * + * @private + * @param {Function} findIndexFunc The function to find the collection index. + * @returns {Function} Returns the new find function. + */ + function createFind(findIndexFunc) { + return function(collection, predicate, fromIndex) { + var iterable = Object(collection); + if (!isArrayLike(collection)) { + var iteratee = getIteratee(predicate, 3); + collection = keys(collection); + predicate = function(key) { return iteratee(iterable[key], key, iterable); }; + } + var index = findIndexFunc(collection, predicate, fromIndex); + return index > -1 ? iterable[iteratee ? collection[index] : index] : undefined; + }; + } + + /** + * Creates a `_.flow` or `_.flowRight` function. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new flow function. + */ + function createFlow(fromRight) { + return flatRest(function(funcs) { + var length = funcs.length, + index = length, + prereq = LodashWrapper.prototype.thru; + + if (fromRight) { + funcs.reverse(); + } + while (index--) { + var func = funcs[index]; + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + if (prereq && !wrapper && getFuncName(func) == 'wrapper') { + var wrapper = new LodashWrapper([], true); + } + } + index = wrapper ? index : length; + while (++index < length) { + func = funcs[index]; + + var funcName = getFuncName(func), + data = funcName == 'wrapper' ? getData(func) : undefined; + + if (data && isLaziable(data[0]) && + data[1] == (WRAP_ARY_FLAG | WRAP_CURRY_FLAG | WRAP_PARTIAL_FLAG | WRAP_REARG_FLAG) && + !data[4].length && data[9] == 1 + ) { + wrapper = wrapper[getFuncName(data[0])].apply(wrapper, data[3]); + } else { + wrapper = (func.length == 1 && isLaziable(func)) + ? wrapper[funcName]() + : wrapper.thru(func); + } + } + return function() { + var args = arguments, + value = args[0]; + + if (wrapper && args.length == 1 && isArray(value)) { + return wrapper.plant(value).value(); + } + var index = 0, + result = length ? funcs[index].apply(this, args) : value; + + while (++index < length) { + result = funcs[index].call(this, result); + } + return result; + }; + }); + } + + /** + * Creates a function that wraps `func` to invoke it with optional `this` + * binding of `thisArg`, partial application, and currying. + * + * @private + * @param {Function|string} func The function or method name to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to prepend to those provided to + * the new function. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [partialsRight] The arguments to append to those provided + * to the new function. + * @param {Array} [holdersRight] The `partialsRight` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createHybrid(func, bitmask, thisArg, partials, holders, partialsRight, holdersRight, argPos, ary, arity) { + var isAry = bitmask & WRAP_ARY_FLAG, + isBind = bitmask & WRAP_BIND_FLAG, + isBindKey = bitmask & WRAP_BIND_KEY_FLAG, + isCurried = bitmask & (WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG), + isFlip = bitmask & WRAP_FLIP_FLAG, + Ctor = isBindKey ? undefined : createCtor(func); + + function wrapper() { + var length = arguments.length, + args = Array(length), + index = length; + + while (index--) { + args[index] = arguments[index]; + } + if (isCurried) { + var placeholder = getHolder(wrapper), + holdersCount = countHolders(args, placeholder); + } + if (partials) { + args = composeArgs(args, partials, holders, isCurried); + } + if (partialsRight) { + args = composeArgsRight(args, partialsRight, holdersRight, isCurried); + } + length -= holdersCount; + if (isCurried && length < arity) { + var newHolders = replaceHolders(args, placeholder); + return createRecurry( + func, bitmask, createHybrid, wrapper.placeholder, thisArg, + args, newHolders, argPos, ary, arity - length + ); + } + var thisBinding = isBind ? thisArg : this, + fn = isBindKey ? thisBinding[func] : func; + + length = args.length; + if (argPos) { + args = reorder(args, argPos); + } else if (isFlip && length > 1) { + args.reverse(); + } + if (isAry && ary < length) { + args.length = ary; + } + if (this && this !== root && this instanceof wrapper) { + fn = Ctor || createCtor(fn); + } + return fn.apply(thisBinding, args); + } + return wrapper; + } + + /** + * Creates a function like `_.invertBy`. + * + * @private + * @param {Function} setter The function to set accumulator values. + * @param {Function} toIteratee The function to resolve iteratees. + * @returns {Function} Returns the new inverter function. + */ + function createInverter(setter, toIteratee) { + return function(object, iteratee) { + return baseInverter(object, setter, toIteratee(iteratee), {}); + }; + } + + /** + * Creates a function that performs a mathematical operation on two values. + * + * @private + * @param {Function} operator The function to perform the operation. + * @param {number} [defaultValue] The value used for `undefined` arguments. + * @returns {Function} Returns the new mathematical operation function. + */ + function createMathOperation(operator, defaultValue) { + return function(value, other) { + var result; + if (value === undefined && other === undefined) { + return defaultValue; + } + if (value !== undefined) { + result = value; + } + if (other !== undefined) { + if (result === undefined) { + return other; + } + if (typeof value == 'string' || typeof other == 'string') { + value = baseToString(value); + other = baseToString(other); + } else { + value = baseToNumber(value); + other = baseToNumber(other); + } + result = operator(value, other); + } + return result; + }; + } + + /** + * Creates a function like `_.over`. + * + * @private + * @param {Function} arrayFunc The function to iterate over iteratees. + * @returns {Function} Returns the new over function. + */ + function createOver(arrayFunc) { + return flatRest(function(iteratees) { + iteratees = arrayMap(iteratees, baseUnary(getIteratee())); + return baseRest(function(args) { + var thisArg = this; + return arrayFunc(iteratees, function(iteratee) { + return apply(iteratee, thisArg, args); + }); + }); + }); + } + + /** + * Creates the padding for `string` based on `length`. The `chars` string + * is truncated if the number of characters exceeds `length`. + * + * @private + * @param {number} length The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padding for `string`. + */ + function createPadding(length, chars) { + chars = chars === undefined ? ' ' : baseToString(chars); + + var charsLength = chars.length; + if (charsLength < 2) { + return charsLength ? baseRepeat(chars, length) : chars; + } + var result = baseRepeat(chars, nativeCeil(length / stringSize(chars))); + return hasUnicode(chars) + ? castSlice(stringToArray(result), 0, length).join('') + : result.slice(0, length); + } + + /** + * Creates a function that wraps `func` to invoke it with the `this` binding + * of `thisArg` and `partials` prepended to the arguments it receives. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} partials The arguments to prepend to those provided to + * the new function. + * @returns {Function} Returns the new wrapped function. + */ + function createPartial(func, bitmask, thisArg, partials) { + var isBind = bitmask & WRAP_BIND_FLAG, + Ctor = createCtor(func); + + function wrapper() { + var argsIndex = -1, + argsLength = arguments.length, + leftIndex = -1, + leftLength = partials.length, + args = Array(leftLength + argsLength), + fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + + while (++leftIndex < leftLength) { + args[leftIndex] = partials[leftIndex]; + } + while (argsLength--) { + args[leftIndex++] = arguments[++argsIndex]; + } + return apply(fn, isBind ? thisArg : this, args); + } + return wrapper; + } + + /** + * Creates a `_.range` or `_.rangeRight` function. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new range function. + */ + function createRange(fromRight) { + return function(start, end, step) { + if (step && typeof step != 'number' && isIterateeCall(start, end, step)) { + end = step = undefined; + } + // Ensure the sign of `-0` is preserved. + start = toFinite(start); + if (end === undefined) { + end = start; + start = 0; + } else { + end = toFinite(end); + } + step = step === undefined ? (start < end ? 1 : -1) : toFinite(step); + return baseRange(start, end, step, fromRight); + }; + } + + /** + * Creates a function that performs a relational operation on two values. + * + * @private + * @param {Function} operator The function to perform the operation. + * @returns {Function} Returns the new relational operation function. + */ + function createRelationalOperation(operator) { + return function(value, other) { + if (!(typeof value == 'string' && typeof other == 'string')) { + value = toNumber(value); + other = toNumber(other); + } + return operator(value, other); + }; + } + + /** + * Creates a function that wraps `func` to continue currying. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {Function} wrapFunc The function to create the `func` wrapper. + * @param {*} placeholder The placeholder value. + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to prepend to those provided to + * the new function. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createRecurry(func, bitmask, wrapFunc, placeholder, thisArg, partials, holders, argPos, ary, arity) { + var isCurry = bitmask & WRAP_CURRY_FLAG, + newHolders = isCurry ? holders : undefined, + newHoldersRight = isCurry ? undefined : holders, + newPartials = isCurry ? partials : undefined, + newPartialsRight = isCurry ? undefined : partials; + + bitmask |= (isCurry ? WRAP_PARTIAL_FLAG : WRAP_PARTIAL_RIGHT_FLAG); + bitmask &= ~(isCurry ? WRAP_PARTIAL_RIGHT_FLAG : WRAP_PARTIAL_FLAG); + + if (!(bitmask & WRAP_CURRY_BOUND_FLAG)) { + bitmask &= ~(WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG); + } + var newData = [ + func, bitmask, thisArg, newPartials, newHolders, newPartialsRight, + newHoldersRight, argPos, ary, arity + ]; + + var result = wrapFunc.apply(undefined, newData); + if (isLaziable(func)) { + setData(result, newData); + } + result.placeholder = placeholder; + return setWrapToString(result, func, bitmask); + } + + /** + * Creates a function like `_.round`. + * + * @private + * @param {string} methodName The name of the `Math` method to use when rounding. + * @returns {Function} Returns the new round function. + */ + function createRound(methodName) { + var func = Math[methodName]; + return function(number, precision) { + number = toNumber(number); + precision = precision == null ? 0 : nativeMin(toInteger(precision), 292); + if (precision && nativeIsFinite(number)) { + // Shift with exponential notation to avoid floating-point issues. + // See [MDN](https://mdn.io/round#Examples) for more details. + var pair = (toString(number) + 'e').split('e'), + value = func(pair[0] + 'e' + (+pair[1] + precision)); + + pair = (toString(value) + 'e').split('e'); + return +(pair[0] + 'e' + (+pair[1] - precision)); + } + return func(number); + }; + } + + /** + * Creates a set object of `values`. + * + * @private + * @param {Array} values The values to add to the set. + * @returns {Object} Returns the new set. + */ + var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { + return new Set(values); + }; + + /** + * Creates a `_.toPairs` or `_.toPairsIn` function. + * + * @private + * @param {Function} keysFunc The function to get the keys of a given object. + * @returns {Function} Returns the new pairs function. + */ + function createToPairs(keysFunc) { + return function(object) { + var tag = getTag(object); + if (tag == mapTag) { + return mapToArray(object); + } + if (tag == setTag) { + return setToPairs(object); + } + return baseToPairs(object, keysFunc(object)); + }; + } + + /** + * Creates a function that either curries or invokes `func` with optional + * `this` binding and partially applied arguments. + * + * @private + * @param {Function|string} func The function or method name to wrap. + * @param {number} bitmask The bitmask flags. + * 1 - `_.bind` + * 2 - `_.bindKey` + * 4 - `_.curry` or `_.curryRight` of a bound function + * 8 - `_.curry` + * 16 - `_.curryRight` + * 32 - `_.partial` + * 64 - `_.partialRight` + * 128 - `_.rearg` + * 256 - `_.ary` + * 512 - `_.flip` + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to be partially applied. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createWrap(func, bitmask, thisArg, partials, holders, argPos, ary, arity) { + var isBindKey = bitmask & WRAP_BIND_KEY_FLAG; + if (!isBindKey && typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + var length = partials ? partials.length : 0; + if (!length) { + bitmask &= ~(WRAP_PARTIAL_FLAG | WRAP_PARTIAL_RIGHT_FLAG); + partials = holders = undefined; + } + ary = ary === undefined ? ary : nativeMax(toInteger(ary), 0); + arity = arity === undefined ? arity : toInteger(arity); + length -= holders ? holders.length : 0; + + if (bitmask & WRAP_PARTIAL_RIGHT_FLAG) { + var partialsRight = partials, + holdersRight = holders; + + partials = holders = undefined; + } + var data = isBindKey ? undefined : getData(func); + + var newData = [ + func, bitmask, thisArg, partials, holders, partialsRight, holdersRight, + argPos, ary, arity + ]; + + if (data) { + mergeData(newData, data); + } + func = newData[0]; + bitmask = newData[1]; + thisArg = newData[2]; + partials = newData[3]; + holders = newData[4]; + arity = newData[9] = newData[9] === undefined + ? (isBindKey ? 0 : func.length) + : nativeMax(newData[9] - length, 0); + + if (!arity && bitmask & (WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG)) { + bitmask &= ~(WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG); + } + if (!bitmask || bitmask == WRAP_BIND_FLAG) { + var result = createBind(func, bitmask, thisArg); + } else if (bitmask == WRAP_CURRY_FLAG || bitmask == WRAP_CURRY_RIGHT_FLAG) { + result = createCurry(func, bitmask, arity); + } else if ((bitmask == WRAP_PARTIAL_FLAG || bitmask == (WRAP_BIND_FLAG | WRAP_PARTIAL_FLAG)) && !holders.length) { + result = createPartial(func, bitmask, thisArg, partials); + } else { + result = createHybrid.apply(undefined, newData); + } + var setter = data ? baseSetData : setData; + return setWrapToString(setter(result, newData), func, bitmask); + } + + /** + * Used by `_.defaults` to customize its `_.assignIn` use to assign properties + * of source objects to the destination object for all destination properties + * that resolve to `undefined`. + * + * @private + * @param {*} objValue The destination value. + * @param {*} srcValue The source value. + * @param {string} key The key of the property to assign. + * @param {Object} object The parent object of `objValue`. + * @returns {*} Returns the value to assign. + */ + function customDefaultsAssignIn(objValue, srcValue, key, object) { + if (objValue === undefined || + (eq(objValue, objectProto[key]) && !hasOwnProperty.call(object, key))) { + return srcValue; + } + return objValue; + } + + /** + * Used by `_.defaultsDeep` to customize its `_.merge` use to merge source + * objects into destination objects that are passed thru. + * + * @private + * @param {*} objValue The destination value. + * @param {*} srcValue The source value. + * @param {string} key The key of the property to merge. + * @param {Object} object The parent object of `objValue`. + * @param {Object} source The parent object of `srcValue`. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + * @returns {*} Returns the value to assign. + */ + function customDefaultsMerge(objValue, srcValue, key, object, source, stack) { + if (isObject(objValue) && isObject(srcValue)) { + // Recursively merge objects and arrays (susceptible to call stack limits). + stack.set(srcValue, objValue); + baseMerge(objValue, srcValue, undefined, customDefaultsMerge, stack); + stack['delete'](srcValue); + } + return objValue; + } + + /** + * Used by `_.omit` to customize its `_.cloneDeep` use to only clone plain + * objects. + * + * @private + * @param {*} value The value to inspect. + * @param {string} key The key of the property to inspect. + * @returns {*} Returns the uncloned value or `undefined` to defer cloning to `_.cloneDeep`. + */ + function customOmitClone(value) { + return isPlainObject(value) ? undefined : value; + } + + /** + * A specialized version of `baseIsEqualDeep` for arrays with support for + * partial deep comparisons. + * + * @private + * @param {Array} array The array to compare. + * @param {Array} other The other array to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `array` and `other` objects. + * @returns {boolean} Returns `true` if the arrays are equivalent, else `false`. + */ + function equalArrays(array, other, bitmask, customizer, equalFunc, stack) { + var isPartial = bitmask & COMPARE_PARTIAL_FLAG, + arrLength = array.length, + othLength = other.length; + + if (arrLength != othLength && !(isPartial && othLength > arrLength)) { + return false; + } + // Check that cyclic values are equal. + var arrStacked = stack.get(array); + var othStacked = stack.get(other); + if (arrStacked && othStacked) { + return arrStacked == other && othStacked == array; + } + var index = -1, + result = true, + seen = (bitmask & COMPARE_UNORDERED_FLAG) ? new SetCache : undefined; + + stack.set(array, other); + stack.set(other, array); + + // Ignore non-index properties. + while (++index < arrLength) { + var arrValue = array[index], + othValue = other[index]; + + if (customizer) { + var compared = isPartial + ? customizer(othValue, arrValue, index, other, array, stack) + : customizer(arrValue, othValue, index, array, other, stack); + } + if (compared !== undefined) { + if (compared) { + continue; + } + result = false; + break; + } + // Recursively compare arrays (susceptible to call stack limits). + if (seen) { + if (!arraySome(other, function(othValue, othIndex) { + if (!cacheHas(seen, othIndex) && + (arrValue === othValue || equalFunc(arrValue, othValue, bitmask, customizer, stack))) { + return seen.push(othIndex); + } + })) { + result = false; + break; + } + } else if (!( + arrValue === othValue || + equalFunc(arrValue, othValue, bitmask, customizer, stack) + )) { + result = false; + break; + } + } + stack['delete'](array); + stack['delete'](other); + return result; + } + + /** + * A specialized version of `baseIsEqualDeep` for comparing objects of + * the same `toStringTag`. + * + * **Note:** This function only supports comparing values with tags of + * `Boolean`, `Date`, `Error`, `Number`, `RegExp`, or `String`. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {string} tag The `toStringTag` of the objects to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function equalByTag(object, other, tag, bitmask, customizer, equalFunc, stack) { + switch (tag) { + case dataViewTag: + if ((object.byteLength != other.byteLength) || + (object.byteOffset != other.byteOffset)) { + return false; + } + object = object.buffer; + other = other.buffer; + + case arrayBufferTag: + if ((object.byteLength != other.byteLength) || + !equalFunc(new Uint8Array(object), new Uint8Array(other))) { + return false; + } + return true; + + case boolTag: + case dateTag: + case numberTag: + // Coerce booleans to `1` or `0` and dates to milliseconds. + // Invalid dates are coerced to `NaN`. + return eq(+object, +other); + + case errorTag: + return object.name == other.name && object.message == other.message; + + case regexpTag: + case stringTag: + // Coerce regexes to strings and treat strings, primitives and objects, + // as equal. See http://www.ecma-international.org/ecma-262/7.0/#sec-regexp.prototype.tostring + // for more details. + return object == (other + ''); + + case mapTag: + var convert = mapToArray; + + case setTag: + var isPartial = bitmask & COMPARE_PARTIAL_FLAG; + convert || (convert = setToArray); + + if (object.size != other.size && !isPartial) { + return false; + } + // Assume cyclic values are equal. + var stacked = stack.get(object); + if (stacked) { + return stacked == other; + } + bitmask |= COMPARE_UNORDERED_FLAG; + + // Recursively compare objects (susceptible to call stack limits). + stack.set(object, other); + var result = equalArrays(convert(object), convert(other), bitmask, customizer, equalFunc, stack); + stack['delete'](object); + return result; + + case symbolTag: + if (symbolValueOf) { + return symbolValueOf.call(object) == symbolValueOf.call(other); + } + } + return false; + } + + /** + * A specialized version of `baseIsEqualDeep` for objects with support for + * partial deep comparisons. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function equalObjects(object, other, bitmask, customizer, equalFunc, stack) { + var isPartial = bitmask & COMPARE_PARTIAL_FLAG, + objProps = getAllKeys(object), + objLength = objProps.length, + othProps = getAllKeys(other), + othLength = othProps.length; + + if (objLength != othLength && !isPartial) { + return false; + } + var index = objLength; + while (index--) { + var key = objProps[index]; + if (!(isPartial ? key in other : hasOwnProperty.call(other, key))) { + return false; + } + } + // Check that cyclic values are equal. + var objStacked = stack.get(object); + var othStacked = stack.get(other); + if (objStacked && othStacked) { + return objStacked == other && othStacked == object; + } + var result = true; + stack.set(object, other); + stack.set(other, object); + + var skipCtor = isPartial; + while (++index < objLength) { + key = objProps[index]; + var objValue = object[key], + othValue = other[key]; + + if (customizer) { + var compared = isPartial + ? customizer(othValue, objValue, key, other, object, stack) + : customizer(objValue, othValue, key, object, other, stack); + } + // Recursively compare objects (susceptible to call stack limits). + if (!(compared === undefined + ? (objValue === othValue || equalFunc(objValue, othValue, bitmask, customizer, stack)) + : compared + )) { + result = false; + break; + } + skipCtor || (skipCtor = key == 'constructor'); + } + if (result && !skipCtor) { + var objCtor = object.constructor, + othCtor = other.constructor; + + // Non `Object` object instances with different constructors are not equal. + if (objCtor != othCtor && + ('constructor' in object && 'constructor' in other) && + !(typeof objCtor == 'function' && objCtor instanceof objCtor && + typeof othCtor == 'function' && othCtor instanceof othCtor)) { + result = false; + } + } + stack['delete'](object); + stack['delete'](other); + return result; + } + + /** + * A specialized version of `baseRest` which flattens the rest array. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @returns {Function} Returns the new function. + */ + function flatRest(func) { + return setToString(overRest(func, undefined, flatten), func + ''); + } + + /** + * Creates an array of own enumerable property names and symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names and symbols. + */ + function getAllKeys(object) { + return baseGetAllKeys(object, keys, getSymbols); + } + + /** + * Creates an array of own and inherited enumerable property names and + * symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names and symbols. + */ + function getAllKeysIn(object) { + return baseGetAllKeys(object, keysIn, getSymbolsIn); + } + + /** + * Gets metadata for `func`. + * + * @private + * @param {Function} func The function to query. + * @returns {*} Returns the metadata for `func`. + */ + var getData = !metaMap ? noop : function(func) { + return metaMap.get(func); + }; + + /** + * Gets the name of `func`. + * + * @private + * @param {Function} func The function to query. + * @returns {string} Returns the function name. + */ + function getFuncName(func) { + var result = (func.name + ''), + array = realNames[result], + length = hasOwnProperty.call(realNames, result) ? array.length : 0; + + while (length--) { + var data = array[length], + otherFunc = data.func; + if (otherFunc == null || otherFunc == func) { + return data.name; + } + } + return result; + } + + /** + * Gets the argument placeholder value for `func`. + * + * @private + * @param {Function} func The function to inspect. + * @returns {*} Returns the placeholder value. + */ + function getHolder(func) { + var object = hasOwnProperty.call(lodash, 'placeholder') ? lodash : func; + return object.placeholder; + } + + /** + * Gets the appropriate "iteratee" function. If `_.iteratee` is customized, + * this function returns the custom method, otherwise it returns `baseIteratee`. + * If arguments are provided, the chosen function is invoked with them and + * its result is returned. + * + * @private + * @param {*} [value] The value to convert to an iteratee. + * @param {number} [arity] The arity of the created iteratee. + * @returns {Function} Returns the chosen function or its result. + */ + function getIteratee() { + var result = lodash.iteratee || iteratee; + result = result === iteratee ? baseIteratee : result; + return arguments.length ? result(arguments[0], arguments[1]) : result; + } + + /** + * Gets the data for `map`. + * + * @private + * @param {Object} map The map to query. + * @param {string} key The reference key. + * @returns {*} Returns the map data. + */ + function getMapData(map, key) { + var data = map.__data__; + return isKeyable(key) + ? data[typeof key == 'string' ? 'string' : 'hash'] + : data.map; + } + + /** + * Gets the property names, values, and compare flags of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the match data of `object`. + */ + function getMatchData(object) { + var result = keys(object), + length = result.length; + + while (length--) { + var key = result[length], + value = object[key]; + + result[length] = [key, value, isStrictComparable(value)]; + } + return result; + } + + /** + * Gets the native function at `key` of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {string} key The key of the method to get. + * @returns {*} Returns the function if it's native, else `undefined`. + */ + function getNative(object, key) { + var value = getValue(object, key); + return baseIsNative(value) ? value : undefined; + } + + /** + * A specialized version of `baseGetTag` which ignores `Symbol.toStringTag` values. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the raw `toStringTag`. + */ + function getRawTag(value) { + var isOwn = hasOwnProperty.call(value, symToStringTag), + tag = value[symToStringTag]; + + try { + value[symToStringTag] = undefined; + var unmasked = true; + } catch (e) {} + + var result = nativeObjectToString.call(value); + if (unmasked) { + if (isOwn) { + value[symToStringTag] = tag; + } else { + delete value[symToStringTag]; + } + } + return result; + } + + /** + * Creates an array of the own enumerable symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of symbols. + */ + var getSymbols = !nativeGetSymbols ? stubArray : function(object) { + if (object == null) { + return []; + } + object = Object(object); + return arrayFilter(nativeGetSymbols(object), function(symbol) { + return propertyIsEnumerable.call(object, symbol); + }); + }; + + /** + * Creates an array of the own and inherited enumerable symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of symbols. + */ + var getSymbolsIn = !nativeGetSymbols ? stubArray : function(object) { + var result = []; + while (object) { + arrayPush(result, getSymbols(object)); + object = getPrototype(object); + } + return result; + }; + + /** + * Gets the `toStringTag` of `value`. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. + */ + var getTag = baseGetTag; + + // Fallback for data views, maps, sets, and weak maps in IE 11 and promises in Node.js < 6. + if ((DataView && getTag(new DataView(new ArrayBuffer(1))) != dataViewTag) || + (Map && getTag(new Map) != mapTag) || + (Promise && getTag(Promise.resolve()) != promiseTag) || + (Set && getTag(new Set) != setTag) || + (WeakMap && getTag(new WeakMap) != weakMapTag)) { + getTag = function(value) { + var result = baseGetTag(value), + Ctor = result == objectTag ? value.constructor : undefined, + ctorString = Ctor ? toSource(Ctor) : ''; + + if (ctorString) { + switch (ctorString) { + case dataViewCtorString: return dataViewTag; + case mapCtorString: return mapTag; + case promiseCtorString: return promiseTag; + case setCtorString: return setTag; + case weakMapCtorString: return weakMapTag; + } + } + return result; + }; + } + + /** + * Gets the view, applying any `transforms` to the `start` and `end` positions. + * + * @private + * @param {number} start The start of the view. + * @param {number} end The end of the view. + * @param {Array} transforms The transformations to apply to the view. + * @returns {Object} Returns an object containing the `start` and `end` + * positions of the view. + */ + function getView(start, end, transforms) { + var index = -1, + length = transforms.length; + + while (++index < length) { + var data = transforms[index], + size = data.size; + + switch (data.type) { + case 'drop': start += size; break; + case 'dropRight': end -= size; break; + case 'take': end = nativeMin(end, start + size); break; + case 'takeRight': start = nativeMax(start, end - size); break; + } + } + return { 'start': start, 'end': end }; + } + + /** + * Extracts wrapper details from the `source` body comment. + * + * @private + * @param {string} source The source to inspect. + * @returns {Array} Returns the wrapper details. + */ + function getWrapDetails(source) { + var match = source.match(reWrapDetails); + return match ? match[1].split(reSplitDetails) : []; + } + + /** + * Checks if `path` exists on `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @param {Function} hasFunc The function to check properties. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + */ + function hasPath(object, path, hasFunc) { + path = castPath(path, object); + + var index = -1, + length = path.length, + result = false; + + while (++index < length) { + var key = toKey(path[index]); + if (!(result = object != null && hasFunc(object, key))) { + break; + } + object = object[key]; + } + if (result || ++index != length) { + return result; + } + length = object == null ? 0 : object.length; + return !!length && isLength(length) && isIndex(key, length) && + (isArray(object) || isArguments(object)); + } + + /** + * Initializes an array clone. + * + * @private + * @param {Array} array The array to clone. + * @returns {Array} Returns the initialized clone. + */ + function initCloneArray(array) { + var length = array.length, + result = new array.constructor(length); + + // Add properties assigned by `RegExp#exec`. + if (length && typeof array[0] == 'string' && hasOwnProperty.call(array, 'index')) { + result.index = array.index; + result.input = array.input; + } + return result; + } + + /** + * Initializes an object clone. + * + * @private + * @param {Object} object The object to clone. + * @returns {Object} Returns the initialized clone. + */ + function initCloneObject(object) { + return (typeof object.constructor == 'function' && !isPrototype(object)) + ? baseCreate(getPrototype(object)) + : {}; + } + + /** + * Initializes an object clone based on its `toStringTag`. + * + * **Note:** This function only supports cloning values with tags of + * `Boolean`, `Date`, `Error`, `Map`, `Number`, `RegExp`, `Set`, or `String`. + * + * @private + * @param {Object} object The object to clone. + * @param {string} tag The `toStringTag` of the object to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the initialized clone. + */ + function initCloneByTag(object, tag, isDeep) { + var Ctor = object.constructor; + switch (tag) { + case arrayBufferTag: + return cloneArrayBuffer(object); + + case boolTag: + case dateTag: + return new Ctor(+object); + + case dataViewTag: + return cloneDataView(object, isDeep); + + case float32Tag: case float64Tag: + case int8Tag: case int16Tag: case int32Tag: + case uint8Tag: case uint8ClampedTag: case uint16Tag: case uint32Tag: + return cloneTypedArray(object, isDeep); + + case mapTag: + return new Ctor; + + case numberTag: + case stringTag: + return new Ctor(object); + + case regexpTag: + return cloneRegExp(object); + + case setTag: + return new Ctor; + + case symbolTag: + return cloneSymbol(object); + } + } + + /** + * Inserts wrapper `details` in a comment at the top of the `source` body. + * + * @private + * @param {string} source The source to modify. + * @returns {Array} details The details to insert. + * @returns {string} Returns the modified source. + */ + function insertWrapDetails(source, details) { + var length = details.length; + if (!length) { + return source; + } + var lastIndex = length - 1; + details[lastIndex] = (length > 1 ? '& ' : '') + details[lastIndex]; + details = details.join(length > 2 ? ', ' : ' '); + return source.replace(reWrapComment, '{\n/* [wrapped with ' + details + '] */\n'); + } + + /** + * Checks if `value` is a flattenable `arguments` object or array. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is flattenable, else `false`. + */ + function isFlattenable(value) { + return isArray(value) || isArguments(value) || + !!(spreadableSymbol && value && value[spreadableSymbol]); + } + + /** + * Checks if `value` is a valid array-like index. + * + * @private + * @param {*} value The value to check. + * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. + * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. + */ + function isIndex(value, length) { + var type = typeof value; + length = length == null ? MAX_SAFE_INTEGER : length; + + return !!length && + (type == 'number' || + (type != 'symbol' && reIsUint.test(value))) && + (value > -1 && value % 1 == 0 && value < length); + } + + /** + * Checks if the given arguments are from an iteratee call. + * + * @private + * @param {*} value The potential iteratee value argument. + * @param {*} index The potential iteratee index or key argument. + * @param {*} object The potential iteratee object argument. + * @returns {boolean} Returns `true` if the arguments are from an iteratee call, + * else `false`. + */ + function isIterateeCall(value, index, object) { + if (!isObject(object)) { + return false; + } + var type = typeof index; + if (type == 'number' + ? (isArrayLike(object) && isIndex(index, object.length)) + : (type == 'string' && index in object) + ) { + return eq(object[index], value); + } + return false; + } + + /** + * Checks if `value` is a property name and not a property path. + * + * @private + * @param {*} value The value to check. + * @param {Object} [object] The object to query keys on. + * @returns {boolean} Returns `true` if `value` is a property name, else `false`. + */ + function isKey(value, object) { + if (isArray(value)) { + return false; + } + var type = typeof value; + if (type == 'number' || type == 'symbol' || type == 'boolean' || + value == null || isSymbol(value)) { + return true; + } + return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || + (object != null && value in Object(object)); + } + + /** + * Checks if `value` is suitable for use as unique object key. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is suitable, else `false`. + */ + function isKeyable(value) { + var type = typeof value; + return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') + ? (value !== '__proto__') + : (value === null); + } + + /** + * Checks if `func` has a lazy counterpart. + * + * @private + * @param {Function} func The function to check. + * @returns {boolean} Returns `true` if `func` has a lazy counterpart, + * else `false`. + */ + function isLaziable(func) { + var funcName = getFuncName(func), + other = lodash[funcName]; + + if (typeof other != 'function' || !(funcName in LazyWrapper.prototype)) { + return false; + } + if (func === other) { + return true; + } + var data = getData(other); + return !!data && func === data[0]; + } + + /** + * Checks if `func` has its source masked. + * + * @private + * @param {Function} func The function to check. + * @returns {boolean} Returns `true` if `func` is masked, else `false`. + */ + function isMasked(func) { + return !!maskSrcKey && (maskSrcKey in func); + } + + /** + * Checks if `func` is capable of being masked. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `func` is maskable, else `false`. + */ + var isMaskable = coreJsData ? isFunction : stubFalse; + + /** + * Checks if `value` is likely a prototype object. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a prototype, else `false`. + */ + function isPrototype(value) { + var Ctor = value && value.constructor, + proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto; + + return value === proto; + } + + /** + * Checks if `value` is suitable for strict equality comparisons, i.e. `===`. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` if suitable for strict + * equality comparisons, else `false`. + */ + function isStrictComparable(value) { + return value === value && !isObject(value); + } + + /** + * A specialized version of `matchesProperty` for source values suitable + * for strict equality comparisons, i.e. `===`. + * + * @private + * @param {string} key The key of the property to get. + * @param {*} srcValue The value to match. + * @returns {Function} Returns the new spec function. + */ + function matchesStrictComparable(key, srcValue) { + return function(object) { + if (object == null) { + return false; + } + return object[key] === srcValue && + (srcValue !== undefined || (key in Object(object))); + }; + } + + /** + * A specialized version of `_.memoize` which clears the memoized function's + * cache when it exceeds `MAX_MEMOIZE_SIZE`. + * + * @private + * @param {Function} func The function to have its output memoized. + * @returns {Function} Returns the new memoized function. + */ + function memoizeCapped(func) { + var result = memoize(func, function(key) { + if (cache.size === MAX_MEMOIZE_SIZE) { + cache.clear(); + } + return key; + }); + + var cache = result.cache; + return result; + } + + /** + * Merges the function metadata of `source` into `data`. + * + * Merging metadata reduces the number of wrappers used to invoke a function. + * This is possible because methods like `_.bind`, `_.curry`, and `_.partial` + * may be applied regardless of execution order. Methods like `_.ary` and + * `_.rearg` modify function arguments, making the order in which they are + * executed important, preventing the merging of metadata. However, we make + * an exception for a safe combined case where curried functions have `_.ary` + * and or `_.rearg` applied. + * + * @private + * @param {Array} data The destination metadata. + * @param {Array} source The source metadata. + * @returns {Array} Returns `data`. + */ + function mergeData(data, source) { + var bitmask = data[1], + srcBitmask = source[1], + newBitmask = bitmask | srcBitmask, + isCommon = newBitmask < (WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG | WRAP_ARY_FLAG); + + var isCombo = + ((srcBitmask == WRAP_ARY_FLAG) && (bitmask == WRAP_CURRY_FLAG)) || + ((srcBitmask == WRAP_ARY_FLAG) && (bitmask == WRAP_REARG_FLAG) && (data[7].length <= source[8])) || + ((srcBitmask == (WRAP_ARY_FLAG | WRAP_REARG_FLAG)) && (source[7].length <= source[8]) && (bitmask == WRAP_CURRY_FLAG)); + + // Exit early if metadata can't be merged. + if (!(isCommon || isCombo)) { + return data; + } + // Use source `thisArg` if available. + if (srcBitmask & WRAP_BIND_FLAG) { + data[2] = source[2]; + // Set when currying a bound function. + newBitmask |= bitmask & WRAP_BIND_FLAG ? 0 : WRAP_CURRY_BOUND_FLAG; + } + // Compose partial arguments. + var value = source[3]; + if (value) { + var partials = data[3]; + data[3] = partials ? composeArgs(partials, value, source[4]) : value; + data[4] = partials ? replaceHolders(data[3], PLACEHOLDER) : source[4]; + } + // Compose partial right arguments. + value = source[5]; + if (value) { + partials = data[5]; + data[5] = partials ? composeArgsRight(partials, value, source[6]) : value; + data[6] = partials ? replaceHolders(data[5], PLACEHOLDER) : source[6]; + } + // Use source `argPos` if available. + value = source[7]; + if (value) { + data[7] = value; + } + // Use source `ary` if it's smaller. + if (srcBitmask & WRAP_ARY_FLAG) { + data[8] = data[8] == null ? source[8] : nativeMin(data[8], source[8]); + } + // Use source `arity` if one is not provided. + if (data[9] == null) { + data[9] = source[9]; + } + // Use source `func` and merge bitmasks. + data[0] = source[0]; + data[1] = newBitmask; + + return data; + } + + /** + * This function is like + * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * except that it includes inherited enumerable properties. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ + function nativeKeysIn(object) { + var result = []; + if (object != null) { + for (var key in Object(object)) { + result.push(key); + } + } + return result; + } + + /** + * Converts `value` to a string using `Object.prototype.toString`. + * + * @private + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + */ + function objectToString(value) { + return nativeObjectToString.call(value); + } + + /** + * A specialized version of `baseRest` which transforms the rest array. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @param {Function} transform The rest array transform. + * @returns {Function} Returns the new function. + */ + function overRest(func, start, transform) { + start = nativeMax(start === undefined ? (func.length - 1) : start, 0); + return function() { + var args = arguments, + index = -1, + length = nativeMax(args.length - start, 0), + array = Array(length); + + while (++index < length) { + array[index] = args[start + index]; + } + index = -1; + var otherArgs = Array(start + 1); + while (++index < start) { + otherArgs[index] = args[index]; + } + otherArgs[start] = transform(array); + return apply(func, this, otherArgs); + }; + } + + /** + * Gets the parent value at `path` of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} path The path to get the parent value of. + * @returns {*} Returns the parent value. + */ + function parent(object, path) { + return path.length < 2 ? object : baseGet(object, baseSlice(path, 0, -1)); + } + + /** + * Reorder `array` according to the specified indexes where the element at + * the first index is assigned as the first element, the element at + * the second index is assigned as the second element, and so on. + * + * @private + * @param {Array} array The array to reorder. + * @param {Array} indexes The arranged array indexes. + * @returns {Array} Returns `array`. + */ + function reorder(array, indexes) { + var arrLength = array.length, + length = nativeMin(indexes.length, arrLength), + oldArray = copyArray(array); + + while (length--) { + var index = indexes[length]; + array[length] = isIndex(index, arrLength) ? oldArray[index] : undefined; + } + return array; + } + + /** + * Gets the value at `key`, unless `key` is "__proto__" or "constructor". + * + * @private + * @param {Object} object The object to query. + * @param {string} key The key of the property to get. + * @returns {*} Returns the property value. + */ + function safeGet(object, key) { + if (key === 'constructor' && typeof object[key] === 'function') { + return; + } + + if (key == '__proto__') { + return; + } + + return object[key]; + } + + /** + * Sets metadata for `func`. + * + * **Note:** If this function becomes hot, i.e. is invoked a lot in a short + * period of time, it will trip its breaker and transition to an identity + * function to avoid garbage collection pauses in V8. See + * [V8 issue 2070](https://bugs.chromium.org/p/v8/issues/detail?id=2070) + * for more details. + * + * @private + * @param {Function} func The function to associate metadata with. + * @param {*} data The metadata. + * @returns {Function} Returns `func`. + */ + var setData = shortOut(baseSetData); + + /** + * A simple wrapper around the global [`setTimeout`](https://mdn.io/setTimeout). + * + * @private + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @returns {number|Object} Returns the timer id or timeout object. + */ + var setTimeout = ctxSetTimeout || function(func, wait) { + return root.setTimeout(func, wait); + }; + + /** + * Sets the `toString` method of `func` to return `string`. + * + * @private + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. + */ + var setToString = shortOut(baseSetToString); + + /** + * Sets the `toString` method of `wrapper` to mimic the source of `reference` + * with wrapper details in a comment at the top of the source body. + * + * @private + * @param {Function} wrapper The function to modify. + * @param {Function} reference The reference function. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @returns {Function} Returns `wrapper`. + */ + function setWrapToString(wrapper, reference, bitmask) { + var source = (reference + ''); + return setToString(wrapper, insertWrapDetails(source, updateWrapDetails(getWrapDetails(source), bitmask))); + } + + /** + * Creates a function that'll short out and invoke `identity` instead + * of `func` when it's called `HOT_COUNT` or more times in `HOT_SPAN` + * milliseconds. + * + * @private + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new shortable function. + */ + function shortOut(func) { + var count = 0, + lastCalled = 0; + + return function() { + var stamp = nativeNow(), + remaining = HOT_SPAN - (stamp - lastCalled); + + lastCalled = stamp; + if (remaining > 0) { + if (++count >= HOT_COUNT) { + return arguments[0]; + } + } else { + count = 0; + } + return func.apply(undefined, arguments); + }; + } + + /** + * A specialized version of `_.shuffle` which mutates and sets the size of `array`. + * + * @private + * @param {Array} array The array to shuffle. + * @param {number} [size=array.length] The size of `array`. + * @returns {Array} Returns `array`. + */ + function shuffleSelf(array, size) { + var index = -1, + length = array.length, + lastIndex = length - 1; + + size = size === undefined ? length : size; + while (++index < size) { + var rand = baseRandom(index, lastIndex), + value = array[rand]; + + array[rand] = array[index]; + array[index] = value; + } + array.length = size; + return array; + } + + /** + * Converts `string` to a property path array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the property path array. + */ + var stringToPath = memoizeCapped(function(string) { + var result = []; + if (string.charCodeAt(0) === 46 /* . */) { + result.push(''); + } + string.replace(rePropName, function(match, number, quote, subString) { + result.push(quote ? subString.replace(reEscapeChar, '$1') : (number || match)); + }); + return result; + }); + + /** + * Converts `value` to a string key if it's not a string or symbol. + * + * @private + * @param {*} value The value to inspect. + * @returns {string|symbol} Returns the key. + */ + function toKey(value) { + if (typeof value == 'string' || isSymbol(value)) { + return value; + } + var result = (value + ''); + return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; + } + + /** + * Converts `func` to its source code. + * + * @private + * @param {Function} func The function to convert. + * @returns {string} Returns the source code. + */ + function toSource(func) { + if (func != null) { + try { + return funcToString.call(func); + } catch (e) {} + try { + return (func + ''); + } catch (e) {} + } + return ''; + } + + /** + * Updates wrapper `details` based on `bitmask` flags. + * + * @private + * @returns {Array} details The details to modify. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @returns {Array} Returns `details`. + */ + function updateWrapDetails(details, bitmask) { + arrayEach(wrapFlags, function(pair) { + var value = '_.' + pair[0]; + if ((bitmask & pair[1]) && !arrayIncludes(details, value)) { + details.push(value); + } + }); + return details.sort(); + } + + /** + * Creates a clone of `wrapper`. + * + * @private + * @param {Object} wrapper The wrapper to clone. + * @returns {Object} Returns the cloned wrapper. + */ + function wrapperClone(wrapper) { + if (wrapper instanceof LazyWrapper) { + return wrapper.clone(); + } + var result = new LodashWrapper(wrapper.__wrapped__, wrapper.__chain__); + result.__actions__ = copyArray(wrapper.__actions__); + result.__index__ = wrapper.__index__; + result.__values__ = wrapper.__values__; + return result; + } + + /*------------------------------------------------------------------------*/ + + /** + * Creates an array of elements split into groups the length of `size`. + * If `array` can't be split evenly, the final chunk will be the remaining + * elements. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to process. + * @param {number} [size=1] The length of each chunk + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the new array of chunks. + * @example + * + * _.chunk(['a', 'b', 'c', 'd'], 2); + * // => [['a', 'b'], ['c', 'd']] + * + * _.chunk(['a', 'b', 'c', 'd'], 3); + * // => [['a', 'b', 'c'], ['d']] + */ + function chunk(array, size, guard) { + if ((guard ? isIterateeCall(array, size, guard) : size === undefined)) { + size = 1; + } else { + size = nativeMax(toInteger(size), 0); + } + var length = array == null ? 0 : array.length; + if (!length || size < 1) { + return []; + } + var index = 0, + resIndex = 0, + result = Array(nativeCeil(length / size)); + + while (index < length) { + result[resIndex++] = baseSlice(array, index, (index += size)); + } + return result; + } + + /** + * Creates an array with all falsey values removed. The values `false`, `null`, + * `0`, `""`, `undefined`, and `NaN` are falsey. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to compact. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.compact([0, 1, false, 2, '', 3]); + * // => [1, 2, 3] + */ + function compact(array) { + var index = -1, + length = array == null ? 0 : array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (value) { + result[resIndex++] = value; + } + } + return result; + } + + /** + * Creates a new array concatenating `array` with any additional arrays + * and/or values. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to concatenate. + * @param {...*} [values] The values to concatenate. + * @returns {Array} Returns the new concatenated array. + * @example + * + * var array = [1]; + * var other = _.concat(array, 2, [3], [[4]]); + * + * console.log(other); + * // => [1, 2, 3, [4]] + * + * console.log(array); + * // => [1] + */ + function concat() { + var length = arguments.length; + if (!length) { + return []; + } + var args = Array(length - 1), + array = arguments[0], + index = length; + + while (index--) { + args[index - 1] = arguments[index]; + } + return arrayPush(isArray(array) ? copyArray(array) : [array], baseFlatten(args, 1)); + } + + /** + * Creates an array of `array` values not included in the other given arrays + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. The order and references of result values are + * determined by the first array. + * + * **Note:** Unlike `_.pullAll`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @returns {Array} Returns the new array of filtered values. + * @see _.without, _.xor + * @example + * + * _.difference([2, 1], [2, 3]); + * // => [1] + */ + var difference = baseRest(function(array, values) { + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true)) + : []; + }); + + /** + * This method is like `_.difference` except that it accepts `iteratee` which + * is invoked for each element of `array` and `values` to generate the criterion + * by which they're compared. The order and references of result values are + * determined by the first array. The iteratee is invoked with one argument: + * (value). + * + * **Note:** Unlike `_.pullAllBy`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.differenceBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [1.2] + * + * // The `_.property` iteratee shorthand. + * _.differenceBy([{ 'x': 2 }, { 'x': 1 }], [{ 'x': 1 }], 'x'); + * // => [{ 'x': 2 }] + */ + var differenceBy = baseRest(function(array, values) { + var iteratee = last(values); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true), getIteratee(iteratee, 2)) + : []; + }); + + /** + * This method is like `_.difference` except that it accepts `comparator` + * which is invoked to compare elements of `array` to `values`. The order and + * references of result values are determined by the first array. The comparator + * is invoked with two arguments: (arrVal, othVal). + * + * **Note:** Unlike `_.pullAllWith`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * + * _.differenceWith(objects, [{ 'x': 1, 'y': 2 }], _.isEqual); + * // => [{ 'x': 2, 'y': 1 }] + */ + var differenceWith = baseRest(function(array, values) { + var comparator = last(values); + if (isArrayLikeObject(comparator)) { + comparator = undefined; + } + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true), undefined, comparator) + : []; + }); + + /** + * Creates a slice of `array` with `n` elements dropped from the beginning. + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to drop. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.drop([1, 2, 3]); + * // => [2, 3] + * + * _.drop([1, 2, 3], 2); + * // => [3] + * + * _.drop([1, 2, 3], 5); + * // => [] + * + * _.drop([1, 2, 3], 0); + * // => [1, 2, 3] + */ + function drop(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + return baseSlice(array, n < 0 ? 0 : n, length); + } + + /** + * Creates a slice of `array` with `n` elements dropped from the end. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to drop. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.dropRight([1, 2, 3]); + * // => [1, 2] + * + * _.dropRight([1, 2, 3], 2); + * // => [1] + * + * _.dropRight([1, 2, 3], 5); + * // => [] + * + * _.dropRight([1, 2, 3], 0); + * // => [1, 2, 3] + */ + function dropRight(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + n = length - n; + return baseSlice(array, 0, n < 0 ? 0 : n); + } + + /** + * Creates a slice of `array` excluding elements dropped from the end. + * Elements are dropped until `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.dropRightWhile(users, function(o) { return !o.active; }); + * // => objects for ['barney'] + * + * // The `_.matches` iteratee shorthand. + * _.dropRightWhile(users, { 'user': 'pebbles', 'active': false }); + * // => objects for ['barney', 'fred'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.dropRightWhile(users, ['active', false]); + * // => objects for ['barney'] + * + * // The `_.property` iteratee shorthand. + * _.dropRightWhile(users, 'active'); + * // => objects for ['barney', 'fred', 'pebbles'] + */ + function dropRightWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3), true, true) + : []; + } + + /** + * Creates a slice of `array` excluding elements dropped from the beginning. + * Elements are dropped until `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.dropWhile(users, function(o) { return !o.active; }); + * // => objects for ['pebbles'] + * + * // The `_.matches` iteratee shorthand. + * _.dropWhile(users, { 'user': 'barney', 'active': false }); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.dropWhile(users, ['active', false]); + * // => objects for ['pebbles'] + * + * // The `_.property` iteratee shorthand. + * _.dropWhile(users, 'active'); + * // => objects for ['barney', 'fred', 'pebbles'] + */ + function dropWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3), true) + : []; + } + + /** + * Fills elements of `array` with `value` from `start` up to, but not + * including, `end`. + * + * **Note:** This method mutates `array`. + * + * @static + * @memberOf _ + * @since 3.2.0 + * @category Array + * @param {Array} array The array to fill. + * @param {*} value The value to fill `array` with. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns `array`. + * @example + * + * var array = [1, 2, 3]; + * + * _.fill(array, 'a'); + * console.log(array); + * // => ['a', 'a', 'a'] + * + * _.fill(Array(3), 2); + * // => [2, 2, 2] + * + * _.fill([4, 6, 8, 10], '*', 1, 3); + * // => [4, '*', '*', 10] + */ + function fill(array, value, start, end) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + if (start && typeof start != 'number' && isIterateeCall(array, value, start)) { + start = 0; + end = length; + } + return baseFill(array, value, start, end); + } + + /** + * This method is like `_.find` except that it returns the index of the first + * element `predicate` returns truthy for instead of the element itself. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.findIndex(users, function(o) { return o.user == 'barney'; }); + * // => 0 + * + * // The `_.matches` iteratee shorthand. + * _.findIndex(users, { 'user': 'fred', 'active': false }); + * // => 1 + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findIndex(users, ['active', false]); + * // => 0 + * + * // The `_.property` iteratee shorthand. + * _.findIndex(users, 'active'); + * // => 2 + */ + function findIndex(array, predicate, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = fromIndex == null ? 0 : toInteger(fromIndex); + if (index < 0) { + index = nativeMax(length + index, 0); + } + return baseFindIndex(array, getIteratee(predicate, 3), index); + } + + /** + * This method is like `_.findIndex` except that it iterates over elements + * of `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=array.length-1] The index to search from. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.findLastIndex(users, function(o) { return o.user == 'pebbles'; }); + * // => 2 + * + * // The `_.matches` iteratee shorthand. + * _.findLastIndex(users, { 'user': 'barney', 'active': true }); + * // => 0 + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findLastIndex(users, ['active', false]); + * // => 2 + * + * // The `_.property` iteratee shorthand. + * _.findLastIndex(users, 'active'); + * // => 0 + */ + function findLastIndex(array, predicate, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = length - 1; + if (fromIndex !== undefined) { + index = toInteger(fromIndex); + index = fromIndex < 0 + ? nativeMax(length + index, 0) + : nativeMin(index, length - 1); + } + return baseFindIndex(array, getIteratee(predicate, 3), index, true); + } + + /** + * Flattens `array` a single level deep. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flatten([1, [2, [3, [4]], 5]]); + * // => [1, 2, [3, [4]], 5] + */ + function flatten(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, 1) : []; + } + + /** + * Recursively flattens `array`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flattenDeep([1, [2, [3, [4]], 5]]); + * // => [1, 2, 3, 4, 5] + */ + function flattenDeep(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, INFINITY) : []; + } + + /** + * Recursively flatten `array` up to `depth` times. + * + * @static + * @memberOf _ + * @since 4.4.0 + * @category Array + * @param {Array} array The array to flatten. + * @param {number} [depth=1] The maximum recursion depth. + * @returns {Array} Returns the new flattened array. + * @example + * + * var array = [1, [2, [3, [4]], 5]]; + * + * _.flattenDepth(array, 1); + * // => [1, 2, [3, [4]], 5] + * + * _.flattenDepth(array, 2); + * // => [1, 2, 3, [4], 5] + */ + function flattenDepth(array, depth) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + depth = depth === undefined ? 1 : toInteger(depth); + return baseFlatten(array, depth); + } + + /** + * The inverse of `_.toPairs`; this method returns an object composed + * from key-value `pairs`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} pairs The key-value pairs. + * @returns {Object} Returns the new object. + * @example + * + * _.fromPairs([['a', 1], ['b', 2]]); + * // => { 'a': 1, 'b': 2 } + */ + function fromPairs(pairs) { + var index = -1, + length = pairs == null ? 0 : pairs.length, + result = {}; + + while (++index < length) { + var pair = pairs[index]; + result[pair[0]] = pair[1]; + } + return result; + } + + /** + * Gets the first element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @alias first + * @category Array + * @param {Array} array The array to query. + * @returns {*} Returns the first element of `array`. + * @example + * + * _.head([1, 2, 3]); + * // => 1 + * + * _.head([]); + * // => undefined + */ + function head(array) { + return (array && array.length) ? array[0] : undefined; + } + + /** + * Gets the index at which the first occurrence of `value` is found in `array` + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. If `fromIndex` is negative, it's used as the + * offset from the end of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.indexOf([1, 2, 1, 2], 2); + * // => 1 + * + * // Search from the `fromIndex`. + * _.indexOf([1, 2, 1, 2], 2, 2); + * // => 3 + */ + function indexOf(array, value, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = fromIndex == null ? 0 : toInteger(fromIndex); + if (index < 0) { + index = nativeMax(length + index, 0); + } + return baseIndexOf(array, value, index); + } + + /** + * Gets all but the last element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.initial([1, 2, 3]); + * // => [1, 2] + */ + function initial(array) { + var length = array == null ? 0 : array.length; + return length ? baseSlice(array, 0, -1) : []; + } + + /** + * Creates an array of unique values that are included in all given arrays + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. The order and references of result values are + * determined by the first array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * _.intersection([2, 1], [2, 3]); + * // => [2] + */ + var intersection = baseRest(function(arrays) { + var mapped = arrayMap(arrays, castArrayLikeObject); + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped) + : []; + }); + + /** + * This method is like `_.intersection` except that it accepts `iteratee` + * which is invoked for each element of each `arrays` to generate the criterion + * by which they're compared. The order and references of result values are + * determined by the first array. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * _.intersectionBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [2.1] + * + * // The `_.property` iteratee shorthand. + * _.intersectionBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }] + */ + var intersectionBy = baseRest(function(arrays) { + var iteratee = last(arrays), + mapped = arrayMap(arrays, castArrayLikeObject); + + if (iteratee === last(mapped)) { + iteratee = undefined; + } else { + mapped.pop(); + } + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped, getIteratee(iteratee, 2)) + : []; + }); + + /** + * This method is like `_.intersection` except that it accepts `comparator` + * which is invoked to compare elements of `arrays`. The order and references + * of result values are determined by the first array. The comparator is + * invoked with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.intersectionWith(objects, others, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }] + */ + var intersectionWith = baseRest(function(arrays) { + var comparator = last(arrays), + mapped = arrayMap(arrays, castArrayLikeObject); + + comparator = typeof comparator == 'function' ? comparator : undefined; + if (comparator) { + mapped.pop(); + } + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped, undefined, comparator) + : []; + }); + + /** + * Converts all elements in `array` into a string separated by `separator`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to convert. + * @param {string} [separator=','] The element separator. + * @returns {string} Returns the joined string. + * @example + * + * _.join(['a', 'b', 'c'], '~'); + * // => 'a~b~c' + */ + function join(array, separator) { + return array == null ? '' : nativeJoin.call(array, separator); + } + + /** + * Gets the last element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @returns {*} Returns the last element of `array`. + * @example + * + * _.last([1, 2, 3]); + * // => 3 + */ + function last(array) { + var length = array == null ? 0 : array.length; + return length ? array[length - 1] : undefined; + } + + /** + * This method is like `_.indexOf` except that it iterates over elements of + * `array` from right to left. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=array.length-1] The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.lastIndexOf([1, 2, 1, 2], 2); + * // => 3 + * + * // Search from the `fromIndex`. + * _.lastIndexOf([1, 2, 1, 2], 2, 2); + * // => 1 + */ + function lastIndexOf(array, value, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = length; + if (fromIndex !== undefined) { + index = toInteger(fromIndex); + index = index < 0 ? nativeMax(length + index, 0) : nativeMin(index, length - 1); + } + return value === value + ? strictLastIndexOf(array, value, index) + : baseFindIndex(array, baseIsNaN, index, true); + } + + /** + * Gets the element at index `n` of `array`. If `n` is negative, the nth + * element from the end is returned. + * + * @static + * @memberOf _ + * @since 4.11.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=0] The index of the element to return. + * @returns {*} Returns the nth element of `array`. + * @example + * + * var array = ['a', 'b', 'c', 'd']; + * + * _.nth(array, 1); + * // => 'b' + * + * _.nth(array, -2); + * // => 'c'; + */ + function nth(array, n) { + return (array && array.length) ? baseNth(array, toInteger(n)) : undefined; + } + + /** + * Removes all given values from `array` using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * **Note:** Unlike `_.without`, this method mutates `array`. Use `_.remove` + * to remove elements from an array by predicate. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {...*} [values] The values to remove. + * @returns {Array} Returns `array`. + * @example + * + * var array = ['a', 'b', 'c', 'a', 'b', 'c']; + * + * _.pull(array, 'a', 'c'); + * console.log(array); + * // => ['b', 'b'] + */ + var pull = baseRest(pullAll); + + /** + * This method is like `_.pull` except that it accepts an array of values to remove. + * + * **Note:** Unlike `_.difference`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @returns {Array} Returns `array`. + * @example + * + * var array = ['a', 'b', 'c', 'a', 'b', 'c']; + * + * _.pullAll(array, ['a', 'c']); + * console.log(array); + * // => ['b', 'b'] + */ + function pullAll(array, values) { + return (array && array.length && values && values.length) + ? basePullAll(array, values) + : array; + } + + /** + * This method is like `_.pullAll` except that it accepts `iteratee` which is + * invoked for each element of `array` and `values` to generate the criterion + * by which they're compared. The iteratee is invoked with one argument: (value). + * + * **Note:** Unlike `_.differenceBy`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns `array`. + * @example + * + * var array = [{ 'x': 1 }, { 'x': 2 }, { 'x': 3 }, { 'x': 1 }]; + * + * _.pullAllBy(array, [{ 'x': 1 }, { 'x': 3 }], 'x'); + * console.log(array); + * // => [{ 'x': 2 }] + */ + function pullAllBy(array, values, iteratee) { + return (array && array.length && values && values.length) + ? basePullAll(array, values, getIteratee(iteratee, 2)) + : array; + } + + /** + * This method is like `_.pullAll` except that it accepts `comparator` which + * is invoked to compare elements of `array` to `values`. The comparator is + * invoked with two arguments: (arrVal, othVal). + * + * **Note:** Unlike `_.differenceWith`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 4.6.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns `array`. + * @example + * + * var array = [{ 'x': 1, 'y': 2 }, { 'x': 3, 'y': 4 }, { 'x': 5, 'y': 6 }]; + * + * _.pullAllWith(array, [{ 'x': 3, 'y': 4 }], _.isEqual); + * console.log(array); + * // => [{ 'x': 1, 'y': 2 }, { 'x': 5, 'y': 6 }] + */ + function pullAllWith(array, values, comparator) { + return (array && array.length && values && values.length) + ? basePullAll(array, values, undefined, comparator) + : array; + } + + /** + * Removes elements from `array` corresponding to `indexes` and returns an + * array of removed elements. + * + * **Note:** Unlike `_.at`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {...(number|number[])} [indexes] The indexes of elements to remove. + * @returns {Array} Returns the new array of removed elements. + * @example + * + * var array = ['a', 'b', 'c', 'd']; + * var pulled = _.pullAt(array, [1, 3]); + * + * console.log(array); + * // => ['a', 'c'] + * + * console.log(pulled); + * // => ['b', 'd'] + */ + var pullAt = flatRest(function(array, indexes) { + var length = array == null ? 0 : array.length, + result = baseAt(array, indexes); + + basePullAt(array, arrayMap(indexes, function(index) { + return isIndex(index, length) ? +index : index; + }).sort(compareAscending)); + + return result; + }); + + /** + * Removes all elements from `array` that `predicate` returns truthy for + * and returns an array of the removed elements. The predicate is invoked + * with three arguments: (value, index, array). + * + * **Note:** Unlike `_.filter`, this method mutates `array`. Use `_.pull` + * to pull elements from an array by value. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new array of removed elements. + * @example + * + * var array = [1, 2, 3, 4]; + * var evens = _.remove(array, function(n) { + * return n % 2 == 0; + * }); + * + * console.log(array); + * // => [1, 3] + * + * console.log(evens); + * // => [2, 4] + */ + function remove(array, predicate) { + var result = []; + if (!(array && array.length)) { + return result; + } + var index = -1, + indexes = [], + length = array.length; + + predicate = getIteratee(predicate, 3); + while (++index < length) { + var value = array[index]; + if (predicate(value, index, array)) { + result.push(value); + indexes.push(index); + } + } + basePullAt(array, indexes); + return result; + } + + /** + * Reverses `array` so that the first element becomes the last, the second + * element becomes the second to last, and so on. + * + * **Note:** This method mutates `array` and is based on + * [`Array#reverse`](https://mdn.io/Array/reverse). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to modify. + * @returns {Array} Returns `array`. + * @example + * + * var array = [1, 2, 3]; + * + * _.reverse(array); + * // => [3, 2, 1] + * + * console.log(array); + * // => [3, 2, 1] + */ + function reverse(array) { + return array == null ? array : nativeReverse.call(array); + } + + /** + * Creates a slice of `array` from `start` up to, but not including, `end`. + * + * **Note:** This method is used instead of + * [`Array#slice`](https://mdn.io/Array/slice) to ensure dense arrays are + * returned. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to slice. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the slice of `array`. + */ + function slice(array, start, end) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + if (end && typeof end != 'number' && isIterateeCall(array, start, end)) { + start = 0; + end = length; + } + else { + start = start == null ? 0 : toInteger(start); + end = end === undefined ? length : toInteger(end); + } + return baseSlice(array, start, end); + } + + /** + * Uses a binary search to determine the lowest index at which `value` + * should be inserted into `array` in order to maintain its sort order. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * _.sortedIndex([30, 50], 40); + * // => 1 + */ + function sortedIndex(array, value) { + return baseSortedIndex(array, value); + } + + /** + * This method is like `_.sortedIndex` except that it accepts `iteratee` + * which is invoked for `value` and each element of `array` to compute their + * sort ranking. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * var objects = [{ 'x': 4 }, { 'x': 5 }]; + * + * _.sortedIndexBy(objects, { 'x': 4 }, function(o) { return o.x; }); + * // => 0 + * + * // The `_.property` iteratee shorthand. + * _.sortedIndexBy(objects, { 'x': 4 }, 'x'); + * // => 0 + */ + function sortedIndexBy(array, value, iteratee) { + return baseSortedIndexBy(array, value, getIteratee(iteratee, 2)); + } + + /** + * This method is like `_.indexOf` except that it performs a binary + * search on a sorted `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.sortedIndexOf([4, 5, 5, 5, 6], 5); + * // => 1 + */ + function sortedIndexOf(array, value) { + var length = array == null ? 0 : array.length; + if (length) { + var index = baseSortedIndex(array, value); + if (index < length && eq(array[index], value)) { + return index; + } + } + return -1; + } + + /** + * This method is like `_.sortedIndex` except that it returns the highest + * index at which `value` should be inserted into `array` in order to + * maintain its sort order. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * _.sortedLastIndex([4, 5, 5, 5, 6], 5); + * // => 4 + */ + function sortedLastIndex(array, value) { + return baseSortedIndex(array, value, true); + } + + /** + * This method is like `_.sortedLastIndex` except that it accepts `iteratee` + * which is invoked for `value` and each element of `array` to compute their + * sort ranking. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * var objects = [{ 'x': 4 }, { 'x': 5 }]; + * + * _.sortedLastIndexBy(objects, { 'x': 4 }, function(o) { return o.x; }); + * // => 1 + * + * // The `_.property` iteratee shorthand. + * _.sortedLastIndexBy(objects, { 'x': 4 }, 'x'); + * // => 1 + */ + function sortedLastIndexBy(array, value, iteratee) { + return baseSortedIndexBy(array, value, getIteratee(iteratee, 2), true); + } + + /** + * This method is like `_.lastIndexOf` except that it performs a binary + * search on a sorted `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.sortedLastIndexOf([4, 5, 5, 5, 6], 5); + * // => 3 + */ + function sortedLastIndexOf(array, value) { + var length = array == null ? 0 : array.length; + if (length) { + var index = baseSortedIndex(array, value, true) - 1; + if (eq(array[index], value)) { + return index; + } + } + return -1; + } + + /** + * This method is like `_.uniq` except that it's designed and optimized + * for sorted arrays. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.sortedUniq([1, 1, 2]); + * // => [1, 2] + */ + function sortedUniq(array) { + return (array && array.length) + ? baseSortedUniq(array) + : []; + } + + /** + * This method is like `_.uniqBy` except that it's designed and optimized + * for sorted arrays. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.sortedUniqBy([1.1, 1.2, 2.3, 2.4], Math.floor); + * // => [1.1, 2.3] + */ + function sortedUniqBy(array, iteratee) { + return (array && array.length) + ? baseSortedUniq(array, getIteratee(iteratee, 2)) + : []; + } + + /** + * Gets all but the first element of `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to query. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.tail([1, 2, 3]); + * // => [2, 3] + */ + function tail(array) { + var length = array == null ? 0 : array.length; + return length ? baseSlice(array, 1, length) : []; + } + + /** + * Creates a slice of `array` with `n` elements taken from the beginning. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to take. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.take([1, 2, 3]); + * // => [1] + * + * _.take([1, 2, 3], 2); + * // => [1, 2] + * + * _.take([1, 2, 3], 5); + * // => [1, 2, 3] + * + * _.take([1, 2, 3], 0); + * // => [] + */ + function take(array, n, guard) { + if (!(array && array.length)) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + return baseSlice(array, 0, n < 0 ? 0 : n); + } + + /** + * Creates a slice of `array` with `n` elements taken from the end. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to take. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.takeRight([1, 2, 3]); + * // => [3] + * + * _.takeRight([1, 2, 3], 2); + * // => [2, 3] + * + * _.takeRight([1, 2, 3], 5); + * // => [1, 2, 3] + * + * _.takeRight([1, 2, 3], 0); + * // => [] + */ + function takeRight(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + n = length - n; + return baseSlice(array, n < 0 ? 0 : n, length); + } + + /** + * Creates a slice of `array` with elements taken from the end. Elements are + * taken until `predicate` returns falsey. The predicate is invoked with + * three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.takeRightWhile(users, function(o) { return !o.active; }); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.matches` iteratee shorthand. + * _.takeRightWhile(users, { 'user': 'pebbles', 'active': false }); + * // => objects for ['pebbles'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.takeRightWhile(users, ['active', false]); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.property` iteratee shorthand. + * _.takeRightWhile(users, 'active'); + * // => [] + */ + function takeRightWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3), false, true) + : []; + } + + /** + * Creates a slice of `array` with elements taken from the beginning. Elements + * are taken until `predicate` returns falsey. The predicate is invoked with + * three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.takeWhile(users, function(o) { return !o.active; }); + * // => objects for ['barney', 'fred'] + * + * // The `_.matches` iteratee shorthand. + * _.takeWhile(users, { 'user': 'barney', 'active': false }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.takeWhile(users, ['active', false]); + * // => objects for ['barney', 'fred'] + * + * // The `_.property` iteratee shorthand. + * _.takeWhile(users, 'active'); + * // => [] + */ + function takeWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3)) + : []; + } + + /** + * Creates an array of unique values, in order, from all given arrays using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of combined values. + * @example + * + * _.union([2], [1, 2]); + * // => [2, 1] + */ + var union = baseRest(function(arrays) { + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true)); + }); + + /** + * This method is like `_.union` except that it accepts `iteratee` which is + * invoked for each element of each `arrays` to generate the criterion by + * which uniqueness is computed. Result values are chosen from the first + * array in which the value occurs. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of combined values. + * @example + * + * _.unionBy([2.1], [1.2, 2.3], Math.floor); + * // => [2.1, 1.2] + * + * // The `_.property` iteratee shorthand. + * _.unionBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }, { 'x': 2 }] + */ + var unionBy = baseRest(function(arrays) { + var iteratee = last(arrays); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true), getIteratee(iteratee, 2)); + }); + + /** + * This method is like `_.union` except that it accepts `comparator` which + * is invoked to compare elements of `arrays`. Result values are chosen from + * the first array in which the value occurs. The comparator is invoked + * with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of combined values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.unionWith(objects, others, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }, { 'x': 1, 'y': 1 }] + */ + var unionWith = baseRest(function(arrays) { + var comparator = last(arrays); + comparator = typeof comparator == 'function' ? comparator : undefined; + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true), undefined, comparator); + }); + + /** + * Creates a duplicate-free version of an array, using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons, in which only the first occurrence of each element + * is kept. The order of result values is determined by the order they occur + * in the array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.uniq([2, 1, 2]); + * // => [2, 1] + */ + function uniq(array) { + return (array && array.length) ? baseUniq(array) : []; + } + + /** + * This method is like `_.uniq` except that it accepts `iteratee` which is + * invoked for each element in `array` to generate the criterion by which + * uniqueness is computed. The order of result values is determined by the + * order they occur in the array. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.uniqBy([2.1, 1.2, 2.3], Math.floor); + * // => [2.1, 1.2] + * + * // The `_.property` iteratee shorthand. + * _.uniqBy([{ 'x': 1 }, { 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }, { 'x': 2 }] + */ + function uniqBy(array, iteratee) { + return (array && array.length) ? baseUniq(array, getIteratee(iteratee, 2)) : []; + } + + /** + * This method is like `_.uniq` except that it accepts `comparator` which + * is invoked to compare elements of `array`. The order of result values is + * determined by the order they occur in the array.The comparator is invoked + * with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.uniqWith(objects, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }] + */ + function uniqWith(array, comparator) { + comparator = typeof comparator == 'function' ? comparator : undefined; + return (array && array.length) ? baseUniq(array, undefined, comparator) : []; + } + + /** + * This method is like `_.zip` except that it accepts an array of grouped + * elements and creates an array regrouping the elements to their pre-zip + * configuration. + * + * @static + * @memberOf _ + * @since 1.2.0 + * @category Array + * @param {Array} array The array of grouped elements to process. + * @returns {Array} Returns the new array of regrouped elements. + * @example + * + * var zipped = _.zip(['a', 'b'], [1, 2], [true, false]); + * // => [['a', 1, true], ['b', 2, false]] + * + * _.unzip(zipped); + * // => [['a', 'b'], [1, 2], [true, false]] + */ + function unzip(array) { + if (!(array && array.length)) { + return []; + } + var length = 0; + array = arrayFilter(array, function(group) { + if (isArrayLikeObject(group)) { + length = nativeMax(group.length, length); + return true; + } + }); + return baseTimes(length, function(index) { + return arrayMap(array, baseProperty(index)); + }); + } + + /** + * This method is like `_.unzip` except that it accepts `iteratee` to specify + * how regrouped values should be combined. The iteratee is invoked with the + * elements of each group: (...group). + * + * @static + * @memberOf _ + * @since 3.8.0 + * @category Array + * @param {Array} array The array of grouped elements to process. + * @param {Function} [iteratee=_.identity] The function to combine + * regrouped values. + * @returns {Array} Returns the new array of regrouped elements. + * @example + * + * var zipped = _.zip([1, 2], [10, 20], [100, 200]); + * // => [[1, 10, 100], [2, 20, 200]] + * + * _.unzipWith(zipped, _.add); + * // => [3, 30, 300] + */ + function unzipWith(array, iteratee) { + if (!(array && array.length)) { + return []; + } + var result = unzip(array); + if (iteratee == null) { + return result; + } + return arrayMap(result, function(group) { + return apply(iteratee, undefined, group); + }); + } + + /** + * Creates an array excluding all given values using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * **Note:** Unlike `_.pull`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...*} [values] The values to exclude. + * @returns {Array} Returns the new array of filtered values. + * @see _.difference, _.xor + * @example + * + * _.without([2, 1, 2, 3], 1, 2); + * // => [3] + */ + var without = baseRest(function(array, values) { + return isArrayLikeObject(array) + ? baseDifference(array, values) + : []; + }); + + /** + * Creates an array of unique values that is the + * [symmetric difference](https://en.wikipedia.org/wiki/Symmetric_difference) + * of the given arrays. The order of result values is determined by the order + * they occur in the arrays. + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of filtered values. + * @see _.difference, _.without + * @example + * + * _.xor([2, 1], [2, 3]); + * // => [1, 3] + */ + var xor = baseRest(function(arrays) { + return baseXor(arrayFilter(arrays, isArrayLikeObject)); + }); + + /** + * This method is like `_.xor` except that it accepts `iteratee` which is + * invoked for each element of each `arrays` to generate the criterion by + * which by which they're compared. The order of result values is determined + * by the order they occur in the arrays. The iteratee is invoked with one + * argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.xorBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [1.2, 3.4] + * + * // The `_.property` iteratee shorthand. + * _.xorBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 2 }] + */ + var xorBy = baseRest(function(arrays) { + var iteratee = last(arrays); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return baseXor(arrayFilter(arrays, isArrayLikeObject), getIteratee(iteratee, 2)); + }); + + /** + * This method is like `_.xor` except that it accepts `comparator` which is + * invoked to compare elements of `arrays`. The order of result values is + * determined by the order they occur in the arrays. The comparator is invoked + * with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.xorWith(objects, others, _.isEqual); + * // => [{ 'x': 2, 'y': 1 }, { 'x': 1, 'y': 1 }] + */ + var xorWith = baseRest(function(arrays) { + var comparator = last(arrays); + comparator = typeof comparator == 'function' ? comparator : undefined; + return baseXor(arrayFilter(arrays, isArrayLikeObject), undefined, comparator); + }); + + /** + * Creates an array of grouped elements, the first of which contains the + * first elements of the given arrays, the second of which contains the + * second elements of the given arrays, and so on. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to process. + * @returns {Array} Returns the new array of grouped elements. + * @example + * + * _.zip(['a', 'b'], [1, 2], [true, false]); + * // => [['a', 1, true], ['b', 2, false]] + */ + var zip = baseRest(unzip); + + /** + * This method is like `_.fromPairs` except that it accepts two arrays, + * one of property identifiers and one of corresponding values. + * + * @static + * @memberOf _ + * @since 0.4.0 + * @category Array + * @param {Array} [props=[]] The property identifiers. + * @param {Array} [values=[]] The property values. + * @returns {Object} Returns the new object. + * @example + * + * _.zipObject(['a', 'b'], [1, 2]); + * // => { 'a': 1, 'b': 2 } + */ + function zipObject(props, values) { + return baseZipObject(props || [], values || [], assignValue); + } + + /** + * This method is like `_.zipObject` except that it supports property paths. + * + * @static + * @memberOf _ + * @since 4.1.0 + * @category Array + * @param {Array} [props=[]] The property identifiers. + * @param {Array} [values=[]] The property values. + * @returns {Object} Returns the new object. + * @example + * + * _.zipObjectDeep(['a.b[0].c', 'a.b[1].d'], [1, 2]); + * // => { 'a': { 'b': [{ 'c': 1 }, { 'd': 2 }] } } + */ + function zipObjectDeep(props, values) { + return baseZipObject(props || [], values || [], baseSet); + } + + /** + * This method is like `_.zip` except that it accepts `iteratee` to specify + * how grouped values should be combined. The iteratee is invoked with the + * elements of each group: (...group). + * + * @static + * @memberOf _ + * @since 3.8.0 + * @category Array + * @param {...Array} [arrays] The arrays to process. + * @param {Function} [iteratee=_.identity] The function to combine + * grouped values. + * @returns {Array} Returns the new array of grouped elements. + * @example + * + * _.zipWith([1, 2], [10, 20], [100, 200], function(a, b, c) { + * return a + b + c; + * }); + * // => [111, 222] + */ + var zipWith = baseRest(function(arrays) { + var length = arrays.length, + iteratee = length > 1 ? arrays[length - 1] : undefined; + + iteratee = typeof iteratee == 'function' ? (arrays.pop(), iteratee) : undefined; + return unzipWith(arrays, iteratee); + }); + + /*------------------------------------------------------------------------*/ + + /** + * Creates a `lodash` wrapper instance that wraps `value` with explicit method + * chain sequences enabled. The result of such sequences must be unwrapped + * with `_#value`. + * + * @static + * @memberOf _ + * @since 1.3.0 + * @category Seq + * @param {*} value The value to wrap. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 40 }, + * { 'user': 'pebbles', 'age': 1 } + * ]; + * + * var youngest = _ + * .chain(users) + * .sortBy('age') + * .map(function(o) { + * return o.user + ' is ' + o.age; + * }) + * .head() + * .value(); + * // => 'pebbles is 1' + */ + function chain(value) { + var result = lodash(value); + result.__chain__ = true; + return result; + } + + /** + * This method invokes `interceptor` and returns `value`. The interceptor + * is invoked with one argument; (value). The purpose of this method is to + * "tap into" a method chain sequence in order to modify intermediate results. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @param {*} value The value to provide to `interceptor`. + * @param {Function} interceptor The function to invoke. + * @returns {*} Returns `value`. + * @example + * + * _([1, 2, 3]) + * .tap(function(array) { + * // Mutate input array. + * array.pop(); + * }) + * .reverse() + * .value(); + * // => [2, 1] + */ + function tap(value, interceptor) { + interceptor(value); + return value; + } + + /** + * This method is like `_.tap` except that it returns the result of `interceptor`. + * The purpose of this method is to "pass thru" values replacing intermediate + * results in a method chain sequence. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Seq + * @param {*} value The value to provide to `interceptor`. + * @param {Function} interceptor The function to invoke. + * @returns {*} Returns the result of `interceptor`. + * @example + * + * _(' abc ') + * .chain() + * .trim() + * .thru(function(value) { + * return [value]; + * }) + * .value(); + * // => ['abc'] + */ + function thru(value, interceptor) { + return interceptor(value); + } + + /** + * This method is the wrapper version of `_.at`. + * + * @name at + * @memberOf _ + * @since 1.0.0 + * @category Seq + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }, 4] }; + * + * _(object).at(['a[0].b.c', 'a[1]']).value(); + * // => [3, 4] + */ + var wrapperAt = flatRest(function(paths) { + var length = paths.length, + start = length ? paths[0] : 0, + value = this.__wrapped__, + interceptor = function(object) { return baseAt(object, paths); }; + + if (length > 1 || this.__actions__.length || + !(value instanceof LazyWrapper) || !isIndex(start)) { + return this.thru(interceptor); + } + value = value.slice(start, +start + (length ? 1 : 0)); + value.__actions__.push({ + 'func': thru, + 'args': [interceptor], + 'thisArg': undefined + }); + return new LodashWrapper(value, this.__chain__).thru(function(array) { + if (length && !array.length) { + array.push(undefined); + } + return array; + }); + }); + + /** + * Creates a `lodash` wrapper instance with explicit method chain sequences enabled. + * + * @name chain + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 40 } + * ]; + * + * // A sequence without explicit chaining. + * _(users).head(); + * // => { 'user': 'barney', 'age': 36 } + * + * // A sequence with explicit chaining. + * _(users) + * .chain() + * .head() + * .pick('user') + * .value(); + * // => { 'user': 'barney' } + */ + function wrapperChain() { + return chain(this); + } + + /** + * Executes the chain sequence and returns the wrapped result. + * + * @name commit + * @memberOf _ + * @since 3.2.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var array = [1, 2]; + * var wrapped = _(array).push(3); + * + * console.log(array); + * // => [1, 2] + * + * wrapped = wrapped.commit(); + * console.log(array); + * // => [1, 2, 3] + * + * wrapped.last(); + * // => 3 + * + * console.log(array); + * // => [1, 2, 3] + */ + function wrapperCommit() { + return new LodashWrapper(this.value(), this.__chain__); + } + + /** + * Gets the next value on a wrapped object following the + * [iterator protocol](https://mdn.io/iteration_protocols#iterator). + * + * @name next + * @memberOf _ + * @since 4.0.0 + * @category Seq + * @returns {Object} Returns the next iterator value. + * @example + * + * var wrapped = _([1, 2]); + * + * wrapped.next(); + * // => { 'done': false, 'value': 1 } + * + * wrapped.next(); + * // => { 'done': false, 'value': 2 } + * + * wrapped.next(); + * // => { 'done': true, 'value': undefined } + */ + function wrapperNext() { + if (this.__values__ === undefined) { + this.__values__ = toArray(this.value()); + } + var done = this.__index__ >= this.__values__.length, + value = done ? undefined : this.__values__[this.__index__++]; + + return { 'done': done, 'value': value }; + } + + /** + * Enables the wrapper to be iterable. + * + * @name Symbol.iterator + * @memberOf _ + * @since 4.0.0 + * @category Seq + * @returns {Object} Returns the wrapper object. + * @example + * + * var wrapped = _([1, 2]); + * + * wrapped[Symbol.iterator]() === wrapped; + * // => true + * + * Array.from(wrapped); + * // => [1, 2] + */ + function wrapperToIterator() { + return this; + } + + /** + * Creates a clone of the chain sequence planting `value` as the wrapped value. + * + * @name plant + * @memberOf _ + * @since 3.2.0 + * @category Seq + * @param {*} value The value to plant. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * function square(n) { + * return n * n; + * } + * + * var wrapped = _([1, 2]).map(square); + * var other = wrapped.plant([3, 4]); + * + * other.value(); + * // => [9, 16] + * + * wrapped.value(); + * // => [1, 4] + */ + function wrapperPlant(value) { + var result, + parent = this; + + while (parent instanceof baseLodash) { + var clone = wrapperClone(parent); + clone.__index__ = 0; + clone.__values__ = undefined; + if (result) { + previous.__wrapped__ = clone; + } else { + result = clone; + } + var previous = clone; + parent = parent.__wrapped__; + } + previous.__wrapped__ = value; + return result; + } + + /** + * This method is the wrapper version of `_.reverse`. + * + * **Note:** This method mutates the wrapped array. + * + * @name reverse + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var array = [1, 2, 3]; + * + * _(array).reverse().value() + * // => [3, 2, 1] + * + * console.log(array); + * // => [3, 2, 1] + */ + function wrapperReverse() { + var value = this.__wrapped__; + if (value instanceof LazyWrapper) { + var wrapped = value; + if (this.__actions__.length) { + wrapped = new LazyWrapper(this); + } + wrapped = wrapped.reverse(); + wrapped.__actions__.push({ + 'func': thru, + 'args': [reverse], + 'thisArg': undefined + }); + return new LodashWrapper(wrapped, this.__chain__); + } + return this.thru(reverse); + } + + /** + * Executes the chain sequence to resolve the unwrapped value. + * + * @name value + * @memberOf _ + * @since 0.1.0 + * @alias toJSON, valueOf + * @category Seq + * @returns {*} Returns the resolved unwrapped value. + * @example + * + * _([1, 2, 3]).value(); + * // => [1, 2, 3] + */ + function wrapperValue() { + return baseWrapperValue(this.__wrapped__, this.__actions__); + } + + /*------------------------------------------------------------------------*/ + + /** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The corresponding value of + * each key is the number of times the key was returned by `iteratee`. The + * iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * _.countBy([6.1, 4.2, 6.3], Math.floor); + * // => { '4': 1, '6': 2 } + * + * // The `_.property` iteratee shorthand. + * _.countBy(['one', 'two', 'three'], 'length'); + * // => { '3': 2, '5': 1 } + */ + var countBy = createAggregator(function(result, value, key) { + if (hasOwnProperty.call(result, key)) { + ++result[key]; + } else { + baseAssignValue(result, key, 1); + } + }); + + /** + * Checks if `predicate` returns truthy for **all** elements of `collection`. + * Iteration is stopped once `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index|key, collection). + * + * **Note:** This method returns `true` for + * [empty collections](https://en.wikipedia.org/wiki/Empty_set) because + * [everything is true](https://en.wikipedia.org/wiki/Vacuous_truth) of + * elements of empty collections. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false`. + * @example + * + * _.every([true, 1, null, 'yes'], Boolean); + * // => false + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.every(users, { 'user': 'barney', 'active': false }); + * // => false + * + * // The `_.matchesProperty` iteratee shorthand. + * _.every(users, ['active', false]); + * // => true + * + * // The `_.property` iteratee shorthand. + * _.every(users, 'active'); + * // => false + */ + function every(collection, predicate, guard) { + var func = isArray(collection) ? arrayEvery : baseEvery; + if (guard && isIterateeCall(collection, predicate, guard)) { + predicate = undefined; + } + return func(collection, getIteratee(predicate, 3)); + } + + /** + * Iterates over elements of `collection`, returning an array of all elements + * `predicate` returns truthy for. The predicate is invoked with three + * arguments: (value, index|key, collection). + * + * **Note:** Unlike `_.remove`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + * @see _.reject + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * _.filter(users, function(o) { return !o.active; }); + * // => objects for ['fred'] + * + * // The `_.matches` iteratee shorthand. + * _.filter(users, { 'age': 36, 'active': true }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.filter(users, ['active', false]); + * // => objects for ['fred'] + * + * // The `_.property` iteratee shorthand. + * _.filter(users, 'active'); + * // => objects for ['barney'] + * + * // Combining several predicates using `_.overEvery` or `_.overSome`. + * _.filter(users, _.overSome([{ 'age': 36 }, ['age', 40]])); + * // => objects for ['fred', 'barney'] + */ + function filter(collection, predicate) { + var func = isArray(collection) ? arrayFilter : baseFilter; + return func(collection, getIteratee(predicate, 3)); + } + + /** + * Iterates over elements of `collection`, returning the first element + * `predicate` returns truthy for. The predicate is invoked with three + * arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=0] The index to search from. + * @returns {*} Returns the matched element, else `undefined`. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false }, + * { 'user': 'pebbles', 'age': 1, 'active': true } + * ]; + * + * _.find(users, function(o) { return o.age < 40; }); + * // => object for 'barney' + * + * // The `_.matches` iteratee shorthand. + * _.find(users, { 'age': 1, 'active': true }); + * // => object for 'pebbles' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.find(users, ['active', false]); + * // => object for 'fred' + * + * // The `_.property` iteratee shorthand. + * _.find(users, 'active'); + * // => object for 'barney' + */ + var find = createFind(findIndex); + + /** + * This method is like `_.find` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Collection + * @param {Array|Object} collection The collection to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=collection.length-1] The index to search from. + * @returns {*} Returns the matched element, else `undefined`. + * @example + * + * _.findLast([1, 2, 3, 4], function(n) { + * return n % 2 == 1; + * }); + * // => 3 + */ + var findLast = createFind(findLastIndex); + + /** + * Creates a flattened array of values by running each element in `collection` + * thru `iteratee` and flattening the mapped results. The iteratee is invoked + * with three arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [n, n]; + * } + * + * _.flatMap([1, 2], duplicate); + * // => [1, 1, 2, 2] + */ + function flatMap(collection, iteratee) { + return baseFlatten(map(collection, iteratee), 1); + } + + /** + * This method is like `_.flatMap` except that it recursively flattens the + * mapped results. + * + * @static + * @memberOf _ + * @since 4.7.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [[[n, n]]]; + * } + * + * _.flatMapDeep([1, 2], duplicate); + * // => [1, 1, 2, 2] + */ + function flatMapDeep(collection, iteratee) { + return baseFlatten(map(collection, iteratee), INFINITY); + } + + /** + * This method is like `_.flatMap` except that it recursively flattens the + * mapped results up to `depth` times. + * + * @static + * @memberOf _ + * @since 4.7.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {number} [depth=1] The maximum recursion depth. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [[[n, n]]]; + * } + * + * _.flatMapDepth([1, 2], duplicate, 2); + * // => [[1, 1], [2, 2]] + */ + function flatMapDepth(collection, iteratee, depth) { + depth = depth === undefined ? 1 : toInteger(depth); + return baseFlatten(map(collection, iteratee), depth); + } + + /** + * Iterates over elements of `collection` and invokes `iteratee` for each element. + * The iteratee is invoked with three arguments: (value, index|key, collection). + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * **Note:** As with other "Collections" methods, objects with a "length" + * property are iterated like arrays. To avoid this behavior use `_.forIn` + * or `_.forOwn` for object iteration. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @alias each + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + * @see _.forEachRight + * @example + * + * _.forEach([1, 2], function(value) { + * console.log(value); + * }); + * // => Logs `1` then `2`. + * + * _.forEach({ 'a': 1, 'b': 2 }, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a' then 'b' (iteration order is not guaranteed). + */ + function forEach(collection, iteratee) { + var func = isArray(collection) ? arrayEach : baseEach; + return func(collection, getIteratee(iteratee, 3)); + } + + /** + * This method is like `_.forEach` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @alias eachRight + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + * @see _.forEach + * @example + * + * _.forEachRight([1, 2], function(value) { + * console.log(value); + * }); + * // => Logs `2` then `1`. + */ + function forEachRight(collection, iteratee) { + var func = isArray(collection) ? arrayEachRight : baseEachRight; + return func(collection, getIteratee(iteratee, 3)); + } + + /** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The order of grouped values + * is determined by the order they occur in `collection`. The corresponding + * value of each key is an array of elements responsible for generating the + * key. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * _.groupBy([6.1, 4.2, 6.3], Math.floor); + * // => { '4': [4.2], '6': [6.1, 6.3] } + * + * // The `_.property` iteratee shorthand. + * _.groupBy(['one', 'two', 'three'], 'length'); + * // => { '3': ['one', 'two'], '5': ['three'] } + */ + var groupBy = createAggregator(function(result, value, key) { + if (hasOwnProperty.call(result, key)) { + result[key].push(value); + } else { + baseAssignValue(result, key, [value]); + } + }); + + /** + * Checks if `value` is in `collection`. If `collection` is a string, it's + * checked for a substring of `value`, otherwise + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * is used for equality comparisons. If `fromIndex` is negative, it's used as + * the offset from the end of `collection`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object|string} collection The collection to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.reduce`. + * @returns {boolean} Returns `true` if `value` is found, else `false`. + * @example + * + * _.includes([1, 2, 3], 1); + * // => true + * + * _.includes([1, 2, 3], 1, 2); + * // => false + * + * _.includes({ 'a': 1, 'b': 2 }, 1); + * // => true + * + * _.includes('abcd', 'bc'); + * // => true + */ + function includes(collection, value, fromIndex, guard) { + collection = isArrayLike(collection) ? collection : values(collection); + fromIndex = (fromIndex && !guard) ? toInteger(fromIndex) : 0; + + var length = collection.length; + if (fromIndex < 0) { + fromIndex = nativeMax(length + fromIndex, 0); + } + return isString(collection) + ? (fromIndex <= length && collection.indexOf(value, fromIndex) > -1) + : (!!length && baseIndexOf(collection, value, fromIndex) > -1); + } + + /** + * Invokes the method at `path` of each element in `collection`, returning + * an array of the results of each invoked method. Any additional arguments + * are provided to each invoked method. If `path` is a function, it's invoked + * for, and `this` bound to, each element in `collection`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Array|Function|string} path The path of the method to invoke or + * the function invoked per iteration. + * @param {...*} [args] The arguments to invoke each method with. + * @returns {Array} Returns the array of results. + * @example + * + * _.invokeMap([[5, 1, 7], [3, 2, 1]], 'sort'); + * // => [[1, 5, 7], [1, 2, 3]] + * + * _.invokeMap([123, 456], String.prototype.split, ''); + * // => [['1', '2', '3'], ['4', '5', '6']] + */ + var invokeMap = baseRest(function(collection, path, args) { + var index = -1, + isFunc = typeof path == 'function', + result = isArrayLike(collection) ? Array(collection.length) : []; + + baseEach(collection, function(value) { + result[++index] = isFunc ? apply(path, value, args) : baseInvoke(value, path, args); + }); + return result; + }); + + /** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The corresponding value of + * each key is the last element responsible for generating the key. The + * iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * var array = [ + * { 'dir': 'left', 'code': 97 }, + * { 'dir': 'right', 'code': 100 } + * ]; + * + * _.keyBy(array, function(o) { + * return String.fromCharCode(o.code); + * }); + * // => { 'a': { 'dir': 'left', 'code': 97 }, 'd': { 'dir': 'right', 'code': 100 } } + * + * _.keyBy(array, 'dir'); + * // => { 'left': { 'dir': 'left', 'code': 97 }, 'right': { 'dir': 'right', 'code': 100 } } + */ + var keyBy = createAggregator(function(result, value, key) { + baseAssignValue(result, key, value); + }); + + /** + * Creates an array of values by running each element in `collection` thru + * `iteratee`. The iteratee is invoked with three arguments: + * (value, index|key, collection). + * + * Many lodash methods are guarded to work as iteratees for methods like + * `_.every`, `_.filter`, `_.map`, `_.mapValues`, `_.reject`, and `_.some`. + * + * The guarded methods are: + * `ary`, `chunk`, `curry`, `curryRight`, `drop`, `dropRight`, `every`, + * `fill`, `invert`, `parseInt`, `random`, `range`, `rangeRight`, `repeat`, + * `sampleSize`, `slice`, `some`, `sortBy`, `split`, `take`, `takeRight`, + * `template`, `trim`, `trimEnd`, `trimStart`, and `words` + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + * @example + * + * function square(n) { + * return n * n; + * } + * + * _.map([4, 8], square); + * // => [16, 64] + * + * _.map({ 'a': 4, 'b': 8 }, square); + * // => [16, 64] (iteration order is not guaranteed) + * + * var users = [ + * { 'user': 'barney' }, + * { 'user': 'fred' } + * ]; + * + * // The `_.property` iteratee shorthand. + * _.map(users, 'user'); + * // => ['barney', 'fred'] + */ + function map(collection, iteratee) { + var func = isArray(collection) ? arrayMap : baseMap; + return func(collection, getIteratee(iteratee, 3)); + } + + /** + * This method is like `_.sortBy` except that it allows specifying the sort + * orders of the iteratees to sort by. If `orders` is unspecified, all values + * are sorted in ascending order. Otherwise, specify an order of "desc" for + * descending or "asc" for ascending sort order of corresponding values. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Array[]|Function[]|Object[]|string[]} [iteratees=[_.identity]] + * The iteratees to sort by. + * @param {string[]} [orders] The sort orders of `iteratees`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.reduce`. + * @returns {Array} Returns the new sorted array. + * @example + * + * var users = [ + * { 'user': 'fred', 'age': 48 }, + * { 'user': 'barney', 'age': 34 }, + * { 'user': 'fred', 'age': 40 }, + * { 'user': 'barney', 'age': 36 } + * ]; + * + * // Sort by `user` in ascending order and by `age` in descending order. + * _.orderBy(users, ['user', 'age'], ['asc', 'desc']); + * // => objects for [['barney', 36], ['barney', 34], ['fred', 48], ['fred', 40]] + */ + function orderBy(collection, iteratees, orders, guard) { + if (collection == null) { + return []; + } + if (!isArray(iteratees)) { + iteratees = iteratees == null ? [] : [iteratees]; + } + orders = guard ? undefined : orders; + if (!isArray(orders)) { + orders = orders == null ? [] : [orders]; + } + return baseOrderBy(collection, iteratees, orders); + } + + /** + * Creates an array of elements split into two groups, the first of which + * contains elements `predicate` returns truthy for, the second of which + * contains elements `predicate` returns falsey for. The predicate is + * invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the array of grouped elements. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': true }, + * { 'user': 'pebbles', 'age': 1, 'active': false } + * ]; + * + * _.partition(users, function(o) { return o.active; }); + * // => objects for [['fred'], ['barney', 'pebbles']] + * + * // The `_.matches` iteratee shorthand. + * _.partition(users, { 'age': 1, 'active': false }); + * // => objects for [['pebbles'], ['barney', 'fred']] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.partition(users, ['active', false]); + * // => objects for [['barney', 'pebbles'], ['fred']] + * + * // The `_.property` iteratee shorthand. + * _.partition(users, 'active'); + * // => objects for [['fred'], ['barney', 'pebbles']] + */ + var partition = createAggregator(function(result, value, key) { + result[key ? 0 : 1].push(value); + }, function() { return [[], []]; }); + + /** + * Reduces `collection` to a value which is the accumulated result of running + * each element in `collection` thru `iteratee`, where each successive + * invocation is supplied the return value of the previous. If `accumulator` + * is not given, the first element of `collection` is used as the initial + * value. The iteratee is invoked with four arguments: + * (accumulator, value, index|key, collection). + * + * Many lodash methods are guarded to work as iteratees for methods like + * `_.reduce`, `_.reduceRight`, and `_.transform`. + * + * The guarded methods are: + * `assign`, `defaults`, `defaultsDeep`, `includes`, `merge`, `orderBy`, + * and `sortBy` + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @returns {*} Returns the accumulated value. + * @see _.reduceRight + * @example + * + * _.reduce([1, 2], function(sum, n) { + * return sum + n; + * }, 0); + * // => 3 + * + * _.reduce({ 'a': 1, 'b': 2, 'c': 1 }, function(result, value, key) { + * (result[value] || (result[value] = [])).push(key); + * return result; + * }, {}); + * // => { '1': ['a', 'c'], '2': ['b'] } (iteration order is not guaranteed) + */ + function reduce(collection, iteratee, accumulator) { + var func = isArray(collection) ? arrayReduce : baseReduce, + initAccum = arguments.length < 3; + + return func(collection, getIteratee(iteratee, 4), accumulator, initAccum, baseEach); + } + + /** + * This method is like `_.reduce` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @returns {*} Returns the accumulated value. + * @see _.reduce + * @example + * + * var array = [[0, 1], [2, 3], [4, 5]]; + * + * _.reduceRight(array, function(flattened, other) { + * return flattened.concat(other); + * }, []); + * // => [4, 5, 2, 3, 0, 1] + */ + function reduceRight(collection, iteratee, accumulator) { + var func = isArray(collection) ? arrayReduceRight : baseReduce, + initAccum = arguments.length < 3; + + return func(collection, getIteratee(iteratee, 4), accumulator, initAccum, baseEachRight); + } + + /** + * The opposite of `_.filter`; this method returns the elements of `collection` + * that `predicate` does **not** return truthy for. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + * @see _.filter + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': true } + * ]; + * + * _.reject(users, function(o) { return !o.active; }); + * // => objects for ['fred'] + * + * // The `_.matches` iteratee shorthand. + * _.reject(users, { 'age': 40, 'active': true }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.reject(users, ['active', false]); + * // => objects for ['fred'] + * + * // The `_.property` iteratee shorthand. + * _.reject(users, 'active'); + * // => objects for ['barney'] + */ + function reject(collection, predicate) { + var func = isArray(collection) ? arrayFilter : baseFilter; + return func(collection, negate(getIteratee(predicate, 3))); + } + + /** + * Gets a random element from `collection`. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Collection + * @param {Array|Object} collection The collection to sample. + * @returns {*} Returns the random element. + * @example + * + * _.sample([1, 2, 3, 4]); + * // => 2 + */ + function sample(collection) { + var func = isArray(collection) ? arraySample : baseSample; + return func(collection); + } + + /** + * Gets `n` random elements at unique keys from `collection` up to the + * size of `collection`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to sample. + * @param {number} [n=1] The number of elements to sample. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the random elements. + * @example + * + * _.sampleSize([1, 2, 3], 2); + * // => [3, 1] + * + * _.sampleSize([1, 2, 3], 4); + * // => [2, 3, 1] + */ + function sampleSize(collection, n, guard) { + if ((guard ? isIterateeCall(collection, n, guard) : n === undefined)) { + n = 1; + } else { + n = toInteger(n); + } + var func = isArray(collection) ? arraySampleSize : baseSampleSize; + return func(collection, n); + } + + /** + * Creates an array of shuffled values, using a version of the + * [Fisher-Yates shuffle](https://en.wikipedia.org/wiki/Fisher-Yates_shuffle). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to shuffle. + * @returns {Array} Returns the new shuffled array. + * @example + * + * _.shuffle([1, 2, 3, 4]); + * // => [4, 1, 3, 2] + */ + function shuffle(collection) { + var func = isArray(collection) ? arrayShuffle : baseShuffle; + return func(collection); + } + + /** + * Gets the size of `collection` by returning its length for array-like + * values or the number of own enumerable string keyed properties for objects. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object|string} collection The collection to inspect. + * @returns {number} Returns the collection size. + * @example + * + * _.size([1, 2, 3]); + * // => 3 + * + * _.size({ 'a': 1, 'b': 2 }); + * // => 2 + * + * _.size('pebbles'); + * // => 7 + */ + function size(collection) { + if (collection == null) { + return 0; + } + if (isArrayLike(collection)) { + return isString(collection) ? stringSize(collection) : collection.length; + } + var tag = getTag(collection); + if (tag == mapTag || tag == setTag) { + return collection.size; + } + return baseKeys(collection).length; + } + + /** + * Checks if `predicate` returns truthy for **any** element of `collection`. + * Iteration is stopped once `predicate` returns truthy. The predicate is + * invoked with three arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + * @example + * + * _.some([null, 0, 'yes', false], Boolean); + * // => true + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.some(users, { 'user': 'barney', 'active': false }); + * // => false + * + * // The `_.matchesProperty` iteratee shorthand. + * _.some(users, ['active', false]); + * // => true + * + * // The `_.property` iteratee shorthand. + * _.some(users, 'active'); + * // => true + */ + function some(collection, predicate, guard) { + var func = isArray(collection) ? arraySome : baseSome; + if (guard && isIterateeCall(collection, predicate, guard)) { + predicate = undefined; + } + return func(collection, getIteratee(predicate, 3)); + } + + /** + * Creates an array of elements, sorted in ascending order by the results of + * running each element in a collection thru each iteratee. This method + * performs a stable sort, that is, it preserves the original sort order of + * equal elements. The iteratees are invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {...(Function|Function[])} [iteratees=[_.identity]] + * The iteratees to sort by. + * @returns {Array} Returns the new sorted array. + * @example + * + * var users = [ + * { 'user': 'fred', 'age': 48 }, + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 30 }, + * { 'user': 'barney', 'age': 34 } + * ]; + * + * _.sortBy(users, [function(o) { return o.user; }]); + * // => objects for [['barney', 36], ['barney', 34], ['fred', 48], ['fred', 30]] + * + * _.sortBy(users, ['user', 'age']); + * // => objects for [['barney', 34], ['barney', 36], ['fred', 30], ['fred', 48]] + */ + var sortBy = baseRest(function(collection, iteratees) { + if (collection == null) { + return []; + } + var length = iteratees.length; + if (length > 1 && isIterateeCall(collection, iteratees[0], iteratees[1])) { + iteratees = []; + } else if (length > 2 && isIterateeCall(iteratees[0], iteratees[1], iteratees[2])) { + iteratees = [iteratees[0]]; + } + return baseOrderBy(collection, baseFlatten(iteratees, 1), []); + }); + + /*------------------------------------------------------------------------*/ + + /** + * Gets the timestamp of the number of milliseconds that have elapsed since + * the Unix epoch (1 January 1970 00:00:00 UTC). + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Date + * @returns {number} Returns the timestamp. + * @example + * + * _.defer(function(stamp) { + * console.log(_.now() - stamp); + * }, _.now()); + * // => Logs the number of milliseconds it took for the deferred invocation. + */ + var now = ctxNow || function() { + return root.Date.now(); + }; + + /*------------------------------------------------------------------------*/ + + /** + * The opposite of `_.before`; this method creates a function that invokes + * `func` once it's called `n` or more times. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {number} n The number of calls before `func` is invoked. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var saves = ['profile', 'settings']; + * + * var done = _.after(saves.length, function() { + * console.log('done saving!'); + * }); + * + * _.forEach(saves, function(type) { + * asyncSave({ 'type': type, 'complete': done }); + * }); + * // => Logs 'done saving!' after the two async saves have completed. + */ + function after(n, func) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + n = toInteger(n); + return function() { + if (--n < 1) { + return func.apply(this, arguments); + } + }; + } + + /** + * Creates a function that invokes `func`, with up to `n` arguments, + * ignoring any additional arguments. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to cap arguments for. + * @param {number} [n=func.length] The arity cap. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new capped function. + * @example + * + * _.map(['6', '8', '10'], _.ary(parseInt, 1)); + * // => [6, 8, 10] + */ + function ary(func, n, guard) { + n = guard ? undefined : n; + n = (func && n == null) ? func.length : n; + return createWrap(func, WRAP_ARY_FLAG, undefined, undefined, undefined, undefined, n); + } + + /** + * Creates a function that invokes `func`, with the `this` binding and arguments + * of the created function, while it's called less than `n` times. Subsequent + * calls to the created function return the result of the last `func` invocation. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {number} n The number of calls at which `func` is no longer invoked. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * jQuery(element).on('click', _.before(5, addContactToList)); + * // => Allows adding up to 4 contacts to the list. + */ + function before(n, func) { + var result; + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + n = toInteger(n); + return function() { + if (--n > 0) { + result = func.apply(this, arguments); + } + if (n <= 1) { + func = undefined; + } + return result; + }; + } + + /** + * Creates a function that invokes `func` with the `this` binding of `thisArg` + * and `partials` prepended to the arguments it receives. + * + * The `_.bind.placeholder` value, which defaults to `_` in monolithic builds, + * may be used as a placeholder for partially applied arguments. + * + * **Note:** Unlike native `Function#bind`, this method doesn't set the "length" + * property of bound functions. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to bind. + * @param {*} thisArg The `this` binding of `func`. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * function greet(greeting, punctuation) { + * return greeting + ' ' + this.user + punctuation; + * } + * + * var object = { 'user': 'fred' }; + * + * var bound = _.bind(greet, object, 'hi'); + * bound('!'); + * // => 'hi fred!' + * + * // Bound with placeholders. + * var bound = _.bind(greet, object, _, '!'); + * bound('hi'); + * // => 'hi fred!' + */ + var bind = baseRest(function(func, thisArg, partials) { + var bitmask = WRAP_BIND_FLAG; + if (partials.length) { + var holders = replaceHolders(partials, getHolder(bind)); + bitmask |= WRAP_PARTIAL_FLAG; + } + return createWrap(func, bitmask, thisArg, partials, holders); + }); + + /** + * Creates a function that invokes the method at `object[key]` with `partials` + * prepended to the arguments it receives. + * + * This method differs from `_.bind` by allowing bound functions to reference + * methods that may be redefined or don't yet exist. See + * [Peter Michaux's article](http://peter.michaux.ca/articles/lazy-function-definition-pattern) + * for more details. + * + * The `_.bindKey.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * @static + * @memberOf _ + * @since 0.10.0 + * @category Function + * @param {Object} object The object to invoke the method on. + * @param {string} key The key of the method. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * var object = { + * 'user': 'fred', + * 'greet': function(greeting, punctuation) { + * return greeting + ' ' + this.user + punctuation; + * } + * }; + * + * var bound = _.bindKey(object, 'greet', 'hi'); + * bound('!'); + * // => 'hi fred!' + * + * object.greet = function(greeting, punctuation) { + * return greeting + 'ya ' + this.user + punctuation; + * }; + * + * bound('!'); + * // => 'hiya fred!' + * + * // Bound with placeholders. + * var bound = _.bindKey(object, 'greet', _, '!'); + * bound('hi'); + * // => 'hiya fred!' + */ + var bindKey = baseRest(function(object, key, partials) { + var bitmask = WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG; + if (partials.length) { + var holders = replaceHolders(partials, getHolder(bindKey)); + bitmask |= WRAP_PARTIAL_FLAG; + } + return createWrap(key, bitmask, object, partials, holders); + }); + + /** + * Creates a function that accepts arguments of `func` and either invokes + * `func` returning its result, if at least `arity` number of arguments have + * been provided, or returns a function that accepts the remaining `func` + * arguments, and so on. The arity of `func` may be specified if `func.length` + * is not sufficient. + * + * The `_.curry.placeholder` value, which defaults to `_` in monolithic builds, + * may be used as a placeholder for provided arguments. + * + * **Note:** This method doesn't set the "length" property of curried functions. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Function + * @param {Function} func The function to curry. + * @param {number} [arity=func.length] The arity of `func`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new curried function. + * @example + * + * var abc = function(a, b, c) { + * return [a, b, c]; + * }; + * + * var curried = _.curry(abc); + * + * curried(1)(2)(3); + * // => [1, 2, 3] + * + * curried(1, 2)(3); + * // => [1, 2, 3] + * + * curried(1, 2, 3); + * // => [1, 2, 3] + * + * // Curried with placeholders. + * curried(1)(_, 3)(2); + * // => [1, 2, 3] + */ + function curry(func, arity, guard) { + arity = guard ? undefined : arity; + var result = createWrap(func, WRAP_CURRY_FLAG, undefined, undefined, undefined, undefined, undefined, arity); + result.placeholder = curry.placeholder; + return result; + } + + /** + * This method is like `_.curry` except that arguments are applied to `func` + * in the manner of `_.partialRight` instead of `_.partial`. + * + * The `_.curryRight.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for provided arguments. + * + * **Note:** This method doesn't set the "length" property of curried functions. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to curry. + * @param {number} [arity=func.length] The arity of `func`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new curried function. + * @example + * + * var abc = function(a, b, c) { + * return [a, b, c]; + * }; + * + * var curried = _.curryRight(abc); + * + * curried(3)(2)(1); + * // => [1, 2, 3] + * + * curried(2, 3)(1); + * // => [1, 2, 3] + * + * curried(1, 2, 3); + * // => [1, 2, 3] + * + * // Curried with placeholders. + * curried(3)(1, _)(2); + * // => [1, 2, 3] + */ + function curryRight(func, arity, guard) { + arity = guard ? undefined : arity; + var result = createWrap(func, WRAP_CURRY_RIGHT_FLAG, undefined, undefined, undefined, undefined, undefined, arity); + result.placeholder = curryRight.placeholder; + return result; + } + + /** + * Creates a debounced function that delays invoking `func` until after `wait` + * milliseconds have elapsed since the last time the debounced function was + * invoked. The debounced function comes with a `cancel` method to cancel + * delayed `func` invocations and a `flush` method to immediately invoke them. + * Provide `options` to indicate whether `func` should be invoked on the + * leading and/or trailing edge of the `wait` timeout. The `func` is invoked + * with the last arguments provided to the debounced function. Subsequent + * calls to the debounced function return the result of the last `func` + * invocation. + * + * **Note:** If `leading` and `trailing` options are `true`, `func` is + * invoked on the trailing edge of the timeout only if the debounced function + * is invoked more than once during the `wait` timeout. + * + * If `wait` is `0` and `leading` is `false`, `func` invocation is deferred + * until to the next tick, similar to `setTimeout` with a timeout of `0`. + * + * See [David Corbacho's article](https://css-tricks.com/debouncing-throttling-explained-examples/) + * for details over the differences between `_.debounce` and `_.throttle`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to debounce. + * @param {number} [wait=0] The number of milliseconds to delay. + * @param {Object} [options={}] The options object. + * @param {boolean} [options.leading=false] + * Specify invoking on the leading edge of the timeout. + * @param {number} [options.maxWait] + * The maximum time `func` is allowed to be delayed before it's invoked. + * @param {boolean} [options.trailing=true] + * Specify invoking on the trailing edge of the timeout. + * @returns {Function} Returns the new debounced function. + * @example + * + * // Avoid costly calculations while the window size is in flux. + * jQuery(window).on('resize', _.debounce(calculateLayout, 150)); + * + * // Invoke `sendMail` when clicked, debouncing subsequent calls. + * jQuery(element).on('click', _.debounce(sendMail, 300, { + * 'leading': true, + * 'trailing': false + * })); + * + * // Ensure `batchLog` is invoked once after 1 second of debounced calls. + * var debounced = _.debounce(batchLog, 250, { 'maxWait': 1000 }); + * var source = new EventSource('/stream'); + * jQuery(source).on('message', debounced); + * + * // Cancel the trailing debounced invocation. + * jQuery(window).on('popstate', debounced.cancel); + */ + function debounce(func, wait, options) { + var lastArgs, + lastThis, + maxWait, + result, + timerId, + lastCallTime, + lastInvokeTime = 0, + leading = false, + maxing = false, + trailing = true; + + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + wait = toNumber(wait) || 0; + if (isObject(options)) { + leading = !!options.leading; + maxing = 'maxWait' in options; + maxWait = maxing ? nativeMax(toNumber(options.maxWait) || 0, wait) : maxWait; + trailing = 'trailing' in options ? !!options.trailing : trailing; + } + + function invokeFunc(time) { + var args = lastArgs, + thisArg = lastThis; + + lastArgs = lastThis = undefined; + lastInvokeTime = time; + result = func.apply(thisArg, args); + return result; + } + + function leadingEdge(time) { + // Reset any `maxWait` timer. + lastInvokeTime = time; + // Start the timer for the trailing edge. + timerId = setTimeout(timerExpired, wait); + // Invoke the leading edge. + return leading ? invokeFunc(time) : result; + } + + function remainingWait(time) { + var timeSinceLastCall = time - lastCallTime, + timeSinceLastInvoke = time - lastInvokeTime, + timeWaiting = wait - timeSinceLastCall; + + return maxing + ? nativeMin(timeWaiting, maxWait - timeSinceLastInvoke) + : timeWaiting; + } + + function shouldInvoke(time) { + var timeSinceLastCall = time - lastCallTime, + timeSinceLastInvoke = time - lastInvokeTime; + + // Either this is the first call, activity has stopped and we're at the + // trailing edge, the system time has gone backwards and we're treating + // it as the trailing edge, or we've hit the `maxWait` limit. + return (lastCallTime === undefined || (timeSinceLastCall >= wait) || + (timeSinceLastCall < 0) || (maxing && timeSinceLastInvoke >= maxWait)); + } + + function timerExpired() { + var time = now(); + if (shouldInvoke(time)) { + return trailingEdge(time); + } + // Restart the timer. + timerId = setTimeout(timerExpired, remainingWait(time)); + } + + function trailingEdge(time) { + timerId = undefined; + + // Only invoke if we have `lastArgs` which means `func` has been + // debounced at least once. + if (trailing && lastArgs) { + return invokeFunc(time); + } + lastArgs = lastThis = undefined; + return result; + } + + function cancel() { + if (timerId !== undefined) { + clearTimeout(timerId); + } + lastInvokeTime = 0; + lastArgs = lastCallTime = lastThis = timerId = undefined; + } + + function flush() { + return timerId === undefined ? result : trailingEdge(now()); + } + + function debounced() { + var time = now(), + isInvoking = shouldInvoke(time); + + lastArgs = arguments; + lastThis = this; + lastCallTime = time; + + if (isInvoking) { + if (timerId === undefined) { + return leadingEdge(lastCallTime); + } + if (maxing) { + // Handle invocations in a tight loop. + clearTimeout(timerId); + timerId = setTimeout(timerExpired, wait); + return invokeFunc(lastCallTime); + } + } + if (timerId === undefined) { + timerId = setTimeout(timerExpired, wait); + } + return result; + } + debounced.cancel = cancel; + debounced.flush = flush; + return debounced; + } + + /** + * Defers invoking the `func` until the current call stack has cleared. Any + * additional arguments are provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to defer. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {number} Returns the timer id. + * @example + * + * _.defer(function(text) { + * console.log(text); + * }, 'deferred'); + * // => Logs 'deferred' after one millisecond. + */ + var defer = baseRest(function(func, args) { + return baseDelay(func, 1, args); + }); + + /** + * Invokes `func` after `wait` milliseconds. Any additional arguments are + * provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {number} Returns the timer id. + * @example + * + * _.delay(function(text) { + * console.log(text); + * }, 1000, 'later'); + * // => Logs 'later' after one second. + */ + var delay = baseRest(function(func, wait, args) { + return baseDelay(func, toNumber(wait) || 0, args); + }); + + /** + * Creates a function that invokes `func` with arguments reversed. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to flip arguments for. + * @returns {Function} Returns the new flipped function. + * @example + * + * var flipped = _.flip(function() { + * return _.toArray(arguments); + * }); + * + * flipped('a', 'b', 'c', 'd'); + * // => ['d', 'c', 'b', 'a'] + */ + function flip(func) { + return createWrap(func, WRAP_FLIP_FLAG); + } + + /** + * Creates a function that memoizes the result of `func`. If `resolver` is + * provided, it determines the cache key for storing the result based on the + * arguments provided to the memoized function. By default, the first argument + * provided to the memoized function is used as the map cache key. The `func` + * is invoked with the `this` binding of the memoized function. + * + * **Note:** The cache is exposed as the `cache` property on the memoized + * function. Its creation may be customized by replacing the `_.memoize.Cache` + * constructor with one whose instances implement the + * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) + * method interface of `clear`, `delete`, `get`, `has`, and `set`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to have its output memoized. + * @param {Function} [resolver] The function to resolve the cache key. + * @returns {Function} Returns the new memoized function. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * var other = { 'c': 3, 'd': 4 }; + * + * var values = _.memoize(_.values); + * values(object); + * // => [1, 2] + * + * values(other); + * // => [3, 4] + * + * object.a = 2; + * values(object); + * // => [1, 2] + * + * // Modify the result cache. + * values.cache.set(object, ['a', 'b']); + * values(object); + * // => ['a', 'b'] + * + * // Replace `_.memoize.Cache`. + * _.memoize.Cache = WeakMap; + */ + function memoize(func, resolver) { + if (typeof func != 'function' || (resolver != null && typeof resolver != 'function')) { + throw new TypeError(FUNC_ERROR_TEXT); + } + var memoized = function() { + var args = arguments, + key = resolver ? resolver.apply(this, args) : args[0], + cache = memoized.cache; + + if (cache.has(key)) { + return cache.get(key); + } + var result = func.apply(this, args); + memoized.cache = cache.set(key, result) || cache; + return result; + }; + memoized.cache = new (memoize.Cache || MapCache); + return memoized; + } + + // Expose `MapCache`. + memoize.Cache = MapCache; + + /** + * Creates a function that negates the result of the predicate `func`. The + * `func` predicate is invoked with the `this` binding and arguments of the + * created function. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} predicate The predicate to negate. + * @returns {Function} Returns the new negated function. + * @example + * + * function isEven(n) { + * return n % 2 == 0; + * } + * + * _.filter([1, 2, 3, 4, 5, 6], _.negate(isEven)); + * // => [1, 3, 5] + */ + function negate(predicate) { + if (typeof predicate != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + return function() { + var args = arguments; + switch (args.length) { + case 0: return !predicate.call(this); + case 1: return !predicate.call(this, args[0]); + case 2: return !predicate.call(this, args[0], args[1]); + case 3: return !predicate.call(this, args[0], args[1], args[2]); + } + return !predicate.apply(this, args); + }; + } + + /** + * Creates a function that is restricted to invoking `func` once. Repeat calls + * to the function return the value of the first invocation. The `func` is + * invoked with the `this` binding and arguments of the created function. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var initialize = _.once(createApplication); + * initialize(); + * initialize(); + * // => `createApplication` is invoked once + */ + function once(func) { + return before(2, func); + } + + /** + * Creates a function that invokes `func` with its arguments transformed. + * + * @static + * @since 4.0.0 + * @memberOf _ + * @category Function + * @param {Function} func The function to wrap. + * @param {...(Function|Function[])} [transforms=[_.identity]] + * The argument transforms. + * @returns {Function} Returns the new function. + * @example + * + * function doubled(n) { + * return n * 2; + * } + * + * function square(n) { + * return n * n; + * } + * + * var func = _.overArgs(function(x, y) { + * return [x, y]; + * }, [square, doubled]); + * + * func(9, 3); + * // => [81, 6] + * + * func(10, 5); + * // => [100, 10] + */ + var overArgs = castRest(function(func, transforms) { + transforms = (transforms.length == 1 && isArray(transforms[0])) + ? arrayMap(transforms[0], baseUnary(getIteratee())) + : arrayMap(baseFlatten(transforms, 1), baseUnary(getIteratee())); + + var funcsLength = transforms.length; + return baseRest(function(args) { + var index = -1, + length = nativeMin(args.length, funcsLength); + + while (++index < length) { + args[index] = transforms[index].call(this, args[index]); + } + return apply(func, this, args); + }); + }); + + /** + * Creates a function that invokes `func` with `partials` prepended to the + * arguments it receives. This method is like `_.bind` except it does **not** + * alter the `this` binding. + * + * The `_.partial.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * **Note:** This method doesn't set the "length" property of partially + * applied functions. + * + * @static + * @memberOf _ + * @since 0.2.0 + * @category Function + * @param {Function} func The function to partially apply arguments to. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new partially applied function. + * @example + * + * function greet(greeting, name) { + * return greeting + ' ' + name; + * } + * + * var sayHelloTo = _.partial(greet, 'hello'); + * sayHelloTo('fred'); + * // => 'hello fred' + * + * // Partially applied with placeholders. + * var greetFred = _.partial(greet, _, 'fred'); + * greetFred('hi'); + * // => 'hi fred' + */ + var partial = baseRest(function(func, partials) { + var holders = replaceHolders(partials, getHolder(partial)); + return createWrap(func, WRAP_PARTIAL_FLAG, undefined, partials, holders); + }); + + /** + * This method is like `_.partial` except that partially applied arguments + * are appended to the arguments it receives. + * + * The `_.partialRight.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * **Note:** This method doesn't set the "length" property of partially + * applied functions. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Function + * @param {Function} func The function to partially apply arguments to. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new partially applied function. + * @example + * + * function greet(greeting, name) { + * return greeting + ' ' + name; + * } + * + * var greetFred = _.partialRight(greet, 'fred'); + * greetFred('hi'); + * // => 'hi fred' + * + * // Partially applied with placeholders. + * var sayHelloTo = _.partialRight(greet, 'hello', _); + * sayHelloTo('fred'); + * // => 'hello fred' + */ + var partialRight = baseRest(function(func, partials) { + var holders = replaceHolders(partials, getHolder(partialRight)); + return createWrap(func, WRAP_PARTIAL_RIGHT_FLAG, undefined, partials, holders); + }); + + /** + * Creates a function that invokes `func` with arguments arranged according + * to the specified `indexes` where the argument value at the first index is + * provided as the first argument, the argument value at the second index is + * provided as the second argument, and so on. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to rearrange arguments for. + * @param {...(number|number[])} indexes The arranged argument indexes. + * @returns {Function} Returns the new function. + * @example + * + * var rearged = _.rearg(function(a, b, c) { + * return [a, b, c]; + * }, [2, 0, 1]); + * + * rearged('b', 'c', 'a') + * // => ['a', 'b', 'c'] + */ + var rearg = flatRest(function(func, indexes) { + return createWrap(func, WRAP_REARG_FLAG, undefined, undefined, undefined, indexes); + }); + + /** + * Creates a function that invokes `func` with the `this` binding of the + * created function and arguments from `start` and beyond provided as + * an array. + * + * **Note:** This method is based on the + * [rest parameter](https://mdn.io/rest_parameters). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @returns {Function} Returns the new function. + * @example + * + * var say = _.rest(function(what, names) { + * return what + ' ' + _.initial(names).join(', ') + + * (_.size(names) > 1 ? ', & ' : '') + _.last(names); + * }); + * + * say('hello', 'fred', 'barney', 'pebbles'); + * // => 'hello fred, barney, & pebbles' + */ + function rest(func, start) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + start = start === undefined ? start : toInteger(start); + return baseRest(func, start); + } + + /** + * Creates a function that invokes `func` with the `this` binding of the + * create function and an array of arguments much like + * [`Function#apply`](http://www.ecma-international.org/ecma-262/7.0/#sec-function.prototype.apply). + * + * **Note:** This method is based on the + * [spread operator](https://mdn.io/spread_operator). + * + * @static + * @memberOf _ + * @since 3.2.0 + * @category Function + * @param {Function} func The function to spread arguments over. + * @param {number} [start=0] The start position of the spread. + * @returns {Function} Returns the new function. + * @example + * + * var say = _.spread(function(who, what) { + * return who + ' says ' + what; + * }); + * + * say(['fred', 'hello']); + * // => 'fred says hello' + * + * var numbers = Promise.all([ + * Promise.resolve(40), + * Promise.resolve(36) + * ]); + * + * numbers.then(_.spread(function(x, y) { + * return x + y; + * })); + * // => a Promise of 76 + */ + function spread(func, start) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + start = start == null ? 0 : nativeMax(toInteger(start), 0); + return baseRest(function(args) { + var array = args[start], + otherArgs = castSlice(args, 0, start); + + if (array) { + arrayPush(otherArgs, array); + } + return apply(func, this, otherArgs); + }); + } + + /** + * Creates a throttled function that only invokes `func` at most once per + * every `wait` milliseconds. The throttled function comes with a `cancel` + * method to cancel delayed `func` invocations and a `flush` method to + * immediately invoke them. Provide `options` to indicate whether `func` + * should be invoked on the leading and/or trailing edge of the `wait` + * timeout. The `func` is invoked with the last arguments provided to the + * throttled function. Subsequent calls to the throttled function return the + * result of the last `func` invocation. + * + * **Note:** If `leading` and `trailing` options are `true`, `func` is + * invoked on the trailing edge of the timeout only if the throttled function + * is invoked more than once during the `wait` timeout. + * + * If `wait` is `0` and `leading` is `false`, `func` invocation is deferred + * until to the next tick, similar to `setTimeout` with a timeout of `0`. + * + * See [David Corbacho's article](https://css-tricks.com/debouncing-throttling-explained-examples/) + * for details over the differences between `_.throttle` and `_.debounce`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to throttle. + * @param {number} [wait=0] The number of milliseconds to throttle invocations to. + * @param {Object} [options={}] The options object. + * @param {boolean} [options.leading=true] + * Specify invoking on the leading edge of the timeout. + * @param {boolean} [options.trailing=true] + * Specify invoking on the trailing edge of the timeout. + * @returns {Function} Returns the new throttled function. + * @example + * + * // Avoid excessively updating the position while scrolling. + * jQuery(window).on('scroll', _.throttle(updatePosition, 100)); + * + * // Invoke `renewToken` when the click event is fired, but not more than once every 5 minutes. + * var throttled = _.throttle(renewToken, 300000, { 'trailing': false }); + * jQuery(element).on('click', throttled); + * + * // Cancel the trailing throttled invocation. + * jQuery(window).on('popstate', throttled.cancel); + */ + function throttle(func, wait, options) { + var leading = true, + trailing = true; + + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + if (isObject(options)) { + leading = 'leading' in options ? !!options.leading : leading; + trailing = 'trailing' in options ? !!options.trailing : trailing; + } + return debounce(func, wait, { + 'leading': leading, + 'maxWait': wait, + 'trailing': trailing + }); + } + + /** + * Creates a function that accepts up to one argument, ignoring any + * additional arguments. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to cap arguments for. + * @returns {Function} Returns the new capped function. + * @example + * + * _.map(['6', '8', '10'], _.unary(parseInt)); + * // => [6, 8, 10] + */ + function unary(func) { + return ary(func, 1); + } + + /** + * Creates a function that provides `value` to `wrapper` as its first + * argument. Any additional arguments provided to the function are appended + * to those provided to the `wrapper`. The wrapper is invoked with the `this` + * binding of the created function. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {*} value The value to wrap. + * @param {Function} [wrapper=identity] The wrapper function. + * @returns {Function} Returns the new function. + * @example + * + * var p = _.wrap(_.escape, function(func, text) { + * return '

' + func(text) + '

'; + * }); + * + * p('fred, barney, & pebbles'); + * // => '

fred, barney, & pebbles

' + */ + function wrap(value, wrapper) { + return partial(castFunction(wrapper), value); + } + + /*------------------------------------------------------------------------*/ + + /** + * Casts `value` as an array if it's not one. + * + * @static + * @memberOf _ + * @since 4.4.0 + * @category Lang + * @param {*} value The value to inspect. + * @returns {Array} Returns the cast array. + * @example + * + * _.castArray(1); + * // => [1] + * + * _.castArray({ 'a': 1 }); + * // => [{ 'a': 1 }] + * + * _.castArray('abc'); + * // => ['abc'] + * + * _.castArray(null); + * // => [null] + * + * _.castArray(undefined); + * // => [undefined] + * + * _.castArray(); + * // => [] + * + * var array = [1, 2, 3]; + * console.log(_.castArray(array) === array); + * // => true + */ + function castArray() { + if (!arguments.length) { + return []; + } + var value = arguments[0]; + return isArray(value) ? value : [value]; + } + + /** + * Creates a shallow clone of `value`. + * + * **Note:** This method is loosely based on the + * [structured clone algorithm](https://mdn.io/Structured_clone_algorithm) + * and supports cloning arrays, array buffers, booleans, date objects, maps, + * numbers, `Object` objects, regexes, sets, strings, symbols, and typed + * arrays. The own enumerable properties of `arguments` objects are cloned + * as plain objects. An empty object is returned for uncloneable values such + * as error objects, functions, DOM nodes, and WeakMaps. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to clone. + * @returns {*} Returns the cloned value. + * @see _.cloneDeep + * @example + * + * var objects = [{ 'a': 1 }, { 'b': 2 }]; + * + * var shallow = _.clone(objects); + * console.log(shallow[0] === objects[0]); + * // => true + */ + function clone(value) { + return baseClone(value, CLONE_SYMBOLS_FLAG); + } + + /** + * This method is like `_.clone` except that it accepts `customizer` which + * is invoked to produce the cloned value. If `customizer` returns `undefined`, + * cloning is handled by the method instead. The `customizer` is invoked with + * up to four arguments; (value [, index|key, object, stack]). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to clone. + * @param {Function} [customizer] The function to customize cloning. + * @returns {*} Returns the cloned value. + * @see _.cloneDeepWith + * @example + * + * function customizer(value) { + * if (_.isElement(value)) { + * return value.cloneNode(false); + * } + * } + * + * var el = _.cloneWith(document.body, customizer); + * + * console.log(el === document.body); + * // => false + * console.log(el.nodeName); + * // => 'BODY' + * console.log(el.childNodes.length); + * // => 0 + */ + function cloneWith(value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseClone(value, CLONE_SYMBOLS_FLAG, customizer); + } + + /** + * This method is like `_.clone` except that it recursively clones `value`. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Lang + * @param {*} value The value to recursively clone. + * @returns {*} Returns the deep cloned value. + * @see _.clone + * @example + * + * var objects = [{ 'a': 1 }, { 'b': 2 }]; + * + * var deep = _.cloneDeep(objects); + * console.log(deep[0] === objects[0]); + * // => false + */ + function cloneDeep(value) { + return baseClone(value, CLONE_DEEP_FLAG | CLONE_SYMBOLS_FLAG); + } + + /** + * This method is like `_.cloneWith` except that it recursively clones `value`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to recursively clone. + * @param {Function} [customizer] The function to customize cloning. + * @returns {*} Returns the deep cloned value. + * @see _.cloneWith + * @example + * + * function customizer(value) { + * if (_.isElement(value)) { + * return value.cloneNode(true); + * } + * } + * + * var el = _.cloneDeepWith(document.body, customizer); + * + * console.log(el === document.body); + * // => false + * console.log(el.nodeName); + * // => 'BODY' + * console.log(el.childNodes.length); + * // => 20 + */ + function cloneDeepWith(value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseClone(value, CLONE_DEEP_FLAG | CLONE_SYMBOLS_FLAG, customizer); + } + + /** + * Checks if `object` conforms to `source` by invoking the predicate + * properties of `source` with the corresponding property values of `object`. + * + * **Note:** This method is equivalent to `_.conforms` when `source` is + * partially applied. + * + * @static + * @memberOf _ + * @since 4.14.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property predicates to conform to. + * @returns {boolean} Returns `true` if `object` conforms, else `false`. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * + * _.conformsTo(object, { 'b': function(n) { return n > 1; } }); + * // => true + * + * _.conformsTo(object, { 'b': function(n) { return n > 2; } }); + * // => false + */ + function conformsTo(object, source) { + return source == null || baseConformsTo(object, source, keys(source)); + } + + /** + * Performs a + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * comparison between two values to determine if they are equivalent. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.eq(object, object); + * // => true + * + * _.eq(object, other); + * // => false + * + * _.eq('a', 'a'); + * // => true + * + * _.eq('a', Object('a')); + * // => false + * + * _.eq(NaN, NaN); + * // => true + */ + function eq(value, other) { + return value === other || (value !== value && other !== other); + } + + /** + * Checks if `value` is greater than `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than `other`, + * else `false`. + * @see _.lt + * @example + * + * _.gt(3, 1); + * // => true + * + * _.gt(3, 3); + * // => false + * + * _.gt(1, 3); + * // => false + */ + var gt = createRelationalOperation(baseGt); + + /** + * Checks if `value` is greater than or equal to `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than or equal to + * `other`, else `false`. + * @see _.lte + * @example + * + * _.gte(3, 1); + * // => true + * + * _.gte(3, 3); + * // => true + * + * _.gte(1, 3); + * // => false + */ + var gte = createRelationalOperation(function(value, other) { + return value >= other; + }); + + /** + * Checks if `value` is likely an `arguments` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + * else `false`. + * @example + * + * _.isArguments(function() { return arguments; }()); + * // => true + * + * _.isArguments([1, 2, 3]); + * // => false + */ + var isArguments = baseIsArguments(function() { return arguments; }()) ? baseIsArguments : function(value) { + return isObjectLike(value) && hasOwnProperty.call(value, 'callee') && + !propertyIsEnumerable.call(value, 'callee'); + }; + + /** + * Checks if `value` is classified as an `Array` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array, else `false`. + * @example + * + * _.isArray([1, 2, 3]); + * // => true + * + * _.isArray(document.body.children); + * // => false + * + * _.isArray('abc'); + * // => false + * + * _.isArray(_.noop); + * // => false + */ + var isArray = Array.isArray; + + /** + * Checks if `value` is classified as an `ArrayBuffer` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array buffer, else `false`. + * @example + * + * _.isArrayBuffer(new ArrayBuffer(2)); + * // => true + * + * _.isArrayBuffer(new Array(2)); + * // => false + */ + var isArrayBuffer = nodeIsArrayBuffer ? baseUnary(nodeIsArrayBuffer) : baseIsArrayBuffer; + + /** + * Checks if `value` is array-like. A value is considered array-like if it's + * not a function and has a `value.length` that's an integer greater than or + * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is array-like, else `false`. + * @example + * + * _.isArrayLike([1, 2, 3]); + * // => true + * + * _.isArrayLike(document.body.children); + * // => true + * + * _.isArrayLike('abc'); + * // => true + * + * _.isArrayLike(_.noop); + * // => false + */ + function isArrayLike(value) { + return value != null && isLength(value.length) && !isFunction(value); + } + + /** + * This method is like `_.isArrayLike` except that it also checks if `value` + * is an object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array-like object, + * else `false`. + * @example + * + * _.isArrayLikeObject([1, 2, 3]); + * // => true + * + * _.isArrayLikeObject(document.body.children); + * // => true + * + * _.isArrayLikeObject('abc'); + * // => false + * + * _.isArrayLikeObject(_.noop); + * // => false + */ + function isArrayLikeObject(value) { + return isObjectLike(value) && isArrayLike(value); + } + + /** + * Checks if `value` is classified as a boolean primitive or object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a boolean, else `false`. + * @example + * + * _.isBoolean(false); + * // => true + * + * _.isBoolean(null); + * // => false + */ + function isBoolean(value) { + return value === true || value === false || + (isObjectLike(value) && baseGetTag(value) == boolTag); + } + + /** + * Checks if `value` is a buffer. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a buffer, else `false`. + * @example + * + * _.isBuffer(new Buffer(2)); + * // => true + * + * _.isBuffer(new Uint8Array(2)); + * // => false + */ + var isBuffer = nativeIsBuffer || stubFalse; + + /** + * Checks if `value` is classified as a `Date` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a date object, else `false`. + * @example + * + * _.isDate(new Date); + * // => true + * + * _.isDate('Mon April 23 2012'); + * // => false + */ + var isDate = nodeIsDate ? baseUnary(nodeIsDate) : baseIsDate; + + /** + * Checks if `value` is likely a DOM element. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a DOM element, else `false`. + * @example + * + * _.isElement(document.body); + * // => true + * + * _.isElement(''); + * // => false + */ + function isElement(value) { + return isObjectLike(value) && value.nodeType === 1 && !isPlainObject(value); + } + + /** + * Checks if `value` is an empty object, collection, map, or set. + * + * Objects are considered empty if they have no own enumerable string keyed + * properties. + * + * Array-like values such as `arguments` objects, arrays, buffers, strings, or + * jQuery-like collections are considered empty if they have a `length` of `0`. + * Similarly, maps and sets are considered empty if they have a `size` of `0`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is empty, else `false`. + * @example + * + * _.isEmpty(null); + * // => true + * + * _.isEmpty(true); + * // => true + * + * _.isEmpty(1); + * // => true + * + * _.isEmpty([1, 2, 3]); + * // => false + * + * _.isEmpty({ 'a': 1 }); + * // => false + */ + function isEmpty(value) { + if (value == null) { + return true; + } + if (isArrayLike(value) && + (isArray(value) || typeof value == 'string' || typeof value.splice == 'function' || + isBuffer(value) || isTypedArray(value) || isArguments(value))) { + return !value.length; + } + var tag = getTag(value); + if (tag == mapTag || tag == setTag) { + return !value.size; + } + if (isPrototype(value)) { + return !baseKeys(value).length; + } + for (var key in value) { + if (hasOwnProperty.call(value, key)) { + return false; + } + } + return true; + } + + /** + * Performs a deep comparison between two values to determine if they are + * equivalent. + * + * **Note:** This method supports comparing arrays, array buffers, booleans, + * date objects, error objects, maps, numbers, `Object` objects, regexes, + * sets, strings, symbols, and typed arrays. `Object` objects are compared + * by their own, not inherited, enumerable properties. Functions and DOM + * nodes are compared by strict equality, i.e. `===`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.isEqual(object, other); + * // => true + * + * object === other; + * // => false + */ + function isEqual(value, other) { + return baseIsEqual(value, other); + } + + /** + * This method is like `_.isEqual` except that it accepts `customizer` which + * is invoked to compare values. If `customizer` returns `undefined`, comparisons + * are handled by the method instead. The `customizer` is invoked with up to + * six arguments: (objValue, othValue [, index|key, object, other, stack]). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * function isGreeting(value) { + * return /^h(?:i|ello)$/.test(value); + * } + * + * function customizer(objValue, othValue) { + * if (isGreeting(objValue) && isGreeting(othValue)) { + * return true; + * } + * } + * + * var array = ['hello', 'goodbye']; + * var other = ['hi', 'goodbye']; + * + * _.isEqualWith(array, other, customizer); + * // => true + */ + function isEqualWith(value, other, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + var result = customizer ? customizer(value, other) : undefined; + return result === undefined ? baseIsEqual(value, other, undefined, customizer) : !!result; + } + + /** + * Checks if `value` is an `Error`, `EvalError`, `RangeError`, `ReferenceError`, + * `SyntaxError`, `TypeError`, or `URIError` object. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an error object, else `false`. + * @example + * + * _.isError(new Error); + * // => true + * + * _.isError(Error); + * // => false + */ + function isError(value) { + if (!isObjectLike(value)) { + return false; + } + var tag = baseGetTag(value); + return tag == errorTag || tag == domExcTag || + (typeof value.message == 'string' && typeof value.name == 'string' && !isPlainObject(value)); + } + + /** + * Checks if `value` is a finite primitive number. + * + * **Note:** This method is based on + * [`Number.isFinite`](https://mdn.io/Number/isFinite). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a finite number, else `false`. + * @example + * + * _.isFinite(3); + * // => true + * + * _.isFinite(Number.MIN_VALUE); + * // => true + * + * _.isFinite(Infinity); + * // => false + * + * _.isFinite('3'); + * // => false + */ + function isFinite(value) { + return typeof value == 'number' && nativeIsFinite(value); + } + + /** + * Checks if `value` is classified as a `Function` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a function, else `false`. + * @example + * + * _.isFunction(_); + * // => true + * + * _.isFunction(/abc/); + * // => false + */ + function isFunction(value) { + if (!isObject(value)) { + return false; + } + // The use of `Object#toString` avoids issues with the `typeof` operator + // in Safari 9 which returns 'object' for typed arrays and other constructors. + var tag = baseGetTag(value); + return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag; + } + + /** + * Checks if `value` is an integer. + * + * **Note:** This method is based on + * [`Number.isInteger`](https://mdn.io/Number/isInteger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an integer, else `false`. + * @example + * + * _.isInteger(3); + * // => true + * + * _.isInteger(Number.MIN_VALUE); + * // => false + * + * _.isInteger(Infinity); + * // => false + * + * _.isInteger('3'); + * // => false + */ + function isInteger(value) { + return typeof value == 'number' && value == toInteger(value); + } + + /** + * Checks if `value` is a valid array-like length. + * + * **Note:** This method is loosely based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. + * @example + * + * _.isLength(3); + * // => true + * + * _.isLength(Number.MIN_VALUE); + * // => false + * + * _.isLength(Infinity); + * // => false + * + * _.isLength('3'); + * // => false + */ + function isLength(value) { + return typeof value == 'number' && + value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; + } + + /** + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false + */ + function isObject(value) { + var type = typeof value; + return value != null && (type == 'object' || type == 'function'); + } + + /** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ + function isObjectLike(value) { + return value != null && typeof value == 'object'; + } + + /** + * Checks if `value` is classified as a `Map` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a map, else `false`. + * @example + * + * _.isMap(new Map); + * // => true + * + * _.isMap(new WeakMap); + * // => false + */ + var isMap = nodeIsMap ? baseUnary(nodeIsMap) : baseIsMap; + + /** + * Performs a partial deep comparison between `object` and `source` to + * determine if `object` contains equivalent property values. + * + * **Note:** This method is equivalent to `_.matches` when `source` is + * partially applied. + * + * Partial comparisons will match empty array and empty object `source` + * values against any array or object value, respectively. See `_.isEqual` + * for a list of supported value comparisons. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * + * _.isMatch(object, { 'b': 2 }); + * // => true + * + * _.isMatch(object, { 'b': 1 }); + * // => false + */ + function isMatch(object, source) { + return object === source || baseIsMatch(object, source, getMatchData(source)); + } + + /** + * This method is like `_.isMatch` except that it accepts `customizer` which + * is invoked to compare values. If `customizer` returns `undefined`, comparisons + * are handled by the method instead. The `customizer` is invoked with five + * arguments: (objValue, srcValue, index|key, object, source). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + * @example + * + * function isGreeting(value) { + * return /^h(?:i|ello)$/.test(value); + * } + * + * function customizer(objValue, srcValue) { + * if (isGreeting(objValue) && isGreeting(srcValue)) { + * return true; + * } + * } + * + * var object = { 'greeting': 'hello' }; + * var source = { 'greeting': 'hi' }; + * + * _.isMatchWith(object, source, customizer); + * // => true + */ + function isMatchWith(object, source, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseIsMatch(object, source, getMatchData(source), customizer); + } + + /** + * Checks if `value` is `NaN`. + * + * **Note:** This method is based on + * [`Number.isNaN`](https://mdn.io/Number/isNaN) and is not the same as + * global [`isNaN`](https://mdn.io/isNaN) which returns `true` for + * `undefined` and other non-number values. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + * @example + * + * _.isNaN(NaN); + * // => true + * + * _.isNaN(new Number(NaN)); + * // => true + * + * isNaN(undefined); + * // => true + * + * _.isNaN(undefined); + * // => false + */ + function isNaN(value) { + // An `NaN` primitive is the only value that is not equal to itself. + // Perform the `toStringTag` check first to avoid errors with some + // ActiveX objects in IE. + return isNumber(value) && value != +value; + } + + /** + * Checks if `value` is a pristine native function. + * + * **Note:** This method can't reliably detect native functions in the presence + * of the core-js package because core-js circumvents this kind of detection. + * Despite multiple requests, the core-js maintainer has made it clear: any + * attempt to fix the detection will be obstructed. As a result, we're left + * with little choice but to throw an error. Unfortunately, this also affects + * packages, like [babel-polyfill](https://www.npmjs.com/package/babel-polyfill), + * which rely on core-js. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a native function, + * else `false`. + * @example + * + * _.isNative(Array.prototype.push); + * // => true + * + * _.isNative(_); + * // => false + */ + function isNative(value) { + if (isMaskable(value)) { + throw new Error(CORE_ERROR_TEXT); + } + return baseIsNative(value); + } + + /** + * Checks if `value` is `null`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `null`, else `false`. + * @example + * + * _.isNull(null); + * // => true + * + * _.isNull(void 0); + * // => false + */ + function isNull(value) { + return value === null; + } + + /** + * Checks if `value` is `null` or `undefined`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is nullish, else `false`. + * @example + * + * _.isNil(null); + * // => true + * + * _.isNil(void 0); + * // => true + * + * _.isNil(NaN); + * // => false + */ + function isNil(value) { + return value == null; + } + + /** + * Checks if `value` is classified as a `Number` primitive or object. + * + * **Note:** To exclude `Infinity`, `-Infinity`, and `NaN`, which are + * classified as numbers, use the `_.isFinite` method. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a number, else `false`. + * @example + * + * _.isNumber(3); + * // => true + * + * _.isNumber(Number.MIN_VALUE); + * // => true + * + * _.isNumber(Infinity); + * // => true + * + * _.isNumber('3'); + * // => false + */ + function isNumber(value) { + return typeof value == 'number' || + (isObjectLike(value) && baseGetTag(value) == numberTag); + } + + /** + * Checks if `value` is a plain object, that is, an object created by the + * `Object` constructor or one with a `[[Prototype]]` of `null`. + * + * @static + * @memberOf _ + * @since 0.8.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a plain object, else `false`. + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * _.isPlainObject(new Foo); + * // => false + * + * _.isPlainObject([1, 2, 3]); + * // => false + * + * _.isPlainObject({ 'x': 0, 'y': 0 }); + * // => true + * + * _.isPlainObject(Object.create(null)); + * // => true + */ + function isPlainObject(value) { + if (!isObjectLike(value) || baseGetTag(value) != objectTag) { + return false; + } + var proto = getPrototype(value); + if (proto === null) { + return true; + } + var Ctor = hasOwnProperty.call(proto, 'constructor') && proto.constructor; + return typeof Ctor == 'function' && Ctor instanceof Ctor && + funcToString.call(Ctor) == objectCtorString; + } + + /** + * Checks if `value` is classified as a `RegExp` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a regexp, else `false`. + * @example + * + * _.isRegExp(/abc/); + * // => true + * + * _.isRegExp('/abc/'); + * // => false + */ + var isRegExp = nodeIsRegExp ? baseUnary(nodeIsRegExp) : baseIsRegExp; + + /** + * Checks if `value` is a safe integer. An integer is safe if it's an IEEE-754 + * double precision number which isn't the result of a rounded unsafe integer. + * + * **Note:** This method is based on + * [`Number.isSafeInteger`](https://mdn.io/Number/isSafeInteger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a safe integer, else `false`. + * @example + * + * _.isSafeInteger(3); + * // => true + * + * _.isSafeInteger(Number.MIN_VALUE); + * // => false + * + * _.isSafeInteger(Infinity); + * // => false + * + * _.isSafeInteger('3'); + * // => false + */ + function isSafeInteger(value) { + return isInteger(value) && value >= -MAX_SAFE_INTEGER && value <= MAX_SAFE_INTEGER; + } + + /** + * Checks if `value` is classified as a `Set` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a set, else `false`. + * @example + * + * _.isSet(new Set); + * // => true + * + * _.isSet(new WeakSet); + * // => false + */ + var isSet = nodeIsSet ? baseUnary(nodeIsSet) : baseIsSet; + + /** + * Checks if `value` is classified as a `String` primitive or object. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a string, else `false`. + * @example + * + * _.isString('abc'); + * // => true + * + * _.isString(1); + * // => false + */ + function isString(value) { + return typeof value == 'string' || + (!isArray(value) && isObjectLike(value) && baseGetTag(value) == stringTag); + } + + /** + * Checks if `value` is classified as a `Symbol` primitive or object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. + * @example + * + * _.isSymbol(Symbol.iterator); + * // => true + * + * _.isSymbol('abc'); + * // => false + */ + function isSymbol(value) { + return typeof value == 'symbol' || + (isObjectLike(value) && baseGetTag(value) == symbolTag); + } + + /** + * Checks if `value` is classified as a typed array. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. + * @example + * + * _.isTypedArray(new Uint8Array); + * // => true + * + * _.isTypedArray([]); + * // => false + */ + var isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray; + + /** + * Checks if `value` is `undefined`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `undefined`, else `false`. + * @example + * + * _.isUndefined(void 0); + * // => true + * + * _.isUndefined(null); + * // => false + */ + function isUndefined(value) { + return value === undefined; + } + + /** + * Checks if `value` is classified as a `WeakMap` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a weak map, else `false`. + * @example + * + * _.isWeakMap(new WeakMap); + * // => true + * + * _.isWeakMap(new Map); + * // => false + */ + function isWeakMap(value) { + return isObjectLike(value) && getTag(value) == weakMapTag; + } + + /** + * Checks if `value` is classified as a `WeakSet` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a weak set, else `false`. + * @example + * + * _.isWeakSet(new WeakSet); + * // => true + * + * _.isWeakSet(new Set); + * // => false + */ + function isWeakSet(value) { + return isObjectLike(value) && baseGetTag(value) == weakSetTag; + } + + /** + * Checks if `value` is less than `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than `other`, + * else `false`. + * @see _.gt + * @example + * + * _.lt(1, 3); + * // => true + * + * _.lt(3, 3); + * // => false + * + * _.lt(3, 1); + * // => false + */ + var lt = createRelationalOperation(baseLt); + + /** + * Checks if `value` is less than or equal to `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than or equal to + * `other`, else `false`. + * @see _.gte + * @example + * + * _.lte(1, 3); + * // => true + * + * _.lte(3, 3); + * // => true + * + * _.lte(3, 1); + * // => false + */ + var lte = createRelationalOperation(function(value, other) { + return value <= other; + }); + + /** + * Converts `value` to an array. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to convert. + * @returns {Array} Returns the converted array. + * @example + * + * _.toArray({ 'a': 1, 'b': 2 }); + * // => [1, 2] + * + * _.toArray('abc'); + * // => ['a', 'b', 'c'] + * + * _.toArray(1); + * // => [] + * + * _.toArray(null); + * // => [] + */ + function toArray(value) { + if (!value) { + return []; + } + if (isArrayLike(value)) { + return isString(value) ? stringToArray(value) : copyArray(value); + } + if (symIterator && value[symIterator]) { + return iteratorToArray(value[symIterator]()); + } + var tag = getTag(value), + func = tag == mapTag ? mapToArray : (tag == setTag ? setToArray : values); + + return func(value); + } + + /** + * Converts `value` to a finite number. + * + * @static + * @memberOf _ + * @since 4.12.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted number. + * @example + * + * _.toFinite(3.2); + * // => 3.2 + * + * _.toFinite(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toFinite(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toFinite('3.2'); + * // => 3.2 + */ + function toFinite(value) { + if (!value) { + return value === 0 ? value : 0; + } + value = toNumber(value); + if (value === INFINITY || value === -INFINITY) { + var sign = (value < 0 ? -1 : 1); + return sign * MAX_INTEGER; + } + return value === value ? value : 0; + } + + /** + * Converts `value` to an integer. + * + * **Note:** This method is loosely based on + * [`ToInteger`](http://www.ecma-international.org/ecma-262/7.0/#sec-tointeger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toInteger(3.2); + * // => 3 + * + * _.toInteger(Number.MIN_VALUE); + * // => 0 + * + * _.toInteger(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toInteger('3.2'); + * // => 3 + */ + function toInteger(value) { + var result = toFinite(value), + remainder = result % 1; + + return result === result ? (remainder ? result - remainder : result) : 0; + } + + /** + * Converts `value` to an integer suitable for use as the length of an + * array-like object. + * + * **Note:** This method is based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toLength(3.2); + * // => 3 + * + * _.toLength(Number.MIN_VALUE); + * // => 0 + * + * _.toLength(Infinity); + * // => 4294967295 + * + * _.toLength('3.2'); + * // => 3 + */ + function toLength(value) { + return value ? baseClamp(toInteger(value), 0, MAX_ARRAY_LENGTH) : 0; + } + + /** + * Converts `value` to a number. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to process. + * @returns {number} Returns the number. + * @example + * + * _.toNumber(3.2); + * // => 3.2 + * + * _.toNumber(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toNumber(Infinity); + * // => Infinity + * + * _.toNumber('3.2'); + * // => 3.2 + */ + function toNumber(value) { + if (typeof value == 'number') { + return value; + } + if (isSymbol(value)) { + return NAN; + } + if (isObject(value)) { + var other = typeof value.valueOf == 'function' ? value.valueOf() : value; + value = isObject(other) ? (other + '') : other; + } + if (typeof value != 'string') { + return value === 0 ? value : +value; + } + value = baseTrim(value); + var isBinary = reIsBinary.test(value); + return (isBinary || reIsOctal.test(value)) + ? freeParseInt(value.slice(2), isBinary ? 2 : 8) + : (reIsBadHex.test(value) ? NAN : +value); + } + + /** + * Converts `value` to a plain object flattening inherited enumerable string + * keyed properties of `value` to own properties of the plain object. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {Object} Returns the converted plain object. + * @example + * + * function Foo() { + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.assign({ 'a': 1 }, new Foo); + * // => { 'a': 1, 'b': 2 } + * + * _.assign({ 'a': 1 }, _.toPlainObject(new Foo)); + * // => { 'a': 1, 'b': 2, 'c': 3 } + */ + function toPlainObject(value) { + return copyObject(value, keysIn(value)); + } + + /** + * Converts `value` to a safe integer. A safe integer can be compared and + * represented correctly. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toSafeInteger(3.2); + * // => 3 + * + * _.toSafeInteger(Number.MIN_VALUE); + * // => 0 + * + * _.toSafeInteger(Infinity); + * // => 9007199254740991 + * + * _.toSafeInteger('3.2'); + * // => 3 + */ + function toSafeInteger(value) { + return value + ? baseClamp(toInteger(value), -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER) + : (value === 0 ? value : 0); + } + + /** + * Converts `value` to a string. An empty string is returned for `null` + * and `undefined` values. The sign of `-0` is preserved. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + * @example + * + * _.toString(null); + * // => '' + * + * _.toString(-0); + * // => '-0' + * + * _.toString([1, 2, 3]); + * // => '1,2,3' + */ + function toString(value) { + return value == null ? '' : baseToString(value); + } + + /*------------------------------------------------------------------------*/ + + /** + * Assigns own enumerable string keyed properties of source objects to the + * destination object. Source objects are applied from left to right. + * Subsequent sources overwrite property assignments of previous sources. + * + * **Note:** This method mutates `object` and is loosely based on + * [`Object.assign`](https://mdn.io/Object/assign). + * + * @static + * @memberOf _ + * @since 0.10.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.assignIn + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * function Bar() { + * this.c = 3; + * } + * + * Foo.prototype.b = 2; + * Bar.prototype.d = 4; + * + * _.assign({ 'a': 0 }, new Foo, new Bar); + * // => { 'a': 1, 'c': 3 } + */ + var assign = createAssigner(function(object, source) { + if (isPrototype(source) || isArrayLike(source)) { + copyObject(source, keys(source), object); + return; + } + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + assignValue(object, key, source[key]); + } + } + }); + + /** + * This method is like `_.assign` except that it iterates over own and + * inherited source properties. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias extend + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.assign + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * function Bar() { + * this.c = 3; + * } + * + * Foo.prototype.b = 2; + * Bar.prototype.d = 4; + * + * _.assignIn({ 'a': 0 }, new Foo, new Bar); + * // => { 'a': 1, 'b': 2, 'c': 3, 'd': 4 } + */ + var assignIn = createAssigner(function(object, source) { + copyObject(source, keysIn(source), object); + }); + + /** + * This method is like `_.assignIn` except that it accepts `customizer` + * which is invoked to produce the assigned values. If `customizer` returns + * `undefined`, assignment is handled by the method instead. The `customizer` + * is invoked with five arguments: (objValue, srcValue, key, object, source). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias extendWith + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @see _.assignWith + * @example + * + * function customizer(objValue, srcValue) { + * return _.isUndefined(objValue) ? srcValue : objValue; + * } + * + * var defaults = _.partialRight(_.assignInWith, customizer); + * + * defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var assignInWith = createAssigner(function(object, source, srcIndex, customizer) { + copyObject(source, keysIn(source), object, customizer); + }); + + /** + * This method is like `_.assign` except that it accepts `customizer` + * which is invoked to produce the assigned values. If `customizer` returns + * `undefined`, assignment is handled by the method instead. The `customizer` + * is invoked with five arguments: (objValue, srcValue, key, object, source). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @see _.assignInWith + * @example + * + * function customizer(objValue, srcValue) { + * return _.isUndefined(objValue) ? srcValue : objValue; + * } + * + * var defaults = _.partialRight(_.assignWith, customizer); + * + * defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var assignWith = createAssigner(function(object, source, srcIndex, customizer) { + copyObject(source, keys(source), object, customizer); + }); + + /** + * Creates an array of values corresponding to `paths` of `object`. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Array} Returns the picked values. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }, 4] }; + * + * _.at(object, ['a[0].b.c', 'a[1]']); + * // => [3, 4] + */ + var at = flatRest(baseAt); + + /** + * Creates an object that inherits from the `prototype` object. If a + * `properties` object is given, its own enumerable string keyed properties + * are assigned to the created object. + * + * @static + * @memberOf _ + * @since 2.3.0 + * @category Object + * @param {Object} prototype The object to inherit from. + * @param {Object} [properties] The properties to assign to the object. + * @returns {Object} Returns the new object. + * @example + * + * function Shape() { + * this.x = 0; + * this.y = 0; + * } + * + * function Circle() { + * Shape.call(this); + * } + * + * Circle.prototype = _.create(Shape.prototype, { + * 'constructor': Circle + * }); + * + * var circle = new Circle; + * circle instanceof Circle; + * // => true + * + * circle instanceof Shape; + * // => true + */ + function create(prototype, properties) { + var result = baseCreate(prototype); + return properties == null ? result : baseAssign(result, properties); + } + + /** + * Assigns own and inherited enumerable string keyed properties of source + * objects to the destination object for all destination properties that + * resolve to `undefined`. Source objects are applied from left to right. + * Once a property is set, additional values of the same property are ignored. + * + * **Note:** This method mutates `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaultsDeep + * @example + * + * _.defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var defaults = baseRest(function(object, sources) { + object = Object(object); + + var index = -1; + var length = sources.length; + var guard = length > 2 ? sources[2] : undefined; + + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + length = 1; + } + + while (++index < length) { + var source = sources[index]; + var props = keysIn(source); + var propsIndex = -1; + var propsLength = props.length; + + while (++propsIndex < propsLength) { + var key = props[propsIndex]; + var value = object[key]; + + if (value === undefined || + (eq(value, objectProto[key]) && !hasOwnProperty.call(object, key))) { + object[key] = source[key]; + } + } + } + + return object; + }); + + /** + * This method is like `_.defaults` except that it recursively assigns + * default properties. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 3.10.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaults + * @example + * + * _.defaultsDeep({ 'a': { 'b': 2 } }, { 'a': { 'b': 1, 'c': 3 } }); + * // => { 'a': { 'b': 2, 'c': 3 } } + */ + var defaultsDeep = baseRest(function(args) { + args.push(undefined, customDefaultsMerge); + return apply(mergeWith, undefined, args); + }); + + /** + * This method is like `_.find` except that it returns the key of the first + * element `predicate` returns truthy for instead of the element itself. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Object + * @param {Object} object The object to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {string|undefined} Returns the key of the matched element, + * else `undefined`. + * @example + * + * var users = { + * 'barney': { 'age': 36, 'active': true }, + * 'fred': { 'age': 40, 'active': false }, + * 'pebbles': { 'age': 1, 'active': true } + * }; + * + * _.findKey(users, function(o) { return o.age < 40; }); + * // => 'barney' (iteration order is not guaranteed) + * + * // The `_.matches` iteratee shorthand. + * _.findKey(users, { 'age': 1, 'active': true }); + * // => 'pebbles' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findKey(users, ['active', false]); + * // => 'fred' + * + * // The `_.property` iteratee shorthand. + * _.findKey(users, 'active'); + * // => 'barney' + */ + function findKey(object, predicate) { + return baseFindKey(object, getIteratee(predicate, 3), baseForOwn); + } + + /** + * This method is like `_.findKey` except that it iterates over elements of + * a collection in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {string|undefined} Returns the key of the matched element, + * else `undefined`. + * @example + * + * var users = { + * 'barney': { 'age': 36, 'active': true }, + * 'fred': { 'age': 40, 'active': false }, + * 'pebbles': { 'age': 1, 'active': true } + * }; + * + * _.findLastKey(users, function(o) { return o.age < 40; }); + * // => returns 'pebbles' assuming `_.findKey` returns 'barney' + * + * // The `_.matches` iteratee shorthand. + * _.findLastKey(users, { 'age': 36, 'active': true }); + * // => 'barney' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findLastKey(users, ['active', false]); + * // => 'fred' + * + * // The `_.property` iteratee shorthand. + * _.findLastKey(users, 'active'); + * // => 'pebbles' + */ + function findLastKey(object, predicate) { + return baseFindKey(object, getIteratee(predicate, 3), baseForOwnRight); + } + + /** + * Iterates over own and inherited enumerable string keyed properties of an + * object and invokes `iteratee` for each property. The iteratee is invoked + * with three arguments: (value, key, object). Iteratee functions may exit + * iteration early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 0.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forInRight + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forIn(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a', 'b', then 'c' (iteration order is not guaranteed). + */ + function forIn(object, iteratee) { + return object == null + ? object + : baseFor(object, getIteratee(iteratee, 3), keysIn); + } + + /** + * This method is like `_.forIn` except that it iterates over properties of + * `object` in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forIn + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forInRight(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'c', 'b', then 'a' assuming `_.forIn` logs 'a', 'b', then 'c'. + */ + function forInRight(object, iteratee) { + return object == null + ? object + : baseForRight(object, getIteratee(iteratee, 3), keysIn); + } + + /** + * Iterates over own enumerable string keyed properties of an object and + * invokes `iteratee` for each property. The iteratee is invoked with three + * arguments: (value, key, object). Iteratee functions may exit iteration + * early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 0.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forOwnRight + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forOwn(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a' then 'b' (iteration order is not guaranteed). + */ + function forOwn(object, iteratee) { + return object && baseForOwn(object, getIteratee(iteratee, 3)); + } + + /** + * This method is like `_.forOwn` except that it iterates over properties of + * `object` in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forOwn + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forOwnRight(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'b' then 'a' assuming `_.forOwn` logs 'a' then 'b'. + */ + function forOwnRight(object, iteratee) { + return object && baseForOwnRight(object, getIteratee(iteratee, 3)); + } + + /** + * Creates an array of function property names from own enumerable properties + * of `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to inspect. + * @returns {Array} Returns the function names. + * @see _.functionsIn + * @example + * + * function Foo() { + * this.a = _.constant('a'); + * this.b = _.constant('b'); + * } + * + * Foo.prototype.c = _.constant('c'); + * + * _.functions(new Foo); + * // => ['a', 'b'] + */ + function functions(object) { + return object == null ? [] : baseFunctions(object, keys(object)); + } + + /** + * Creates an array of function property names from own and inherited + * enumerable properties of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to inspect. + * @returns {Array} Returns the function names. + * @see _.functions + * @example + * + * function Foo() { + * this.a = _.constant('a'); + * this.b = _.constant('b'); + * } + * + * Foo.prototype.c = _.constant('c'); + * + * _.functionsIn(new Foo); + * // => ['a', 'b', 'c'] + */ + function functionsIn(object) { + return object == null ? [] : baseFunctions(object, keysIn(object)); + } + + /** + * Gets the value at `path` of `object`. If the resolved value is + * `undefined`, the `defaultValue` is returned in its place. + * + * @static + * @memberOf _ + * @since 3.7.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to get. + * @param {*} [defaultValue] The value returned for `undefined` resolved values. + * @returns {*} Returns the resolved value. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.get(object, 'a[0].b.c'); + * // => 3 + * + * _.get(object, ['a', '0', 'b', 'c']); + * // => 3 + * + * _.get(object, 'a.b.c', 'default'); + * // => 'default' + */ + function get(object, path, defaultValue) { + var result = object == null ? undefined : baseGet(object, path); + return result === undefined ? defaultValue : result; + } + + /** + * Checks if `path` is a direct property of `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + * @example + * + * var object = { 'a': { 'b': 2 } }; + * var other = _.create({ 'a': _.create({ 'b': 2 }) }); + * + * _.has(object, 'a'); + * // => true + * + * _.has(object, 'a.b'); + * // => true + * + * _.has(object, ['a', 'b']); + * // => true + * + * _.has(other, 'a'); + * // => false + */ + function has(object, path) { + return object != null && hasPath(object, path, baseHas); + } + + /** + * Checks if `path` is a direct or inherited property of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + * @example + * + * var object = _.create({ 'a': _.create({ 'b': 2 }) }); + * + * _.hasIn(object, 'a'); + * // => true + * + * _.hasIn(object, 'a.b'); + * // => true + * + * _.hasIn(object, ['a', 'b']); + * // => true + * + * _.hasIn(object, 'b'); + * // => false + */ + function hasIn(object, path) { + return object != null && hasPath(object, path, baseHasIn); + } + + /** + * Creates an object composed of the inverted keys and values of `object`. + * If `object` contains duplicate values, subsequent values overwrite + * property assignments of previous values. + * + * @static + * @memberOf _ + * @since 0.7.0 + * @category Object + * @param {Object} object The object to invert. + * @returns {Object} Returns the new inverted object. + * @example + * + * var object = { 'a': 1, 'b': 2, 'c': 1 }; + * + * _.invert(object); + * // => { '1': 'c', '2': 'b' } + */ + var invert = createInverter(function(result, value, key) { + if (value != null && + typeof value.toString != 'function') { + value = nativeObjectToString.call(value); + } + + result[value] = key; + }, constant(identity)); + + /** + * This method is like `_.invert` except that the inverted object is generated + * from the results of running each element of `object` thru `iteratee`. The + * corresponding inverted value of each inverted key is an array of keys + * responsible for generating the inverted value. The iteratee is invoked + * with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.1.0 + * @category Object + * @param {Object} object The object to invert. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Object} Returns the new inverted object. + * @example + * + * var object = { 'a': 1, 'b': 2, 'c': 1 }; + * + * _.invertBy(object); + * // => { '1': ['a', 'c'], '2': ['b'] } + * + * _.invertBy(object, function(value) { + * return 'group' + value; + * }); + * // => { 'group1': ['a', 'c'], 'group2': ['b'] } + */ + var invertBy = createInverter(function(result, value, key) { + if (value != null && + typeof value.toString != 'function') { + value = nativeObjectToString.call(value); + } + + if (hasOwnProperty.call(result, value)) { + result[value].push(key); + } else { + result[value] = [key]; + } + }, getIteratee); + + /** + * Invokes the method at `path` of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the method to invoke. + * @param {...*} [args] The arguments to invoke the method with. + * @returns {*} Returns the result of the invoked method. + * @example + * + * var object = { 'a': [{ 'b': { 'c': [1, 2, 3, 4] } }] }; + * + * _.invoke(object, 'a[0].b.c.slice', 1, 3); + * // => [2, 3] + */ + var invoke = baseRest(baseInvoke); + + /** + * Creates an array of the own enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. See the + * [ES spec](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * for more details. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keys(new Foo); + * // => ['a', 'b'] (iteration order is not guaranteed) + * + * _.keys('hi'); + * // => ['0', '1'] + */ + function keys(object) { + return isArrayLike(object) ? arrayLikeKeys(object) : baseKeys(object); + } + + /** + * Creates an array of the own and inherited enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keysIn(new Foo); + * // => ['a', 'b', 'c'] (iteration order is not guaranteed) + */ + function keysIn(object) { + return isArrayLike(object) ? arrayLikeKeys(object, true) : baseKeysIn(object); + } + + /** + * The opposite of `_.mapValues`; this method creates an object with the + * same values as `object` and keys generated by running each own enumerable + * string keyed property of `object` thru `iteratee`. The iteratee is invoked + * with three arguments: (value, key, object). + * + * @static + * @memberOf _ + * @since 3.8.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns the new mapped object. + * @see _.mapValues + * @example + * + * _.mapKeys({ 'a': 1, 'b': 2 }, function(value, key) { + * return key + value; + * }); + * // => { 'a1': 1, 'b2': 2 } + */ + function mapKeys(object, iteratee) { + var result = {}; + iteratee = getIteratee(iteratee, 3); + + baseForOwn(object, function(value, key, object) { + baseAssignValue(result, iteratee(value, key, object), value); + }); + return result; + } + + /** + * Creates an object with the same keys as `object` and values generated + * by running each own enumerable string keyed property of `object` thru + * `iteratee`. The iteratee is invoked with three arguments: + * (value, key, object). + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns the new mapped object. + * @see _.mapKeys + * @example + * + * var users = { + * 'fred': { 'user': 'fred', 'age': 40 }, + * 'pebbles': { 'user': 'pebbles', 'age': 1 } + * }; + * + * _.mapValues(users, function(o) { return o.age; }); + * // => { 'fred': 40, 'pebbles': 1 } (iteration order is not guaranteed) + * + * // The `_.property` iteratee shorthand. + * _.mapValues(users, 'age'); + * // => { 'fred': 40, 'pebbles': 1 } (iteration order is not guaranteed) + */ + function mapValues(object, iteratee) { + var result = {}; + iteratee = getIteratee(iteratee, 3); + + baseForOwn(object, function(value, key, object) { + baseAssignValue(result, key, iteratee(value, key, object)); + }); + return result; + } + + /** + * This method is like `_.assign` except that it recursively merges own and + * inherited enumerable string keyed properties of source objects into the + * destination object. Source properties that resolve to `undefined` are + * skipped if a destination value exists. Array and plain object properties + * are merged recursively. Other objects and value types are overridden by + * assignment. Source objects are applied from left to right. Subsequent + * sources overwrite property assignments of previous sources. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @example + * + * var object = { + * 'a': [{ 'b': 2 }, { 'd': 4 }] + * }; + * + * var other = { + * 'a': [{ 'c': 3 }, { 'e': 5 }] + * }; + * + * _.merge(object, other); + * // => { 'a': [{ 'b': 2, 'c': 3 }, { 'd': 4, 'e': 5 }] } + */ + var merge = createAssigner(function(object, source, srcIndex) { + baseMerge(object, source, srcIndex); + }); + + /** + * This method is like `_.merge` except that it accepts `customizer` which + * is invoked to produce the merged values of the destination and source + * properties. If `customizer` returns `undefined`, merging is handled by the + * method instead. The `customizer` is invoked with six arguments: + * (objValue, srcValue, key, object, source, stack). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} customizer The function to customize assigned values. + * @returns {Object} Returns `object`. + * @example + * + * function customizer(objValue, srcValue) { + * if (_.isArray(objValue)) { + * return objValue.concat(srcValue); + * } + * } + * + * var object = { 'a': [1], 'b': [2] }; + * var other = { 'a': [3], 'b': [4] }; + * + * _.mergeWith(object, other, customizer); + * // => { 'a': [1, 3], 'b': [2, 4] } + */ + var mergeWith = createAssigner(function(object, source, srcIndex, customizer) { + baseMerge(object, source, srcIndex, customizer); + }); + + /** + * The opposite of `_.pick`; this method creates an object composed of the + * own and inherited enumerable property paths of `object` that are not omitted. + * + * **Note:** This method is considerably slower than `_.pick`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The source object. + * @param {...(string|string[])} [paths] The property paths to omit. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.omit(object, ['a', 'c']); + * // => { 'b': '2' } + */ + var omit = flatRest(function(object, paths) { + var result = {}; + if (object == null) { + return result; + } + var isDeep = false; + paths = arrayMap(paths, function(path) { + path = castPath(path, object); + isDeep || (isDeep = path.length > 1); + return path; + }); + copyObject(object, getAllKeysIn(object), result); + if (isDeep) { + result = baseClone(result, CLONE_DEEP_FLAG | CLONE_FLAT_FLAG | CLONE_SYMBOLS_FLAG, customOmitClone); + } + var length = paths.length; + while (length--) { + baseUnset(result, paths[length]); + } + return result; + }); + + /** + * The opposite of `_.pickBy`; this method creates an object composed of + * the own and inherited enumerable string keyed properties of `object` that + * `predicate` doesn't return truthy for. The predicate is invoked with two + * arguments: (value, key). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The source object. + * @param {Function} [predicate=_.identity] The function invoked per property. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.omitBy(object, _.isNumber); + * // => { 'b': '2' } + */ + function omitBy(object, predicate) { + return pickBy(object, negate(getIteratee(predicate))); + } + + /** + * Creates an object composed of the picked `object` properties. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The source object. + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.pick(object, ['a', 'c']); + * // => { 'a': 1, 'c': 3 } + */ + var pick = flatRest(function(object, paths) { + return object == null ? {} : basePick(object, paths); + }); + + /** + * Creates an object composed of the `object` properties `predicate` returns + * truthy for. The predicate is invoked with two arguments: (value, key). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The source object. + * @param {Function} [predicate=_.identity] The function invoked per property. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.pickBy(object, _.isNumber); + * // => { 'a': 1, 'c': 3 } + */ + function pickBy(object, predicate) { + if (object == null) { + return {}; + } + var props = arrayMap(getAllKeysIn(object), function(prop) { + return [prop]; + }); + predicate = getIteratee(predicate); + return basePickBy(object, props, function(value, path) { + return predicate(value, path[0]); + }); + } + + /** + * This method is like `_.get` except that if the resolved value is a + * function it's invoked with the `this` binding of its parent object and + * its result is returned. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to resolve. + * @param {*} [defaultValue] The value returned for `undefined` resolved values. + * @returns {*} Returns the resolved value. + * @example + * + * var object = { 'a': [{ 'b': { 'c1': 3, 'c2': _.constant(4) } }] }; + * + * _.result(object, 'a[0].b.c1'); + * // => 3 + * + * _.result(object, 'a[0].b.c2'); + * // => 4 + * + * _.result(object, 'a[0].b.c3', 'default'); + * // => 'default' + * + * _.result(object, 'a[0].b.c3', _.constant('default')); + * // => 'default' + */ + function result(object, path, defaultValue) { + path = castPath(path, object); + + var index = -1, + length = path.length; + + // Ensure the loop is entered when path is empty. + if (!length) { + length = 1; + object = undefined; + } + while (++index < length) { + var value = object == null ? undefined : object[toKey(path[index])]; + if (value === undefined) { + index = length; + value = defaultValue; + } + object = isFunction(value) ? value.call(object) : value; + } + return object; + } + + /** + * Sets the value at `path` of `object`. If a portion of `path` doesn't exist, + * it's created. Arrays are created for missing index properties while objects + * are created for all other missing properties. Use `_.setWith` to customize + * `path` creation. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 3.7.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @returns {Object} Returns `object`. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.set(object, 'a[0].b.c', 4); + * console.log(object.a[0].b.c); + * // => 4 + * + * _.set(object, ['x', '0', 'y', 'z'], 5); + * console.log(object.x[0].y.z); + * // => 5 + */ + function set(object, path, value) { + return object == null ? object : baseSet(object, path, value); + } + + /** + * This method is like `_.set` except that it accepts `customizer` which is + * invoked to produce the objects of `path`. If `customizer` returns `undefined` + * path creation is handled by the method instead. The `customizer` is invoked + * with three arguments: (nsValue, key, nsObject). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @example + * + * var object = {}; + * + * _.setWith(object, '[0][1]', 'a', Object); + * // => { '0': { '1': 'a' } } + */ + function setWith(object, path, value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return object == null ? object : baseSet(object, path, value, customizer); + } + + /** + * Creates an array of own enumerable string keyed-value pairs for `object` + * which can be consumed by `_.fromPairs`. If `object` is a map or set, its + * entries are returned. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias entries + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the key-value pairs. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.toPairs(new Foo); + * // => [['a', 1], ['b', 2]] (iteration order is not guaranteed) + */ + var toPairs = createToPairs(keys); + + /** + * Creates an array of own and inherited enumerable string keyed-value pairs + * for `object` which can be consumed by `_.fromPairs`. If `object` is a map + * or set, its entries are returned. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias entriesIn + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the key-value pairs. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.toPairsIn(new Foo); + * // => [['a', 1], ['b', 2], ['c', 3]] (iteration order is not guaranteed) + */ + var toPairsIn = createToPairs(keysIn); + + /** + * An alternative to `_.reduce`; this method transforms `object` to a new + * `accumulator` object which is the result of running each of its own + * enumerable string keyed properties thru `iteratee`, with each invocation + * potentially mutating the `accumulator` object. If `accumulator` is not + * provided, a new object with the same `[[Prototype]]` will be used. The + * iteratee is invoked with four arguments: (accumulator, value, key, object). + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 1.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The custom accumulator value. + * @returns {*} Returns the accumulated value. + * @example + * + * _.transform([2, 3, 4], function(result, n) { + * result.push(n *= n); + * return n % 2 == 0; + * }, []); + * // => [4, 9] + * + * _.transform({ 'a': 1, 'b': 2, 'c': 1 }, function(result, value, key) { + * (result[value] || (result[value] = [])).push(key); + * }, {}); + * // => { '1': ['a', 'c'], '2': ['b'] } + */ + function transform(object, iteratee, accumulator) { + var isArr = isArray(object), + isArrLike = isArr || isBuffer(object) || isTypedArray(object); + + iteratee = getIteratee(iteratee, 4); + if (accumulator == null) { + var Ctor = object && object.constructor; + if (isArrLike) { + accumulator = isArr ? new Ctor : []; + } + else if (isObject(object)) { + accumulator = isFunction(Ctor) ? baseCreate(getPrototype(object)) : {}; + } + else { + accumulator = {}; + } + } + (isArrLike ? arrayEach : baseForOwn)(object, function(value, index, object) { + return iteratee(accumulator, value, index, object); + }); + return accumulator; + } + + /** + * Removes the property at `path` of `object`. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to unset. + * @returns {boolean} Returns `true` if the property is deleted, else `false`. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 7 } }] }; + * _.unset(object, 'a[0].b.c'); + * // => true + * + * console.log(object); + * // => { 'a': [{ 'b': {} }] }; + * + * _.unset(object, ['a', '0', 'b', 'c']); + * // => true + * + * console.log(object); + * // => { 'a': [{ 'b': {} }] }; + */ + function unset(object, path) { + return object == null ? true : baseUnset(object, path); + } + + /** + * This method is like `_.set` except that accepts `updater` to produce the + * value to set. Use `_.updateWith` to customize `path` creation. The `updater` + * is invoked with one argument: (value). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.6.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {Function} updater The function to produce the updated value. + * @returns {Object} Returns `object`. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.update(object, 'a[0].b.c', function(n) { return n * n; }); + * console.log(object.a[0].b.c); + * // => 9 + * + * _.update(object, 'x[0].y.z', function(n) { return n ? n + 1 : 0; }); + * console.log(object.x[0].y.z); + * // => 0 + */ + function update(object, path, updater) { + return object == null ? object : baseUpdate(object, path, castFunction(updater)); + } + + /** + * This method is like `_.update` except that it accepts `customizer` which is + * invoked to produce the objects of `path`. If `customizer` returns `undefined` + * path creation is handled by the method instead. The `customizer` is invoked + * with three arguments: (nsValue, key, nsObject). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.6.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {Function} updater The function to produce the updated value. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @example + * + * var object = {}; + * + * _.updateWith(object, '[0][1]', _.constant('a'), Object); + * // => { '0': { '1': 'a' } } + */ + function updateWith(object, path, updater, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return object == null ? object : baseUpdate(object, path, castFunction(updater), customizer); + } + + /** + * Creates an array of the own enumerable string keyed property values of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property values. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.values(new Foo); + * // => [1, 2] (iteration order is not guaranteed) + * + * _.values('hi'); + * // => ['h', 'i'] + */ + function values(object) { + return object == null ? [] : baseValues(object, keys(object)); + } + + /** + * Creates an array of the own and inherited enumerable string keyed property + * values of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property values. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.valuesIn(new Foo); + * // => [1, 2, 3] (iteration order is not guaranteed) + */ + function valuesIn(object) { + return object == null ? [] : baseValues(object, keysIn(object)); + } + + /*------------------------------------------------------------------------*/ + + /** + * Clamps `number` within the inclusive `lower` and `upper` bounds. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Number + * @param {number} number The number to clamp. + * @param {number} [lower] The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the clamped number. + * @example + * + * _.clamp(-10, -5, 5); + * // => -5 + * + * _.clamp(10, -5, 5); + * // => 5 + */ + function clamp(number, lower, upper) { + if (upper === undefined) { + upper = lower; + lower = undefined; + } + if (upper !== undefined) { + upper = toNumber(upper); + upper = upper === upper ? upper : 0; + } + if (lower !== undefined) { + lower = toNumber(lower); + lower = lower === lower ? lower : 0; + } + return baseClamp(toNumber(number), lower, upper); + } + + /** + * Checks if `n` is between `start` and up to, but not including, `end`. If + * `end` is not specified, it's set to `start` with `start` then set to `0`. + * If `start` is greater than `end` the params are swapped to support + * negative ranges. + * + * @static + * @memberOf _ + * @since 3.3.0 + * @category Number + * @param {number} number The number to check. + * @param {number} [start=0] The start of the range. + * @param {number} end The end of the range. + * @returns {boolean} Returns `true` if `number` is in the range, else `false`. + * @see _.range, _.rangeRight + * @example + * + * _.inRange(3, 2, 4); + * // => true + * + * _.inRange(4, 8); + * // => true + * + * _.inRange(4, 2); + * // => false + * + * _.inRange(2, 2); + * // => false + * + * _.inRange(1.2, 2); + * // => true + * + * _.inRange(5.2, 4); + * // => false + * + * _.inRange(-3, -2, -6); + * // => true + */ + function inRange(number, start, end) { + start = toFinite(start); + if (end === undefined) { + end = start; + start = 0; + } else { + end = toFinite(end); + } + number = toNumber(number); + return baseInRange(number, start, end); + } + + /** + * Produces a random number between the inclusive `lower` and `upper` bounds. + * If only one argument is provided a number between `0` and the given number + * is returned. If `floating` is `true`, or either `lower` or `upper` are + * floats, a floating-point number is returned instead of an integer. + * + * **Note:** JavaScript follows the IEEE-754 standard for resolving + * floating-point values which can produce unexpected results. + * + * @static + * @memberOf _ + * @since 0.7.0 + * @category Number + * @param {number} [lower=0] The lower bound. + * @param {number} [upper=1] The upper bound. + * @param {boolean} [floating] Specify returning a floating-point number. + * @returns {number} Returns the random number. + * @example + * + * _.random(0, 5); + * // => an integer between 0 and 5 + * + * _.random(5); + * // => also an integer between 0 and 5 + * + * _.random(5, true); + * // => a floating-point number between 0 and 5 + * + * _.random(1.2, 5.2); + * // => a floating-point number between 1.2 and 5.2 + */ + function random(lower, upper, floating) { + if (floating && typeof floating != 'boolean' && isIterateeCall(lower, upper, floating)) { + upper = floating = undefined; + } + if (floating === undefined) { + if (typeof upper == 'boolean') { + floating = upper; + upper = undefined; + } + else if (typeof lower == 'boolean') { + floating = lower; + lower = undefined; + } + } + if (lower === undefined && upper === undefined) { + lower = 0; + upper = 1; + } + else { + lower = toFinite(lower); + if (upper === undefined) { + upper = lower; + lower = 0; + } else { + upper = toFinite(upper); + } + } + if (lower > upper) { + var temp = lower; + lower = upper; + upper = temp; + } + if (floating || lower % 1 || upper % 1) { + var rand = nativeRandom(); + return nativeMin(lower + (rand * (upper - lower + freeParseFloat('1e-' + ((rand + '').length - 1)))), upper); + } + return baseRandom(lower, upper); + } + + /*------------------------------------------------------------------------*/ + + /** + * Converts `string` to [camel case](https://en.wikipedia.org/wiki/CamelCase). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the camel cased string. + * @example + * + * _.camelCase('Foo Bar'); + * // => 'fooBar' + * + * _.camelCase('--foo-bar--'); + * // => 'fooBar' + * + * _.camelCase('__FOO_BAR__'); + * // => 'fooBar' + */ + var camelCase = createCompounder(function(result, word, index) { + word = word.toLowerCase(); + return result + (index ? capitalize(word) : word); + }); + + /** + * Converts the first character of `string` to upper case and the remaining + * to lower case. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to capitalize. + * @returns {string} Returns the capitalized string. + * @example + * + * _.capitalize('FRED'); + * // => 'Fred' + */ + function capitalize(string) { + return upperFirst(toString(string).toLowerCase()); + } + + /** + * Deburrs `string` by converting + * [Latin-1 Supplement](https://en.wikipedia.org/wiki/Latin-1_Supplement_(Unicode_block)#Character_table) + * and [Latin Extended-A](https://en.wikipedia.org/wiki/Latin_Extended-A) + * letters to basic Latin letters and removing + * [combining diacritical marks](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to deburr. + * @returns {string} Returns the deburred string. + * @example + * + * _.deburr('déjà vu'); + * // => 'deja vu' + */ + function deburr(string) { + string = toString(string); + return string && string.replace(reLatin, deburrLetter).replace(reComboMark, ''); + } + + /** + * Checks if `string` ends with the given target string. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to inspect. + * @param {string} [target] The string to search for. + * @param {number} [position=string.length] The position to search up to. + * @returns {boolean} Returns `true` if `string` ends with `target`, + * else `false`. + * @example + * + * _.endsWith('abc', 'c'); + * // => true + * + * _.endsWith('abc', 'b'); + * // => false + * + * _.endsWith('abc', 'b', 2); + * // => true + */ + function endsWith(string, target, position) { + string = toString(string); + target = baseToString(target); + + var length = string.length; + position = position === undefined + ? length + : baseClamp(toInteger(position), 0, length); + + var end = position; + position -= target.length; + return position >= 0 && string.slice(position, end) == target; + } + + /** + * Converts the characters "&", "<", ">", '"', and "'" in `string` to their + * corresponding HTML entities. + * + * **Note:** No other characters are escaped. To escape additional + * characters use a third-party library like [_he_](https://mths.be/he). + * + * Though the ">" character is escaped for symmetry, characters like + * ">" and "/" don't need escaping in HTML and have no special meaning + * unless they're part of a tag or unquoted attribute value. See + * [Mathias Bynens's article](https://mathiasbynens.be/notes/ambiguous-ampersands) + * (under "semi-related fun fact") for more details. + * + * When working with HTML you should always + * [quote attribute values](http://wonko.com/post/html-escaping) to reduce + * XSS vectors. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category String + * @param {string} [string=''] The string to escape. + * @returns {string} Returns the escaped string. + * @example + * + * _.escape('fred, barney, & pebbles'); + * // => 'fred, barney, & pebbles' + */ + function escape(string) { + string = toString(string); + return (string && reHasUnescapedHtml.test(string)) + ? string.replace(reUnescapedHtml, escapeHtmlChar) + : string; + } + + /** + * Escapes the `RegExp` special characters "^", "$", "\", ".", "*", "+", + * "?", "(", ")", "[", "]", "{", "}", and "|" in `string`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to escape. + * @returns {string} Returns the escaped string. + * @example + * + * _.escapeRegExp('[lodash](https://lodash.com/)'); + * // => '\[lodash\]\(https://lodash\.com/\)' + */ + function escapeRegExp(string) { + string = toString(string); + return (string && reHasRegExpChar.test(string)) + ? string.replace(reRegExpChar, '\\$&') + : string; + } + + /** + * Converts `string` to + * [kebab case](https://en.wikipedia.org/wiki/Letter_case#Special_case_styles). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the kebab cased string. + * @example + * + * _.kebabCase('Foo Bar'); + * // => 'foo-bar' + * + * _.kebabCase('fooBar'); + * // => 'foo-bar' + * + * _.kebabCase('__FOO_BAR__'); + * // => 'foo-bar' + */ + var kebabCase = createCompounder(function(result, word, index) { + return result + (index ? '-' : '') + word.toLowerCase(); + }); + + /** + * Converts `string`, as space separated words, to lower case. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the lower cased string. + * @example + * + * _.lowerCase('--Foo-Bar--'); + * // => 'foo bar' + * + * _.lowerCase('fooBar'); + * // => 'foo bar' + * + * _.lowerCase('__FOO_BAR__'); + * // => 'foo bar' + */ + var lowerCase = createCompounder(function(result, word, index) { + return result + (index ? ' ' : '') + word.toLowerCase(); + }); + + /** + * Converts the first character of `string` to lower case. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the converted string. + * @example + * + * _.lowerFirst('Fred'); + * // => 'fred' + * + * _.lowerFirst('FRED'); + * // => 'fRED' + */ + var lowerFirst = createCaseFirst('toLowerCase'); + + /** + * Pads `string` on the left and right sides if it's shorter than `length`. + * Padding characters are truncated if they can't be evenly divided by `length`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to pad. + * @param {number} [length=0] The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padded string. + * @example + * + * _.pad('abc', 8); + * // => ' abc ' + * + * _.pad('abc', 8, '_-'); + * // => '_-abc_-_' + * + * _.pad('abc', 3); + * // => 'abc' + */ + function pad(string, length, chars) { + string = toString(string); + length = toInteger(length); + + var strLength = length ? stringSize(string) : 0; + if (!length || strLength >= length) { + return string; + } + var mid = (length - strLength) / 2; + return ( + createPadding(nativeFloor(mid), chars) + + string + + createPadding(nativeCeil(mid), chars) + ); + } + + /** + * Pads `string` on the right side if it's shorter than `length`. Padding + * characters are truncated if they exceed `length`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to pad. + * @param {number} [length=0] The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padded string. + * @example + * + * _.padEnd('abc', 6); + * // => 'abc ' + * + * _.padEnd('abc', 6, '_-'); + * // => 'abc_-_' + * + * _.padEnd('abc', 3); + * // => 'abc' + */ + function padEnd(string, length, chars) { + string = toString(string); + length = toInteger(length); + + var strLength = length ? stringSize(string) : 0; + return (length && strLength < length) + ? (string + createPadding(length - strLength, chars)) + : string; + } + + /** + * Pads `string` on the left side if it's shorter than `length`. Padding + * characters are truncated if they exceed `length`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to pad. + * @param {number} [length=0] The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padded string. + * @example + * + * _.padStart('abc', 6); + * // => ' abc' + * + * _.padStart('abc', 6, '_-'); + * // => '_-_abc' + * + * _.padStart('abc', 3); + * // => 'abc' + */ + function padStart(string, length, chars) { + string = toString(string); + length = toInteger(length); + + var strLength = length ? stringSize(string) : 0; + return (length && strLength < length) + ? (createPadding(length - strLength, chars) + string) + : string; + } + + /** + * Converts `string` to an integer of the specified radix. If `radix` is + * `undefined` or `0`, a `radix` of `10` is used unless `value` is a + * hexadecimal, in which case a `radix` of `16` is used. + * + * **Note:** This method aligns with the + * [ES5 implementation](https://es5.github.io/#x15.1.2.2) of `parseInt`. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category String + * @param {string} string The string to convert. + * @param {number} [radix=10] The radix to interpret `value` by. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {number} Returns the converted integer. + * @example + * + * _.parseInt('08'); + * // => 8 + * + * _.map(['6', '08', '10'], _.parseInt); + * // => [6, 8, 10] + */ + function parseInt(string, radix, guard) { + if (guard || radix == null) { + radix = 0; + } else if (radix) { + radix = +radix; + } + return nativeParseInt(toString(string).replace(reTrimStart, ''), radix || 0); + } + + /** + * Repeats the given string `n` times. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to repeat. + * @param {number} [n=1] The number of times to repeat the string. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {string} Returns the repeated string. + * @example + * + * _.repeat('*', 3); + * // => '***' + * + * _.repeat('abc', 2); + * // => 'abcabc' + * + * _.repeat('abc', 0); + * // => '' + */ + function repeat(string, n, guard) { + if ((guard ? isIterateeCall(string, n, guard) : n === undefined)) { + n = 1; + } else { + n = toInteger(n); + } + return baseRepeat(toString(string), n); + } + + /** + * Replaces matches for `pattern` in `string` with `replacement`. + * + * **Note:** This method is based on + * [`String#replace`](https://mdn.io/String/replace). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to modify. + * @param {RegExp|string} pattern The pattern to replace. + * @param {Function|string} replacement The match replacement. + * @returns {string} Returns the modified string. + * @example + * + * _.replace('Hi Fred', 'Fred', 'Barney'); + * // => 'Hi Barney' + */ + function replace() { + var args = arguments, + string = toString(args[0]); + + return args.length < 3 ? string : string.replace(args[1], args[2]); + } + + /** + * Converts `string` to + * [snake case](https://en.wikipedia.org/wiki/Snake_case). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the snake cased string. + * @example + * + * _.snakeCase('Foo Bar'); + * // => 'foo_bar' + * + * _.snakeCase('fooBar'); + * // => 'foo_bar' + * + * _.snakeCase('--FOO-BAR--'); + * // => 'foo_bar' + */ + var snakeCase = createCompounder(function(result, word, index) { + return result + (index ? '_' : '') + word.toLowerCase(); + }); + + /** + * Splits `string` by `separator`. + * + * **Note:** This method is based on + * [`String#split`](https://mdn.io/String/split). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to split. + * @param {RegExp|string} separator The separator pattern to split by. + * @param {number} [limit] The length to truncate results to. + * @returns {Array} Returns the string segments. + * @example + * + * _.split('a-b-c', '-', 2); + * // => ['a', 'b'] + */ + function split(string, separator, limit) { + if (limit && typeof limit != 'number' && isIterateeCall(string, separator, limit)) { + separator = limit = undefined; + } + limit = limit === undefined ? MAX_ARRAY_LENGTH : limit >>> 0; + if (!limit) { + return []; + } + string = toString(string); + if (string && ( + typeof separator == 'string' || + (separator != null && !isRegExp(separator)) + )) { + separator = baseToString(separator); + if (!separator && hasUnicode(string)) { + return castSlice(stringToArray(string), 0, limit); + } + } + return string.split(separator, limit); + } + + /** + * Converts `string` to + * [start case](https://en.wikipedia.org/wiki/Letter_case#Stylistic_or_specialised_usage). + * + * @static + * @memberOf _ + * @since 3.1.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the start cased string. + * @example + * + * _.startCase('--foo-bar--'); + * // => 'Foo Bar' + * + * _.startCase('fooBar'); + * // => 'Foo Bar' + * + * _.startCase('__FOO_BAR__'); + * // => 'FOO BAR' + */ + var startCase = createCompounder(function(result, word, index) { + return result + (index ? ' ' : '') + upperFirst(word); + }); + + /** + * Checks if `string` starts with the given target string. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to inspect. + * @param {string} [target] The string to search for. + * @param {number} [position=0] The position to search from. + * @returns {boolean} Returns `true` if `string` starts with `target`, + * else `false`. + * @example + * + * _.startsWith('abc', 'a'); + * // => true + * + * _.startsWith('abc', 'b'); + * // => false + * + * _.startsWith('abc', 'b', 1); + * // => true + */ + function startsWith(string, target, position) { + string = toString(string); + position = position == null + ? 0 + : baseClamp(toInteger(position), 0, string.length); + + target = baseToString(target); + return string.slice(position, position + target.length) == target; + } + + /** + * Creates a compiled template function that can interpolate data properties + * in "interpolate" delimiters, HTML-escape interpolated data properties in + * "escape" delimiters, and execute JavaScript in "evaluate" delimiters. Data + * properties may be accessed as free variables in the template. If a setting + * object is given, it takes precedence over `_.templateSettings` values. + * + * **Note:** In the development build `_.template` utilizes + * [sourceURLs](http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-sourceurl) + * for easier debugging. + * + * For more information on precompiling templates see + * [lodash's custom builds documentation](https://lodash.com/custom-builds). + * + * For more information on Chrome extension sandboxes see + * [Chrome's extensions documentation](https://developer.chrome.com/extensions/sandboxingEval). + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category String + * @param {string} [string=''] The template string. + * @param {Object} [options={}] The options object. + * @param {RegExp} [options.escape=_.templateSettings.escape] + * The HTML "escape" delimiter. + * @param {RegExp} [options.evaluate=_.templateSettings.evaluate] + * The "evaluate" delimiter. + * @param {Object} [options.imports=_.templateSettings.imports] + * An object to import into the template as free variables. + * @param {RegExp} [options.interpolate=_.templateSettings.interpolate] + * The "interpolate" delimiter. + * @param {string} [options.sourceURL='lodash.templateSources[n]'] + * The sourceURL of the compiled template. + * @param {string} [options.variable='obj'] + * The data object variable name. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the compiled template function. + * @example + * + * // Use the "interpolate" delimiter to create a compiled template. + * var compiled = _.template('hello <%= user %>!'); + * compiled({ 'user': 'fred' }); + * // => 'hello fred!' + * + * // Use the HTML "escape" delimiter to escape data property values. + * var compiled = _.template('<%- value %>'); + * compiled({ 'value': '