diff --git a/miniprogram/images/tab-match-active.png b/miniprogram/images/tab-match-active.png new file mode 100644 index 00000000..b0ccef07 Binary files /dev/null and b/miniprogram/images/tab-match-active.png differ diff --git a/miniprogram/images/tab-match.png b/miniprogram/images/tab-match.png new file mode 100644 index 00000000..2dcbb8d9 Binary files /dev/null and b/miniprogram/images/tab-match.png differ diff --git a/miniprogram/images/tab-points-active.png b/miniprogram/images/tab-points-active.png new file mode 100644 index 00000000..bc4f9be5 Binary files /dev/null and b/miniprogram/images/tab-points-active.png differ diff --git a/miniprogram/images/tab-points.png b/miniprogram/images/tab-points.png new file mode 100644 index 00000000..6042e827 Binary files /dev/null and b/miniprogram/images/tab-points.png differ diff --git a/miniprogram/images/tab-rank-active.png b/miniprogram/images/tab-rank-active.png new file mode 100644 index 00000000..d4d61a65 Binary files /dev/null and b/miniprogram/images/tab-rank-active.png differ diff --git a/miniprogram/images/tab-rank.png b/miniprogram/images/tab-rank.png new file mode 100644 index 00000000..802ca25a Binary files /dev/null and b/miniprogram/images/tab-rank.png differ diff --git a/miniprogram/images/tab-user-active.png b/miniprogram/images/tab-user-active.png new file mode 100644 index 00000000..ea0381a0 Binary files /dev/null and b/miniprogram/images/tab-user-active.png differ diff --git a/miniprogram/images/tab-user.png b/miniprogram/images/tab-user.png new file mode 100644 index 00000000..7b81ff87 Binary files /dev/null and b/miniprogram/images/tab-user.png differ diff --git a/miniprogram/node_modules/.bin/prebuild-install b/miniprogram/node_modules/.bin/prebuild-install new file mode 100644 index 00000000..154b529e --- /dev/null +++ b/miniprogram/node_modules/.bin/prebuild-install @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../prebuild-install/bin.js" "$@" +else + exec node "$basedir/../prebuild-install/bin.js" "$@" +fi diff --git a/miniprogram/node_modules/.bin/prebuild-install.cmd b/miniprogram/node_modules/.bin/prebuild-install.cmd new file mode 100644 index 00000000..21ff9042 --- /dev/null +++ b/miniprogram/node_modules/.bin/prebuild-install.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\prebuild-install\bin.js" %* diff --git a/miniprogram/node_modules/.bin/prebuild-install.ps1 b/miniprogram/node_modules/.bin/prebuild-install.ps1 new file mode 100644 index 00000000..6e657a3b --- /dev/null +++ b/miniprogram/node_modules/.bin/prebuild-install.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../prebuild-install/bin.js" $args + } else { + & "$basedir/node$exe" "$basedir/../prebuild-install/bin.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../prebuild-install/bin.js" $args + } else { + & "node$exe" "$basedir/../prebuild-install/bin.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/miniprogram/node_modules/.bin/rc b/miniprogram/node_modules/.bin/rc new file mode 100644 index 00000000..c9d9af68 --- /dev/null +++ b/miniprogram/node_modules/.bin/rc @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../rc/cli.js" "$@" +else + exec node "$basedir/../rc/cli.js" "$@" +fi diff --git a/miniprogram/node_modules/.bin/rc.cmd b/miniprogram/node_modules/.bin/rc.cmd new file mode 100644 index 00000000..be16b733 --- /dev/null +++ b/miniprogram/node_modules/.bin/rc.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\rc\cli.js" %* diff --git a/miniprogram/node_modules/.bin/rc.ps1 b/miniprogram/node_modules/.bin/rc.ps1 new file mode 100644 index 00000000..9a9b6e37 --- /dev/null +++ b/miniprogram/node_modules/.bin/rc.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../rc/cli.js" $args + } else { + & "$basedir/node$exe" "$basedir/../rc/cli.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../rc/cli.js" $args + } else { + & "node$exe" "$basedir/../rc/cli.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/miniprogram/node_modules/.bin/semver b/miniprogram/node_modules/.bin/semver new file mode 100644 index 00000000..97c53279 --- /dev/null +++ b/miniprogram/node_modules/.bin/semver @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@" +else + exec node "$basedir/../semver/bin/semver.js" "$@" +fi diff --git a/miniprogram/node_modules/.bin/semver.cmd b/miniprogram/node_modules/.bin/semver.cmd new file mode 100644 index 00000000..9913fa9d --- /dev/null +++ b/miniprogram/node_modules/.bin/semver.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver.js" %* diff --git a/miniprogram/node_modules/.bin/semver.ps1 b/miniprogram/node_modules/.bin/semver.ps1 new file mode 100644 index 00000000..314717ad --- /dev/null +++ b/miniprogram/node_modules/.bin/semver.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args + } else { + & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../semver/bin/semver.js" $args + } else { + & "node$exe" "$basedir/../semver/bin/semver.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/miniprogram/node_modules/.package-lock.json b/miniprogram/node_modules/.package-lock.json new file mode 100644 index 00000000..0166fcbb --- /dev/null +++ b/miniprogram/node_modules/.package-lock.json @@ -0,0 +1,449 @@ +{ + "name": "miniprogram", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "license": "MIT", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/canvas": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/canvas/-/canvas-3.2.1.tgz", + "integrity": "sha512-ej1sPFR5+0YWtaVp6S1N1FVz69TQCqmrkGeRvQxZeAB1nAIcjNTHVwrZtYtWFFBmQsF40/uDLehsW5KuYC99mg==", + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "node-addon-api": "^7.0.0", + "prebuild-install": "^7.1.3" + }, + "engines": { + "node": "^18.12.0 || >= 20.9.0" + } + }, + "node_modules/chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", + "license": "ISC" + }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "license": "MIT", + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "license": "MIT", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/expand-template": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", + "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", + "license": "(MIT OR WTFPL)", + "engines": { + "node": ">=6" + } + }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "license": "MIT" + }, + "node_modules/github-from-package": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", + "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", + "license": "MIT" + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "license": "ISC" + }, + "node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp-classic": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", + "license": "MIT" + }, + "node_modules/napi-build-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-2.0.0.tgz", + "integrity": "sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==", + "license": "MIT" + }, + "node_modules/node-abi": { + "version": "3.86.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.86.0.tgz", + "integrity": "sha512-sn9Et4N3ynsetj3spsZR729DVlGH6iBG4RiDMV7HEp3guyOW6W3S0unGpLDxT50mXortGUMax/ykUNQXdqc/Xg==", + "license": "MIT", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-addon-api": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz", + "integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==", + "license": "MIT" + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/prebuild-install": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.3.tgz", + "integrity": "sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug==", + "license": "MIT", + "dependencies": { + "detect-libc": "^2.0.0", + "expand-template": "^2.0.3", + "github-from-package": "0.0.0", + "minimist": "^1.2.3", + "mkdirp-classic": "^0.5.3", + "napi-build-utils": "^2.0.0", + "node-abi": "^3.3.0", + "pump": "^3.0.0", + "rc": "^1.2.7", + "simple-get": "^4.0.0", + "tar-fs": "^2.0.0", + "tunnel-agent": "^0.6.0" + }, + "bin": { + "prebuild-install": "bin.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/pump": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", + "license": "MIT", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/simple-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/simple-get": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "decompress-response": "^6.0.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tar-fs": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz", + "integrity": "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==", + "license": "MIT", + "dependencies": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + } + }, + "node_modules/tar-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "license": "MIT", + "dependencies": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "license": "Apache-2.0", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + } + } +} diff --git a/miniprogram/node_modules/base64-js/LICENSE b/miniprogram/node_modules/base64-js/LICENSE new file mode 100644 index 00000000..6d52b8ac --- /dev/null +++ b/miniprogram/node_modules/base64-js/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Jameson Little + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/miniprogram/node_modules/base64-js/README.md b/miniprogram/node_modules/base64-js/README.md new file mode 100644 index 00000000..b42a48f4 --- /dev/null +++ b/miniprogram/node_modules/base64-js/README.md @@ -0,0 +1,34 @@ +base64-js +========= + +`base64-js` does basic base64 encoding/decoding in pure JS. + +[![build status](https://secure.travis-ci.org/beatgammit/base64-js.png)](http://travis-ci.org/beatgammit/base64-js) + +Many browsers already have base64 encoding/decoding functionality, but it is for text data, not all-purpose binary data. + +Sometimes encoding/decoding binary data in the browser is useful, and that is what this module does. + +## install + +With [npm](https://npmjs.org) do: + +`npm install base64-js` and `var base64js = require('base64-js')` + +For use in web browsers do: + +`` + +[Get supported base64-js with the Tidelift Subscription](https://tidelift.com/subscription/pkg/npm-base64-js?utm_source=npm-base64-js&utm_medium=referral&utm_campaign=readme) + +## methods + +`base64js` has three exposed functions, `byteLength`, `toByteArray` and `fromByteArray`, which both take a single argument. + +* `byteLength` - Takes a base64 string and returns length of byte array +* `toByteArray` - Takes a base64 string and returns a byte array +* `fromByteArray` - Takes a byte array and returns a base64 string + +## license + +MIT diff --git a/miniprogram/node_modules/base64-js/base64js.min.js b/miniprogram/node_modules/base64-js/base64js.min.js new file mode 100644 index 00000000..908ac83f --- /dev/null +++ b/miniprogram/node_modules/base64-js/base64js.min.js @@ -0,0 +1 @@ +(function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"==typeof window?"undefined"==typeof global?"undefined"==typeof self?this:self:global:window,b.base64js=a()}})(function(){return function(){function b(d,e,g){function a(j,i){if(!e[j]){if(!d[j]){var f="function"==typeof require&&require;if(!i&&f)return f(j,!0);if(h)return h(j,!0);var c=new Error("Cannot find module '"+j+"'");throw c.code="MODULE_NOT_FOUND",c}var k=e[j]={exports:{}};d[j][0].call(k.exports,function(b){var c=d[j][1][b];return a(c||b)},k,k.exports,b,d,e,g)}return e[j].exports}for(var h="function"==typeof require&&require,c=0;c>16,j[k++]=255&b>>8,j[k++]=255&b;return 2===h&&(b=l[a.charCodeAt(c)]<<2|l[a.charCodeAt(c+1)]>>4,j[k++]=255&b),1===h&&(b=l[a.charCodeAt(c)]<<10|l[a.charCodeAt(c+1)]<<4|l[a.charCodeAt(c+2)]>>2,j[k++]=255&b>>8,j[k++]=255&b),j}function g(a){return k[63&a>>18]+k[63&a>>12]+k[63&a>>6]+k[63&a]}function h(a,b,c){for(var d,e=[],f=b;fj?j:g+f));return 1===d?(b=a[c-1],e.push(k[b>>2]+k[63&b<<4]+"==")):2===d&&(b=(a[c-2]<<8)+a[c-1],e.push(k[b>>10]+k[63&b>>4]+k[63&b<<2]+"=")),e.join("")}c.byteLength=function(a){var b=d(a),c=b[0],e=b[1];return 3*(c+e)/4-e},c.toByteArray=f,c.fromByteArray=j;for(var k=[],l=[],m="undefined"==typeof Uint8Array?Array:Uint8Array,n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",o=0,p=n.length;o 0) { + throw new Error('Invalid string. Length must be a multiple of 4') + } + + // Trim off extra bytes after placeholder bytes are found + // See: https://github.com/beatgammit/base64-js/issues/42 + var validLen = b64.indexOf('=') + if (validLen === -1) validLen = len + + var placeHoldersLen = validLen === len + ? 0 + : 4 - (validLen % 4) + + return [validLen, placeHoldersLen] +} + +// base64 is 4/3 + up to two characters of the original data +function byteLength (b64) { + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function _byteLength (b64, validLen, placeHoldersLen) { + return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen +} + +function toByteArray (b64) { + var tmp + var lens = getLens(b64) + var validLen = lens[0] + var placeHoldersLen = lens[1] + + var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) + + var curByte = 0 + + // if there are placeholders, only get up to the last complete 4 chars + var len = placeHoldersLen > 0 + ? validLen - 4 + : validLen + + var i + for (i = 0; i < len; i += 4) { + tmp = + (revLookup[b64.charCodeAt(i)] << 18) | + (revLookup[b64.charCodeAt(i + 1)] << 12) | + (revLookup[b64.charCodeAt(i + 2)] << 6) | + revLookup[b64.charCodeAt(i + 3)] + arr[curByte++] = (tmp >> 16) & 0xFF + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 2) { + tmp = + (revLookup[b64.charCodeAt(i)] << 2) | + (revLookup[b64.charCodeAt(i + 1)] >> 4) + arr[curByte++] = tmp & 0xFF + } + + if (placeHoldersLen === 1) { + tmp = + (revLookup[b64.charCodeAt(i)] << 10) | + (revLookup[b64.charCodeAt(i + 1)] << 4) | + (revLookup[b64.charCodeAt(i + 2)] >> 2) + arr[curByte++] = (tmp >> 8) & 0xFF + arr[curByte++] = tmp & 0xFF + } + + return arr +} + +function tripletToBase64 (num) { + return lookup[num >> 18 & 0x3F] + + lookup[num >> 12 & 0x3F] + + lookup[num >> 6 & 0x3F] + + lookup[num & 0x3F] +} + +function encodeChunk (uint8, start, end) { + var tmp + var output = [] + for (var i = start; i < end; i += 3) { + tmp = + ((uint8[i] << 16) & 0xFF0000) + + ((uint8[i + 1] << 8) & 0xFF00) + + (uint8[i + 2] & 0xFF) + output.push(tripletToBase64(tmp)) + } + return output.join('') +} + +function fromByteArray (uint8) { + var tmp + var len = uint8.length + var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes + var parts = [] + var maxChunkLength = 16383 // must be multiple of 3 + + // go through the array every three bytes, we'll deal with trailing stuff later + for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { + parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) + } + + // pad the end with zeros, but make sure to not forget the extra bytes + if (extraBytes === 1) { + tmp = uint8[len - 1] + parts.push( + lookup[tmp >> 2] + + lookup[(tmp << 4) & 0x3F] + + '==' + ) + } else if (extraBytes === 2) { + tmp = (uint8[len - 2] << 8) + uint8[len - 1] + parts.push( + lookup[tmp >> 10] + + lookup[(tmp >> 4) & 0x3F] + + lookup[(tmp << 2) & 0x3F] + + '=' + ) + } + + return parts.join('') +} diff --git a/miniprogram/node_modules/base64-js/package.json b/miniprogram/node_modules/base64-js/package.json new file mode 100644 index 00000000..c3972e39 --- /dev/null +++ b/miniprogram/node_modules/base64-js/package.json @@ -0,0 +1,47 @@ +{ + "name": "base64-js", + "description": "Base64 encoding/decoding in pure JS", + "version": "1.5.1", + "author": "T. Jameson Little ", + "typings": "index.d.ts", + "bugs": { + "url": "https://github.com/beatgammit/base64-js/issues" + }, + "devDependencies": { + "babel-minify": "^0.5.1", + "benchmark": "^2.1.4", + "browserify": "^16.3.0", + "standard": "*", + "tape": "4.x" + }, + "homepage": "https://github.com/beatgammit/base64-js", + "keywords": [ + "base64" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/beatgammit/base64-js.git" + }, + "scripts": { + "build": "browserify -s base64js -r ./ | minify > base64js.min.js", + "lint": "standard", + "test": "npm run lint && npm run unit", + "unit": "tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/miniprogram/node_modules/bl/.travis.yml b/miniprogram/node_modules/bl/.travis.yml new file mode 100644 index 00000000..016eaf55 --- /dev/null +++ b/miniprogram/node_modules/bl/.travis.yml @@ -0,0 +1,17 @@ +sudo: false +arch: + - amd64 + - ppc64le +language: node_js +node_js: + - '6' + - '8' + - '10' + - '12' + - '14' + - '15' + - lts/* +notifications: + email: + - rod@vagg.org + - matteo.collina@gmail.com diff --git a/miniprogram/node_modules/bl/BufferList.js b/miniprogram/node_modules/bl/BufferList.js new file mode 100644 index 00000000..471ee778 --- /dev/null +++ b/miniprogram/node_modules/bl/BufferList.js @@ -0,0 +1,396 @@ +'use strict' + +const { Buffer } = require('buffer') +const symbol = Symbol.for('BufferList') + +function BufferList (buf) { + if (!(this instanceof BufferList)) { + return new BufferList(buf) + } + + BufferList._init.call(this, buf) +} + +BufferList._init = function _init (buf) { + Object.defineProperty(this, symbol, { value: true }) + + this._bufs = [] + this.length = 0 + + if (buf) { + this.append(buf) + } +} + +BufferList.prototype._new = function _new (buf) { + return new BufferList(buf) +} + +BufferList.prototype._offset = function _offset (offset) { + if (offset === 0) { + return [0, 0] + } + + let tot = 0 + + for (let i = 0; i < this._bufs.length; i++) { + const _t = tot + this._bufs[i].length + if (offset < _t || i === this._bufs.length - 1) { + return [i, offset - tot] + } + tot = _t + } +} + +BufferList.prototype._reverseOffset = function (blOffset) { + const bufferId = blOffset[0] + let offset = blOffset[1] + + for (let i = 0; i < bufferId; i++) { + offset += this._bufs[i].length + } + + return offset +} + +BufferList.prototype.get = function get (index) { + if (index > this.length || index < 0) { + return undefined + } + + const offset = this._offset(index) + + return this._bufs[offset[0]][offset[1]] +} + +BufferList.prototype.slice = function slice (start, end) { + if (typeof start === 'number' && start < 0) { + start += this.length + } + + if (typeof end === 'number' && end < 0) { + end += this.length + } + + return this.copy(null, 0, start, end) +} + +BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) { + if (typeof srcStart !== 'number' || srcStart < 0) { + srcStart = 0 + } + + if (typeof srcEnd !== 'number' || srcEnd > this.length) { + srcEnd = this.length + } + + if (srcStart >= this.length) { + return dst || Buffer.alloc(0) + } + + if (srcEnd <= 0) { + return dst || Buffer.alloc(0) + } + + const copy = !!dst + const off = this._offset(srcStart) + const len = srcEnd - srcStart + let bytes = len + let bufoff = (copy && dstStart) || 0 + let start = off[1] + + // copy/slice everything + if (srcStart === 0 && srcEnd === this.length) { + if (!copy) { + // slice, but full concat if multiple buffers + return this._bufs.length === 1 + ? this._bufs[0] + : Buffer.concat(this._bufs, this.length) + } + + // copy, need to copy individual buffers + for (let i = 0; i < this._bufs.length; i++) { + this._bufs[i].copy(dst, bufoff) + bufoff += this._bufs[i].length + } + + return dst + } + + // easy, cheap case where it's a subset of one of the buffers + if (bytes <= this._bufs[off[0]].length - start) { + return copy + ? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes) + : this._bufs[off[0]].slice(start, start + bytes) + } + + if (!copy) { + // a slice, we need something to copy in to + dst = Buffer.allocUnsafe(len) + } + + for (let i = off[0]; i < this._bufs.length; i++) { + const l = this._bufs[i].length - start + + if (bytes > l) { + this._bufs[i].copy(dst, bufoff, start) + bufoff += l + } else { + this._bufs[i].copy(dst, bufoff, start, start + bytes) + bufoff += l + break + } + + bytes -= l + + if (start) { + start = 0 + } + } + + // safeguard so that we don't return uninitialized memory + if (dst.length > bufoff) return dst.slice(0, bufoff) + + return dst +} + +BufferList.prototype.shallowSlice = function shallowSlice (start, end) { + start = start || 0 + end = typeof end !== 'number' ? this.length : end + + if (start < 0) { + start += this.length + } + + if (end < 0) { + end += this.length + } + + if (start === end) { + return this._new() + } + + const startOffset = this._offset(start) + const endOffset = this._offset(end) + const buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1) + + if (endOffset[1] === 0) { + buffers.pop() + } else { + buffers[buffers.length - 1] = buffers[buffers.length - 1].slice(0, endOffset[1]) + } + + if (startOffset[1] !== 0) { + buffers[0] = buffers[0].slice(startOffset[1]) + } + + return this._new(buffers) +} + +BufferList.prototype.toString = function toString (encoding, start, end) { + return this.slice(start, end).toString(encoding) +} + +BufferList.prototype.consume = function consume (bytes) { + // first, normalize the argument, in accordance with how Buffer does it + bytes = Math.trunc(bytes) + // do nothing if not a positive number + if (Number.isNaN(bytes) || bytes <= 0) return this + + while (this._bufs.length) { + if (bytes >= this._bufs[0].length) { + bytes -= this._bufs[0].length + this.length -= this._bufs[0].length + this._bufs.shift() + } else { + this._bufs[0] = this._bufs[0].slice(bytes) + this.length -= bytes + break + } + } + + return this +} + +BufferList.prototype.duplicate = function duplicate () { + const copy = this._new() + + for (let i = 0; i < this._bufs.length; i++) { + copy.append(this._bufs[i]) + } + + return copy +} + +BufferList.prototype.append = function append (buf) { + if (buf == null) { + return this + } + + if (buf.buffer) { + // append a view of the underlying ArrayBuffer + this._appendBuffer(Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength)) + } else if (Array.isArray(buf)) { + for (let i = 0; i < buf.length; i++) { + this.append(buf[i]) + } + } else if (this._isBufferList(buf)) { + // unwrap argument into individual BufferLists + for (let i = 0; i < buf._bufs.length; i++) { + this.append(buf._bufs[i]) + } + } else { + // coerce number arguments to strings, since Buffer(number) does + // uninitialized memory allocation + if (typeof buf === 'number') { + buf = buf.toString() + } + + this._appendBuffer(Buffer.from(buf)) + } + + return this +} + +BufferList.prototype._appendBuffer = function appendBuffer (buf) { + this._bufs.push(buf) + this.length += buf.length +} + +BufferList.prototype.indexOf = function (search, offset, encoding) { + if (encoding === undefined && typeof offset === 'string') { + encoding = offset + offset = undefined + } + + if (typeof search === 'function' || Array.isArray(search)) { + throw new TypeError('The "value" argument must be one of type string, Buffer, BufferList, or Uint8Array.') + } else if (typeof search === 'number') { + search = Buffer.from([search]) + } else if (typeof search === 'string') { + search = Buffer.from(search, encoding) + } else if (this._isBufferList(search)) { + search = search.slice() + } else if (Array.isArray(search.buffer)) { + search = Buffer.from(search.buffer, search.byteOffset, search.byteLength) + } else if (!Buffer.isBuffer(search)) { + search = Buffer.from(search) + } + + offset = Number(offset || 0) + + if (isNaN(offset)) { + offset = 0 + } + + if (offset < 0) { + offset = this.length + offset + } + + if (offset < 0) { + offset = 0 + } + + if (search.length === 0) { + return offset > this.length ? this.length : offset + } + + const blOffset = this._offset(offset) + let blIndex = blOffset[0] // index of which internal buffer we're working on + let buffOffset = blOffset[1] // offset of the internal buffer we're working on + + // scan over each buffer + for (; blIndex < this._bufs.length; blIndex++) { + const buff = this._bufs[blIndex] + + while (buffOffset < buff.length) { + const availableWindow = buff.length - buffOffset + + if (availableWindow >= search.length) { + const nativeSearchResult = buff.indexOf(search, buffOffset) + + if (nativeSearchResult !== -1) { + return this._reverseOffset([blIndex, nativeSearchResult]) + } + + buffOffset = buff.length - search.length + 1 // end of native search window + } else { + const revOffset = this._reverseOffset([blIndex, buffOffset]) + + if (this._match(revOffset, search)) { + return revOffset + } + + buffOffset++ + } + } + + buffOffset = 0 + } + + return -1 +} + +BufferList.prototype._match = function (offset, search) { + if (this.length - offset < search.length) { + return false + } + + for (let searchOffset = 0; searchOffset < search.length; searchOffset++) { + if (this.get(offset + searchOffset) !== search[searchOffset]) { + return false + } + } + return true +} + +;(function () { + const methods = { + readDoubleBE: 8, + readDoubleLE: 8, + readFloatBE: 4, + readFloatLE: 4, + readInt32BE: 4, + readInt32LE: 4, + readUInt32BE: 4, + readUInt32LE: 4, + readInt16BE: 2, + readInt16LE: 2, + readUInt16BE: 2, + readUInt16LE: 2, + readInt8: 1, + readUInt8: 1, + readIntBE: null, + readIntLE: null, + readUIntBE: null, + readUIntLE: null + } + + for (const m in methods) { + (function (m) { + if (methods[m] === null) { + BufferList.prototype[m] = function (offset, byteLength) { + return this.slice(offset, offset + byteLength)[m](0, byteLength) + } + } else { + BufferList.prototype[m] = function (offset = 0) { + return this.slice(offset, offset + methods[m])[m](0) + } + } + }(m)) + } +}()) + +// Used internally by the class and also as an indicator of this object being +// a `BufferList`. It's not possible to use `instanceof BufferList` in a browser +// environment because there could be multiple different copies of the +// BufferList class and some `BufferList`s might be `BufferList`s. +BufferList.prototype._isBufferList = function _isBufferList (b) { + return b instanceof BufferList || BufferList.isBufferList(b) +} + +BufferList.isBufferList = function isBufferList (b) { + return b != null && b[symbol] +} + +module.exports = BufferList diff --git a/miniprogram/node_modules/bl/LICENSE.md b/miniprogram/node_modules/bl/LICENSE.md new file mode 100644 index 00000000..ecbe5163 --- /dev/null +++ b/miniprogram/node_modules/bl/LICENSE.md @@ -0,0 +1,13 @@ +The MIT License (MIT) +===================== + +Copyright (c) 2013-2019 bl contributors +---------------------------------- + +*bl contributors listed at * + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/miniprogram/node_modules/bl/README.md b/miniprogram/node_modules/bl/README.md new file mode 100644 index 00000000..9680b1dc --- /dev/null +++ b/miniprogram/node_modules/bl/README.md @@ -0,0 +1,247 @@ +# bl *(BufferList)* + +[![Build Status](https://api.travis-ci.com/rvagg/bl.svg?branch=master)](https://travis-ci.com/rvagg/bl/) + +**A Node.js Buffer list collector, reader and streamer thingy.** + +[![NPM](https://nodei.co/npm/bl.svg)](https://nodei.co/npm/bl/) + +**bl** is a storage object for collections of Node Buffers, exposing them with the main Buffer readable API. Also works as a duplex stream so you can collect buffers from a stream that emits them and emit buffers to a stream that consumes them! + +The original buffers are kept intact and copies are only done as necessary. Any reads that require the use of a single original buffer will return a slice of that buffer only (which references the same memory as the original buffer). Reads that span buffers perform concatenation as required and return the results transparently. + +```js +const { BufferList } = require('bl') + +const bl = new BufferList() +bl.append(Buffer.from('abcd')) +bl.append(Buffer.from('efg')) +bl.append('hi') // bl will also accept & convert Strings +bl.append(Buffer.from('j')) +bl.append(Buffer.from([ 0x3, 0x4 ])) + +console.log(bl.length) // 12 + +console.log(bl.slice(0, 10).toString('ascii')) // 'abcdefghij' +console.log(bl.slice(3, 10).toString('ascii')) // 'defghij' +console.log(bl.slice(3, 6).toString('ascii')) // 'def' +console.log(bl.slice(3, 8).toString('ascii')) // 'defgh' +console.log(bl.slice(5, 10).toString('ascii')) // 'fghij' + +console.log(bl.indexOf('def')) // 3 +console.log(bl.indexOf('asdf')) // -1 + +// or just use toString! +console.log(bl.toString()) // 'abcdefghij\u0003\u0004' +console.log(bl.toString('ascii', 3, 8)) // 'defgh' +console.log(bl.toString('ascii', 5, 10)) // 'fghij' + +// other standard Buffer readables +console.log(bl.readUInt16BE(10)) // 0x0304 +console.log(bl.readUInt16LE(10)) // 0x0403 +``` + +Give it a callback in the constructor and use it just like **[concat-stream](https://github.com/maxogden/node-concat-stream)**: + +```js +const { BufferListStream } = require('bl') +const fs = require('fs') + +fs.createReadStream('README.md') + .pipe(BufferListStream((err, data) => { // note 'new' isn't strictly required + // `data` is a complete Buffer object containing the full data + console.log(data.toString()) + })) +``` + +Note that when you use the *callback* method like this, the resulting `data` parameter is a concatenation of all `Buffer` objects in the list. If you want to avoid the overhead of this concatenation (in cases of extreme performance consciousness), then avoid the *callback* method and just listen to `'end'` instead, like a standard Stream. + +Or to fetch a URL using [hyperquest](https://github.com/substack/hyperquest) (should work with [request](http://github.com/mikeal/request) and even plain Node http too!): + +```js +const hyperquest = require('hyperquest') +const { BufferListStream } = require('bl') + +const url = 'https://raw.github.com/rvagg/bl/master/README.md' + +hyperquest(url).pipe(BufferListStream((err, data) => { + console.log(data.toString()) +})) +``` + +Or, use it as a readable stream to recompose a list of Buffers to an output source: + +```js +const { BufferListStream } = require('bl') +const fs = require('fs') + +var bl = new BufferListStream() +bl.append(Buffer.from('abcd')) +bl.append(Buffer.from('efg')) +bl.append(Buffer.from('hi')) +bl.append(Buffer.from('j')) + +bl.pipe(fs.createWriteStream('gibberish.txt')) +``` + +## API + + * new BufferList([ buf ]) + * BufferList.isBufferList(obj) + * bl.length + * bl.append(buffer) + * bl.get(index) + * bl.indexOf(value[, byteOffset][, encoding]) + * bl.slice([ start[, end ] ]) + * bl.shallowSlice([ start[, end ] ]) + * bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) + * bl.duplicate() + * bl.consume(bytes) + * bl.toString([encoding, [ start, [ end ]]]) + * bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() + * new BufferListStream([ callback ]) + +-------------------------------------------------------- + +### new BufferList([ Buffer | Buffer array | BufferList | BufferList array | String ]) +No arguments are _required_ for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` objects. + +`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with: + +```js +const { BufferList } = require('bl') +const bl = BufferList() + +// equivalent to: + +const { BufferList } = require('bl') +const bl = new BufferList() +``` + +-------------------------------------------------------- + +### BufferList.isBufferList(obj) +Determines if the passed object is a `BufferList`. It will return `true` if the passed object is an instance of `BufferList` **or** `BufferListStream` and `false` otherwise. + +N.B. this won't return `true` for `BufferList` or `BufferListStream` instances created by versions of this library before this static method was added. + +-------------------------------------------------------- + +### bl.length +Get the length of the list in bytes. This is the sum of the lengths of all of the buffers contained in the list, minus any initial offset for a semi-consumed buffer at the beginning. Should accurately represent the total number of bytes that can be read from the list. + +-------------------------------------------------------- + +### bl.append(Buffer | Buffer array | BufferList | BufferList array | String) +`append(buffer)` adds an additional buffer or BufferList to the internal list. `this` is returned so it can be chained. + +-------------------------------------------------------- + +### bl.get(index) +`get()` will return the byte at the specified index. + +-------------------------------------------------------- + +### bl.indexOf(value[, byteOffset][, encoding]) +`get()` will return the byte at the specified index. +`indexOf()` method returns the first index at which a given element can be found in the BufferList, or -1 if it is not present. + +-------------------------------------------------------- + +### bl.slice([ start, [ end ] ]) +`slice()` returns a new `Buffer` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. + +If the requested range spans a single internal buffer then a slice of that buffer will be returned which shares the original memory range of that Buffer. If the range spans multiple buffers then copy operations will likely occur to give you a uniform Buffer. + +-------------------------------------------------------- + +### bl.shallowSlice([ start, [ end ] ]) +`shallowSlice()` returns a new `BufferList` object containing the bytes within the range specified. Both `start` and `end` are optional and will default to the beginning and end of the list respectively. + +No copies will be performed. All buffers in the result share memory with the original list. + +-------------------------------------------------------- + +### bl.copy(dest, [ destStart, [ srcStart [, srcEnd ] ] ]) +`copy()` copies the content of the list in the `dest` buffer, starting from `destStart` and containing the bytes within the range specified with `srcStart` to `srcEnd`. `destStart`, `start` and `end` are optional and will default to the beginning of the `dest` buffer, and the beginning and end of the list respectively. + +-------------------------------------------------------- + +### bl.duplicate() +`duplicate()` performs a **shallow-copy** of the list. The internal Buffers remains the same, so if you change the underlying Buffers, the change will be reflected in both the original and the duplicate. This method is needed if you want to call `consume()` or `pipe()` and still keep the original list.Example: + +```js +var bl = new BufferListStream() + +bl.append('hello') +bl.append(' world') +bl.append('\n') + +bl.duplicate().pipe(process.stdout, { end: false }) + +console.log(bl.toString()) +``` + +-------------------------------------------------------- + +### bl.consume(bytes) +`consume()` will shift bytes *off the start of the list*. The number of bytes consumed don't need to line up with the sizes of the internal Buffers—initial offsets will be calculated accordingly in order to give you a consistent view of the data. + +-------------------------------------------------------- + +### bl.toString([encoding, [ start, [ end ]]]) +`toString()` will return a string representation of the buffer. The optional `start` and `end` arguments are passed on to `slice()`, while the `encoding` is passed on to `toString()` of the resulting Buffer. See the [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end) documentation for more information. + +-------------------------------------------------------- + +### bl.readDoubleBE(), bl.readDoubleLE(), bl.readFloatBE(), bl.readFloatLE(), bl.readInt32BE(), bl.readInt32LE(), bl.readUInt32BE(), bl.readUInt32LE(), bl.readInt16BE(), bl.readInt16LE(), bl.readUInt16BE(), bl.readUInt16LE(), bl.readInt8(), bl.readUInt8() + +All of the standard byte-reading methods of the `Buffer` interface are implemented and will operate across internal Buffer boundaries transparently. + +See the [Buffer](http://nodejs.org/docs/latest/api/buffer.html) documentation for how these work. + +-------------------------------------------------------- + +### new BufferListStream([ callback | Buffer | Buffer array | BufferList | BufferList array | String ]) +**BufferListStream** is a Node **[Duplex Stream](http://nodejs.org/docs/latest/api/stream.html#stream_class_stream_duplex)**, so it can be read from and written to like a standard Node stream. You can also `pipe()` to and from a **BufferListStream** instance. + +The constructor takes an optional callback, if supplied, the callback will be called with an error argument followed by a reference to the **bl** instance, when `bl.end()` is called (i.e. from a piped stream). This is a convenient method of collecting the entire contents of a stream, particularly when the stream is *chunky*, such as a network stream. + +Normally, no arguments are required for the constructor, but you can initialise the list by passing in a single `Buffer` object or an array of `Buffer` object. + +`new` is not strictly required, if you don't instantiate a new object, it will be done automatically for you so you can create a new instance simply with: + +```js +const { BufferListStream } = require('bl') +const bl = BufferListStream() + +// equivalent to: + +const { BufferListStream } = require('bl') +const bl = new BufferListStream() +``` + +N.B. For backwards compatibility reasons, `BufferListStream` is the **default** export when you `require('bl')`: + +```js +const { BufferListStream } = require('bl') +// equivalent to: +const BufferListStream = require('bl') +``` + +-------------------------------------------------------- + +## Contributors + +**bl** is brought to you by the following hackers: + + * [Rod Vagg](https://github.com/rvagg) + * [Matteo Collina](https://github.com/mcollina) + * [Jarett Cruger](https://github.com/jcrugzz) + + +## License & copyright + +Copyright (c) 2013-2019 bl contributors (listed above). + +bl is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details. diff --git a/miniprogram/node_modules/bl/bl.js b/miniprogram/node_modules/bl/bl.js new file mode 100644 index 00000000..40228f87 --- /dev/null +++ b/miniprogram/node_modules/bl/bl.js @@ -0,0 +1,84 @@ +'use strict' + +const DuplexStream = require('readable-stream').Duplex +const inherits = require('inherits') +const BufferList = require('./BufferList') + +function BufferListStream (callback) { + if (!(this instanceof BufferListStream)) { + return new BufferListStream(callback) + } + + if (typeof callback === 'function') { + this._callback = callback + + const piper = function piper (err) { + if (this._callback) { + this._callback(err) + this._callback = null + } + }.bind(this) + + this.on('pipe', function onPipe (src) { + src.on('error', piper) + }) + this.on('unpipe', function onUnpipe (src) { + src.removeListener('error', piper) + }) + + callback = null + } + + BufferList._init.call(this, callback) + DuplexStream.call(this) +} + +inherits(BufferListStream, DuplexStream) +Object.assign(BufferListStream.prototype, BufferList.prototype) + +BufferListStream.prototype._new = function _new (callback) { + return new BufferListStream(callback) +} + +BufferListStream.prototype._write = function _write (buf, encoding, callback) { + this._appendBuffer(buf) + + if (typeof callback === 'function') { + callback() + } +} + +BufferListStream.prototype._read = function _read (size) { + if (!this.length) { + return this.push(null) + } + + size = Math.min(size, this.length) + this.push(this.slice(0, size)) + this.consume(size) +} + +BufferListStream.prototype.end = function end (chunk) { + DuplexStream.prototype.end.call(this, chunk) + + if (this._callback) { + this._callback(null, this.slice()) + this._callback = null + } +} + +BufferListStream.prototype._destroy = function _destroy (err, cb) { + this._bufs.length = 0 + this.length = 0 + cb(err) +} + +BufferListStream.prototype._isBufferList = function _isBufferList (b) { + return b instanceof BufferListStream || b instanceof BufferList || BufferListStream.isBufferList(b) +} + +BufferListStream.isBufferList = BufferList.isBufferList + +module.exports = BufferListStream +module.exports.BufferListStream = BufferListStream +module.exports.BufferList = BufferList diff --git a/miniprogram/node_modules/bl/package.json b/miniprogram/node_modules/bl/package.json new file mode 100644 index 00000000..3b2be3f4 --- /dev/null +++ b/miniprogram/node_modules/bl/package.json @@ -0,0 +1,37 @@ +{ + "name": "bl", + "version": "4.1.0", + "description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!", + "license": "MIT", + "main": "bl.js", + "scripts": { + "lint": "standard *.js test/*.js", + "test": "npm run lint && node test/test.js | faucet" + }, + "repository": { + "type": "git", + "url": "https://github.com/rvagg/bl.git" + }, + "homepage": "https://github.com/rvagg/bl", + "authors": [ + "Rod Vagg (https://github.com/rvagg)", + "Matteo Collina (https://github.com/mcollina)", + "Jarett Cruger (https://github.com/jcrugzz)" + ], + "keywords": [ + "buffer", + "buffers", + "stream", + "awesomesauce" + ], + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + }, + "devDependencies": { + "faucet": "~0.0.1", + "standard": "^14.3.0", + "tape": "^4.11.0" + } +} diff --git a/miniprogram/node_modules/bl/test/convert.js b/miniprogram/node_modules/bl/test/convert.js new file mode 100644 index 00000000..9f3e2359 --- /dev/null +++ b/miniprogram/node_modules/bl/test/convert.js @@ -0,0 +1,21 @@ +'use strict' + +const tape = require('tape') +const { BufferList, BufferListStream } = require('../') +const { Buffer } = require('buffer') + +tape('convert from BufferList to BufferListStream', (t) => { + const data = Buffer.from(`TEST-${Date.now()}`) + const bl = new BufferList(data) + const bls = new BufferListStream(bl) + t.ok(bl.slice().equals(bls.slice())) + t.end() +}) + +tape('convert from BufferListStream to BufferList', (t) => { + const data = Buffer.from(`TEST-${Date.now()}`) + const bls = new BufferListStream(data) + const bl = new BufferList(bls) + t.ok(bl.slice().equals(bls.slice())) + t.end() +}) diff --git a/miniprogram/node_modules/bl/test/indexOf.js b/miniprogram/node_modules/bl/test/indexOf.js new file mode 100644 index 00000000..62dcb01f --- /dev/null +++ b/miniprogram/node_modules/bl/test/indexOf.js @@ -0,0 +1,492 @@ +'use strict' + +const tape = require('tape') +const BufferList = require('../') +const { Buffer } = require('buffer') + +tape('indexOf single byte needle', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg', '12345']) + + t.equal(bl.indexOf('e'), 4) + t.equal(bl.indexOf('e', 5), 11) + t.equal(bl.indexOf('e', 12), -1) + t.equal(bl.indexOf('5'), 18) + + t.end() +}) + +tape('indexOf multiple byte needle', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + + t.equal(bl.indexOf('ef'), 4) + t.equal(bl.indexOf('ef', 5), 11) + + t.end() +}) + +tape('indexOf multiple byte needles across buffer boundaries', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + + t.equal(bl.indexOf('fgabc'), 5) + + t.end() +}) + +tape('indexOf takes a Uint8Array search', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + const search = new Uint8Array([102, 103, 97, 98, 99]) // fgabc + + t.equal(bl.indexOf(search), 5) + + t.end() +}) + +tape('indexOf takes a buffer list search', (t) => { + const bl = new BufferList(['abcdefg', 'abcdefg']) + const search = new BufferList('fgabc') + + t.equal(bl.indexOf(search), 5) + + t.end() +}) + +tape('indexOf a zero byte needle', (t) => { + const b = new BufferList('abcdef') + const bufEmpty = Buffer.from('') + + t.equal(b.indexOf(''), 0) + t.equal(b.indexOf('', 1), 1) + t.equal(b.indexOf('', b.length + 1), b.length) + t.equal(b.indexOf('', Infinity), b.length) + t.equal(b.indexOf(bufEmpty), 0) + t.equal(b.indexOf(bufEmpty, 1), 1) + t.equal(b.indexOf(bufEmpty, b.length + 1), b.length) + t.equal(b.indexOf(bufEmpty, Infinity), b.length) + + t.end() +}) + +tape('indexOf buffers smaller and larger than the needle', (t) => { + const bl = new BufferList(['abcdefg', 'a', 'bcdefg', 'a', 'bcfgab']) + + t.equal(bl.indexOf('fgabc'), 5) + t.equal(bl.indexOf('fgabc', 6), 12) + t.equal(bl.indexOf('fgabc', 13), -1) + + t.end() +}) + +// only present in node 6+ +;(process.version.substr(1).split('.')[0] >= 6) && tape('indexOf latin1 and binary encoding', (t) => { + const b = new BufferList('abcdef') + + // test latin1 encoding + t.equal( + new BufferList(Buffer.from(b.toString('latin1'), 'latin1')) + .indexOf('d', 0, 'latin1'), + 3 + ) + t.equal( + new BufferList(Buffer.from(b.toString('latin1'), 'latin1')) + .indexOf(Buffer.from('d', 'latin1'), 0, 'latin1'), + 3 + ) + t.equal( + new BufferList(Buffer.from('aa\u00e8aa', 'latin1')) + .indexOf('\u00e8', 'latin1'), + 2 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'latin1')) + .indexOf('\u00e8', 'latin1'), + 0 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'latin1')) + .indexOf(Buffer.from('\u00e8', 'latin1'), 'latin1'), + 0 + ) + + // test binary encoding + t.equal( + new BufferList(Buffer.from(b.toString('binary'), 'binary')) + .indexOf('d', 0, 'binary'), + 3 + ) + t.equal( + new BufferList(Buffer.from(b.toString('binary'), 'binary')) + .indexOf(Buffer.from('d', 'binary'), 0, 'binary'), + 3 + ) + t.equal( + new BufferList(Buffer.from('aa\u00e8aa', 'binary')) + .indexOf('\u00e8', 'binary'), + 2 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'binary')) + .indexOf('\u00e8', 'binary'), + 0 + ) + t.equal( + new BufferList(Buffer.from('\u00e8', 'binary')) + .indexOf(Buffer.from('\u00e8', 'binary'), 'binary'), + 0 + ) + + t.end() +}) + +tape('indexOf the entire nodejs10 buffer test suite', (t) => { + const b = new BufferList('abcdef') + const bufA = Buffer.from('a') + const bufBc = Buffer.from('bc') + const bufF = Buffer.from('f') + const bufZ = Buffer.from('z') + + const stringComparison = 'abcdef' + + t.equal(b.indexOf('a'), 0) + t.equal(b.indexOf('a', 1), -1) + t.equal(b.indexOf('a', -1), -1) + t.equal(b.indexOf('a', -4), -1) + t.equal(b.indexOf('a', -b.length), 0) + t.equal(b.indexOf('a', NaN), 0) + t.equal(b.indexOf('a', -Infinity), 0) + t.equal(b.indexOf('a', Infinity), -1) + t.equal(b.indexOf('bc'), 1) + t.equal(b.indexOf('bc', 2), -1) + t.equal(b.indexOf('bc', -1), -1) + t.equal(b.indexOf('bc', -3), -1) + t.equal(b.indexOf('bc', -5), 1) + t.equal(b.indexOf('bc', NaN), 1) + t.equal(b.indexOf('bc', -Infinity), 1) + t.equal(b.indexOf('bc', Infinity), -1) + t.equal(b.indexOf('f'), b.length - 1) + t.equal(b.indexOf('z'), -1) + + // empty search tests + t.equal(b.indexOf(bufA), 0) + t.equal(b.indexOf(bufA, 1), -1) + t.equal(b.indexOf(bufA, -1), -1) + t.equal(b.indexOf(bufA, -4), -1) + t.equal(b.indexOf(bufA, -b.length), 0) + t.equal(b.indexOf(bufA, NaN), 0) + t.equal(b.indexOf(bufA, -Infinity), 0) + t.equal(b.indexOf(bufA, Infinity), -1) + t.equal(b.indexOf(bufBc), 1) + t.equal(b.indexOf(bufBc, 2), -1) + t.equal(b.indexOf(bufBc, -1), -1) + t.equal(b.indexOf(bufBc, -3), -1) + t.equal(b.indexOf(bufBc, -5), 1) + t.equal(b.indexOf(bufBc, NaN), 1) + t.equal(b.indexOf(bufBc, -Infinity), 1) + t.equal(b.indexOf(bufBc, Infinity), -1) + t.equal(b.indexOf(bufF), b.length - 1) + t.equal(b.indexOf(bufZ), -1) + t.equal(b.indexOf(0x61), 0) + t.equal(b.indexOf(0x61, 1), -1) + t.equal(b.indexOf(0x61, -1), -1) + t.equal(b.indexOf(0x61, -4), -1) + t.equal(b.indexOf(0x61, -b.length), 0) + t.equal(b.indexOf(0x61, NaN), 0) + t.equal(b.indexOf(0x61, -Infinity), 0) + t.equal(b.indexOf(0x61, Infinity), -1) + t.equal(b.indexOf(0x0), -1) + + // test offsets + t.equal(b.indexOf('d', 2), 3) + t.equal(b.indexOf('f', 5), 5) + t.equal(b.indexOf('f', -1), 5) + t.equal(b.indexOf('f', 6), -1) + + t.equal(b.indexOf(Buffer.from('d'), 2), 3) + t.equal(b.indexOf(Buffer.from('f'), 5), 5) + t.equal(b.indexOf(Buffer.from('f'), -1), 5) + t.equal(b.indexOf(Buffer.from('f'), 6), -1) + + t.equal(Buffer.from('ff').indexOf(Buffer.from('f'), 1, 'ucs2'), -1) + + // test invalid and uppercase encoding + t.equal(b.indexOf('b', 'utf8'), 1) + t.equal(b.indexOf('b', 'UTF8'), 1) + t.equal(b.indexOf('62', 'HEX'), 1) + t.throws(() => b.indexOf('bad', 'enc'), TypeError) + + // test hex encoding + t.equal( + Buffer.from(b.toString('hex'), 'hex') + .indexOf('64', 0, 'hex'), + 3 + ) + t.equal( + Buffer.from(b.toString('hex'), 'hex') + .indexOf(Buffer.from('64', 'hex'), 0, 'hex'), + 3 + ) + + // test base64 encoding + t.equal( + Buffer.from(b.toString('base64'), 'base64') + .indexOf('ZA==', 0, 'base64'), + 3 + ) + t.equal( + Buffer.from(b.toString('base64'), 'base64') + .indexOf(Buffer.from('ZA==', 'base64'), 0, 'base64'), + 3 + ) + + // test ascii encoding + t.equal( + Buffer.from(b.toString('ascii'), 'ascii') + .indexOf('d', 0, 'ascii'), + 3 + ) + t.equal( + Buffer.from(b.toString('ascii'), 'ascii') + .indexOf(Buffer.from('d', 'ascii'), 0, 'ascii'), + 3 + ) + + // test optional offset with passed encoding + t.equal(Buffer.from('aaaa0').indexOf('30', 'hex'), 4) + t.equal(Buffer.from('aaaa00a').indexOf('3030', 'hex'), 4) + + { + // test usc2 encoding + const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2') + + t.equal(8, twoByteString.indexOf('\u0395', 4, 'ucs2')) + t.equal(6, twoByteString.indexOf('\u03a3', -4, 'ucs2')) + t.equal(4, twoByteString.indexOf('\u03a3', -6, 'ucs2')) + t.equal(4, twoByteString.indexOf( + Buffer.from('\u03a3', 'ucs2'), -6, 'ucs2')) + t.equal(-1, twoByteString.indexOf('\u03a3', -2, 'ucs2')) + } + + const mixedByteStringUcs2 = + Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395', 'ucs2') + + t.equal(6, mixedByteStringUcs2.indexOf('bc', 0, 'ucs2')) + t.equal(10, mixedByteStringUcs2.indexOf('\u03a3', 0, 'ucs2')) + t.equal(-1, mixedByteStringUcs2.indexOf('\u0396', 0, 'ucs2')) + + t.equal( + 6, mixedByteStringUcs2.indexOf(Buffer.from('bc', 'ucs2'), 0, 'ucs2')) + t.equal( + 10, mixedByteStringUcs2.indexOf(Buffer.from('\u03a3', 'ucs2'), 0, 'ucs2')) + t.equal( + -1, mixedByteStringUcs2.indexOf(Buffer.from('\u0396', 'ucs2'), 0, 'ucs2')) + + { + const twoByteString = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'ucs2') + + // Test single char pattern + t.equal(0, twoByteString.indexOf('\u039a', 0, 'ucs2')) + let index = twoByteString.indexOf('\u0391', 0, 'ucs2') + t.equal(2, index, `Alpha - at index ${index}`) + index = twoByteString.indexOf('\u03a3', 0, 'ucs2') + t.equal(4, index, `First Sigma - at index ${index}`) + index = twoByteString.indexOf('\u03a3', 6, 'ucs2') + t.equal(6, index, `Second Sigma - at index ${index}`) + index = twoByteString.indexOf('\u0395', 0, 'ucs2') + t.equal(8, index, `Epsilon - at index ${index}`) + index = twoByteString.indexOf('\u0392', 0, 'ucs2') + t.equal(-1, index, `Not beta - at index ${index}`) + + // Test multi-char pattern + index = twoByteString.indexOf('\u039a\u0391', 0, 'ucs2') + t.equal(0, index, `Lambda Alpha - at index ${index}`) + index = twoByteString.indexOf('\u0391\u03a3', 0, 'ucs2') + t.equal(2, index, `Alpha Sigma - at index ${index}`) + index = twoByteString.indexOf('\u03a3\u03a3', 0, 'ucs2') + t.equal(4, index, `Sigma Sigma - at index ${index}`) + index = twoByteString.indexOf('\u03a3\u0395', 0, 'ucs2') + t.equal(6, index, `Sigma Epsilon - at index ${index}`) + } + + const mixedByteStringUtf8 = Buffer.from('\u039a\u0391abc\u03a3\u03a3\u0395') + + t.equal(5, mixedByteStringUtf8.indexOf('bc')) + t.equal(5, mixedByteStringUtf8.indexOf('bc', 5)) + t.equal(5, mixedByteStringUtf8.indexOf('bc', -8)) + t.equal(7, mixedByteStringUtf8.indexOf('\u03a3')) + t.equal(-1, mixedByteStringUtf8.indexOf('\u0396')) + + // Test complex string indexOf algorithms. Only trigger for long strings. + // Long string that isn't a simple repeat of a shorter string. + let longString = 'A' + for (let i = 66; i < 76; i++) { // from 'B' to 'K' + longString = longString + String.fromCharCode(i) + longString + } + + const longBufferString = Buffer.from(longString) + + // pattern of 15 chars, repeated every 16 chars in long + let pattern = 'ABACABADABACABA' + for (let i = 0; i < longBufferString.length - pattern.length; i += 7) { + const index = longBufferString.indexOf(pattern, i) + t.equal((i + 15) & ~0xf, index, + `Long ABACABA...-string at index ${i}`) + } + + let index = longBufferString.indexOf('AJABACA') + t.equal(510, index, `Long AJABACA, First J - at index ${index}`) + index = longBufferString.indexOf('AJABACA', 511) + t.equal(1534, index, `Long AJABACA, Second J - at index ${index}`) + + pattern = 'JABACABADABACABA' + index = longBufferString.indexOf(pattern) + t.equal(511, index, `Long JABACABA..., First J - at index ${index}`) + index = longBufferString.indexOf(pattern, 512) + t.equal( + 1535, index, `Long JABACABA..., Second J - at index ${index}`) + + // Search for a non-ASCII string in a pure ASCII string. + const asciiString = Buffer.from( + 'somethingnotatallsinisterwhichalsoworks') + t.equal(-1, asciiString.indexOf('\x2061')) + t.equal(3, asciiString.indexOf('eth', 0)) + + // Search in string containing many non-ASCII chars. + const allCodePoints = [] + for (let i = 0; i < 65536; i++) { + allCodePoints[i] = i + } + + const allCharsString = String.fromCharCode.apply(String, allCodePoints) + const allCharsBufferUtf8 = Buffer.from(allCharsString) + const allCharsBufferUcs2 = Buffer.from(allCharsString, 'ucs2') + + // Search for string long enough to trigger complex search with ASCII pattern + // and UC16 subject. + t.equal(-1, allCharsBufferUtf8.indexOf('notfound')) + t.equal(-1, allCharsBufferUcs2.indexOf('notfound')) + + // Needle is longer than haystack, but only because it's encoded as UTF-16 + t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'ucs2'), -1) + + t.equal(Buffer.from('aaaa').indexOf('a'.repeat(4), 'utf8'), 0) + t.equal(Buffer.from('aaaa').indexOf('你好', 'ucs2'), -1) + + // Haystack has odd length, but the needle is UCS2. + t.equal(Buffer.from('aaaaa').indexOf('b', 'ucs2'), -1) + + { + // Find substrings in Utf8. + const lengths = [1, 3, 15] // Single char, simple and complex. + const indices = [0x5, 0x60, 0x400, 0x680, 0x7ee, 0xFF02, 0x16610, 0x2f77b] + for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) { + for (let i = 0; i < indices.length; i++) { + const index = indices[i] + let length = lengths[lengthIndex] + + if (index + length > 0x7F) { + length = 2 * length + } + + if (index + length > 0x7FF) { + length = 3 * length + } + + if (index + length > 0xFFFF) { + length = 4 * length + } + + const patternBufferUtf8 = allCharsBufferUtf8.slice(index, index + length) + t.equal(index, allCharsBufferUtf8.indexOf(patternBufferUtf8)) + + const patternStringUtf8 = patternBufferUtf8.toString() + t.equal(index, allCharsBufferUtf8.indexOf(patternStringUtf8)) + } + } + } + + { + // Find substrings in Usc2. + const lengths = [2, 4, 16] // Single char, simple and complex. + const indices = [0x5, 0x65, 0x105, 0x205, 0x285, 0x2005, 0x2085, 0xfff0] + + for (let lengthIndex = 0; lengthIndex < lengths.length; lengthIndex++) { + for (let i = 0; i < indices.length; i++) { + const index = indices[i] * 2 + const length = lengths[lengthIndex] + + const patternBufferUcs2 = + allCharsBufferUcs2.slice(index, index + length) + t.equal( + index, allCharsBufferUcs2.indexOf(patternBufferUcs2, 0, 'ucs2')) + + const patternStringUcs2 = patternBufferUcs2.toString('ucs2') + t.equal( + index, allCharsBufferUcs2.indexOf(patternStringUcs2, 0, 'ucs2')) + } + } + } + + [ + () => {}, + {}, + [] + ].forEach((val) => { + t.throws(() => b.indexOf(val), TypeError, `"${JSON.stringify(val)}" should throw`) + }) + + // Test weird offset arguments. + // The following offsets coerce to NaN or 0, searching the whole Buffer + t.equal(b.indexOf('b', undefined), 1) + t.equal(b.indexOf('b', {}), 1) + t.equal(b.indexOf('b', 0), 1) + t.equal(b.indexOf('b', null), 1) + t.equal(b.indexOf('b', []), 1) + + // The following offset coerces to 2, in other words +[2] === 2 + t.equal(b.indexOf('b', [2]), -1) + + // Behavior should match String.indexOf() + t.equal( + b.indexOf('b', undefined), + stringComparison.indexOf('b', undefined)) + t.equal( + b.indexOf('b', {}), + stringComparison.indexOf('b', {})) + t.equal( + b.indexOf('b', 0), + stringComparison.indexOf('b', 0)) + t.equal( + b.indexOf('b', null), + stringComparison.indexOf('b', null)) + t.equal( + b.indexOf('b', []), + stringComparison.indexOf('b', [])) + t.equal( + b.indexOf('b', [2]), + stringComparison.indexOf('b', [2])) + + // test truncation of Number arguments to uint8 + { + const buf = Buffer.from('this is a test') + + t.equal(buf.indexOf(0x6973), 3) + t.equal(buf.indexOf(0x697320), 4) + t.equal(buf.indexOf(0x69732069), 2) + t.equal(buf.indexOf(0x697374657374), 0) + t.equal(buf.indexOf(0x69737374), 0) + t.equal(buf.indexOf(0x69737465), 11) + t.equal(buf.indexOf(0x69737465), 11) + t.equal(buf.indexOf(-140), 0) + t.equal(buf.indexOf(-152), 1) + t.equal(buf.indexOf(0xff), -1) + t.equal(buf.indexOf(0xffff), -1) + } + + // Test that Uint8Array arguments are okay. + { + const needle = new Uint8Array([0x66, 0x6f, 0x6f]) + const haystack = new BufferList(Buffer.from('a foo b foo')) + t.equal(haystack.indexOf(needle), 2) + } + + t.end() +}) diff --git a/miniprogram/node_modules/bl/test/isBufferList.js b/miniprogram/node_modules/bl/test/isBufferList.js new file mode 100644 index 00000000..9d895d59 --- /dev/null +++ b/miniprogram/node_modules/bl/test/isBufferList.js @@ -0,0 +1,32 @@ +'use strict' + +const tape = require('tape') +const { BufferList, BufferListStream } = require('../') +const { Buffer } = require('buffer') + +tape('isBufferList positives', (t) => { + t.ok(BufferList.isBufferList(new BufferList())) + t.ok(BufferList.isBufferList(new BufferListStream())) + + t.end() +}) + +tape('isBufferList negatives', (t) => { + const types = [ + null, + undefined, + NaN, + true, + false, + {}, + [], + Buffer.alloc(0), + [Buffer.alloc(0)] + ] + + for (const obj of types) { + t.notOk(BufferList.isBufferList(obj)) + } + + t.end() +}) diff --git a/miniprogram/node_modules/bl/test/test.js b/miniprogram/node_modules/bl/test/test.js new file mode 100644 index 00000000..e523d0c3 --- /dev/null +++ b/miniprogram/node_modules/bl/test/test.js @@ -0,0 +1,869 @@ +'use strict' + +const tape = require('tape') +const crypto = require('crypto') +const fs = require('fs') +const path = require('path') +const BufferList = require('../') +const { Buffer } = require('buffer') + +const encodings = + ('hex utf8 utf-8 ascii binary base64' + + (process.browser ? '' : ' ucs2 ucs-2 utf16le utf-16le')).split(' ') + +require('./indexOf') +require('./isBufferList') +require('./convert') + +tape('single bytes from single buffer', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + + t.equal(bl.length, 4) + t.equal(bl.get(-1), undefined) + t.equal(bl.get(0), 97) + t.equal(bl.get(1), 98) + t.equal(bl.get(2), 99) + t.equal(bl.get(3), 100) + t.equal(bl.get(4), undefined) + + t.end() +}) + +tape('single bytes from multiple buffers', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.get(0), 97) + t.equal(bl.get(1), 98) + t.equal(bl.get(2), 99) + t.equal(bl.get(3), 100) + t.equal(bl.get(4), 101) + t.equal(bl.get(5), 102) + t.equal(bl.get(6), 103) + t.equal(bl.get(7), 104) + t.equal(bl.get(8), 105) + t.equal(bl.get(9), 106) + + t.end() +}) + +tape('multi bytes from single buffer', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + + t.equal(bl.length, 4) + + t.equal(bl.slice(0, 4).toString('ascii'), 'abcd') + t.equal(bl.slice(0, 3).toString('ascii'), 'abc') + t.equal(bl.slice(1, 4).toString('ascii'), 'bcd') + t.equal(bl.slice(-4, -1).toString('ascii'), 'abc') + + t.end() +}) + +tape('multi bytes from single buffer (negative indexes)', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('buffer')) + + t.equal(bl.length, 6) + + t.equal(bl.slice(-6, -1).toString('ascii'), 'buffe') + t.equal(bl.slice(-6, -2).toString('ascii'), 'buff') + t.equal(bl.slice(-5, -2).toString('ascii'), 'uff') + + t.end() +}) + +tape('multiple bytes from multiple buffers', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + t.equal(bl.slice(-7, -4).toString('ascii'), 'def') + + t.end() +}) + +tape('multiple bytes from multiple buffer lists', function (t) { + const bl = new BufferList() + + bl.append(new BufferList([Buffer.from('abcd'), Buffer.from('efg')])) + bl.append(new BufferList([Buffer.from('hi'), Buffer.from('j')])) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + + t.end() +}) + +// same data as previous test, just using nested constructors +tape('multiple bytes from crazy nested buffer lists', function (t) { + const bl = new BufferList() + + bl.append(new BufferList([ + new BufferList([ + new BufferList(Buffer.from('abc')), + Buffer.from('d'), + new BufferList(Buffer.from('efg')) + ]), + new BufferList([Buffer.from('hi')]), + new BufferList(Buffer.from('j')) + ])) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + t.equal(bl.slice(3, 10).toString('ascii'), 'defghij') + t.equal(bl.slice(3, 6).toString('ascii'), 'def') + t.equal(bl.slice(3, 8).toString('ascii'), 'defgh') + t.equal(bl.slice(5, 10).toString('ascii'), 'fghij') + + t.end() +}) + +tape('append accepts arrays of Buffers', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abc')) + bl.append([Buffer.from('def')]) + bl.append([Buffer.from('ghi'), Buffer.from('jkl')]) + bl.append([Buffer.from('mnop'), Buffer.from('qrstu'), Buffer.from('vwxyz')]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + + t.end() +}) + +tape('append accepts arrays of Uint8Arrays', function (t) { + const bl = new BufferList() + + bl.append(new Uint8Array([97, 98, 99])) + bl.append([Uint8Array.from([100, 101, 102])]) + bl.append([new Uint8Array([103, 104, 105]), new Uint8Array([106, 107, 108])]) + bl.append([new Uint8Array([109, 110, 111, 112]), new Uint8Array([113, 114, 115, 116, 117]), new Uint8Array([118, 119, 120, 121, 122])]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + + t.end() +}) + +tape('append accepts arrays of BufferLists', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abc')) + bl.append([new BufferList('def')]) + bl.append(new BufferList([Buffer.from('ghi'), new BufferList('jkl')])) + bl.append([Buffer.from('mnop'), new BufferList([Buffer.from('qrstu'), Buffer.from('vwxyz')])]) + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + + t.end() +}) + +tape('append chainable', function (t) { + const bl = new BufferList() + + t.ok(bl.append(Buffer.from('abcd')) === bl) + t.ok(bl.append([Buffer.from('abcd')]) === bl) + t.ok(bl.append(new BufferList(Buffer.from('abcd'))) === bl) + t.ok(bl.append([new BufferList(Buffer.from('abcd'))]) === bl) + + t.end() +}) + +tape('append chainable (test results)', function (t) { + const bl = new BufferList('abc') + .append([new BufferList('def')]) + .append(new BufferList([Buffer.from('ghi'), new BufferList('jkl')])) + .append([Buffer.from('mnop'), new BufferList([Buffer.from('qrstu'), Buffer.from('vwxyz')])]) + + t.equal(bl.length, 26) + t.equal(bl.slice().toString('ascii'), 'abcdefghijklmnopqrstuvwxyz') + + t.end() +}) + +tape('consuming from multiple buffers', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.length, 10) + + t.equal(bl.slice(0, 10).toString('ascii'), 'abcdefghij') + + bl.consume(3) + t.equal(bl.length, 7) + t.equal(bl.slice(0, 7).toString('ascii'), 'defghij') + + bl.consume(2) + t.equal(bl.length, 5) + t.equal(bl.slice(0, 5).toString('ascii'), 'fghij') + + bl.consume(1) + t.equal(bl.length, 4) + t.equal(bl.slice(0, 4).toString('ascii'), 'ghij') + + bl.consume(1) + t.equal(bl.length, 3) + t.equal(bl.slice(0, 3).toString('ascii'), 'hij') + + bl.consume(2) + t.equal(bl.length, 1) + t.equal(bl.slice(0, 1).toString('ascii'), 'j') + + t.end() +}) + +tape('complete consumption', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('a')) + bl.append(Buffer.from('b')) + + bl.consume(2) + + t.equal(bl.length, 0) + t.equal(bl._bufs.length, 0) + + t.end() +}) + +tape('test readUInt8 / readInt8', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf1[0] = 0x1 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt8(), 0x1) + t.equal(bl.readUInt8(2), 0x3) + t.equal(bl.readInt8(2), 0x3) + t.equal(bl.readUInt8(3), 0x4) + t.equal(bl.readInt8(3), 0x4) + t.equal(bl.readUInt8(4), 0x23) + t.equal(bl.readInt8(4), 0x23) + t.equal(bl.readUInt8(5), 0x42) + t.equal(bl.readInt8(5), 0x42) + + t.end() +}) + +tape('test readUInt16LE / readUInt16BE / readInt16LE / readInt16BE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf1[0] = 0x1 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt16BE(), 0x0100) + t.equal(bl.readUInt16LE(), 0x0001) + t.equal(bl.readUInt16BE(2), 0x0304) + t.equal(bl.readUInt16LE(2), 0x0403) + t.equal(bl.readInt16BE(2), 0x0304) + t.equal(bl.readInt16LE(2), 0x0403) + t.equal(bl.readUInt16BE(3), 0x0423) + t.equal(bl.readUInt16LE(3), 0x2304) + t.equal(bl.readInt16BE(3), 0x0423) + t.equal(bl.readInt16LE(3), 0x2304) + t.equal(bl.readUInt16BE(4), 0x2342) + t.equal(bl.readUInt16LE(4), 0x4223) + t.equal(bl.readInt16BE(4), 0x2342) + t.equal(bl.readInt16LE(4), 0x4223) + + t.end() +}) + +tape('test readUInt32LE / readUInt32BE / readInt32LE / readInt32BE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf1[0] = 0x1 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUInt32BE(), 0x01000304) + t.equal(bl.readUInt32LE(), 0x04030001) + t.equal(bl.readUInt32BE(2), 0x03042342) + t.equal(bl.readUInt32LE(2), 0x42230403) + t.equal(bl.readInt32BE(2), 0x03042342) + t.equal(bl.readInt32LE(2), 0x42230403) + + t.end() +}) + +tape('test readUIntLE / readUIntBE / readIntLE / readIntBE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf2[0] = 0x2 + buf2[1] = 0x3 + buf2[2] = 0x4 + buf3[0] = 0x23 + buf3[1] = 0x42 + buf3[2] = 0x61 + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + t.equal(bl.readUIntBE(1, 1), 0x02) + t.equal(bl.readUIntBE(1, 2), 0x0203) + t.equal(bl.readUIntBE(1, 3), 0x020304) + t.equal(bl.readUIntBE(1, 4), 0x02030423) + t.equal(bl.readUIntBE(1, 5), 0x0203042342) + t.equal(bl.readUIntBE(1, 6), 0x020304234261) + t.equal(bl.readUIntLE(1, 1), 0x02) + t.equal(bl.readUIntLE(1, 2), 0x0302) + t.equal(bl.readUIntLE(1, 3), 0x040302) + t.equal(bl.readUIntLE(1, 4), 0x23040302) + t.equal(bl.readUIntLE(1, 5), 0x4223040302) + t.equal(bl.readUIntLE(1, 6), 0x614223040302) + t.equal(bl.readIntBE(1, 1), 0x02) + t.equal(bl.readIntBE(1, 2), 0x0203) + t.equal(bl.readIntBE(1, 3), 0x020304) + t.equal(bl.readIntBE(1, 4), 0x02030423) + t.equal(bl.readIntBE(1, 5), 0x0203042342) + t.equal(bl.readIntBE(1, 6), 0x020304234261) + t.equal(bl.readIntLE(1, 1), 0x02) + t.equal(bl.readIntLE(1, 2), 0x0302) + t.equal(bl.readIntLE(1, 3), 0x040302) + t.equal(bl.readIntLE(1, 4), 0x23040302) + t.equal(bl.readIntLE(1, 5), 0x4223040302) + t.equal(bl.readIntLE(1, 6), 0x614223040302) + + t.end() +}) + +tape('test readFloatLE / readFloatBE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(3) + const bl = new BufferList() + + buf1[0] = 0x01 + buf2[1] = 0x00 + buf2[2] = 0x00 + buf3[0] = 0x80 + buf3[1] = 0x3f + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + const canonical = Buffer.concat([buf1, buf2, buf3]) + t.equal(bl.readFloatLE(), canonical.readFloatLE()) + t.equal(bl.readFloatBE(), canonical.readFloatBE()) + t.equal(bl.readFloatLE(2), canonical.readFloatLE(2)) + t.equal(bl.readFloatBE(2), canonical.readFloatBE(2)) + + t.end() +}) + +tape('test readDoubleLE / readDoubleBE', function (t) { + const buf1 = Buffer.alloc(1) + const buf2 = Buffer.alloc(3) + const buf3 = Buffer.alloc(10) + const bl = new BufferList() + + buf1[0] = 0x01 + buf2[1] = 0x55 + buf2[2] = 0x55 + buf3[0] = 0x55 + buf3[1] = 0x55 + buf3[2] = 0x55 + buf3[3] = 0x55 + buf3[4] = 0xd5 + buf3[5] = 0x3f + + bl.append(buf1) + bl.append(buf2) + bl.append(buf3) + + const canonical = Buffer.concat([buf1, buf2, buf3]) + t.equal(bl.readDoubleBE(), canonical.readDoubleBE()) + t.equal(bl.readDoubleLE(), canonical.readDoubleLE()) + t.equal(bl.readDoubleBE(2), canonical.readDoubleBE(2)) + t.equal(bl.readDoubleLE(2), canonical.readDoubleLE(2)) + + t.end() +}) + +tape('test toString', function (t) { + const bl = new BufferList() + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + + t.equal(bl.toString('ascii', 0, 10), 'abcdefghij') + t.equal(bl.toString('ascii', 3, 10), 'defghij') + t.equal(bl.toString('ascii', 3, 6), 'def') + t.equal(bl.toString('ascii', 3, 8), 'defgh') + t.equal(bl.toString('ascii', 5, 10), 'fghij') + + t.end() +}) + +tape('test toString encoding', function (t) { + const bl = new BufferList() + const b = Buffer.from('abcdefghij\xff\x00') + + bl.append(Buffer.from('abcd')) + bl.append(Buffer.from('efg')) + bl.append(Buffer.from('hi')) + bl.append(Buffer.from('j')) + bl.append(Buffer.from('\xff\x00')) + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc), enc) + }) + + t.end() +}) + +tape('uninitialized memory', function (t) { + const secret = crypto.randomBytes(256) + for (let i = 0; i < 1e6; i++) { + const clone = Buffer.from(secret) + const bl = new BufferList() + bl.append(Buffer.from('a')) + bl.consume(-1024) + const buf = bl.slice(1) + if (buf.indexOf(clone) !== -1) { + t.fail(`Match (at ${i})`) + break + } + } + t.end() +}) + +!process.browser && tape('test stream', function (t) { + const random = crypto.randomBytes(65534) + + const bl = new BufferList((err, buf) => { + t.ok(Buffer.isBuffer(buf)) + t.ok(err === null) + t.ok(random.equals(bl.slice())) + t.ok(random.equals(buf.slice())) + + bl.pipe(fs.createWriteStream('/tmp/bl_test_rnd_out.dat')) + .on('close', function () { + const rndhash = crypto.createHash('md5').update(random).digest('hex') + const md5sum = crypto.createHash('md5') + const s = fs.createReadStream('/tmp/bl_test_rnd_out.dat') + + s.on('data', md5sum.update.bind(md5sum)) + s.on('end', function () { + t.equal(rndhash, md5sum.digest('hex'), 'woohoo! correct hash!') + t.end() + }) + }) + }) + + fs.writeFileSync('/tmp/bl_test_rnd.dat', random) + fs.createReadStream('/tmp/bl_test_rnd.dat').pipe(bl) +}) + +tape('instantiation with Buffer', function (t) { + const buf = crypto.randomBytes(1024) + const buf2 = crypto.randomBytes(1024) + let b = BufferList(buf) + + t.equal(buf.toString('hex'), b.slice().toString('hex'), 'same buffer') + b = BufferList([buf, buf2]) + t.equal(b.slice().toString('hex'), Buffer.concat([buf, buf2]).toString('hex'), 'same buffer') + + t.end() +}) + +tape('test String appendage', function (t) { + const bl = new BufferList() + const b = Buffer.from('abcdefghij\xff\x00') + + bl.append('abcd') + bl.append('efg') + bl.append('hi') + bl.append('j') + bl.append('\xff\x00') + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc)) + }) + + t.end() +}) + +tape('test Number appendage', function (t) { + const bl = new BufferList() + const b = Buffer.from('1234567890') + + bl.append(1234) + bl.append(567) + bl.append(89) + bl.append(0) + + encodings.forEach(function (enc) { + t.equal(bl.toString(enc), b.toString(enc)) + }) + + t.end() +}) + +tape('write nothing, should get empty buffer', function (t) { + t.plan(3) + BufferList(function (err, data) { + t.notOk(err, 'no error') + t.ok(Buffer.isBuffer(data), 'got a buffer') + t.equal(0, data.length, 'got a zero-length buffer') + t.end() + }).end() +}) + +tape('unicode string', function (t) { + t.plan(2) + + const inp1 = '\u2600' + const inp2 = '\u2603' + const exp = inp1 + ' and ' + inp2 + const bl = BufferList() + + bl.write(inp1) + bl.write(' and ') + bl.write(inp2) + t.equal(exp, bl.toString()) + t.equal(Buffer.from(exp).toString('hex'), bl.toString('hex')) +}) + +tape('should emit finish', function (t) { + const source = BufferList() + const dest = BufferList() + + source.write('hello') + source.pipe(dest) + + dest.on('finish', function () { + t.equal(dest.toString('utf8'), 'hello') + t.end() + }) +}) + +tape('basic copy', function (t) { + const buf = crypto.randomBytes(1024) + const buf2 = Buffer.alloc(1024) + const b = BufferList(buf) + + b.copy(buf2) + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy after many appends', function (t) { + const buf = crypto.randomBytes(512) + const buf2 = Buffer.alloc(1024) + const b = BufferList(buf) + + b.append(buf) + b.copy(buf2) + t.equal(b.slice().toString('hex'), buf2.toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy at a precise position', function (t) { + const buf = crypto.randomBytes(1004) + const buf2 = Buffer.alloc(1024) + const b = BufferList(buf) + + b.copy(buf2, 20) + t.equal(b.slice().toString('hex'), buf2.slice(20).toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy starting from a precise location', function (t) { + const buf = crypto.randomBytes(10) + const buf2 = Buffer.alloc(5) + const b = BufferList(buf) + + b.copy(buf2, 0, 5) + t.equal(b.slice(5).toString('hex'), buf2.toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy in an interval', function (t) { + const rnd = crypto.randomBytes(10) + const b = BufferList(rnd) // put the random bytes there + const actual = Buffer.alloc(3) + const expected = Buffer.alloc(3) + + rnd.copy(expected, 0, 5, 8) + b.copy(actual, 0, 5, 8) + + t.equal(actual.toString('hex'), expected.toString('hex'), 'same buffer') + + t.end() +}) + +tape('copy an interval between two buffers', function (t) { + const buf = crypto.randomBytes(10) + const buf2 = Buffer.alloc(10) + const b = BufferList(buf) + + b.append(buf) + b.copy(buf2, 0, 5, 15) + + t.equal(b.slice(5, 15).toString('hex'), buf2.toString('hex'), 'same buffer') + + t.end() +}) + +tape('shallow slice across buffer boundaries', function (t) { + const bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(3, 13).toString(), 'stSecondTh') + + t.end() +}) + +tape('shallow slice within single buffer', function (t) { + t.plan(2) + + const bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(5, 10).toString(), 'Secon') + t.equal(bl.shallowSlice(7, 10).toString(), 'con') + + t.end() +}) + +tape('shallow slice single buffer', function (t) { + t.plan(3) + + const bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice(0, 5).toString(), 'First') + t.equal(bl.shallowSlice(5, 11).toString(), 'Second') + t.equal(bl.shallowSlice(11, 16).toString(), 'Third') +}) + +tape('shallow slice with negative or omitted indices', function (t) { + t.plan(4) + + const bl = new BufferList(['First', 'Second', 'Third']) + + t.equal(bl.shallowSlice().toString(), 'FirstSecondThird') + t.equal(bl.shallowSlice(5).toString(), 'SecondThird') + t.equal(bl.shallowSlice(5, -3).toString(), 'SecondTh') + t.equal(bl.shallowSlice(-8).toString(), 'ondThird') +}) + +tape('shallow slice does not make a copy', function (t) { + t.plan(1) + + const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + const bl = (new BufferList(buffers)).shallowSlice(5, -3) + + buffers[1].fill('h') + buffers[2].fill('h') + + t.equal(bl.toString(), 'hhhhhhhh') +}) + +tape('shallow slice with 0 length', function (t) { + t.plan(1) + + const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + const bl = (new BufferList(buffers)).shallowSlice(0, 0) + + t.equal(bl.length, 0) +}) + +tape('shallow slice with 0 length from middle', function (t) { + t.plan(1) + + const buffers = [Buffer.from('First'), Buffer.from('Second'), Buffer.from('Third')] + const bl = (new BufferList(buffers)).shallowSlice(10, 10) + + t.equal(bl.length, 0) +}) + +tape('duplicate', function (t) { + t.plan(2) + + const bl = new BufferList('abcdefghij\xff\x00') + const dup = bl.duplicate() + + t.equal(bl.prototype, dup.prototype) + t.equal(bl.toString('hex'), dup.toString('hex')) +}) + +tape('destroy no pipe', function (t) { + t.plan(2) + + const bl = new BufferList('alsdkfja;lsdkfja;lsdk') + + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) +}) + +tape('destroy with error', function (t) { + t.plan(3) + + const bl = new BufferList('alsdkfja;lsdkfja;lsdk') + const err = new Error('kaboom') + + bl.destroy(err) + bl.on('error', function (_err) { + t.equal(_err, err) + }) + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) +}) + +!process.browser && tape('destroy with pipe before read end', function (t) { + t.plan(2) + + const bl = new BufferList() + fs.createReadStream(path.join(__dirname, '/test.js')) + .pipe(bl) + + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) +}) + +!process.browser && tape('destroy with pipe before read end with race', function (t) { + t.plan(2) + + const bl = new BufferList() + + fs.createReadStream(path.join(__dirname, '/test.js')) + .pipe(bl) + + setTimeout(function () { + bl.destroy() + setTimeout(function () { + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) + }, 500) + }, 500) +}) + +!process.browser && tape('destroy with pipe after read end', function (t) { + t.plan(2) + + const bl = new BufferList() + + fs.createReadStream(path.join(__dirname, '/test.js')) + .on('end', onEnd) + .pipe(bl) + + function onEnd () { + bl.destroy() + + t.equal(bl._bufs.length, 0) + t.equal(bl.length, 0) + } +}) + +!process.browser && tape('destroy with pipe while writing to a destination', function (t) { + t.plan(4) + + const bl = new BufferList() + const ds = new BufferList() + + fs.createReadStream(path.join(__dirname, '/test.js')) + .on('end', onEnd) + .pipe(bl) + + function onEnd () { + bl.pipe(ds) + + setTimeout(function () { + bl.destroy() + + t.equals(bl._bufs.length, 0) + t.equals(bl.length, 0) + + ds.destroy() + + t.equals(bl._bufs.length, 0) + t.equals(bl.length, 0) + }, 100) + } +}) + +!process.browser && tape('handle error', function (t) { + t.plan(2) + + fs.createReadStream('/does/not/exist').pipe(BufferList(function (err, data) { + t.ok(err instanceof Error, 'has error') + t.notOk(data, 'no data') + })) +}) diff --git a/miniprogram/node_modules/buffer/AUTHORS.md b/miniprogram/node_modules/buffer/AUTHORS.md new file mode 100644 index 00000000..22eb1712 --- /dev/null +++ b/miniprogram/node_modules/buffer/AUTHORS.md @@ -0,0 +1,70 @@ +# Authors + +#### Ordered by first contribution. + +- Romain Beauxis (toots@rastageeks.org) +- Tobias Koppers (tobias.koppers@googlemail.com) +- Janus (ysangkok@gmail.com) +- Rainer Dreyer (rdrey1@gmail.com) +- Tõnis Tiigi (tonistiigi@gmail.com) +- James Halliday (mail@substack.net) +- Michael Williamson (mike@zwobble.org) +- elliottcable (github@elliottcable.name) +- rafael (rvalle@livelens.net) +- Andrew Kelley (superjoe30@gmail.com) +- Andreas Madsen (amwebdk@gmail.com) +- Mike Brevoort (mike.brevoort@pearson.com) +- Brian White (mscdex@mscdex.net) +- Feross Aboukhadijeh (feross@feross.org) +- Ruben Verborgh (ruben@verborgh.org) +- eliang (eliang.cs@gmail.com) +- Jesse Tane (jesse.tane@gmail.com) +- Alfonso Boza (alfonso@cloud.com) +- Mathias Buus (mathiasbuus@gmail.com) +- Devon Govett (devongovett@gmail.com) +- Daniel Cousens (github@dcousens.com) +- Joseph Dykstra (josephdykstra@gmail.com) +- Parsha Pourkhomami (parshap+git@gmail.com) +- Damjan Košir (damjan.kosir@gmail.com) +- daverayment (dave.rayment@gmail.com) +- kawanet (u-suke@kawa.net) +- Linus Unnebäck (linus@folkdatorn.se) +- Nolan Lawson (nolan.lawson@gmail.com) +- Calvin Metcalf (calvin.metcalf@gmail.com) +- Koki Takahashi (hakatasiloving@gmail.com) +- Guy Bedford (guybedford@gmail.com) +- Jan Schär (jscissr@gmail.com) +- RaulTsc (tomescu.raul@gmail.com) +- Matthieu Monsch (monsch@alum.mit.edu) +- Dan Ehrenberg (littledan@chromium.org) +- Kirill Fomichev (fanatid@ya.ru) +- Yusuke Kawasaki (u-suke@kawa.net) +- DC (dcposch@dcpos.ch) +- John-David Dalton (john.david.dalton@gmail.com) +- adventure-yunfei (adventure030@gmail.com) +- Emil Bay (github@tixz.dk) +- Sam Sudar (sudar.sam@gmail.com) +- Volker Mische (volker.mische@gmail.com) +- David Walton (support@geekstocks.com) +- Сковорода Никита Андреевич (chalkerx@gmail.com) +- greenkeeper[bot] (greenkeeper[bot]@users.noreply.github.com) +- ukstv (sergey.ukustov@machinomy.com) +- Renée Kooi (renee@kooi.me) +- ranbochen (ranbochen@qq.com) +- Vladimir Borovik (bobahbdb@gmail.com) +- greenkeeper[bot] (23040076+greenkeeper[bot]@users.noreply.github.com) +- kumavis (aaron@kumavis.me) +- Sergey Ukustov (sergey.ukustov@machinomy.com) +- Fei Liu (liu.feiwood@gmail.com) +- Blaine Bublitz (blaine.bublitz@gmail.com) +- clement (clement@seald.io) +- Koushik Dutta (koushd@gmail.com) +- Jordan Harband (ljharb@gmail.com) +- Niklas Mischkulnig (mischnic@users.noreply.github.com) +- Nikolai Vavilov (vvnicholas@gmail.com) +- Fedor Nezhivoi (gyzerok@users.noreply.github.com) +- Peter Newman (peternewman@users.noreply.github.com) +- mathmakgakpak (44949126+mathmakgakpak@users.noreply.github.com) +- jkkang (jkkang@smartauth.kr) + +#### Generated by bin/update-authors.sh. diff --git a/miniprogram/node_modules/buffer/LICENSE b/miniprogram/node_modules/buffer/LICENSE new file mode 100644 index 00000000..d6bf75dc --- /dev/null +++ b/miniprogram/node_modules/buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh, and other contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/miniprogram/node_modules/buffer/README.md b/miniprogram/node_modules/buffer/README.md new file mode 100644 index 00000000..9a23d7cf --- /dev/null +++ b/miniprogram/node_modules/buffer/README.md @@ -0,0 +1,410 @@ +# buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/buffer/master.svg +[travis-url]: https://travis-ci.org/feross/buffer +[npm-image]: https://img.shields.io/npm/v/buffer.svg +[npm-url]: https://npmjs.org/package/buffer +[downloads-image]: https://img.shields.io/npm/dm/buffer.svg +[downloads-url]: https://npmjs.org/package/buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### The buffer module from [node.js](https://nodejs.org/), for the browser. + +[![saucelabs][saucelabs-image]][saucelabs-url] + +[saucelabs-image]: https://saucelabs.com/browser-matrix/buffer.svg +[saucelabs-url]: https://saucelabs.com/u/buffer + +With [browserify](http://browserify.org), simply `require('buffer')` or use the `Buffer` global and you will get this module. + +The goal is to provide an API that is 100% identical to +[node's Buffer API](https://nodejs.org/api/buffer.html). Read the +[official docs](https://nodejs.org/api/buffer.html) for the full list of properties, +instance methods, and class methods that are supported. + +## features + +- Manipulate binary data like a boss, in all browsers! +- Super fast. Backed by Typed Arrays (`Uint8Array`/`ArrayBuffer`, not `Object`) +- Extremely small bundle size (**6.75KB minified + gzipped**, 51.9KB with comments) +- Excellent browser support (Chrome, Firefox, Edge, Safari 9+, IE 11, iOS 9+, Android, etc.) +- Preserves Node API exactly, with one minor difference (see below) +- Square-bracket `buf[4]` notation works! +- Does not modify any browser prototypes or put anything on `window` +- Comprehensive test suite (including all buffer tests from node.js core) + +## install + +To use this module directly (without browserify), install it: + +```bash +npm install buffer +``` + +This module was previously called **native-buffer-browserify**, but please use **buffer** +from now on. + +If you do not use a bundler, you can use the [standalone script](https://bundle.run/buffer). + +## usage + +The module's API is identical to node's `Buffer` API. Read the +[official docs](https://nodejs.org/api/buffer.html) for the full list of properties, +instance methods, and class methods that are supported. + +As mentioned above, `require('buffer')` or use the `Buffer` global with +[browserify](http://browserify.org) and this module will automatically be included +in your bundle. Almost any npm module will work in the browser, even if it assumes that +the node `Buffer` API will be available. + +To depend on this module explicitly (without browserify), require it like this: + +```js +var Buffer = require('buffer/').Buffer // note: the trailing slash is important! +``` + +To require this module explicitly, use `require('buffer/')` which tells the node.js module +lookup algorithm (also used by browserify) to use the **npm module** named `buffer` +instead of the **node.js core** module named `buffer`! + + +## how does it work? + +The Buffer constructor returns instances of `Uint8Array` that have their prototype +changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of `Uint8Array`, +so the returned instances will have all the node `Buffer` methods and the +`Uint8Array` methods. Square bracket notation works as expected -- it returns a +single octet. + +The `Uint8Array` prototype remains unmodified. + + +## tracking the latest node api + +This module tracks the Buffer API in the latest (unstable) version of node.js. The Buffer +API is considered **stable** in the +[node stability index](https://nodejs.org/docs/latest/api/documentation.html#documentation_stability_index), +so it is unlikely that there will ever be breaking changes. +Nonetheless, when/if the Buffer API changes in node, this module's API will change +accordingly. + +## related packages + +- [`buffer-reverse`](https://www.npmjs.com/package/buffer-reverse) - Reverse a buffer +- [`buffer-xor`](https://www.npmjs.com/package/buffer-xor) - Bitwise xor a buffer +- [`is-buffer`](https://www.npmjs.com/package/is-buffer) - Determine if an object is a Buffer without including the whole `Buffer` package + +## conversion packages + +### convert typed array to buffer + +Use [`typedarray-to-buffer`](https://www.npmjs.com/package/typedarray-to-buffer) to convert any kind of typed array to a `Buffer`. Does not perform a copy, so it's super fast. + +### convert buffer to typed array + +`Buffer` is a subclass of `Uint8Array` (which is a typed array). So there is no need to explicitly convert to typed array. Just use the buffer as a `Uint8Array`. + +### convert blob to buffer + +Use [`blob-to-buffer`](https://www.npmjs.com/package/blob-to-buffer) to convert a `Blob` to a `Buffer`. + +### convert buffer to blob + +To convert a `Buffer` to a `Blob`, use the `Blob` constructor: + +```js +var blob = new Blob([ buffer ]) +``` + +Optionally, specify a mimetype: + +```js +var blob = new Blob([ buffer ], { type: 'text/html' }) +``` + +### convert arraybuffer to buffer + +To convert an `ArrayBuffer` to a `Buffer`, use the `Buffer.from` function. Does not perform a copy, so it's super fast. + +```js +var buffer = Buffer.from(arrayBuffer) +``` + +### convert buffer to arraybuffer + +To convert a `Buffer` to an `ArrayBuffer`, use the `.buffer` property (which is present on all `Uint8Array` objects): + +```js +var arrayBuffer = buffer.buffer.slice( + buffer.byteOffset, buffer.byteOffset + buffer.byteLength +) +``` + +Alternatively, use the [`to-arraybuffer`](https://www.npmjs.com/package/to-arraybuffer) module. + +## performance + +See perf tests in `/perf`. + +`BrowserBuffer` is the browser `buffer` module (this repo). `Uint8Array` is included as a +sanity check (since `BrowserBuffer` uses `Uint8Array` under the hood, `Uint8Array` will +always be at least a bit faster). Finally, `NodeBuffer` is the node.js buffer module, +which is included to compare against. + +NOTE: Performance has improved since these benchmarks were taken. PR welcome to update the README. + +### Chrome 38 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 11,457,464 ops/sec | ±0.86% | 66 | ✓ | +| Uint8Array#bracket-notation | 10,824,332 ops/sec | ±0.74% | 65 | | +| | | | | +| BrowserBuffer#concat | 450,532 ops/sec | ±0.76% | 68 | | +| Uint8Array#concat | 1,368,911 ops/sec | ±1.50% | 62 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 903,001 ops/sec | ±0.96% | 67 | | +| Uint8Array#copy(16000) | 1,422,441 ops/sec | ±1.04% | 66 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 11,431,358 ops/sec | ±0.46% | 69 | | +| Uint8Array#copy(16) | 13,944,163 ops/sec | ±1.12% | 68 | ✓ | +| | | | | +| BrowserBuffer#new(16000) | 106,329 ops/sec | ±6.70% | 44 | | +| Uint8Array#new(16000) | 131,001 ops/sec | ±2.85% | 31 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 1,554,491 ops/sec | ±1.60% | 65 | | +| Uint8Array#new(16) | 6,623,930 ops/sec | ±1.66% | 65 | ✓ | +| | | | | +| BrowserBuffer#readDoubleBE | 112,830 ops/sec | ±0.51% | 69 | ✓ | +| DataView#getFloat64 | 93,500 ops/sec | ±0.57% | 68 | | +| | | | | +| BrowserBuffer#readFloatBE | 146,678 ops/sec | ±0.95% | 68 | ✓ | +| DataView#getFloat32 | 99,311 ops/sec | ±0.41% | 67 | | +| | | | | +| BrowserBuffer#readUInt32LE | 843,214 ops/sec | ±0.70% | 69 | ✓ | +| DataView#getUint32 | 103,024 ops/sec | ±0.64% | 67 | | +| | | | | +| BrowserBuffer#slice | 1,013,941 ops/sec | ±0.75% | 67 | | +| Uint8Array#subarray | 1,903,928 ops/sec | ±0.53% | 67 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 61,387 ops/sec | ±0.90% | 67 | | +| DataView#setFloat32 | 141,249 ops/sec | ±0.40% | 66 | ✓ | + + +### Firefox 33 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 20,800,421 ops/sec | ±1.84% | 60 | | +| Uint8Array#bracket-notation | 20,826,235 ops/sec | ±2.02% | 61 | ✓ | +| | | | | +| BrowserBuffer#concat | 153,076 ops/sec | ±2.32% | 61 | | +| Uint8Array#concat | 1,255,674 ops/sec | ±8.65% | 52 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 1,105,312 ops/sec | ±1.16% | 63 | | +| Uint8Array#copy(16000) | 1,615,911 ops/sec | ±0.55% | 66 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 16,357,599 ops/sec | ±0.73% | 68 | | +| Uint8Array#copy(16) | 31,436,281 ops/sec | ±1.05% | 68 | ✓ | +| | | | | +| BrowserBuffer#new(16000) | 52,995 ops/sec | ±6.01% | 35 | | +| Uint8Array#new(16000) | 87,686 ops/sec | ±5.68% | 45 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 252,031 ops/sec | ±1.61% | 66 | | +| Uint8Array#new(16) | 8,477,026 ops/sec | ±0.49% | 68 | ✓ | +| | | | | +| BrowserBuffer#readDoubleBE | 99,871 ops/sec | ±0.41% | 69 | | +| DataView#getFloat64 | 285,663 ops/sec | ±0.70% | 68 | ✓ | +| | | | | +| BrowserBuffer#readFloatBE | 115,540 ops/sec | ±0.42% | 69 | | +| DataView#getFloat32 | 288,722 ops/sec | ±0.82% | 68 | ✓ | +| | | | | +| BrowserBuffer#readUInt32LE | 633,926 ops/sec | ±1.08% | 67 | ✓ | +| DataView#getUint32 | 294,808 ops/sec | ±0.79% | 64 | | +| | | | | +| BrowserBuffer#slice | 349,425 ops/sec | ±0.46% | 69 | | +| Uint8Array#subarray | 5,965,819 ops/sec | ±0.60% | 65 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 59,980 ops/sec | ±0.41% | 67 | | +| DataView#setFloat32 | 317,634 ops/sec | ±0.63% | 68 | ✓ | + +### Safari 8 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 10,279,729 ops/sec | ±2.25% | 56 | ✓ | +| Uint8Array#bracket-notation | 10,030,767 ops/sec | ±2.23% | 59 | | +| | | | | +| BrowserBuffer#concat | 144,138 ops/sec | ±1.38% | 65 | | +| Uint8Array#concat | 4,950,764 ops/sec | ±1.70% | 63 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 1,058,548 ops/sec | ±1.51% | 64 | | +| Uint8Array#copy(16000) | 1,409,666 ops/sec | ±1.17% | 65 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 6,282,529 ops/sec | ±1.88% | 58 | | +| Uint8Array#copy(16) | 11,907,128 ops/sec | ±2.87% | 58 | ✓ | +| | | | | +| BrowserBuffer#new(16000) | 101,663 ops/sec | ±3.89% | 57 | | +| Uint8Array#new(16000) | 22,050,818 ops/sec | ±6.51% | 46 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 176,072 ops/sec | ±2.13% | 64 | | +| Uint8Array#new(16) | 24,385,731 ops/sec | ±5.01% | 51 | ✓ | +| | | | | +| BrowserBuffer#readDoubleBE | 41,341 ops/sec | ±1.06% | 67 | | +| DataView#getFloat64 | 322,280 ops/sec | ±0.84% | 68 | ✓ | +| | | | | +| BrowserBuffer#readFloatBE | 46,141 ops/sec | ±1.06% | 65 | | +| DataView#getFloat32 | 337,025 ops/sec | ±0.43% | 69 | ✓ | +| | | | | +| BrowserBuffer#readUInt32LE | 151,551 ops/sec | ±1.02% | 66 | | +| DataView#getUint32 | 308,278 ops/sec | ±0.94% | 67 | ✓ | +| | | | | +| BrowserBuffer#slice | 197,365 ops/sec | ±0.95% | 66 | | +| Uint8Array#subarray | 9,558,024 ops/sec | ±3.08% | 58 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 17,518 ops/sec | ±1.03% | 63 | | +| DataView#setFloat32 | 319,751 ops/sec | ±0.48% | 68 | ✓ | + + +### Node 0.11.14 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 10,489,828 ops/sec | ±3.25% | 90 | | +| Uint8Array#bracket-notation | 10,534,884 ops/sec | ±0.81% | 92 | ✓ | +| NodeBuffer#bracket-notation | 10,389,910 ops/sec | ±0.97% | 87 | | +| | | | | +| BrowserBuffer#concat | 487,830 ops/sec | ±2.58% | 88 | | +| Uint8Array#concat | 1,814,327 ops/sec | ±1.28% | 88 | ✓ | +| NodeBuffer#concat | 1,636,523 ops/sec | ±1.88% | 73 | | +| | | | | +| BrowserBuffer#copy(16000) | 1,073,665 ops/sec | ±0.77% | 90 | | +| Uint8Array#copy(16000) | 1,348,517 ops/sec | ±0.84% | 89 | ✓ | +| NodeBuffer#copy(16000) | 1,289,533 ops/sec | ±0.82% | 93 | | +| | | | | +| BrowserBuffer#copy(16) | 12,782,706 ops/sec | ±0.74% | 85 | | +| Uint8Array#copy(16) | 14,180,427 ops/sec | ±0.93% | 92 | ✓ | +| NodeBuffer#copy(16) | 11,083,134 ops/sec | ±1.06% | 89 | | +| | | | | +| BrowserBuffer#new(16000) | 141,678 ops/sec | ±3.30% | 67 | | +| Uint8Array#new(16000) | 161,491 ops/sec | ±2.96% | 60 | | +| NodeBuffer#new(16000) | 292,699 ops/sec | ±3.20% | 55 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 1,655,466 ops/sec | ±2.41% | 82 | | +| Uint8Array#new(16) | 14,399,926 ops/sec | ±0.91% | 94 | ✓ | +| NodeBuffer#new(16) | 3,894,696 ops/sec | ±0.88% | 92 | | +| | | | | +| BrowserBuffer#readDoubleBE | 109,582 ops/sec | ±0.75% | 93 | ✓ | +| DataView#getFloat64 | 91,235 ops/sec | ±0.81% | 90 | | +| NodeBuffer#readDoubleBE | 88,593 ops/sec | ±0.96% | 81 | | +| | | | | +| BrowserBuffer#readFloatBE | 139,854 ops/sec | ±1.03% | 85 | ✓ | +| DataView#getFloat32 | 98,744 ops/sec | ±0.80% | 89 | | +| NodeBuffer#readFloatBE | 92,769 ops/sec | ±0.94% | 93 | | +| | | | | +| BrowserBuffer#readUInt32LE | 710,861 ops/sec | ±0.82% | 92 | | +| DataView#getUint32 | 117,893 ops/sec | ±0.84% | 91 | | +| NodeBuffer#readUInt32LE | 851,412 ops/sec | ±0.72% | 93 | ✓ | +| | | | | +| BrowserBuffer#slice | 1,673,877 ops/sec | ±0.73% | 94 | | +| Uint8Array#subarray | 6,919,243 ops/sec | ±0.67% | 90 | ✓ | +| NodeBuffer#slice | 4,617,604 ops/sec | ±0.79% | 93 | | +| | | | | +| BrowserBuffer#writeFloatBE | 66,011 ops/sec | ±0.75% | 93 | | +| DataView#setFloat32 | 127,760 ops/sec | ±0.72% | 93 | ✓ | +| NodeBuffer#writeFloatBE | 103,352 ops/sec | ±0.83% | 93 | | + +### iojs 1.8.1 + +| Method | Operations | Accuracy | Sampled | Fastest | +|:-------|:-----------|:---------|:--------|:-------:| +| BrowserBuffer#bracket-notation | 10,990,488 ops/sec | ±1.11% | 91 | | +| Uint8Array#bracket-notation | 11,268,757 ops/sec | ±0.65% | 97 | | +| NodeBuffer#bracket-notation | 11,353,260 ops/sec | ±0.83% | 94 | ✓ | +| | | | | +| BrowserBuffer#concat | 378,954 ops/sec | ±0.74% | 94 | | +| Uint8Array#concat | 1,358,288 ops/sec | ±0.97% | 87 | | +| NodeBuffer#concat | 1,934,050 ops/sec | ±1.11% | 78 | ✓ | +| | | | | +| BrowserBuffer#copy(16000) | 894,538 ops/sec | ±0.56% | 84 | | +| Uint8Array#copy(16000) | 1,442,656 ops/sec | ±0.71% | 96 | | +| NodeBuffer#copy(16000) | 1,457,898 ops/sec | ±0.53% | 92 | ✓ | +| | | | | +| BrowserBuffer#copy(16) | 12,870,457 ops/sec | ±0.67% | 95 | | +| Uint8Array#copy(16) | 16,643,989 ops/sec | ±0.61% | 93 | ✓ | +| NodeBuffer#copy(16) | 14,885,848 ops/sec | ±0.74% | 94 | | +| | | | | +| BrowserBuffer#new(16000) | 109,264 ops/sec | ±4.21% | 63 | | +| Uint8Array#new(16000) | 138,916 ops/sec | ±1.87% | 61 | | +| NodeBuffer#new(16000) | 281,449 ops/sec | ±3.58% | 51 | ✓ | +| | | | | +| BrowserBuffer#new(16) | 1,362,935 ops/sec | ±0.56% | 99 | | +| Uint8Array#new(16) | 6,193,090 ops/sec | ±0.64% | 95 | ✓ | +| NodeBuffer#new(16) | 4,745,425 ops/sec | ±1.56% | 90 | | +| | | | | +| BrowserBuffer#readDoubleBE | 118,127 ops/sec | ±0.59% | 93 | ✓ | +| DataView#getFloat64 | 107,332 ops/sec | ±0.65% | 91 | | +| NodeBuffer#readDoubleBE | 116,274 ops/sec | ±0.94% | 95 | | +| | | | | +| BrowserBuffer#readFloatBE | 150,326 ops/sec | ±0.58% | 95 | ✓ | +| DataView#getFloat32 | 110,541 ops/sec | ±0.57% | 98 | | +| NodeBuffer#readFloatBE | 121,599 ops/sec | ±0.60% | 87 | | +| | | | | +| BrowserBuffer#readUInt32LE | 814,147 ops/sec | ±0.62% | 93 | | +| DataView#getUint32 | 137,592 ops/sec | ±0.64% | 90 | | +| NodeBuffer#readUInt32LE | 931,650 ops/sec | ±0.71% | 96 | ✓ | +| | | | | +| BrowserBuffer#slice | 878,590 ops/sec | ±0.68% | 93 | | +| Uint8Array#subarray | 2,843,308 ops/sec | ±1.02% | 90 | | +| NodeBuffer#slice | 4,998,316 ops/sec | ±0.68% | 90 | ✓ | +| | | | | +| BrowserBuffer#writeFloatBE | 65,927 ops/sec | ±0.74% | 93 | | +| DataView#setFloat32 | 139,823 ops/sec | ±0.97% | 89 | ✓ | +| NodeBuffer#writeFloatBE | 135,763 ops/sec | ±0.65% | 96 | | +| | | | | + +## Testing the project + +First, install the project: + + npm install + +Then, to run tests in Node.js, run: + + npm run test-node + +To test locally in a browser, you can run: + + npm run test-browser-es5-local # For ES5 browsers that don't support ES6 + npm run test-browser-es6-local # For ES6 compliant browsers + +This will print out a URL that you can then open in a browser to run the tests, using [airtap](https://www.npmjs.com/package/airtap). + +To run automated browser tests using Saucelabs, ensure that your `SAUCE_USERNAME` and `SAUCE_ACCESS_KEY` environment variables are set, then run: + + npm test + +This is what's run in Travis, to check against various browsers. The list of browsers is kept in the `bin/airtap-es5.yml` and `bin/airtap-es6.yml` files. + +## JavaScript Standard Style + +This module uses [JavaScript Standard Style](https://github.com/feross/standard). + +[![JavaScript Style Guide](https://cdn.rawgit.com/feross/standard/master/badge.svg)](https://github.com/feross/standard) + +To test that the code conforms to the style, `npm install` and run: + + ./node_modules/.bin/standard + +## credit + +This was originally forked from [buffer-browserify](https://github.com/toots/buffer-browserify). + +## Security Policies and Procedures + +The `buffer` team and community take all security bugs in `buffer` seriously. Please see our [security policies and procedures](https://github.com/feross/security) document to learn how to report issues. + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org), and other contributors. Originally forked from an MIT-licensed module by Romain Beauxis. diff --git a/miniprogram/node_modules/buffer/index.d.ts b/miniprogram/node_modules/buffer/index.d.ts new file mode 100644 index 00000000..5d1a804e --- /dev/null +++ b/miniprogram/node_modules/buffer/index.d.ts @@ -0,0 +1,186 @@ +export class Buffer extends Uint8Array { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + reverse(): this; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer | Uint8Array): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initializing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; +} diff --git a/miniprogram/node_modules/buffer/index.js b/miniprogram/node_modules/buffer/index.js new file mode 100644 index 00000000..609cf311 --- /dev/null +++ b/miniprogram/node_modules/buffer/index.js @@ -0,0 +1,1817 @@ +/*! + * The buffer module from node.js, for the browser. + * + * @author Feross Aboukhadijeh + * @license MIT + */ +/* eslint-disable no-proto */ + +'use strict' + +var base64 = require('base64-js') +var ieee754 = require('ieee754') +var customInspectSymbol = + (typeof Symbol === 'function' && typeof Symbol['for'] === 'function') // eslint-disable-line dot-notation + ? Symbol['for']('nodejs.util.inspect.custom') // eslint-disable-line dot-notation + : null + +exports.Buffer = Buffer +exports.SlowBuffer = SlowBuffer +exports.INSPECT_MAX_BYTES = 50 + +var K_MAX_LENGTH = 0x7fffffff +exports.kMaxLength = K_MAX_LENGTH + +/** + * If `Buffer.TYPED_ARRAY_SUPPORT`: + * === true Use Uint8Array implementation (fastest) + * === false Print warning and recommend using `buffer` v4.x which has an Object + * implementation (most compatible, even IE6) + * + * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+, + * Opera 11.6+, iOS 4.2+. + * + * We report that the browser does not support typed arrays if the are not subclassable + * using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array` + * (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support + * for __proto__ and has a buggy typed array implementation. + */ +Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport() + +if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' && + typeof console.error === 'function') { + console.error( + 'This browser lacks typed array (Uint8Array) support which is required by ' + + '`buffer` v5.x. Use `buffer` v4.x if you require old browser support.' + ) +} + +function typedArraySupport () { + // Can typed array instances can be augmented? + try { + var arr = new Uint8Array(1) + var proto = { foo: function () { return 42 } } + Object.setPrototypeOf(proto, Uint8Array.prototype) + Object.setPrototypeOf(arr, proto) + return arr.foo() === 42 + } catch (e) { + return false + } +} + +Object.defineProperty(Buffer.prototype, 'parent', { + enumerable: true, + get: function () { + if (!Buffer.isBuffer(this)) return undefined + return this.buffer + } +}) + +Object.defineProperty(Buffer.prototype, 'offset', { + enumerable: true, + get: function () { + if (!Buffer.isBuffer(this)) return undefined + return this.byteOffset + } +}) + +function createBuffer (length) { + if (length > K_MAX_LENGTH) { + throw new RangeError('The value "' + length + '" is invalid for option "size"') + } + // Return an augmented `Uint8Array` instance + var buf = new Uint8Array(length) + Object.setPrototypeOf(buf, Buffer.prototype) + return buf +} + +/** + * The Buffer constructor returns instances of `Uint8Array` that have their + * prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of + * `Uint8Array`, so the returned instances will have all the node `Buffer` methods + * and the `Uint8Array` methods. Square bracket notation works as expected -- it + * returns a single octet. + * + * The `Uint8Array` prototype remains unmodified. + */ + +function Buffer (arg, encodingOrOffset, length) { + // Common case. + if (typeof arg === 'number') { + if (typeof encodingOrOffset === 'string') { + throw new TypeError( + 'The "string" argument must be of type string. Received type number' + ) + } + return allocUnsafe(arg) + } + return from(arg, encodingOrOffset, length) +} + +Buffer.poolSize = 8192 // not used by this implementation + +function from (value, encodingOrOffset, length) { + if (typeof value === 'string') { + return fromString(value, encodingOrOffset) + } + + if (ArrayBuffer.isView(value)) { + return fromArrayView(value) + } + + if (value == null) { + throw new TypeError( + 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + + 'or Array-like Object. Received type ' + (typeof value) + ) + } + + if (isInstance(value, ArrayBuffer) || + (value && isInstance(value.buffer, ArrayBuffer))) { + return fromArrayBuffer(value, encodingOrOffset, length) + } + + if (typeof SharedArrayBuffer !== 'undefined' && + (isInstance(value, SharedArrayBuffer) || + (value && isInstance(value.buffer, SharedArrayBuffer)))) { + return fromArrayBuffer(value, encodingOrOffset, length) + } + + if (typeof value === 'number') { + throw new TypeError( + 'The "value" argument must not be of type number. Received type number' + ) + } + + var valueOf = value.valueOf && value.valueOf() + if (valueOf != null && valueOf !== value) { + return Buffer.from(valueOf, encodingOrOffset, length) + } + + var b = fromObject(value) + if (b) return b + + if (typeof Symbol !== 'undefined' && Symbol.toPrimitive != null && + typeof value[Symbol.toPrimitive] === 'function') { + return Buffer.from( + value[Symbol.toPrimitive]('string'), encodingOrOffset, length + ) + } + + throw new TypeError( + 'The first argument must be one of type string, Buffer, ArrayBuffer, Array, ' + + 'or Array-like Object. Received type ' + (typeof value) + ) +} + +/** + * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError + * if value is a number. + * Buffer.from(str[, encoding]) + * Buffer.from(array) + * Buffer.from(buffer) + * Buffer.from(arrayBuffer[, byteOffset[, length]]) + **/ +Buffer.from = function (value, encodingOrOffset, length) { + return from(value, encodingOrOffset, length) +} + +// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug: +// https://github.com/feross/buffer/pull/148 +Object.setPrototypeOf(Buffer.prototype, Uint8Array.prototype) +Object.setPrototypeOf(Buffer, Uint8Array) + +function assertSize (size) { + if (typeof size !== 'number') { + throw new TypeError('"size" argument must be of type number') + } else if (size < 0) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } +} + +function alloc (size, fill, encoding) { + assertSize(size) + if (size <= 0) { + return createBuffer(size) + } + if (fill !== undefined) { + // Only pay attention to encoding if it's a string. This + // prevents accidentally sending in a number that would + // be interpreted as a start offset. + return typeof encoding === 'string' + ? createBuffer(size).fill(fill, encoding) + : createBuffer(size).fill(fill) + } + return createBuffer(size) +} + +/** + * Creates a new filled Buffer instance. + * alloc(size[, fill[, encoding]]) + **/ +Buffer.alloc = function (size, fill, encoding) { + return alloc(size, fill, encoding) +} + +function allocUnsafe (size) { + assertSize(size) + return createBuffer(size < 0 ? 0 : checked(size) | 0) +} + +/** + * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance. + * */ +Buffer.allocUnsafe = function (size) { + return allocUnsafe(size) +} +/** + * Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance. + */ +Buffer.allocUnsafeSlow = function (size) { + return allocUnsafe(size) +} + +function fromString (string, encoding) { + if (typeof encoding !== 'string' || encoding === '') { + encoding = 'utf8' + } + + if (!Buffer.isEncoding(encoding)) { + throw new TypeError('Unknown encoding: ' + encoding) + } + + var length = byteLength(string, encoding) | 0 + var buf = createBuffer(length) + + var actual = buf.write(string, encoding) + + if (actual !== length) { + // Writing a hex string, for example, that contains invalid characters will + // cause everything after the first invalid character to be ignored. (e.g. + // 'abxxcd' will be treated as 'ab') + buf = buf.slice(0, actual) + } + + return buf +} + +function fromArrayLike (array) { + var length = array.length < 0 ? 0 : checked(array.length) | 0 + var buf = createBuffer(length) + for (var i = 0; i < length; i += 1) { + buf[i] = array[i] & 255 + } + return buf +} + +function fromArrayView (arrayView) { + if (isInstance(arrayView, Uint8Array)) { + var copy = new Uint8Array(arrayView) + return fromArrayBuffer(copy.buffer, copy.byteOffset, copy.byteLength) + } + return fromArrayLike(arrayView) +} + +function fromArrayBuffer (array, byteOffset, length) { + if (byteOffset < 0 || array.byteLength < byteOffset) { + throw new RangeError('"offset" is outside of buffer bounds') + } + + if (array.byteLength < byteOffset + (length || 0)) { + throw new RangeError('"length" is outside of buffer bounds') + } + + var buf + if (byteOffset === undefined && length === undefined) { + buf = new Uint8Array(array) + } else if (length === undefined) { + buf = new Uint8Array(array, byteOffset) + } else { + buf = new Uint8Array(array, byteOffset, length) + } + + // Return an augmented `Uint8Array` instance + Object.setPrototypeOf(buf, Buffer.prototype) + + return buf +} + +function fromObject (obj) { + if (Buffer.isBuffer(obj)) { + var len = checked(obj.length) | 0 + var buf = createBuffer(len) + + if (buf.length === 0) { + return buf + } + + obj.copy(buf, 0, 0, len) + return buf + } + + if (obj.length !== undefined) { + if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) { + return createBuffer(0) + } + return fromArrayLike(obj) + } + + if (obj.type === 'Buffer' && Array.isArray(obj.data)) { + return fromArrayLike(obj.data) + } +} + +function checked (length) { + // Note: cannot use `length < K_MAX_LENGTH` here because that fails when + // length is NaN (which is otherwise coerced to zero.) + if (length >= K_MAX_LENGTH) { + throw new RangeError('Attempt to allocate Buffer larger than maximum ' + + 'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes') + } + return length | 0 +} + +function SlowBuffer (length) { + if (+length != length) { // eslint-disable-line eqeqeq + length = 0 + } + return Buffer.alloc(+length) +} + +Buffer.isBuffer = function isBuffer (b) { + return b != null && b._isBuffer === true && + b !== Buffer.prototype // so Buffer.isBuffer(Buffer.prototype) will be false +} + +Buffer.compare = function compare (a, b) { + if (isInstance(a, Uint8Array)) a = Buffer.from(a, a.offset, a.byteLength) + if (isInstance(b, Uint8Array)) b = Buffer.from(b, b.offset, b.byteLength) + if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { + throw new TypeError( + 'The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array' + ) + } + + if (a === b) return 0 + + var x = a.length + var y = b.length + + for (var i = 0, len = Math.min(x, y); i < len; ++i) { + if (a[i] !== b[i]) { + x = a[i] + y = b[i] + break + } + } + + if (x < y) return -1 + if (y < x) return 1 + return 0 +} + +Buffer.isEncoding = function isEncoding (encoding) { + switch (String(encoding).toLowerCase()) { + case 'hex': + case 'utf8': + case 'utf-8': + case 'ascii': + case 'latin1': + case 'binary': + case 'base64': + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return true + default: + return false + } +} + +Buffer.concat = function concat (list, length) { + if (!Array.isArray(list)) { + throw new TypeError('"list" argument must be an Array of Buffers') + } + + if (list.length === 0) { + return Buffer.alloc(0) + } + + var i + if (length === undefined) { + length = 0 + for (i = 0; i < list.length; ++i) { + length += list[i].length + } + } + + var buffer = Buffer.allocUnsafe(length) + var pos = 0 + for (i = 0; i < list.length; ++i) { + var buf = list[i] + if (isInstance(buf, Uint8Array)) { + if (pos + buf.length > buffer.length) { + Buffer.from(buf).copy(buffer, pos) + } else { + Uint8Array.prototype.set.call( + buffer, + buf, + pos + ) + } + } else if (!Buffer.isBuffer(buf)) { + throw new TypeError('"list" argument must be an Array of Buffers') + } else { + buf.copy(buffer, pos) + } + pos += buf.length + } + return buffer +} + +function byteLength (string, encoding) { + if (Buffer.isBuffer(string)) { + return string.length + } + if (ArrayBuffer.isView(string) || isInstance(string, ArrayBuffer)) { + return string.byteLength + } + if (typeof string !== 'string') { + throw new TypeError( + 'The "string" argument must be one of type string, Buffer, or ArrayBuffer. ' + + 'Received type ' + typeof string + ) + } + + var len = string.length + var mustMatch = (arguments.length > 2 && arguments[2] === true) + if (!mustMatch && len === 0) return 0 + + // Use a for loop to avoid recursion + var loweredCase = false + for (;;) { + switch (encoding) { + case 'ascii': + case 'latin1': + case 'binary': + return len + case 'utf8': + case 'utf-8': + return utf8ToBytes(string).length + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return len * 2 + case 'hex': + return len >>> 1 + case 'base64': + return base64ToBytes(string).length + default: + if (loweredCase) { + return mustMatch ? -1 : utf8ToBytes(string).length // assume utf8 + } + encoding = ('' + encoding).toLowerCase() + loweredCase = true + } + } +} +Buffer.byteLength = byteLength + +function slowToString (encoding, start, end) { + var loweredCase = false + + // No need to verify that "this.length <= MAX_UINT32" since it's a read-only + // property of a typed array. + + // This behaves neither like String nor Uint8Array in that we set start/end + // to their upper/lower bounds if the value passed is out of range. + // undefined is handled specially as per ECMA-262 6th Edition, + // Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization. + if (start === undefined || start < 0) { + start = 0 + } + // Return early if start > this.length. Done here to prevent potential uint32 + // coercion fail below. + if (start > this.length) { + return '' + } + + if (end === undefined || end > this.length) { + end = this.length + } + + if (end <= 0) { + return '' + } + + // Force coercion to uint32. This will also coerce falsey/NaN values to 0. + end >>>= 0 + start >>>= 0 + + if (end <= start) { + return '' + } + + if (!encoding) encoding = 'utf8' + + while (true) { + switch (encoding) { + case 'hex': + return hexSlice(this, start, end) + + case 'utf8': + case 'utf-8': + return utf8Slice(this, start, end) + + case 'ascii': + return asciiSlice(this, start, end) + + case 'latin1': + case 'binary': + return latin1Slice(this, start, end) + + case 'base64': + return base64Slice(this, start, end) + + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return utf16leSlice(this, start, end) + + default: + if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) + encoding = (encoding + '').toLowerCase() + loweredCase = true + } + } +} + +// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package) +// to detect a Buffer instance. It's not possible to use `instanceof Buffer` +// reliably in a browserify context because there could be multiple different +// copies of the 'buffer' package in use. This method works even for Buffer +// instances that were created from another copy of the `buffer` package. +// See: https://github.com/feross/buffer/issues/154 +Buffer.prototype._isBuffer = true + +function swap (b, n, m) { + var i = b[n] + b[n] = b[m] + b[m] = i +} + +Buffer.prototype.swap16 = function swap16 () { + var len = this.length + if (len % 2 !== 0) { + throw new RangeError('Buffer size must be a multiple of 16-bits') + } + for (var i = 0; i < len; i += 2) { + swap(this, i, i + 1) + } + return this +} + +Buffer.prototype.swap32 = function swap32 () { + var len = this.length + if (len % 4 !== 0) { + throw new RangeError('Buffer size must be a multiple of 32-bits') + } + for (var i = 0; i < len; i += 4) { + swap(this, i, i + 3) + swap(this, i + 1, i + 2) + } + return this +} + +Buffer.prototype.swap64 = function swap64 () { + var len = this.length + if (len % 8 !== 0) { + throw new RangeError('Buffer size must be a multiple of 64-bits') + } + for (var i = 0; i < len; i += 8) { + swap(this, i, i + 7) + swap(this, i + 1, i + 6) + swap(this, i + 2, i + 5) + swap(this, i + 3, i + 4) + } + return this +} + +Buffer.prototype.toString = function toString () { + var length = this.length + if (length === 0) return '' + if (arguments.length === 0) return utf8Slice(this, 0, length) + return slowToString.apply(this, arguments) +} + +Buffer.prototype.toLocaleString = Buffer.prototype.toString + +Buffer.prototype.equals = function equals (b) { + if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') + if (this === b) return true + return Buffer.compare(this, b) === 0 +} + +Buffer.prototype.inspect = function inspect () { + var str = '' + var max = exports.INSPECT_MAX_BYTES + str = this.toString('hex', 0, max).replace(/(.{2})/g, '$1 ').trim() + if (this.length > max) str += ' ... ' + return '' +} +if (customInspectSymbol) { + Buffer.prototype[customInspectSymbol] = Buffer.prototype.inspect +} + +Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) { + if (isInstance(target, Uint8Array)) { + target = Buffer.from(target, target.offset, target.byteLength) + } + if (!Buffer.isBuffer(target)) { + throw new TypeError( + 'The "target" argument must be one of type Buffer or Uint8Array. ' + + 'Received type ' + (typeof target) + ) + } + + if (start === undefined) { + start = 0 + } + if (end === undefined) { + end = target ? target.length : 0 + } + if (thisStart === undefined) { + thisStart = 0 + } + if (thisEnd === undefined) { + thisEnd = this.length + } + + if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) { + throw new RangeError('out of range index') + } + + if (thisStart >= thisEnd && start >= end) { + return 0 + } + if (thisStart >= thisEnd) { + return -1 + } + if (start >= end) { + return 1 + } + + start >>>= 0 + end >>>= 0 + thisStart >>>= 0 + thisEnd >>>= 0 + + if (this === target) return 0 + + var x = thisEnd - thisStart + var y = end - start + var len = Math.min(x, y) + + var thisCopy = this.slice(thisStart, thisEnd) + var targetCopy = target.slice(start, end) + + for (var i = 0; i < len; ++i) { + if (thisCopy[i] !== targetCopy[i]) { + x = thisCopy[i] + y = targetCopy[i] + break + } + } + + if (x < y) return -1 + if (y < x) return 1 + return 0 +} + +// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, +// OR the last index of `val` in `buffer` at offset <= `byteOffset`. +// +// Arguments: +// - buffer - a Buffer to search +// - val - a string, Buffer, or number +// - byteOffset - an index into `buffer`; will be clamped to an int32 +// - encoding - an optional encoding, relevant is val is a string +// - dir - true for indexOf, false for lastIndexOf +function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) { + // Empty buffer means no match + if (buffer.length === 0) return -1 + + // Normalize byteOffset + if (typeof byteOffset === 'string') { + encoding = byteOffset + byteOffset = 0 + } else if (byteOffset > 0x7fffffff) { + byteOffset = 0x7fffffff + } else if (byteOffset < -0x80000000) { + byteOffset = -0x80000000 + } + byteOffset = +byteOffset // Coerce to Number. + if (numberIsNaN(byteOffset)) { + // byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer + byteOffset = dir ? 0 : (buffer.length - 1) + } + + // Normalize byteOffset: negative offsets start from the end of the buffer + if (byteOffset < 0) byteOffset = buffer.length + byteOffset + if (byteOffset >= buffer.length) { + if (dir) return -1 + else byteOffset = buffer.length - 1 + } else if (byteOffset < 0) { + if (dir) byteOffset = 0 + else return -1 + } + + // Normalize val + if (typeof val === 'string') { + val = Buffer.from(val, encoding) + } + + // Finally, search either indexOf (if dir is true) or lastIndexOf + if (Buffer.isBuffer(val)) { + // Special case: looking for empty string/buffer always fails + if (val.length === 0) { + return -1 + } + return arrayIndexOf(buffer, val, byteOffset, encoding, dir) + } else if (typeof val === 'number') { + val = val & 0xFF // Search for a byte value [0-255] + if (typeof Uint8Array.prototype.indexOf === 'function') { + if (dir) { + return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset) + } else { + return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset) + } + } + return arrayIndexOf(buffer, [val], byteOffset, encoding, dir) + } + + throw new TypeError('val must be string, number or Buffer') +} + +function arrayIndexOf (arr, val, byteOffset, encoding, dir) { + var indexSize = 1 + var arrLength = arr.length + var valLength = val.length + + if (encoding !== undefined) { + encoding = String(encoding).toLowerCase() + if (encoding === 'ucs2' || encoding === 'ucs-2' || + encoding === 'utf16le' || encoding === 'utf-16le') { + if (arr.length < 2 || val.length < 2) { + return -1 + } + indexSize = 2 + arrLength /= 2 + valLength /= 2 + byteOffset /= 2 + } + } + + function read (buf, i) { + if (indexSize === 1) { + return buf[i] + } else { + return buf.readUInt16BE(i * indexSize) + } + } + + var i + if (dir) { + var foundIndex = -1 + for (i = byteOffset; i < arrLength; i++) { + if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) { + if (foundIndex === -1) foundIndex = i + if (i - foundIndex + 1 === valLength) return foundIndex * indexSize + } else { + if (foundIndex !== -1) i -= i - foundIndex + foundIndex = -1 + } + } + } else { + if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength + for (i = byteOffset; i >= 0; i--) { + var found = true + for (var j = 0; j < valLength; j++) { + if (read(arr, i + j) !== read(val, j)) { + found = false + break + } + } + if (found) return i + } + } + + return -1 +} + +Buffer.prototype.includes = function includes (val, byteOffset, encoding) { + return this.indexOf(val, byteOffset, encoding) !== -1 +} + +Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, true) +} + +Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, false) +} + +function hexWrite (buf, string, offset, length) { + offset = Number(offset) || 0 + var remaining = buf.length - offset + if (!length) { + length = remaining + } else { + length = Number(length) + if (length > remaining) { + length = remaining + } + } + + var strLen = string.length + + if (length > strLen / 2) { + length = strLen / 2 + } + for (var i = 0; i < length; ++i) { + var parsed = parseInt(string.substr(i * 2, 2), 16) + if (numberIsNaN(parsed)) return i + buf[offset + i] = parsed + } + return i +} + +function utf8Write (buf, string, offset, length) { + return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length) +} + +function asciiWrite (buf, string, offset, length) { + return blitBuffer(asciiToBytes(string), buf, offset, length) +} + +function base64Write (buf, string, offset, length) { + return blitBuffer(base64ToBytes(string), buf, offset, length) +} + +function ucs2Write (buf, string, offset, length) { + return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length) +} + +Buffer.prototype.write = function write (string, offset, length, encoding) { + // Buffer#write(string) + if (offset === undefined) { + encoding = 'utf8' + length = this.length + offset = 0 + // Buffer#write(string, encoding) + } else if (length === undefined && typeof offset === 'string') { + encoding = offset + length = this.length + offset = 0 + // Buffer#write(string, offset[, length][, encoding]) + } else if (isFinite(offset)) { + offset = offset >>> 0 + if (isFinite(length)) { + length = length >>> 0 + if (encoding === undefined) encoding = 'utf8' + } else { + encoding = length + length = undefined + } + } else { + throw new Error( + 'Buffer.write(string, encoding, offset[, length]) is no longer supported' + ) + } + + var remaining = this.length - offset + if (length === undefined || length > remaining) length = remaining + + if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) { + throw new RangeError('Attempt to write outside buffer bounds') + } + + if (!encoding) encoding = 'utf8' + + var loweredCase = false + for (;;) { + switch (encoding) { + case 'hex': + return hexWrite(this, string, offset, length) + + case 'utf8': + case 'utf-8': + return utf8Write(this, string, offset, length) + + case 'ascii': + case 'latin1': + case 'binary': + return asciiWrite(this, string, offset, length) + + case 'base64': + // Warning: maxLength not taken into account in base64Write + return base64Write(this, string, offset, length) + + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return ucs2Write(this, string, offset, length) + + default: + if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) + encoding = ('' + encoding).toLowerCase() + loweredCase = true + } + } +} + +Buffer.prototype.toJSON = function toJSON () { + return { + type: 'Buffer', + data: Array.prototype.slice.call(this._arr || this, 0) + } +} + +function base64Slice (buf, start, end) { + if (start === 0 && end === buf.length) { + return base64.fromByteArray(buf) + } else { + return base64.fromByteArray(buf.slice(start, end)) + } +} + +function utf8Slice (buf, start, end) { + end = Math.min(buf.length, end) + var res = [] + + var i = start + while (i < end) { + var firstByte = buf[i] + var codePoint = null + var bytesPerSequence = (firstByte > 0xEF) + ? 4 + : (firstByte > 0xDF) + ? 3 + : (firstByte > 0xBF) + ? 2 + : 1 + + if (i + bytesPerSequence <= end) { + var secondByte, thirdByte, fourthByte, tempCodePoint + + switch (bytesPerSequence) { + case 1: + if (firstByte < 0x80) { + codePoint = firstByte + } + break + case 2: + secondByte = buf[i + 1] + if ((secondByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F) + if (tempCodePoint > 0x7F) { + codePoint = tempCodePoint + } + } + break + case 3: + secondByte = buf[i + 1] + thirdByte = buf[i + 2] + if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F) + if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) { + codePoint = tempCodePoint + } + } + break + case 4: + secondByte = buf[i + 1] + thirdByte = buf[i + 2] + fourthByte = buf[i + 3] + if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) { + tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F) + if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) { + codePoint = tempCodePoint + } + } + } + } + + if (codePoint === null) { + // we did not generate a valid codePoint so insert a + // replacement char (U+FFFD) and advance only 1 byte + codePoint = 0xFFFD + bytesPerSequence = 1 + } else if (codePoint > 0xFFFF) { + // encode to utf16 (surrogate pair dance) + codePoint -= 0x10000 + res.push(codePoint >>> 10 & 0x3FF | 0xD800) + codePoint = 0xDC00 | codePoint & 0x3FF + } + + res.push(codePoint) + i += bytesPerSequence + } + + return decodeCodePointsArray(res) +} + +// Based on http://stackoverflow.com/a/22747272/680742, the browser with +// the lowest limit is Chrome, with 0x10000 args. +// We go 1 magnitude less, for safety +var MAX_ARGUMENTS_LENGTH = 0x1000 + +function decodeCodePointsArray (codePoints) { + var len = codePoints.length + if (len <= MAX_ARGUMENTS_LENGTH) { + return String.fromCharCode.apply(String, codePoints) // avoid extra slice() + } + + // Decode in chunks to avoid "call stack size exceeded". + var res = '' + var i = 0 + while (i < len) { + res += String.fromCharCode.apply( + String, + codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH) + ) + } + return res +} + +function asciiSlice (buf, start, end) { + var ret = '' + end = Math.min(buf.length, end) + + for (var i = start; i < end; ++i) { + ret += String.fromCharCode(buf[i] & 0x7F) + } + return ret +} + +function latin1Slice (buf, start, end) { + var ret = '' + end = Math.min(buf.length, end) + + for (var i = start; i < end; ++i) { + ret += String.fromCharCode(buf[i]) + } + return ret +} + +function hexSlice (buf, start, end) { + var len = buf.length + + if (!start || start < 0) start = 0 + if (!end || end < 0 || end > len) end = len + + var out = '' + for (var i = start; i < end; ++i) { + out += hexSliceLookupTable[buf[i]] + } + return out +} + +function utf16leSlice (buf, start, end) { + var bytes = buf.slice(start, end) + var res = '' + // If bytes.length is odd, the last 8 bits must be ignored (same as node.js) + for (var i = 0; i < bytes.length - 1; i += 2) { + res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256)) + } + return res +} + +Buffer.prototype.slice = function slice (start, end) { + var len = this.length + start = ~~start + end = end === undefined ? len : ~~end + + if (start < 0) { + start += len + if (start < 0) start = 0 + } else if (start > len) { + start = len + } + + if (end < 0) { + end += len + if (end < 0) end = 0 + } else if (end > len) { + end = len + } + + if (end < start) end = start + + var newBuf = this.subarray(start, end) + // Return an augmented `Uint8Array` instance + Object.setPrototypeOf(newBuf, Buffer.prototype) + + return newBuf +} + +/* + * Need to make sure that buffer isn't trying to write out of bounds. + */ +function checkOffset (offset, ext, length) { + if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint') + if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length') +} + +Buffer.prototype.readUintLE = +Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var val = this[offset] + var mul = 1 + var i = 0 + while (++i < byteLength && (mul *= 0x100)) { + val += this[offset + i] * mul + } + + return val +} + +Buffer.prototype.readUintBE = +Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) { + checkOffset(offset, byteLength, this.length) + } + + var val = this[offset + --byteLength] + var mul = 1 + while (byteLength > 0 && (mul *= 0x100)) { + val += this[offset + --byteLength] * mul + } + + return val +} + +Buffer.prototype.readUint8 = +Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 1, this.length) + return this[offset] +} + +Buffer.prototype.readUint16LE = +Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + return this[offset] | (this[offset + 1] << 8) +} + +Buffer.prototype.readUint16BE = +Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + return (this[offset] << 8) | this[offset + 1] +} + +Buffer.prototype.readUint32LE = +Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return ((this[offset]) | + (this[offset + 1] << 8) | + (this[offset + 2] << 16)) + + (this[offset + 3] * 0x1000000) +} + +Buffer.prototype.readUint32BE = +Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset] * 0x1000000) + + ((this[offset + 1] << 16) | + (this[offset + 2] << 8) | + this[offset + 3]) +} + +Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var val = this[offset] + var mul = 1 + var i = 0 + while (++i < byteLength && (mul *= 0x100)) { + val += this[offset + i] * mul + } + mul *= 0x80 + + if (val >= mul) val -= Math.pow(2, 8 * byteLength) + + return val +} + +Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) { + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) checkOffset(offset, byteLength, this.length) + + var i = byteLength + var mul = 1 + var val = this[offset + --i] + while (i > 0 && (mul *= 0x100)) { + val += this[offset + --i] * mul + } + mul *= 0x80 + + if (val >= mul) val -= Math.pow(2, 8 * byteLength) + + return val +} + +Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 1, this.length) + if (!(this[offset] & 0x80)) return (this[offset]) + return ((0xff - this[offset] + 1) * -1) +} + +Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + var val = this[offset] | (this[offset + 1] << 8) + return (val & 0x8000) ? val | 0xFFFF0000 : val +} + +Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 2, this.length) + var val = this[offset + 1] | (this[offset] << 8) + return (val & 0x8000) ? val | 0xFFFF0000 : val +} + +Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset]) | + (this[offset + 1] << 8) | + (this[offset + 2] << 16) | + (this[offset + 3] << 24) +} + +Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + + return (this[offset] << 24) | + (this[offset + 1] << 16) | + (this[offset + 2] << 8) | + (this[offset + 3]) +} + +Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + return ieee754.read(this, offset, true, 23, 4) +} + +Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 4, this.length) + return ieee754.read(this, offset, false, 23, 4) +} + +Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 8, this.length) + return ieee754.read(this, offset, true, 52, 8) +} + +Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) { + offset = offset >>> 0 + if (!noAssert) checkOffset(offset, 8, this.length) + return ieee754.read(this, offset, false, 52, 8) +} + +function checkInt (buf, value, offset, ext, max, min) { + if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance') + if (value > max || value < min) throw new RangeError('"value" argument is out of bounds') + if (offset + ext > buf.length) throw new RangeError('Index out of range') +} + +Buffer.prototype.writeUintLE = +Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) { + var maxBytes = Math.pow(2, 8 * byteLength) - 1 + checkInt(this, value, offset, byteLength, maxBytes, 0) + } + + var mul = 1 + var i = 0 + this[offset] = value & 0xFF + while (++i < byteLength && (mul *= 0x100)) { + this[offset + i] = (value / mul) & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeUintBE = +Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + byteLength = byteLength >>> 0 + if (!noAssert) { + var maxBytes = Math.pow(2, 8 * byteLength) - 1 + checkInt(this, value, offset, byteLength, maxBytes, 0) + } + + var i = byteLength - 1 + var mul = 1 + this[offset + i] = value & 0xFF + while (--i >= 0 && (mul *= 0x100)) { + this[offset + i] = (value / mul) & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeUint8 = +Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0) + this[offset] = (value & 0xff) + return offset + 1 +} + +Buffer.prototype.writeUint16LE = +Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + return offset + 2 +} + +Buffer.prototype.writeUint16BE = +Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) + this[offset] = (value >>> 8) + this[offset + 1] = (value & 0xff) + return offset + 2 +} + +Buffer.prototype.writeUint32LE = +Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) + this[offset + 3] = (value >>> 24) + this[offset + 2] = (value >>> 16) + this[offset + 1] = (value >>> 8) + this[offset] = (value & 0xff) + return offset + 4 +} + +Buffer.prototype.writeUint32BE = +Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) + this[offset] = (value >>> 24) + this[offset + 1] = (value >>> 16) + this[offset + 2] = (value >>> 8) + this[offset + 3] = (value & 0xff) + return offset + 4 +} + +Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + var limit = Math.pow(2, (8 * byteLength) - 1) + + checkInt(this, value, offset, byteLength, limit - 1, -limit) + } + + var i = 0 + var mul = 1 + var sub = 0 + this[offset] = value & 0xFF + while (++i < byteLength && (mul *= 0x100)) { + if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) { + sub = 1 + } + this[offset + i] = ((value / mul) >> 0) - sub & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + var limit = Math.pow(2, (8 * byteLength) - 1) + + checkInt(this, value, offset, byteLength, limit - 1, -limit) + } + + var i = byteLength - 1 + var mul = 1 + var sub = 0 + this[offset + i] = value & 0xFF + while (--i >= 0 && (mul *= 0x100)) { + if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) { + sub = 1 + } + this[offset + i] = ((value / mul) >> 0) - sub & 0xFF + } + + return offset + byteLength +} + +Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80) + if (value < 0) value = 0xff + value + 1 + this[offset] = (value & 0xff) + return offset + 1 +} + +Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + return offset + 2 +} + +Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) + this[offset] = (value >>> 8) + this[offset + 1] = (value & 0xff) + return offset + 2 +} + +Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) + this[offset] = (value & 0xff) + this[offset + 1] = (value >>> 8) + this[offset + 2] = (value >>> 16) + this[offset + 3] = (value >>> 24) + return offset + 4 +} + +Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) + if (value < 0) value = 0xffffffff + value + 1 + this[offset] = (value >>> 24) + this[offset + 1] = (value >>> 16) + this[offset + 2] = (value >>> 8) + this[offset + 3] = (value & 0xff) + return offset + 4 +} + +function checkIEEE754 (buf, value, offset, ext, max, min) { + if (offset + ext > buf.length) throw new RangeError('Index out of range') + if (offset < 0) throw new RangeError('Index out of range') +} + +function writeFloat (buf, value, offset, littleEndian, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38) + } + ieee754.write(buf, value, offset, littleEndian, 23, 4) + return offset + 4 +} + +Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) { + return writeFloat(this, value, offset, true, noAssert) +} + +Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) { + return writeFloat(this, value, offset, false, noAssert) +} + +function writeDouble (buf, value, offset, littleEndian, noAssert) { + value = +value + offset = offset >>> 0 + if (!noAssert) { + checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308) + } + ieee754.write(buf, value, offset, littleEndian, 52, 8) + return offset + 8 +} + +Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) { + return writeDouble(this, value, offset, true, noAssert) +} + +Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) { + return writeDouble(this, value, offset, false, noAssert) +} + +// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) +Buffer.prototype.copy = function copy (target, targetStart, start, end) { + if (!Buffer.isBuffer(target)) throw new TypeError('argument should be a Buffer') + if (!start) start = 0 + if (!end && end !== 0) end = this.length + if (targetStart >= target.length) targetStart = target.length + if (!targetStart) targetStart = 0 + if (end > 0 && end < start) end = start + + // Copy 0 bytes; we're done + if (end === start) return 0 + if (target.length === 0 || this.length === 0) return 0 + + // Fatal error conditions + if (targetStart < 0) { + throw new RangeError('targetStart out of bounds') + } + if (start < 0 || start >= this.length) throw new RangeError('Index out of range') + if (end < 0) throw new RangeError('sourceEnd out of bounds') + + // Are we oob? + if (end > this.length) end = this.length + if (target.length - targetStart < end - start) { + end = target.length - targetStart + start + } + + var len = end - start + + if (this === target && typeof Uint8Array.prototype.copyWithin === 'function') { + // Use built-in when available, missing from IE11 + this.copyWithin(targetStart, start, end) + } else { + Uint8Array.prototype.set.call( + target, + this.subarray(start, end), + targetStart + ) + } + + return len +} + +// Usage: +// buffer.fill(number[, offset[, end]]) +// buffer.fill(buffer[, offset[, end]]) +// buffer.fill(string[, offset[, end]][, encoding]) +Buffer.prototype.fill = function fill (val, start, end, encoding) { + // Handle string cases: + if (typeof val === 'string') { + if (typeof start === 'string') { + encoding = start + start = 0 + end = this.length + } else if (typeof end === 'string') { + encoding = end + end = this.length + } + if (encoding !== undefined && typeof encoding !== 'string') { + throw new TypeError('encoding must be a string') + } + if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) { + throw new TypeError('Unknown encoding: ' + encoding) + } + if (val.length === 1) { + var code = val.charCodeAt(0) + if ((encoding === 'utf8' && code < 128) || + encoding === 'latin1') { + // Fast path: If `val` fits into a single byte, use that numeric value. + val = code + } + } + } else if (typeof val === 'number') { + val = val & 255 + } else if (typeof val === 'boolean') { + val = Number(val) + } + + // Invalid ranges are not set to a default, so can range check early. + if (start < 0 || this.length < start || this.length < end) { + throw new RangeError('Out of range index') + } + + if (end <= start) { + return this + } + + start = start >>> 0 + end = end === undefined ? this.length : end >>> 0 + + if (!val) val = 0 + + var i + if (typeof val === 'number') { + for (i = start; i < end; ++i) { + this[i] = val + } + } else { + var bytes = Buffer.isBuffer(val) + ? val + : Buffer.from(val, encoding) + var len = bytes.length + if (len === 0) { + throw new TypeError('The value "' + val + + '" is invalid for argument "value"') + } + for (i = 0; i < end - start; ++i) { + this[i + start] = bytes[i % len] + } + } + + return this +} + +// HELPER FUNCTIONS +// ================ + +var INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g + +function base64clean (str) { + // Node takes equal signs as end of the Base64 encoding + str = str.split('=')[0] + // Node strips out invalid characters like \n and \t from the string, base64-js does not + str = str.trim().replace(INVALID_BASE64_RE, '') + // Node converts strings with length < 2 to '' + if (str.length < 2) return '' + // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not + while (str.length % 4 !== 0) { + str = str + '=' + } + return str +} + +function utf8ToBytes (string, units) { + units = units || Infinity + var codePoint + var length = string.length + var leadSurrogate = null + var bytes = [] + + for (var i = 0; i < length; ++i) { + codePoint = string.charCodeAt(i) + + // is surrogate component + if (codePoint > 0xD7FF && codePoint < 0xE000) { + // last char was a lead + if (!leadSurrogate) { + // no lead yet + if (codePoint > 0xDBFF) { + // unexpected trail + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + continue + } else if (i + 1 === length) { + // unpaired lead + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + continue + } + + // valid lead + leadSurrogate = codePoint + + continue + } + + // 2 leads in a row + if (codePoint < 0xDC00) { + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + leadSurrogate = codePoint + continue + } + + // valid surrogate pair + codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000 + } else if (leadSurrogate) { + // valid bmp char, but last char was a lead + if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) + } + + leadSurrogate = null + + // encode utf8 + if (codePoint < 0x80) { + if ((units -= 1) < 0) break + bytes.push(codePoint) + } else if (codePoint < 0x800) { + if ((units -= 2) < 0) break + bytes.push( + codePoint >> 0x6 | 0xC0, + codePoint & 0x3F | 0x80 + ) + } else if (codePoint < 0x10000) { + if ((units -= 3) < 0) break + bytes.push( + codePoint >> 0xC | 0xE0, + codePoint >> 0x6 & 0x3F | 0x80, + codePoint & 0x3F | 0x80 + ) + } else if (codePoint < 0x110000) { + if ((units -= 4) < 0) break + bytes.push( + codePoint >> 0x12 | 0xF0, + codePoint >> 0xC & 0x3F | 0x80, + codePoint >> 0x6 & 0x3F | 0x80, + codePoint & 0x3F | 0x80 + ) + } else { + throw new Error('Invalid code point') + } + } + + return bytes +} + +function asciiToBytes (str) { + var byteArray = [] + for (var i = 0; i < str.length; ++i) { + // Node's code seems to be doing this and not & 0x7F.. + byteArray.push(str.charCodeAt(i) & 0xFF) + } + return byteArray +} + +function utf16leToBytes (str, units) { + var c, hi, lo + var byteArray = [] + for (var i = 0; i < str.length; ++i) { + if ((units -= 2) < 0) break + + c = str.charCodeAt(i) + hi = c >> 8 + lo = c % 256 + byteArray.push(lo) + byteArray.push(hi) + } + + return byteArray +} + +function base64ToBytes (str) { + return base64.toByteArray(base64clean(str)) +} + +function blitBuffer (src, dst, offset, length) { + for (var i = 0; i < length; ++i) { + if ((i + offset >= dst.length) || (i >= src.length)) break + dst[i + offset] = src[i] + } + return i +} + +// ArrayBuffer or Uint8Array objects from other contexts (i.e. iframes) do not pass +// the `instanceof` check but they should be treated as of that type. +// See: https://github.com/feross/buffer/issues/166 +function isInstance (obj, type) { + return obj instanceof type || + (obj != null && obj.constructor != null && obj.constructor.name != null && + obj.constructor.name === type.name) +} +function numberIsNaN (obj) { + // For IE11 support + return obj !== obj // eslint-disable-line no-self-compare +} + +// Create lookup table for `toString('hex')` +// See: https://github.com/feross/buffer/issues/219 +var hexSliceLookupTable = (function () { + var alphabet = '0123456789abcdef' + var table = new Array(256) + for (var i = 0; i < 16; ++i) { + var i16 = i * 16 + for (var j = 0; j < 16; ++j) { + table[i16 + j] = alphabet[i] + alphabet[j] + } + } + return table +})() diff --git a/miniprogram/node_modules/buffer/package.json b/miniprogram/node_modules/buffer/package.json new file mode 100644 index 00000000..3b1b4986 --- /dev/null +++ b/miniprogram/node_modules/buffer/package.json @@ -0,0 +1,96 @@ +{ + "name": "buffer", + "description": "Node.js Buffer API, for the browser", + "version": "5.7.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/buffer/issues" + }, + "contributors": [ + "Romain Beauxis ", + "James Halliday " + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + }, + "devDependencies": { + "airtap": "^3.0.0", + "benchmark": "^2.1.4", + "browserify": "^17.0.0", + "concat-stream": "^2.0.0", + "hyperquest": "^2.1.3", + "is-buffer": "^2.0.4", + "is-nan": "^1.3.0", + "split": "^1.0.1", + "standard": "*", + "tape": "^5.0.1", + "through2": "^4.0.2", + "uglify-js": "^3.11.3" + }, + "homepage": "https://github.com/feross/buffer", + "jspm": { + "map": { + "./index.js": { + "node": "@node/buffer" + } + } + }, + "keywords": [ + "arraybuffer", + "browser", + "browserify", + "buffer", + "compatible", + "dataview", + "uint8array" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/buffer.git" + }, + "scripts": { + "perf": "browserify --debug perf/bracket-notation.js > perf/bundle.js && open perf/index.html", + "perf-node": "node perf/bracket-notation.js && node perf/concat.js && node perf/copy-big.js && node perf/copy.js && node perf/new-big.js && node perf/new.js && node perf/readDoubleBE.js && node perf/readFloatBE.js && node perf/readUInt32LE.js && node perf/slice.js && node perf/writeFloatBE.js", + "size": "browserify -r ./ | uglifyjs -c -m | gzip | wc -c", + "test": "standard && node ./bin/test.js", + "test-browser-es5": "airtap -- test/*.js", + "test-browser-es5-local": "airtap --local -- test/*.js", + "test-browser-es6": "airtap -- test/*.js test/node/*.js", + "test-browser-es6-local": "airtap --local -- test/*.js test/node/*.js", + "test-node": "tape test/*.js test/node/*.js", + "update-authors": "./bin/update-authors.sh" + }, + "standard": { + "ignore": [ + "test/node/**/*.js", + "test/common.js", + "test/_polyfill.js", + "perf/**/*.js" + ], + "globals": [ + "SharedArrayBuffer" + ] + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/miniprogram/node_modules/canvas/Readme.md b/miniprogram/node_modules/canvas/Readme.md new file mode 100644 index 00000000..d0429c52 --- /dev/null +++ b/miniprogram/node_modules/canvas/Readme.md @@ -0,0 +1,657 @@ +# node-canvas + +![Test](https://github.com/Automattic/node-canvas/workflows/Test/badge.svg) +[![NPM version](https://badge.fury.io/js/canvas.svg)](http://badge.fury.io/js/canvas) + +node-canvas is a [Cairo](http://cairographics.org/)-backed Canvas implementation for [Node.js](http://nodejs.org). + +## Installation + +```bash +$ npm install canvas +``` + +By default, pre-built binaries will be downloaded if you're on one of the following platforms: +- macOS x86/64 +- macOS aarch64 (aka Apple silicon) +- Linux x86/64 (glibc only) +- Windows x86/64 + +If you want to build from source, use `npm install --build-from-source` and see the **Compiling** section below. + +The minimum version of Node.js required is **18.12.0**. + +### Compiling + +If you don't have a supported OS or processor architecture, or you use `--build-from-source`, the module will be compiled on your system. This requires several dependencies, including Cairo and Pango. + +For detailed installation information, see the [wiki](https://github.com/Automattic/node-canvas/wiki/_pages). One-line installation instructions for common OSes are below. Note that libgif/giflib, librsvg and libjpeg are optional and only required if you need GIF, SVG and JPEG support, respectively. Cairo v1.10.0 or later is required. + +OS | Command +----- | ----- +macOS | Using [Homebrew](https://brew.sh/):
`brew install pkg-config cairo pango libpng jpeg giflib librsvg pixman python-setuptools` +Ubuntu | `sudo apt-get install build-essential libcairo2-dev libpango1.0-dev libjpeg-dev libgif-dev librsvg2-dev` +Fedora | `sudo yum install gcc-c++ cairo-devel pango-devel libjpeg-turbo-devel giflib-devel` +Solaris | `pkgin install cairo pango pkg-config xproto renderproto kbproto xextproto` +OpenBSD | `doas pkg_add cairo pango png jpeg giflib` +Windows | See the [wiki](https://github.com/Automattic/node-canvas/wiki/Installation:-Windows) +Others | See the [wiki](https://github.com/Automattic/node-canvas/wiki) + +**Mac OS X v10.11+:** If you have recently updated to Mac OS X v10.11+ and are experiencing trouble when compiling, run the following command: `xcode-select --install`. Read more about the problem [on Stack Overflow](http://stackoverflow.com/a/32929012/148072). +If you have xcode 10.0 or higher installed, in order to build from source you need NPM 6.4.1 or higher. + +## Quick Example + +```javascript +const { createCanvas, loadImage } = require('canvas') +const canvas = createCanvas(200, 200) +const ctx = canvas.getContext('2d') + +// Write "Awesome!" +ctx.font = '30px Impact' +ctx.rotate(0.1) +ctx.fillText('Awesome!', 50, 100) + +// Draw line under text +var text = ctx.measureText('Awesome!') +ctx.strokeStyle = 'rgba(0,0,0,0.5)' +ctx.beginPath() +ctx.lineTo(50, 102) +ctx.lineTo(50 + text.width, 102) +ctx.stroke() + +// Draw cat with lime helmet +loadImage('examples/images/lime-cat.jpg').then((image) => { + ctx.drawImage(image, 50, 0, 70, 70) + + console.log('') +}) +``` + +## Upgrading from 1.x to 2.x + +See the [changelog](https://github.com/Automattic/node-canvas/blob/master/CHANGELOG.md) for a guide to upgrading from 1.x to 2.x. + +For version 1.x documentation, see [the v1.x branch](https://github.com/Automattic/node-canvas/tree/v1.x). + +## Documentation + +This project is an implementation of the Web Canvas API and implements that API as closely as possible. For API documentation, please visit [Mozilla Web Canvas API](https://developer.mozilla.org/en-US/docs/Web/API/Canvas_API). (See [Compatibility Status](https://github.com/Automattic/node-canvas/wiki/Compatibility-Status) for the current API compliance.) All utility methods and non-standard APIs are documented below. + +### Utility methods + +* [createCanvas()](#createcanvas) +* [createImageData()](#createimagedata) +* [loadImage()](#loadimage) +* [registerFont()](#registerfont) +* [deregisterAllFonts()](#deregisterAllFonts) + + +### Non-standard APIs + +* [Image#src](#imagesrc) +* [Image#dataMode](#imagedatamode) +* [Canvas#toBuffer()](#canvastobuffer) +* [Canvas#createPNGStream()](#canvascreatepngstream) +* [Canvas#createJPEGStream()](#canvascreatejpegstream) +* [Canvas#createPDFStream()](#canvascreatepdfstream) +* [Canvas#toDataURL()](#canvastodataurl) +* [CanvasRenderingContext2D#patternQuality](#canvasrenderingcontext2dpatternquality) +* [CanvasRenderingContext2D#quality](#canvasrenderingcontext2dquality) +* [CanvasRenderingContext2D#textDrawingMode](#canvasrenderingcontext2dtextdrawingmode) +* [CanvasRenderingContext2D#globalCompositeOperation = 'saturate'](#canvasrenderingcontext2dglobalcompositeoperation--saturate) +* [CanvasRenderingContext2D#antialias](#canvasrenderingcontext2dantialias) + +### createCanvas() + +> ```ts +> createCanvas(width: number, height: number, type?: 'PDF'|'SVG') => Canvas +> ``` + +Creates a Canvas instance. This method works in both Node.js and Web browsers, where there is no Canvas constructor. (See `browser.js` for the implementation that runs in browsers.) + +```js +const { createCanvas } = require('canvas') +const mycanvas = createCanvas(200, 200) +const myPDFcanvas = createCanvas(600, 800, 'pdf') // see "PDF Support" section +``` + +### createImageData() + +> ```ts +> createImageData(width: number, height: number) => ImageData +> createImageData(data: Uint8ClampedArray, width: number, height?: number) => ImageData +> // for alternative pixel formats: +> createImageData(data: Uint16Array, width: number, height?: number) => ImageData +> ``` + +Creates an ImageData instance. This method works in both Node.js and Web browsers. + +```js +const { createImageData } = require('canvas') +const width = 20, height = 20 +const arraySize = width * height * 4 +const mydata = createImageData(new Uint8ClampedArray(arraySize), width) +``` + +### loadImage() + +> ```ts +> loadImage() => Promise +> ``` + +Convenience method for loading images. This method works in both Node.js and Web browsers. + +```js +const { loadImage } = require('canvas') +const myimg = loadImage('http://server.com/image.png') + +myimg.then(() => { + // do something with image +}).catch(err => { + console.log('oh no!', err) +}) + +// or with async/await: +const myimg = await loadImage('http://server.com/image.png') +// do something with image +``` + +### registerFont() + +> ```ts +> registerFont(path: string, { family: string, weight?: string, style?: string }) => void +> ``` + +To use a font file that is not installed as a system font, use `registerFont()` to register the font with Canvas. + +```js +const { registerFont, createCanvas } = require('canvas') +registerFont('comicsans.ttf', { family: 'Comic Sans' }) + +const canvas = createCanvas(500, 500) +const ctx = canvas.getContext('2d') + +ctx.font = '12px "Comic Sans"' +ctx.fillText('Everyone hates this font :(', 250, 10) +``` + +The second argument is an object with properties that resemble the CSS properties that are specified in `@font-face` rules. You must specify at least `family`. `weight`, and `style` are optional and default to `'normal'`. + +### deregisterAllFonts() + +> ```ts +> deregisterAllFonts() => void +> ``` + +Use `deregisterAllFonts` to unregister all fonts that have been previously registered. This method is useful when you want to remove all registered fonts, such as when using the canvas in tests + +```ts +const { registerFont, createCanvas, deregisterAllFonts } = require('canvas') + +describe('text rendering', () => { + afterEach(() => { + deregisterAllFonts(); + }) + it('should render text with Comic Sans', () => { + registerFont('comicsans.ttf', { family: 'Comic Sans' }) + + const canvas = createCanvas(500, 500) + const ctx = canvas.getContext('2d') + + ctx.font = '12px "Comic Sans"' + ctx.fillText('Everyone loves this font :)', 250, 10) + + // assertScreenshot() + }) +}) +``` + +### Image#src + +> ```ts +> img.src: string|Buffer +> ``` + +As in browsers, `img.src` can be set to a `data:` URI or a remote URL. In addition, node-canvas allows setting `src` to a local file path or `Buffer` instance. + +```javascript +const { Image } = require('canvas') + +// From a buffer: +fs.readFile('images/squid.png', (err, squid) => { + if (err) throw err + const img = new Image() + img.onload = () => ctx.drawImage(img, 0, 0) + img.onerror = err => { throw err } + img.src = squid +}) + +// From a local file path: +const img = new Image() +img.onload = () => ctx.drawImage(img, 0, 0) +img.onerror = err => { throw err } +img.src = 'images/squid.png' + +// From a remote URL: +img.src = 'http://picsum.photos/200/300' +// ... as above + +// From a `data:` URI: +img.src = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg==' +// ... as above +``` + +*Note: In some cases, `img.src=` is currently synchronous. However, you should always use `img.onload` and `img.onerror`, as we intend to make `img.src=` always asynchronous as it is in browsers. See https://github.com/Automattic/node-canvas/issues/1007.* + +### Image#dataMode + +> ```ts +> img.dataMode: number +> ``` + +Applies to JPEG images drawn to PDF canvases only. + +Setting `img.dataMode = Image.MODE_MIME` or `Image.MODE_MIME|Image.MODE_IMAGE` enables MIME data tracking of images. When MIME data is tracked, PDF canvases can embed JPEGs directly into the output, rather than re-encoding into PNG. This can drastically reduce filesize and speed up rendering. + +```javascript +const { Image, createCanvas } = require('canvas') +const canvas = createCanvas(w, h, 'pdf') +const img = new Image() +img.dataMode = Image.MODE_IMAGE // Only image data tracked +img.dataMode = Image.MODE_MIME // Only mime data tracked +img.dataMode = Image.MODE_MIME | Image.MODE_IMAGE // Both are tracked +``` + +If working with a non-PDF canvas, image data *must* be tracked; otherwise the output will be junk. + +Enabling mime data tracking has no benefits (only a slow down) unless you are generating a PDF. + +### Canvas#toBuffer() + +> ```ts +> canvas.toBuffer((err: Error|null, result: Buffer) => void, mimeType?: string, config?: any) => void +> canvas.toBuffer(mimeType?: string, config?: any) => Buffer +> ``` + +Creates a [`Buffer`](https://nodejs.org/api/buffer.html) object representing the image contained in the canvas. + +* **callback** If provided, the buffer will be provided in the callback instead of being returned by the function. Invoked with an error as the first argument if encoding failed, or the resulting buffer as the second argument if it succeeded. Not supported for mimeType `raw` or for PDF or SVG canvases. +* **mimeType** A string indicating the image format. Valid options are `image/png`, `image/jpeg` (if node-canvas was built with JPEG support), `raw` (unencoded data in BGRA order on little-endian (most) systems, ARGB on big-endian systems; top-to-bottom), `application/pdf` (for PDF canvases) and `image/svg+xml` (for SVG canvases). Defaults to `image/png` for image canvases, or the corresponding type for PDF or SVG canvas. +* **config** + * For `image/jpeg`, an object specifying the quality (0 to 1), if progressive compression should be used and/or if chroma subsampling should be used: `{quality: 0.75, progressive: false, chromaSubsampling: true}`. All properties are optional. + + * For `image/png`, an object specifying the ZLIB compression level (between 0 and 9), the compression filter(s), the palette (indexed PNGs only), the the background palette index (indexed PNGs only) and/or the resolution (ppi): `{compressionLevel: 6, filters: canvas.PNG_ALL_FILTERS, palette: undefined, backgroundIndex: 0, resolution: undefined}`. All properties are optional. + + Note that the PNG format encodes the resolution in pixels per meter, so if you specify `96`, the file will encode 3780 ppm (~96.01 ppi). The resolution is undefined by default to match common browser behavior. + + * For `application/pdf`, an object specifying optional document metadata: `{title: string, author: string, subject: string, keywords: string, creator: string, creationDate: Date, modDate: Date}`. All properties are optional and default to `undefined`, except for `creationDate`, which defaults to the current date. *Adding metadata requires Cairo 1.16.0 or later.* + + For a description of these properties, see page 550 of [PDF 32000-1:2008](https://www.adobe.com/content/dam/acom/en/devnet/acrobat/pdfs/PDF32000_2008.pdf). + + Note that there is no standard separator for `keywords`. A space is recommended because it is in common use by other applications, and Cairo will enclose the list of keywords in quotes if a comma or semicolon is used. + +**Return value** + +If no callback is provided, a [`Buffer`](https://nodejs.org/api/buffer.html). If a callback is provided, none. + +#### Examples + +```js +// Default: buf contains a PNG-encoded image +const buf = canvas.toBuffer() + +// PNG-encoded, zlib compression level 3 for faster compression but bigger files, no filtering +const buf2 = canvas.toBuffer('image/png', { compressionLevel: 3, filters: canvas.PNG_FILTER_NONE }) + +// JPEG-encoded, 50% quality +const buf3 = canvas.toBuffer('image/jpeg', { quality: 0.5 }) + +// Asynchronous PNG +canvas.toBuffer((err, buf) => { + if (err) throw err // encoding failed + // buf is PNG-encoded image +}) + +canvas.toBuffer((err, buf) => { + if (err) throw err // encoding failed + // buf is JPEG-encoded image at 95% quality +}, 'image/jpeg', { quality: 0.95 }) + +// BGRA pixel values, native-endian +const buf4 = canvas.toBuffer('raw') +const { stride, width } = canvas +// In memory, this is `canvas.height * canvas.stride` bytes long. +// The top row of pixels, in BGRA order on little-endian hardware, +// left-to-right, is: +const topPixelsBGRALeftToRight = buf4.slice(0, width * 4) +// And the third row is: +const row3 = buf4.slice(2 * stride, 2 * stride + width * 4) + +// SVG and PDF canvases +const myCanvas = createCanvas(w, h, 'pdf') +myCanvas.toBuffer() // returns a buffer containing a PDF-encoded canvas +// With optional metadata: +myCanvas.toBuffer('application/pdf', { + title: 'my picture', + keywords: 'node.js demo cairo', + creationDate: new Date() +}) +``` + +### Canvas#createPNGStream() + +> ```ts +> canvas.createPNGStream(config?: any) => ReadableStream +> ``` + +Creates a [`ReadableStream`](https://nodejs.org/api/stream.html#stream_class_stream_readable) that emits PNG-encoded data. + +* `config` An object specifying the ZLIB compression level (between 0 and 9), the compression filter(s), the palette (indexed PNGs only) and/or the background palette index (indexed PNGs only): `{compressionLevel: 6, filters: canvas.PNG_ALL_FILTERS, palette: undefined, backgroundIndex: 0, resolution: undefined}`. All properties are optional. + +#### Examples + +```javascript +const fs = require('fs') +const out = fs.createWriteStream(__dirname + '/test.png') +const stream = canvas.createPNGStream() +stream.pipe(out) +out.on('finish', () => console.log('The PNG file was created.')) +``` + +To encode indexed PNGs from canvases with `pixelFormat: 'A8'` or `'A1'`, provide an options object: + +```js +const palette = new Uint8ClampedArray([ + //r g b a + 0, 50, 50, 255, // index 1 + 10, 90, 90, 255, // index 2 + 127, 127, 255, 255 + // ... +]) +canvas.createPNGStream({ + palette: palette, + backgroundIndex: 0 // optional, defaults to 0 +}) +``` + +### Canvas#createJPEGStream() + +> ```ts +> canvas.createJPEGStream(config?: any) => ReadableStream +> ``` + +Creates a [`ReadableStream`](https://nodejs.org/api/stream.html#stream_class_stream_readable) that emits JPEG-encoded data. + +*Note: At the moment, `createJPEGStream()` is synchronous under the hood. That is, it runs in the main thread, not in the libuv threadpool.* + +* `config` an object specifying the quality (0 to 1), if progressive compression should be used and/or if chroma subsampling should be used: `{quality: 0.75, progressive: false, chromaSubsampling: true}`. All properties are optional. + +#### Examples + +```javascript +const fs = require('fs') +const out = fs.createWriteStream(__dirname + '/test.jpeg') +const stream = canvas.createJPEGStream() +stream.pipe(out) +out.on('finish', () => console.log('The JPEG file was created.')) + +// Disable 2x2 chromaSubsampling for deeper colors and use a higher quality +const stream = canvas.createJPEGStream({ + quality: 0.95, + chromaSubsampling: false +}) +``` + +### Canvas#createPDFStream() + +> ```ts +> canvas.createPDFStream(config?: any) => ReadableStream +> ``` + +* `config` an object specifying optional document metadata: `{title: string, author: string, subject: string, keywords: string, creator: string, creationDate: Date, modDate: Date}`. See `toBuffer()` for more information. *Adding metadata requires Cairo 1.16.0 or later.* + +Applies to PDF canvases only. Creates a [`ReadableStream`](https://nodejs.org/api/stream.html#stream_class_stream_readable) that emits the encoded PDF. `canvas.toBuffer()` also produces an encoded PDF, but `createPDFStream()` can be used to reduce memory usage. + +### Canvas#toDataURL() + +This is a standard API, but several non-standard calls are supported. The full list of supported calls is: + +```js +dataUrl = canvas.toDataURL() // defaults to PNG +dataUrl = canvas.toDataURL('image/png') +dataUrl = canvas.toDataURL('image/jpeg') +dataUrl = canvas.toDataURL('image/jpeg', quality) // quality from 0 to 1 +canvas.toDataURL((err, png) => { }) // defaults to PNG +canvas.toDataURL('image/png', (err, png) => { }) +canvas.toDataURL('image/jpeg', (err, jpeg) => { }) // sync JPEG is not supported +canvas.toDataURL('image/jpeg', {...opts}, (err, jpeg) => { }) // see Canvas#createJPEGStream for valid options +canvas.toDataURL('image/jpeg', quality, (err, jpeg) => { }) // spec-following; quality from 0 to 1 +``` + +### CanvasRenderingContext2D#patternQuality + +> ```ts +> context.patternQuality: 'fast'|'good'|'best'|'nearest'|'bilinear' +> ``` + +Defaults to `'good'`. Affects pattern (gradient, image, etc.) rendering quality. + +### CanvasRenderingContext2D#quality + +> ```ts +> context.quality: 'fast'|'good'|'best'|'nearest'|'bilinear' +> ``` + +Defaults to `'good'`. Like `patternQuality`, but applies to transformations affecting more than just patterns. + +### CanvasRenderingContext2D#textDrawingMode + +> ```ts +> context.textDrawingMode: 'path'|'glyph' +> ``` + +Defaults to `'path'`. The effect depends on the canvas type: + +* **Standard (image)** `glyph` and `path` both result in rasterized text. Glyph mode is faster than `path`, but may result in lower-quality text, especially when rotated or translated. + +* **PDF** `glyph` will embed text instead of paths into the PDF. This is faster to encode, faster to open with PDF viewers, yields a smaller file size and makes the text selectable. The subset of the font needed to render the glyphs will be embedded in the PDF. This is usually the mode you want to use with PDF canvases. + +* **SVG** `glyph` does *not* cause `` elements to be produced as one might expect ([cairo bug](https://gitlab.freedesktop.org/cairo/cairo/issues/253)). Rather, `glyph` will create a `` section with a `` for each glyph, then those glyphs be reused via `` elements. `path` mode creates a `` element for each text string. `glyph` mode is faster and yields a smaller file size. + +In `glyph` mode, `ctx.strokeText()` and `ctx.fillText()` behave the same (aside from using the stroke and fill style, respectively). + +This property is tracked as part of the canvas state in save/restore. + +### CanvasRenderingContext2D#globalCompositeOperation = 'saturate' + +In addition to all of the standard global composite operations defined by the Canvas specification, the ['saturate'](https://www.cairographics.org/operators/#saturate) operation is also available. + +### CanvasRenderingContext2D#antialias + +> ```ts +> context.antialias: 'default'|'none'|'gray'|'subpixel' +> ``` + +Sets the anti-aliasing mode. + +## PDF Output Support + +node-canvas can create PDF documents instead of images. The canvas type must be set when creating the canvas as follows: + +```js +const canvas = createCanvas(200, 500, 'pdf') +``` + +An additional method `.addPage()` is then available to create multiple page PDFs: + +```js +// On first page +ctx.font = '22px Helvetica' +ctx.fillText('Hello World', 50, 80) + +ctx.addPage() +// Now on second page +ctx.font = '22px Helvetica' +ctx.fillText('Hello World 2', 50, 80) + +canvas.toBuffer() // returns a PDF file +canvas.createPDFStream() // returns a ReadableStream that emits a PDF +// With optional document metadata (requires Cairo 1.16.0): +canvas.toBuffer('application/pdf', { + title: 'my picture', + keywords: 'node.js demo cairo', + creationDate: new Date() +}) +``` + +It is also possible to create pages with different sizes by passing `width` and `height` to the `.addPage()` method: + +```js +ctx.font = '22px Helvetica' +ctx.fillText('Hello World', 50, 80) +ctx.addPage(400, 800) + +ctx.fillText('Hello World 2', 50, 80) +``` + +It is possible to add hyperlinks using `.beginTag()` and `.endTag()`: + +```js +ctx.beginTag('Link', "uri='https://google.com'") +ctx.font = '22px Helvetica' +ctx.fillText('Hello World', 50, 80) +ctx.endTag('Link') +``` + +Or with a defined rectangle: + +```js +ctx.beginTag('Link', "uri='https://google.com' rect=[50 80 100 20]") +ctx.endTag('Link') +``` + +Note that the syntax for attributes is unique to Cairo. See [cairo_tag_begin](https://www.cairographics.org/manual/cairo-Tags-and-Links.html#cairo-tag-begin) for the full documentation. + +You can create areas on the canvas using the "cairo.dest" tag, and then link to them using the "Link" tag with the `dest=` attribute. You can also define PDF structure for accessibility by using tag names like "P", "H1", and "TABLE". The standard tags are defined in §14.8.4 of the [PDF 1.7](https://opensource.adobe.com/dc-acrobat-sdk-docs/pdfstandards/PDF32000_2008.pdf) specification. + +See also: + +* [Image#dataMode](#imagedatamode) for embedding JPEGs in PDFs +* [Canvas#createPDFStream()](#canvascreatepdfstream) for creating PDF streams +* [CanvasRenderingContext2D#textDrawingMode](#canvasrenderingcontext2dtextdrawingmode) + for embedding text instead of paths + +## SVG Output Support + +node-canvas can create SVG documents instead of images. The canvas type must be set when creating the canvas as follows: + +```js +const canvas = createCanvas(200, 500, 'svg') +// Use the normal primitives. +fs.writeFileSync('out.svg', canvas.toBuffer()) +``` + +## SVG Image Support + +If librsvg is available when node-canvas is installed, node-canvas can render SVG images to your canvas context. This currently works by rasterizing the SVG image (i.e. drawing an SVG image to an SVG canvas will not preserve the SVG data). + +```js +const img = new Image() +img.onload = () => ctx.drawImage(img, 0, 0) +img.onerror = err => { throw err } +img.src = './example.svg' +``` + +## Image pixel formats (experimental) + +node-canvas has experimental support for additional pixel formats, roughly following the [Canvas color space proposal](https://github.com/WICG/canvas-color-space/blob/master/CanvasColorSpaceProposal.md). + +```js +const canvas = createCanvas(200, 200) +const ctx = canvas.getContext('2d', { pixelFormat: 'A8' }) +``` + +By default, canvases are created in the `RGBA32` format, which corresponds to the native HTML Canvas behavior. Each pixel is 32 bits. The JavaScript APIs that involve pixel data (`getImageData`, `putImageData`) store the colors in the order {red, green, blue, alpha} without alpha pre-multiplication. (The C++ API stores the colors in the order {alpha, red, green, blue} in native-[endian](https://en.wikipedia.org/wiki/Endianness) ordering, with alpha pre-multiplication.) + +These additional pixel formats have experimental support: + +* `RGB24` Like `RGBA32`, but the 8 alpha bits are always opaque. This format is always used if the `alpha` context attribute is set to false (i.e. `canvas.getContext('2d', {alpha: false})`). This format can be faster than `RGBA32` because transparency does not need to be calculated. +* `A8` Each pixel is 8 bits. This format can either be used for creating grayscale images (treating each byte as an alpha value), or for creating indexed PNGs (treating each byte as a palette index) (see [the example using alpha values with `fillStyle`](examples/indexed-png-alpha.js) and [the example using `imageData`](examples/indexed-png-image-data.js)). +* `RGB16_565` Each pixel is 16 bits, with red in the upper 5 bits, green in the middle 6 bits, and blue in the lower 5 bits, in native platform endianness. Some hardware devices and frame buffers use this format. Note that PNG does not support this format; when creating a PNG, the image will be converted to 24-bit RGB. This format is thus suboptimal for generating PNGs. `ImageData` instances for this mode use a `Uint16Array` instead of a `Uint8ClampedArray`. +* `A1` Each pixel is 1 bit, and pixels are packed together into 32-bit quantities. The ordering of the bits matches the endianness of the + platform: on a little-endian machine, the first pixel is the least-significant bit. This format can be used for creating single-color images. *Support for this format is incomplete, see note below.* +* `RGB30` Each pixel is 30 bits, with red in the upper 10, green in the middle 10, and blue in the lower 10. (Requires Cairo 1.12 or later.) *Support for this format is incomplete, see note below.* + +Notes and caveats: + +* Using a non-default format can affect the behavior of APIs that involve pixel data: + + * `context2d.createImageData` The size of the array returned depends on the number of bit per pixel for the underlying image data format, per the above descriptions. + * `context2d.getImageData` The format of the array returned depends on the underlying image mode, per the above descriptions. Be aware of platform endianness, which can be determined using node.js's [`os.endianness()`](https://nodejs.org/api/os.html#os_os_endianness) + function. + * `context2d.putImageData` As above. + +* `A1` and `RGB30` do not yet support `getImageData` or `putImageData`. Have a use case and/or opinion on working with these formats? Open an issue and let us know! (See #935.) + +* `A1`, `A8`, `RGB30` and `RGB16_565` with shadow blurs may crash or not render properly. + +* The `ImageData(width, height)` and `ImageData(Uint8ClampedArray, width)` constructors assume 4 bytes per pixel. To create an `ImageData` instance with a different number of bytes per pixel, use `new ImageData(new Uint8ClampedArray(size), width, height)` or `new ImageData(new Uint16ClampedArray(size), width, height)`. + +## Testing + +First make sure you've built the latest version. Get all the deps you need (see [compiling](#compiling) above), and run: + +``` +npm install --build-from-source +``` + +For visual tests: `npm run test-server` and point your browser to http://localhost:4000. + +For unit tests: `npm run test`. + +## Benchmarks + +Benchmarks live in the `benchmarks` directory. + +## Examples + +Examples line in the `examples` directory. Most produce a png image of the same name, and others such as *live-clock.js* launch an HTTP server to be viewed in the browser. + +## Original Authors + + - TJ Holowaychuk ([tj](http://github.com/tj)) + - Nathan Rajlich ([TooTallNate](http://github.com/TooTallNate)) + - Rod Vagg ([rvagg](http://github.com/rvagg)) + - Juriy Zaytsev ([kangax](http://github.com/kangax)) + +## License + +### node-canvas + +(The MIT License) + +Copyright (c) 2010 LearnBoost, and contributors <dev@learnboost.com> + +Copyright (c) 2014 Automattic, Inc and contributors <dev@automattic.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the 'Software'), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +### BMP parser + +See [license](src/bmp/LICENSE.md) diff --git a/miniprogram/node_modules/canvas/binding.gyp b/miniprogram/node_modules/canvas/binding.gyp new file mode 100644 index 00000000..bf647f7d --- /dev/null +++ b/miniprogram/node_modules/canvas/binding.gyp @@ -0,0 +1,234 @@ +{ + 'conditions': [ + ['OS=="win"', { + 'variables': { + 'GTK_Root%': 'C:/GTK', # Set the location of GTK all-in-one bundle + 'with_jpeg%': 'false', + 'with_gif%': 'false', + 'with_rsvg%': 'false', + 'variables': { # Nest jpeg_root to evaluate it before with_jpeg + 'jpeg_root%': ' + + + + Debug + x64 + + + Release + x64 + + + + {90D75E7A-41A0-8814-61A2-B5859FC0E033} + Win32Proj + canvas + true + x64 + 10.0.26100.0 + + + + DynamicLibrary + + + v143 + + + + + + + + + + $(ExecutablePath);$(MSBuildProjectDirectory)\..\bin\;$(MSBuildProjectDirectory)\..\bin\ + true + $(Configuration)\obj\$(ProjectName)\ + false + true + $(SolutionDir)$(Configuration)\ + .node + .node + .node + .node + $(ProjectName) + $(OutDir)\$(ProjectName).node + + + + C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\include\node;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\src;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\openssl\config;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\openssl\openssl\include;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\uv\include;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\zlib;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\v8\include;..\node_modules\node-addon-api;D:\a\_temp\msys64\ucrt64\include;D:\a\_temp\msys64\ucrt64\include\harfbuzz;D:\a\_temp\msys64\ucrt64\include\pango-1.0;D:\a\_temp\msys64\ucrt64\include\cairo;D:\a\_temp\msys64\ucrt64\include\libpng16;D:\a\_temp\msys64\ucrt64\include\glib-2.0;D:\a\_temp\msys64\ucrt64\lib\glib-2.0\include;D:\a\_temp\msys64\ucrt64\include\pixman-1;D:\a\_temp\msys64\ucrt64\include\freetype2;D:\a\_temp\msys64\ucrt64\include\fontconfig;D:\a\_temp\msys64\ucrt64\include\librsvg-2.0;D:\a\_temp\msys64\ucrt64\include\gdk-pixbuf-2.0;D:\a\_temp\msys64\ucrt64\include\libgsf-1;%(AdditionalIncludeDirectories) + /Zc:__cplusplus -std:c++17 %(AdditionalOptions) + EnableFastChecks + true + OldStyle + 4100;4127;4201;4244;4267;4506;4611;4714;4512;4351;4355;4800;4251;4275;4244;4267;%(DisableSpecificWarnings) + Sync + false + true + false + Disabled + NotUsing + NODE_GYP_MODULE_NAME=canvas;USING_UV_SHARED=1;USING_V8_SHARED=1;V8_DEPRECATION_WARNINGS=1;_GLIBCXX_USE_CXX11_ABI=1;WIN32;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;_HAS_EXCEPTIONS=0;OPENSSL_NO_PINSHARED;OPENSSL_THREADS;HAVE_GIF;HAVE_JPEG;HAVE_RSVG;HAVE_BOOLEAN;_USE_MATH_DEFINES;NOMINMAX;NAPI_DISABLE_CPP_EXCEPTIONS;NODE_ADDON_API_ENABLE_MAYBE;BUILDING_NODE_EXTENSION;HOST_BINARY="node.exe";DEBUG;_DEBUG;%(PreprocessorDefinitions) + MultiThreadedDebug + true + true + false + Level4 + true + + + /LTCG:INCREMENTAL %(AdditionalOptions) + + + kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;DelayImp.lib;"C:\\Users\\runneradmin\\AppData\\Local\\node-gyp\\Cache\\21.7.3\\x64\\node.lib";D:\a\_temp\msys64\ucrt64\lib\libcairo-2.lib;D:\a\_temp\msys64\ucrt64\lib\libpng16-16.lib;D:\a\_temp\msys64\ucrt64\lib\libjpeg-8.lib;D:\a\_temp\msys64\ucrt64\lib\libpango-1.0-0.lib;D:\a\_temp\msys64\ucrt64\lib\libpangocairo-1.0-0.lib;D:\a\_temp\msys64\ucrt64\lib\libgobject-2.0-0.lib;D:\a\_temp\msys64\ucrt64\lib\libglib-2.0-0.lib;D:\a\_temp\msys64\ucrt64\lib\libturbojpeg.lib;D:\a\_temp\msys64\ucrt64\lib\libgif-7.lib;D:\a\_temp\msys64\ucrt64\lib\libfreetype-6.lib;D:\a\_temp\msys64\ucrt64\lib\librsvg-2-2.lib + /LTCG:INCREMENTAL /ignore:4199 %(AdditionalOptions) + node.exe;%(DelayLoadDLLs) + true + true + true + $(OutDir)$(ProjectName).node + true + .node + MachineX64 + + + C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\include\node;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\src;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\openssl\config;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\openssl\openssl\include;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\uv\include;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\zlib;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\v8\include;..\node_modules\node-addon-api;D:\a\_temp\msys64\ucrt64\include;D:\a\_temp\msys64\ucrt64\include\harfbuzz;D:\a\_temp\msys64\ucrt64\include\pango-1.0;D:\a\_temp\msys64\ucrt64\include\cairo;D:\a\_temp\msys64\ucrt64\include\libpng16;D:\a\_temp\msys64\ucrt64\include\glib-2.0;D:\a\_temp\msys64\ucrt64\lib\glib-2.0\include;D:\a\_temp\msys64\ucrt64\include\pixman-1;D:\a\_temp\msys64\ucrt64\include\freetype2;D:\a\_temp\msys64\ucrt64\include\fontconfig;D:\a\_temp\msys64\ucrt64\include\librsvg-2.0;D:\a\_temp\msys64\ucrt64\include\gdk-pixbuf-2.0;D:\a\_temp\msys64\ucrt64\include\libgsf-1;%(AdditionalIncludeDirectories) + NODE_GYP_MODULE_NAME=canvas;USING_UV_SHARED=1;USING_V8_SHARED=1;V8_DEPRECATION_WARNINGS=1;_GLIBCXX_USE_CXX11_ABI=1;WIN32;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;_HAS_EXCEPTIONS=0;OPENSSL_NO_PINSHARED;OPENSSL_THREADS;HAVE_GIF;HAVE_JPEG;HAVE_RSVG;HAVE_BOOLEAN;_USE_MATH_DEFINES;NOMINMAX;NAPI_DISABLE_CPP_EXCEPTIONS;NODE_ADDON_API_ENABLE_MAYBE;BUILDING_NODE_EXTENSION;HOST_BINARY="node.exe";DEBUG;_DEBUG;%(PreprocessorDefinitions);%(PreprocessorDefinitions) + + + + + C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\include\node;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\src;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\openssl\config;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\openssl\openssl\include;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\uv\include;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\zlib;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\v8\include;..\node_modules\node-addon-api;D:\a\_temp\msys64\ucrt64\include;D:\a\_temp\msys64\ucrt64\include\harfbuzz;D:\a\_temp\msys64\ucrt64\include\pango-1.0;D:\a\_temp\msys64\ucrt64\include\cairo;D:\a\_temp\msys64\ucrt64\include\libpng16;D:\a\_temp\msys64\ucrt64\include\glib-2.0;D:\a\_temp\msys64\ucrt64\lib\glib-2.0\include;D:\a\_temp\msys64\ucrt64\include\pixman-1;D:\a\_temp\msys64\ucrt64\include\freetype2;D:\a\_temp\msys64\ucrt64\include\fontconfig;D:\a\_temp\msys64\ucrt64\include\librsvg-2.0;D:\a\_temp\msys64\ucrt64\include\gdk-pixbuf-2.0;D:\a\_temp\msys64\ucrt64\include\libgsf-1;%(AdditionalIncludeDirectories) + /Zc:__cplusplus -std:c++17 %(AdditionalOptions) + true + OldStyle + 4100;4127;4201;4244;4267;4506;4611;4714;4512;4351;4355;4800;4251;4275;4244;4267;%(DisableSpecificWarnings) + Sync + Speed + true + AnySuitable + true + true + true + Full + NotUsing + NODE_GYP_MODULE_NAME=canvas;USING_UV_SHARED=1;USING_V8_SHARED=1;V8_DEPRECATION_WARNINGS=1;_GLIBCXX_USE_CXX11_ABI=1;WIN32;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;_HAS_EXCEPTIONS=0;OPENSSL_NO_PINSHARED;OPENSSL_THREADS;HAVE_GIF;HAVE_JPEG;HAVE_RSVG;HAVE_BOOLEAN;_USE_MATH_DEFINES;NOMINMAX;NAPI_DISABLE_CPP_EXCEPTIONS;NODE_ADDON_API_ENABLE_MAYBE;BUILDING_NODE_EXTENSION;HOST_BINARY="node.exe";%(PreprocessorDefinitions) + MultiThreaded + false + true + true + false + Level4 + true + + + /LTCG:INCREMENTAL %(AdditionalOptions) + + + kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;DelayImp.lib;"C:\\Users\\runneradmin\\AppData\\Local\\node-gyp\\Cache\\21.7.3\\x64\\node.lib";D:\a\_temp\msys64\ucrt64\lib\libcairo-2.lib;D:\a\_temp\msys64\ucrt64\lib\libpng16-16.lib;D:\a\_temp\msys64\ucrt64\lib\libjpeg-8.lib;D:\a\_temp\msys64\ucrt64\lib\libpango-1.0-0.lib;D:\a\_temp\msys64\ucrt64\lib\libpangocairo-1.0-0.lib;D:\a\_temp\msys64\ucrt64\lib\libgobject-2.0-0.lib;D:\a\_temp\msys64\ucrt64\lib\libglib-2.0-0.lib;D:\a\_temp\msys64\ucrt64\lib\libturbojpeg.lib;D:\a\_temp\msys64\ucrt64\lib\libgif-7.lib;D:\a\_temp\msys64\ucrt64\lib\libfreetype-6.lib;D:\a\_temp\msys64\ucrt64\lib\librsvg-2-2.lib + /LTCG:INCREMENTAL /ignore:4199 %(AdditionalOptions) + node.exe;%(DelayLoadDLLs) + true + true + true + $(OutDir)$(ProjectName).node + true + .node + MachineX64 + + + C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\include\node;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\src;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\openssl\config;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\openssl\openssl\include;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\uv\include;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\zlib;C:\Users\runneradmin\AppData\Local\node-gyp\Cache\21.7.3\deps\v8\include;..\node_modules\node-addon-api;D:\a\_temp\msys64\ucrt64\include;D:\a\_temp\msys64\ucrt64\include\harfbuzz;D:\a\_temp\msys64\ucrt64\include\pango-1.0;D:\a\_temp\msys64\ucrt64\include\cairo;D:\a\_temp\msys64\ucrt64\include\libpng16;D:\a\_temp\msys64\ucrt64\include\glib-2.0;D:\a\_temp\msys64\ucrt64\lib\glib-2.0\include;D:\a\_temp\msys64\ucrt64\include\pixman-1;D:\a\_temp\msys64\ucrt64\include\freetype2;D:\a\_temp\msys64\ucrt64\include\fontconfig;D:\a\_temp\msys64\ucrt64\include\librsvg-2.0;D:\a\_temp\msys64\ucrt64\include\gdk-pixbuf-2.0;D:\a\_temp\msys64\ucrt64\include\libgsf-1;%(AdditionalIncludeDirectories) + NODE_GYP_MODULE_NAME=canvas;USING_UV_SHARED=1;USING_V8_SHARED=1;V8_DEPRECATION_WARNINGS=1;_GLIBCXX_USE_CXX11_ABI=1;WIN32;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;_HAS_EXCEPTIONS=0;OPENSSL_NO_PINSHARED;OPENSSL_THREADS;HAVE_GIF;HAVE_JPEG;HAVE_RSVG;HAVE_BOOLEAN;_USE_MATH_DEFINES;NOMINMAX;NAPI_DISABLE_CPP_EXCEPTIONS;NODE_ADDON_API_ENABLE_MAYBE;BUILDING_NODE_EXTENSION;HOST_BINARY="node.exe";%(PreprocessorDefinitions);%(PreprocessorDefinitions) + + + + + + + + $(IntDir)\src\backend\Backend.obj + + + $(IntDir)\src\backend\ImageBackend.obj + + + $(IntDir)\src\backend\PdfBackend.obj + + + $(IntDir)\src\backend\SvgBackend.obj + + + $(IntDir)\src\bmp\BMPParser.obj + + + $(IntDir)\src\Backends.obj + + + $(IntDir)\src\Canvas.obj + + + $(IntDir)\src\CanvasGradient.obj + + + $(IntDir)\src\CanvasPattern.obj + + + $(IntDir)\src\CanvasRenderingContext2d.obj + + + $(IntDir)\src\closure.obj + + + $(IntDir)\src\color.obj + + + $(IntDir)\src\Image.obj + + + $(IntDir)\src\ImageData.obj + + + $(IntDir)\src\init.obj + + + $(IntDir)\src\register_font.obj + + + $(IntDir)\src\FontParser.obj + + + + + + + diff --git a/miniprogram/node_modules/canvas/build/canvas.vcxproj.filters b/miniprogram/node_modules/canvas/build/canvas.vcxproj.filters new file mode 100644 index 00000000..125bbf7b --- /dev/null +++ b/miniprogram/node_modules/canvas/build/canvas.vcxproj.filters @@ -0,0 +1,217 @@ + + + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {0601BD18-2FE3-2D4A-0C05-611A0F36D709} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {0601BD18-2FE3-2D4A-0C05-611A0F36D709} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {0601BD18-2FE3-2D4A-0C05-611A0F36D709} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {0601BD18-2FE3-2D4A-0C05-611A0F36D709} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {C08C95BF-9646-DB44-5C81-9CB5B5F652A5} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {7B735499-E5DD-1C2B-6C26-70023832A1CF} + + + {296B63E6-8BC4-B79B-77CC-9C615B0D2B0F} + + + {C1450D01-C033-76F3-3763-6DE88AF48A77} + + + {A49AD564-6B22-6A46-08E5-B5A7F4427839} + + + {1C63F1C8-0353-A369-E968-394FCDA23886} + + + {E075064C-529C-A4E7-0810-FB88D599C3BE} + + + {56DF7A98-063D-FB9D-485C-089023B4C16A} + + + {741E0E76-39B2-B1AB-9FA1-F1A20B16F295} + + + {56DF7A98-063D-FB9D-485C-089023B4C16A} + + + {77348C0E-2034-7791-74D5-63C077DF5A3B} + + + {8CDEE807-BC53-E450-C8B8-4DEBB66742D4} + + + {739DB09A-CC57-A953-A6CF-F64FA08E4FA7} + + + + + ..\src\backend + + + ..\src\backend + + + ..\src\backend + + + ..\src\backend + + + ..\src\bmp + + + ..\src + + + ..\src + + + ..\src + + + ..\src + + + ..\src + + + ..\src + + + ..\src + + + ..\src + + + ..\src + + + ..\src + + + ..\src + + + ..\src + + + C:\hostedtoolcache\windows\node\21.7.3\x64\node_modules\npm\node_modules\node-gyp\src + + + .. + + + diff --git a/miniprogram/node_modules/canvas/build/config.gypi b/miniprogram/node_modules/canvas/build/config.gypi new file mode 100644 index 00000000..c76dc4a6 --- /dev/null +++ b/miniprogram/node_modules/canvas/build/config.gypi @@ -0,0 +1,433 @@ +# Do not edit. File was generated by node-gyp's "configure" step +{ + "target_defaults": { + "cflags": [], + "default_configuration": "Release", + "defines": [], + "include_dirs": [], + "libraries": [], + "msbuild_toolset": "v143", + "msvs_windows_target_platform_version": "10.0.26100.0" + }, + "variables": { + "asan": 0, + "coverage": "false", + "dcheck_always_on": 0, + "debug_nghttp2": "false", + "debug_node": "false", + "enable_lto": "false", + "enable_pgo_generate": "false", + "enable_pgo_use": "false", + "error_on_warn": "false", + "force_dynamic_crt": 0, + "host_arch": "x64", + "icu_data_in": "..\\..\\deps\\icu-tmp\\icudt74l.dat", + "icu_endianness": "l", + "icu_gyp_path": "tools/icu/icu-generic.gyp", + "icu_path": "deps/icu-small", + "icu_small": "false", + "icu_ver_major": "74", + "is_debug": 0, + "libdir": "lib", + "llvm_version": "0.0", + "napi_build_version": "9", + "nasm_version": "2.16", + "node_builtin_shareable_builtins": [ + "deps/cjs-module-lexer/lexer.js", + "deps/cjs-module-lexer/dist/lexer.js", + "deps/undici/undici.js" + ], + "node_byteorder": "little", + "node_debug_lib": "false", + "node_enable_d8": "false", + "node_enable_v8_vtunejit": "false", + "node_fipsinstall": "false", + "node_install_corepack": "true", + "node_install_npm": "true", + "node_library_files": [ + "lib/_http_agent.js", + "lib/_http_client.js", + "lib/_http_common.js", + "lib/_http_incoming.js", + "lib/_http_outgoing.js", + "lib/_http_server.js", + "lib/_stream_duplex.js", + "lib/_stream_passthrough.js", + "lib/_stream_readable.js", + "lib/_stream_transform.js", + "lib/_stream_wrap.js", + "lib/_stream_writable.js", + "lib/_tls_common.js", + "lib/_tls_wrap.js", + "lib/assert.js", + "lib/assert/strict.js", + "lib/async_hooks.js", + "lib/buffer.js", + "lib/child_process.js", + "lib/cluster.js", + "lib/console.js", + "lib/constants.js", + "lib/crypto.js", + "lib/dgram.js", + "lib/diagnostics_channel.js", + "lib/dns.js", + "lib/dns/promises.js", + "lib/domain.js", + "lib/events.js", + "lib/fs.js", + "lib/fs/promises.js", + "lib/http.js", + "lib/http2.js", + "lib/https.js", + "lib/inspector.js", + "lib/inspector/promises.js", + "lib/internal/abort_controller.js", + "lib/internal/assert.js", + "lib/internal/assert/assertion_error.js", + "lib/internal/assert/calltracker.js", + "lib/internal/async_hooks.js", + "lib/internal/blob.js", + "lib/internal/blocklist.js", + "lib/internal/bootstrap/node.js", + "lib/internal/bootstrap/realm.js", + "lib/internal/bootstrap/shadow_realm.js", + "lib/internal/bootstrap/switches/does_not_own_process_state.js", + "lib/internal/bootstrap/switches/does_own_process_state.js", + "lib/internal/bootstrap/switches/is_main_thread.js", + "lib/internal/bootstrap/switches/is_not_main_thread.js", + "lib/internal/bootstrap/web/exposed-wildcard.js", + "lib/internal/bootstrap/web/exposed-window-or-worker.js", + "lib/internal/buffer.js", + "lib/internal/child_process.js", + "lib/internal/child_process/serialization.js", + "lib/internal/cli_table.js", + "lib/internal/cluster/child.js", + "lib/internal/cluster/primary.js", + "lib/internal/cluster/round_robin_handle.js", + "lib/internal/cluster/shared_handle.js", + "lib/internal/cluster/utils.js", + "lib/internal/cluster/worker.js", + "lib/internal/console/constructor.js", + "lib/internal/console/global.js", + "lib/internal/constants.js", + "lib/internal/crypto/aes.js", + "lib/internal/crypto/certificate.js", + "lib/internal/crypto/cfrg.js", + "lib/internal/crypto/cipher.js", + "lib/internal/crypto/diffiehellman.js", + "lib/internal/crypto/ec.js", + "lib/internal/crypto/hash.js", + "lib/internal/crypto/hashnames.js", + "lib/internal/crypto/hkdf.js", + "lib/internal/crypto/keygen.js", + "lib/internal/crypto/keys.js", + "lib/internal/crypto/mac.js", + "lib/internal/crypto/pbkdf2.js", + "lib/internal/crypto/random.js", + "lib/internal/crypto/rsa.js", + "lib/internal/crypto/scrypt.js", + "lib/internal/crypto/sig.js", + "lib/internal/crypto/util.js", + "lib/internal/crypto/webcrypto.js", + "lib/internal/crypto/webidl.js", + "lib/internal/crypto/x509.js", + "lib/internal/debugger/inspect.js", + "lib/internal/debugger/inspect_client.js", + "lib/internal/debugger/inspect_repl.js", + "lib/internal/dgram.js", + "lib/internal/dns/callback_resolver.js", + "lib/internal/dns/promises.js", + "lib/internal/dns/utils.js", + "lib/internal/encoding.js", + "lib/internal/error_serdes.js", + "lib/internal/errors.js", + "lib/internal/event_target.js", + "lib/internal/events/symbols.js", + "lib/internal/file.js", + "lib/internal/fixed_queue.js", + "lib/internal/freelist.js", + "lib/internal/freeze_intrinsics.js", + "lib/internal/fs/cp/cp-sync.js", + "lib/internal/fs/cp/cp.js", + "lib/internal/fs/dir.js", + "lib/internal/fs/glob.js", + "lib/internal/fs/promises.js", + "lib/internal/fs/read/context.js", + "lib/internal/fs/recursive_watch.js", + "lib/internal/fs/rimraf.js", + "lib/internal/fs/streams.js", + "lib/internal/fs/sync_write_stream.js", + "lib/internal/fs/utils.js", + "lib/internal/fs/watchers.js", + "lib/internal/heap_utils.js", + "lib/internal/histogram.js", + "lib/internal/http.js", + "lib/internal/http2/compat.js", + "lib/internal/http2/core.js", + "lib/internal/http2/util.js", + "lib/internal/idna.js", + "lib/internal/inspector_async_hook.js", + "lib/internal/js_stream_socket.js", + "lib/internal/legacy/processbinding.js", + "lib/internal/linkedlist.js", + "lib/internal/main/check_syntax.js", + "lib/internal/main/embedding.js", + "lib/internal/main/eval_stdin.js", + "lib/internal/main/eval_string.js", + "lib/internal/main/inspect.js", + "lib/internal/main/mksnapshot.js", + "lib/internal/main/print_help.js", + "lib/internal/main/prof_process.js", + "lib/internal/main/repl.js", + "lib/internal/main/run_main_module.js", + "lib/internal/main/test_runner.js", + "lib/internal/main/watch_mode.js", + "lib/internal/main/worker_thread.js", + "lib/internal/mime.js", + "lib/internal/modules/cjs/loader.js", + "lib/internal/modules/esm/assert.js", + "lib/internal/modules/esm/create_dynamic_module.js", + "lib/internal/modules/esm/fetch_module.js", + "lib/internal/modules/esm/formats.js", + "lib/internal/modules/esm/get_format.js", + "lib/internal/modules/esm/handle_process_exit.js", + "lib/internal/modules/esm/hooks.js", + "lib/internal/modules/esm/initialize_import_meta.js", + "lib/internal/modules/esm/load.js", + "lib/internal/modules/esm/loader.js", + "lib/internal/modules/esm/module_job.js", + "lib/internal/modules/esm/module_map.js", + "lib/internal/modules/esm/resolve.js", + "lib/internal/modules/esm/shared_constants.js", + "lib/internal/modules/esm/translators.js", + "lib/internal/modules/esm/utils.js", + "lib/internal/modules/esm/worker.js", + "lib/internal/modules/helpers.js", + "lib/internal/modules/package_json_reader.js", + "lib/internal/modules/run_main.js", + "lib/internal/navigator.js", + "lib/internal/net.js", + "lib/internal/options.js", + "lib/internal/per_context/domexception.js", + "lib/internal/per_context/messageport.js", + "lib/internal/per_context/primordials.js", + "lib/internal/perf/event_loop_delay.js", + "lib/internal/perf/event_loop_utilization.js", + "lib/internal/perf/nodetiming.js", + "lib/internal/perf/observe.js", + "lib/internal/perf/performance.js", + "lib/internal/perf/performance_entry.js", + "lib/internal/perf/resource_timing.js", + "lib/internal/perf/timerify.js", + "lib/internal/perf/usertiming.js", + "lib/internal/perf/utils.js", + "lib/internal/policy/manifest.js", + "lib/internal/policy/sri.js", + "lib/internal/priority_queue.js", + "lib/internal/process/esm_loader.js", + "lib/internal/process/execution.js", + "lib/internal/process/per_thread.js", + "lib/internal/process/permission.js", + "lib/internal/process/policy.js", + "lib/internal/process/pre_execution.js", + "lib/internal/process/promises.js", + "lib/internal/process/report.js", + "lib/internal/process/signal.js", + "lib/internal/process/task_queues.js", + "lib/internal/process/warning.js", + "lib/internal/process/worker_thread_only.js", + "lib/internal/promise_hooks.js", + "lib/internal/querystring.js", + "lib/internal/readline/callbacks.js", + "lib/internal/readline/emitKeypressEvents.js", + "lib/internal/readline/interface.js", + "lib/internal/readline/promises.js", + "lib/internal/readline/utils.js", + "lib/internal/repl.js", + "lib/internal/repl/await.js", + "lib/internal/repl/history.js", + "lib/internal/repl/utils.js", + "lib/internal/socket_list.js", + "lib/internal/socketaddress.js", + "lib/internal/source_map/prepare_stack_trace.js", + "lib/internal/source_map/source_map.js", + "lib/internal/source_map/source_map_cache.js", + "lib/internal/stream_base_commons.js", + "lib/internal/streams/add-abort-signal.js", + "lib/internal/streams/compose.js", + "lib/internal/streams/destroy.js", + "lib/internal/streams/duplex.js", + "lib/internal/streams/duplexify.js", + "lib/internal/streams/end-of-stream.js", + "lib/internal/streams/from.js", + "lib/internal/streams/lazy_transform.js", + "lib/internal/streams/legacy.js", + "lib/internal/streams/operators.js", + "lib/internal/streams/passthrough.js", + "lib/internal/streams/pipeline.js", + "lib/internal/streams/readable.js", + "lib/internal/streams/state.js", + "lib/internal/streams/transform.js", + "lib/internal/streams/utils.js", + "lib/internal/streams/writable.js", + "lib/internal/test/binding.js", + "lib/internal/test/transfer.js", + "lib/internal/test_runner/coverage.js", + "lib/internal/test_runner/harness.js", + "lib/internal/test_runner/mock/mock.js", + "lib/internal/test_runner/mock/mock_timers.js", + "lib/internal/test_runner/reporter/dot.js", + "lib/internal/test_runner/reporter/junit.js", + "lib/internal/test_runner/reporter/lcov.js", + "lib/internal/test_runner/reporter/spec.js", + "lib/internal/test_runner/reporter/tap.js", + "lib/internal/test_runner/reporter/v8-serializer.js", + "lib/internal/test_runner/runner.js", + "lib/internal/test_runner/test.js", + "lib/internal/test_runner/tests_stream.js", + "lib/internal/test_runner/utils.js", + "lib/internal/timers.js", + "lib/internal/tls/secure-context.js", + "lib/internal/tls/secure-pair.js", + "lib/internal/trace_events_async_hooks.js", + "lib/internal/tty.js", + "lib/internal/url.js", + "lib/internal/util.js", + "lib/internal/util/colors.js", + "lib/internal/util/comparisons.js", + "lib/internal/util/debuglog.js", + "lib/internal/util/embedding.js", + "lib/internal/util/inspect.js", + "lib/internal/util/inspector.js", + "lib/internal/util/iterable_weak_map.js", + "lib/internal/util/parse_args/parse_args.js", + "lib/internal/util/parse_args/utils.js", + "lib/internal/util/types.js", + "lib/internal/v8/startup_snapshot.js", + "lib/internal/v8_prof_polyfill.js", + "lib/internal/v8_prof_processor.js", + "lib/internal/validators.js", + "lib/internal/vm.js", + "lib/internal/vm/module.js", + "lib/internal/wasm_web_api.js", + "lib/internal/watch_mode/files_watcher.js", + "lib/internal/watchdog.js", + "lib/internal/webidl.js", + "lib/internal/webstreams/adapters.js", + "lib/internal/webstreams/compression.js", + "lib/internal/webstreams/encoding.js", + "lib/internal/webstreams/queuingstrategies.js", + "lib/internal/webstreams/readablestream.js", + "lib/internal/webstreams/transfer.js", + "lib/internal/webstreams/transformstream.js", + "lib/internal/webstreams/util.js", + "lib/internal/webstreams/writablestream.js", + "lib/internal/worker.js", + "lib/internal/worker/io.js", + "lib/internal/worker/js_transferable.js", + "lib/module.js", + "lib/net.js", + "lib/os.js", + "lib/path.js", + "lib/path/posix.js", + "lib/path/win32.js", + "lib/perf_hooks.js", + "lib/process.js", + "lib/punycode.js", + "lib/querystring.js", + "lib/readline.js", + "lib/readline/promises.js", + "lib/repl.js", + "lib/sea.js", + "lib/stream.js", + "lib/stream/consumers.js", + "lib/stream/promises.js", + "lib/stream/web.js", + "lib/string_decoder.js", + "lib/sys.js", + "lib/test.js", + "lib/test/reporters.js", + "lib/timers.js", + "lib/timers/promises.js", + "lib/tls.js", + "lib/trace_events.js", + "lib/tty.js", + "lib/url.js", + "lib/util.js", + "lib/util/types.js", + "lib/v8.js", + "lib/vm.js", + "lib/wasi.js", + "lib/worker_threads.js", + "lib/zlib.js" + ], + "node_module_version": 120, + "node_no_browser_globals": "false", + "node_prefix": "\\usr\\local", + "node_release_urlbase": "https://nodejs.org/download/release/", + "node_shared": "false", + "node_shared_brotli": "false", + "node_shared_cares": "false", + "node_shared_http_parser": "false", + "node_shared_libuv": "false", + "node_shared_nghttp2": "false", + "node_shared_nghttp3": "false", + "node_shared_ngtcp2": "false", + "node_shared_openssl": "false", + "node_shared_zlib": "false", + "node_tag": "", + "node_target_type": "executable", + "node_use_bundled_v8": "true", + "node_use_node_code_cache": "true", + "node_use_node_snapshot": "true", + "node_use_openssl": "true", + "node_use_v8_platform": "true", + "node_with_ltcg": "true", + "node_without_node_options": "false", + "node_write_snapshot_as_array_literals": "true", + "openssl_is_fips": "false", + "openssl_quic": "true", + "ossfuzz": "false", + "shlib_suffix": "so.120", + "single_executable_application": "true", + "target_arch": "x64", + "use_prefix_to_find_headers": "false", + "v8_enable_31bit_smis_on_64bit_arch": 0, + "v8_enable_extensible_ro_snapshot": 0, + "v8_enable_gdbjit": 0, + "v8_enable_hugepage": 0, + "v8_enable_i18n_support": 1, + "v8_enable_inspector": 1, + "v8_enable_javascript_promise_hooks": 1, + "v8_enable_lite_mode": 0, + "v8_enable_maglev": 0, + "v8_enable_object_print": 1, + "v8_enable_pointer_compression": 0, + "v8_enable_shared_ro_heap": 1, + "v8_enable_short_builtin_calls": 1, + "v8_enable_v8_checks": 0, + "v8_enable_webassembly": 1, + "v8_no_strict_aliasing": 1, + "v8_optimized_debug": 1, + "v8_promise_internal_field_count": 1, + "v8_random_seed": 0, + "v8_trace_maps": 0, + "v8_use_siphash": 1, + "want_separate_host_toolset": 0, + "nodedir": "C:\\Users\\runneradmin\\AppData\\Local\\node-gyp\\Cache\\21.7.3", + "python": "C:\\hostedtoolcache\\windows\\Python\\3.14.2\\x64\\python.exe", + "standalone_static_library": 1, + "msbuild_path": "C:\\Program Files\\Microsoft Visual Studio\\2022\\Enterprise\\MSBuild\\Current\\Bin\\MSBuild.exe", + "build_from_source": "true", + "cache": "C:\\npm\\cache", + "globalconfig": "C:\\npm\\prefix\\etc\\npmrc", + "global_prefix": "C:\\npm\\prefix", + "init_module": "C:\\Users\\runneradmin\\.npm-init.js", + "local_prefix": "D:\\a\\node-canvas\\node-canvas", + "node_gyp": "C:\\hostedtoolcache\\windows\\node\\21.7.3\\x64\\node_modules\\npm\\node_modules\\node-gyp\\bin\\node-gyp.js", + "npm_version": "10.5.0", + "prefix": "C:\\npm\\prefix", + "userconfig": "C:\\Users\\runneradmin\\.npmrc", + "user_agent": "npm/10.5.0 node/v21.7.3 win32 x64 workspaces/false ci/github-actions" + } +} diff --git a/miniprogram/node_modules/canvas/index.d.ts b/miniprogram/node_modules/canvas/index.d.ts new file mode 100644 index 00000000..27ab0c34 --- /dev/null +++ b/miniprogram/node_modules/canvas/index.d.ts @@ -0,0 +1,507 @@ +// TypeScript Version: 3.0 + +import { Readable } from 'stream' + +export interface PngConfig { + /** Specifies the ZLIB compression level. Defaults to 6. */ + compressionLevel?: 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 + /** + * Any bitwise combination of `PNG_FILTER_NONE`, `PNG_FILTER_SUB`, + * `PNG_FILTER_UP`, `PNG_FILTER_AVG` and `PNG_FILTER_PATETH`; or one of + * `PNG_ALL_FILTERS` or `PNG_NO_FILTERS` (all are properties of the canvas + * instance). These specify which filters *may* be used by libpng. During + * encoding, libpng will select the best filter from this list of allowed + * filters. Defaults to `canvas.PNG_ALL_FILTERS`. + */ + filters?: number + /** + * _For creating indexed PNGs._ The palette of colors. Entries should be in + * RGBA order. + */ + palette?: Uint8ClampedArray + /** + * _For creating indexed PNGs._ The index of the background color. Defaults + * to 0. + */ + backgroundIndex?: number + /** pixels per inch */ + resolution?: number +} + +export interface JpegConfig { + /** Specifies the quality, between 0 and 1. Defaults to 0.75. */ + quality?: number + /** Enables progressive encoding. Defaults to `false`. */ + progressive?: boolean + /** Enables 2x2 chroma subsampling. Defaults to `true`. */ + chromaSubsampling?: boolean +} + +export interface PdfConfig { + title?: string + author?: string + subject?: string + keywords?: string + creator?: string + creationDate?: Date + modDate?: Date +} + +export interface NodeCanvasRenderingContext2DSettings { + alpha?: boolean + pixelFormat?: 'RGBA32' | 'RGB24' | 'A8' | 'RGB16_565' | 'A1' | 'RGB30' +} + +export class Canvas { + width: number + height: number + + /** _Non standard._ The type of the canvas. */ + readonly type: 'image'|'pdf'|'svg' + + /** _Non standard._ Getter. The stride used by the canvas. */ + readonly stride: number; + + /** Constant used in PNG encoding methods. */ + static readonly PNG_NO_FILTERS: number + /** Constant used in PNG encoding methods. */ + static readonly PNG_ALL_FILTERS: number + /** Constant used in PNG encoding methods. */ + static readonly PNG_FILTER_NONE: number + /** Constant used in PNG encoding methods. */ + static readonly PNG_FILTER_SUB: number + /** Constant used in PNG encoding methods. */ + static readonly PNG_FILTER_UP: number + /** Constant used in PNG encoding methods. */ + static readonly PNG_FILTER_AVG: number + /** Constant used in PNG encoding methods. */ + static readonly PNG_FILTER_PAETH: number + + constructor(width: number, height: number, type?: 'image'|'pdf'|'svg') + + getContext(contextId: '2d', contextAttributes?: NodeCanvasRenderingContext2DSettings): CanvasRenderingContext2D + + /** + * For image canvases, encodes the canvas as a PNG. For PDF canvases, + * encodes the canvas as a PDF. For SVG canvases, encodes the canvas as an + * SVG. + */ + toBuffer(cb: (err: Error|null, result: Buffer) => void): void + toBuffer(cb: (err: Error|null, result: Buffer) => void, mimeType: 'image/png', config?: PngConfig): void + toBuffer(cb: (err: Error|null, result: Buffer) => void, mimeType: 'image/jpeg', config?: JpegConfig): void + + /** + * For image canvases, encodes the canvas as a PNG. For PDF canvases, + * encodes the canvas as a PDF. For SVG canvases, encodes the canvas as an + * SVG. + */ + toBuffer(): Buffer + toBuffer(mimeType: 'image/png', config?: PngConfig): Buffer + toBuffer(mimeType: 'image/jpeg', config?: JpegConfig): Buffer + toBuffer(mimeType: 'application/pdf', config?: PdfConfig): Buffer + + /** + * Returns the unencoded pixel data, top-to-bottom. On little-endian (most) + * systems, the array will be ordered BGRA; on big-endian systems, it will + * be ARGB. + */ + toBuffer(mimeType: 'raw'): Buffer + + createPNGStream(config?: PngConfig): PNGStream + createJPEGStream(config?: JpegConfig): JPEGStream + createPDFStream(config?: PdfConfig): PDFStream + + /** Defaults to PNG image. */ + toDataURL(): string + toDataURL(mimeType: 'image/png'): string + toDataURL(mimeType: 'image/jpeg', quality?: number): string + /** _Non-standard._ Defaults to PNG image. */ + toDataURL(cb: (err: Error|null, result: string) => void): void + /** _Non-standard._ */ + toDataURL(mimeType: 'image/png', cb: (err: Error|null, result: string) => void): void + /** _Non-standard._ */ + toDataURL(mimeType: 'image/jpeg', cb: (err: Error|null, result: string) => void): void + /** _Non-standard._ */ + toDataURL(mimeType: 'image/jpeg', config: JpegConfig, cb: (err: Error|null, result: string) => void): void + /** _Non-standard._ */ + toDataURL(mimeType: 'image/jpeg', quality: number, cb: (err: Error|null, result: string) => void): void +} + +export interface TextMetrics { + readonly alphabeticBaseline: number; + readonly actualBoundingBoxAscent: number; + readonly actualBoundingBoxDescent: number; + readonly actualBoundingBoxLeft: number; + readonly actualBoundingBoxRight: number; + readonly emHeightAscent: number; + readonly emHeightDescent: number; + readonly fontBoundingBoxAscent: number; + readonly fontBoundingBoxDescent: number; + readonly width: number; +} + +export type CanvasFillRule = 'evenodd' | 'nonzero'; + +export type GlobalCompositeOperation = + | 'clear' + | 'copy' + | 'destination' + | 'source-over' + | 'destination-over' + | 'source-in' + | 'destination-in' + | 'source-out' + | 'destination-out' + | 'source-atop' + | 'destination-atop' + | 'xor' + | 'lighter' + | 'normal' + | 'multiply' + | 'screen' + | 'overlay' + | 'darken' + | 'lighten' + | 'color-dodge' + | 'color-burn' + | 'hard-light' + | 'soft-light' + | 'difference' + | 'exclusion' + | 'hue' + | 'saturation' + | 'color' + | 'luminosity' + | 'saturate'; + +export type CanvasLineCap = 'butt' | 'round' | 'square'; + +export type CanvasLineJoin = 'bevel' | 'miter' | 'round'; + +export type CanvasTextBaseline = 'alphabetic' | 'bottom' | 'hanging' | 'ideographic' | 'middle' | 'top'; + +export type CanvasTextAlign = 'center' | 'end' | 'left' | 'right' | 'start'; + +export class CanvasRenderingContext2D { + drawImage(image: Canvas|Image, dx: number, dy: number): void + drawImage(image: Canvas|Image, dx: number, dy: number, dw: number, dh: number): void + drawImage(image: Canvas|Image, sx: number, sy: number, sw: number, sh: number, dx: number, dy: number, dw: number, dh: number): void + putImageData(imagedata: ImageData, dx: number, dy: number): void; + putImageData(imagedata: ImageData, dx: number, dy: number, dirtyX: number, dirtyY: number, dirtyWidth: number, dirtyHeight: number): void; + getImageData(sx: number, sy: number, sw: number, sh: number): ImageData; + createImageData(sw: number, sh: number): ImageData; + createImageData(imagedata: ImageData): ImageData; + /** + * For PDF canvases, adds another page. If width and/or height are omitted, + * the canvas's initial size is used. + */ + addPage(width?: number, height?: number): void + save(): void; + restore(): void; + rotate(angle: number): void; + translate(x: number, y: number): void; + transform(a: number, b: number, c: number, d: number, e: number, f: number): void; + getTransform(): DOMMatrix; + resetTransform(): void; + setTransform(transform?: DOMMatrix): void; + setTransform(a: number, b: number, c: number, d: number, e: number, f: number): void; + isPointInPath(x: number, y: number, fillRule?: CanvasFillRule): boolean; + scale(x: number, y: number): void; + clip(fillRule?: CanvasFillRule): void; + fill(fillRule?: CanvasFillRule): void; + stroke(): void; + fillText(text: string, x: number, y: number, maxWidth?: number): void; + strokeText(text: string, x: number, y: number, maxWidth?: number): void; + fillRect(x: number, y: number, w: number, h: number): void; + strokeRect(x: number, y: number, w: number, h: number): void; + clearRect(x: number, y: number, w: number, h: number): void; + rect(x: number, y: number, w: number, h: number): void; + roundRect(x: number, y: number, w: number, h: number, radii?: number | number[]): void; + measureText(text: string): TextMetrics; + moveTo(x: number, y: number): void; + lineTo(x: number, y: number): void; + bezierCurveTo(cp1x: number, cp1y: number, cp2x: number, cp2y: number, x: number, y: number): void; + quadraticCurveTo(cpx: number, cpy: number, x: number, y: number): void; + beginPath(): void; + closePath(): void; + arc(x: number, y: number, radius: number, startAngle: number, endAngle: number, counterclockwise?: boolean): void; + arcTo(x1: number, y1: number, x2: number, y2: number, radius: number): void; + ellipse(x: number, y: number, radiusX: number, radiusY: number, rotation: number, startAngle: number, endAngle: number, counterclockwise?: boolean): void; + setLineDash(segments: number[]): void; + getLineDash(): number[]; + createPattern(image: Canvas|Image, repetition: 'repeat' | 'repeat-x' | 'repeat-y' | 'no-repeat' | '' | null): CanvasPattern + createLinearGradient(x0: number, y0: number, x1: number, y1: number): CanvasGradient; + createRadialGradient(x0: number, y0: number, r0: number, x1: number, y1: number, r1: number): CanvasGradient; + beginTag(tagName: string, attributes?: string): void; + endTag(tagName: string): void; + /** + * _Non-standard_. Defaults to 'good'. Affects pattern (gradient, image, + * etc.) rendering quality. + */ + patternQuality: 'fast' | 'good' | 'best' | 'nearest' | 'bilinear' + imageSmoothingEnabled: boolean; + globalCompositeOperation: GlobalCompositeOperation; + globalAlpha: number; + shadowColor: string; + miterLimit: number; + lineWidth: number; + lineCap: CanvasLineCap; + lineJoin: CanvasLineJoin; + lineDashOffset: number; + shadowOffsetX: number; + shadowOffsetY: number; + shadowBlur: number; + /** _Non-standard_. Sets the antialiasing mode. */ + antialias: 'default' | 'gray' | 'none' | 'subpixel' + /** + * Defaults to 'path'. The effect depends on the canvas type: + * + * * **Standard (image)** `'glyph'` and `'path'` both result in rasterized + * text. Glyph mode is faster than path, but may result in lower-quality + * text, especially when rotated or translated. + * + * * **PDF** `'glyph'` will embed text instead of paths into the PDF. This + * is faster to encode, faster to open with PDF viewers, yields a smaller + * file size and makes the text selectable. The subset of the font needed + * to render the glyphs will be embedded in the PDF. This is usually the + * mode you want to use with PDF canvases. + * + * * **SVG** glyph does not cause `` elements to be produced as one + * might expect ([cairo bug](https://gitlab.freedesktop.org/cairo/cairo/issues/253)). + * Rather, glyph will create a `` section with a `` for each + * glyph, then those glyphs be reused via `` elements. `'path'` mode + * creates a `` element for each text string. glyph mode is faster + * and yields a smaller file size. + * + * In glyph mode, `ctx.strokeText()` and `ctx.fillText()` behave the same + * (aside from using the stroke and fill style, respectively). + */ + textDrawingMode: 'path' | 'glyph' + /** + * _Non-standard_. Defaults to 'good'. Like `patternQuality`, but applies to + * transformations affecting more than just patterns. + */ + quality: 'fast' | 'good' | 'best' | 'nearest' | 'bilinear' + /** Returns or sets a `DOMMatrix` for the current transformation matrix. */ + currentTransform: DOMMatrix + fillStyle: string | CanvasGradient | CanvasPattern; + strokeStyle: string | CanvasGradient | CanvasPattern; + font: string; + textBaseline: CanvasTextBaseline; + textAlign: CanvasTextAlign; + canvas: Canvas; + direction: 'ltr' | 'rtl'; + lang: string; +} + +export class CanvasGradient { + addColorStop(offset: number, color: string): void; +} + +export class CanvasPattern { + setTransform(transform?: DOMMatrix): void; +} + +// This does not extend HTMLImageElement because there are dozens of inherited +// methods and properties that we do not provide. +export class Image { + /** Track image data */ + static readonly MODE_IMAGE: number + /** Track MIME data */ + static readonly MODE_MIME: number + + /** + * The URL, `data:` URI or local file path of the image to be loaded, or a + * Buffer instance containing an encoded image. + */ + src: string | Buffer + /** Retrieves whether the object is fully loaded. */ + readonly complete: boolean + /** Sets or retrieves the height of the image. */ + height: number + /** Sets or retrieves the width of the image. */ + width: number + + /** The original height of the image resource before sizing. */ + readonly naturalHeight: number + /** The original width of the image resource before sizing. */ + readonly naturalWidth: number + /** + * Applies to JPEG images drawn to PDF canvases only. Setting + * `img.dataMode = Image.MODE_MIME` or `Image.MODE_MIME|Image.MODE_IMAGE` + * enables image MIME data tracking. When MIME data is tracked, PDF canvases + * can embed JPEGs directly into the output, rather than re-encoding into + * PNG. This can drastically reduce filesize and speed up rendering. + */ + dataMode: number + + onload: (() => void) | null; + onerror: ((err: Error) => void) | null; +} + +/** + * Creates a Canvas instance. This function works in both Node.js and Web + * browsers, where there is no Canvas constructor. + * @param type Optionally specify to create a PDF or SVG canvas. Defaults to an + * image canvas. + */ +export function createCanvas(width: number, height: number, type?: 'pdf'|'svg'): Canvas + +/** + * Creates an ImageData instance. This function works in both Node.js and Web + * browsers. + * @param data An array containing the pixel representation of the image. + * @param height If omitted, the height is calculated based on the array's size + * and `width`. + */ +export function createImageData(data: Uint8ClampedArray, width: number, height?: number): ImageData +/** + * _Non-standard._ Creates an ImageData instance for an alternative pixel + * format, such as RGB16_565 + * @param data An array containing the pixel representation of the image. + * @param height If omitted, the height is calculated based on the array's size + * and `width`. + */ +export function createImageData(data: Uint16Array, width: number, height?: number): ImageData +/** + * Creates an ImageData instance. This function works in both Node.js and Web + * browsers. + */ +export function createImageData(width: number, height: number): ImageData + +/** + * Convenience function for loading an image with a Promise interface. This + * function works in both Node.js and Web browsers; however, the `src` must be + * a string in Web browsers (it can only be a Buffer in Node.js). + * @param src URL, `data: ` URI or (Node.js only) a local file path or Buffer + * instance. + */ +export function loadImage(src: string|Buffer, options?: any): Promise + +/** + * Registers a font that is not installed as a system font. This must be used + * before creating Canvas instances. + * @param path Path to local font file. + * @param fontFace Description of the font face, corresponding to CSS properties + * used in `@font-face` rules. + */ +export function registerFont(path: string, fontFace: {family: string, weight?: string, style?: string}): void + +/** + * Unloads all fonts + */ +export function deregisterAllFonts(): void; + +/** This class must not be constructed directly; use `canvas.createPNGStream()`. */ +export class PNGStream extends Readable {} +/** This class must not be constructed directly; use `canvas.createJPEGStream()`. */ +export class JPEGStream extends Readable {} +/** This class must not be constructed directly; use `canvas.createPDFStream()`. */ +export class PDFStream extends Readable {} + +// TODO: this is wrong. See matrixTransform in lib/DOMMatrix.js +type DOMMatrixInit = DOMMatrix | string | number[]; + +interface DOMPointInit { + w?: number; + x?: number; + y?: number; + z?: number; +} + +export class DOMPoint { + w: number; + x: number; + y: number; + z: number; + matrixTransform(matrix?: DOMMatrixInit): DOMPoint; + toJSON(): any; + static fromPoint(other?: DOMPointInit): DOMPoint; +} + +export class DOMMatrix { + constructor(init?: string | number[]); + toString(): string; + multiply(other?: DOMMatrix): DOMMatrix; + multiplySelf(other?: DOMMatrix): DOMMatrix; + preMultiplySelf(other?: DOMMatrix): DOMMatrix; + translate(tx?: number, ty?: number, tz?: number): DOMMatrix; + translateSelf(tx?: number, ty?: number, tz?: number): DOMMatrix; + scale(scaleX?: number, scaleY?: number, scaleZ?: number, originX?: number, originY?: number, originZ?: number): DOMMatrix; + scale3d(scale?: number, originX?: number, originY?: number, originZ?: number): DOMMatrix; + scale3dSelf(scale?: number, originX?: number, originY?: number, originZ?: number): DOMMatrix; + scaleSelf(scaleX?: number, scaleY?: number, scaleZ?: number, originX?: number, originY?: number, originZ?: number): DOMMatrix; + /** + * @deprecated + */ + scaleNonUniform(scaleX?: number, scaleY?: number): DOMMatrix; + rotateFromVector(x?: number, y?: number): DOMMatrix; + rotateFromVectorSelf(x?: number, y?: number): DOMMatrix; + rotate(rotX?: number, rotY?: number, rotZ?: number): DOMMatrix; + rotateSelf(rotX?: number, rotY?: number, rotZ?: number): DOMMatrix; + rotateAxisAngle(x?: number, y?: number, z?: number, angle?: number): DOMMatrix; + rotateAxisAngleSelf(x?: number, y?: number, z?: number, angle?: number): DOMMatrix; + skewX(sx?: number): DOMMatrix; + skewXSelf(sx?: number): DOMMatrix; + skewY(sy?: number): DOMMatrix; + skewYSelf(sy?: number): DOMMatrix; + flipX(): DOMMatrix; + flipY(): DOMMatrix; + inverse(): DOMMatrix; + invertSelf(): DOMMatrix; + setMatrixValue(transformList: string): DOMMatrix; + transformPoint(point?: DOMPoint): DOMPoint; + toJSON(): any; + toFloat32Array(): Float32Array; + toFloat64Array(): Float64Array; + readonly is2D: boolean; + readonly isIdentity: boolean; + a: number; + b: number; + c: number; + d: number; + e: number; + f: number; + m11: number; + m12: number; + m13: number; + m14: number; + m21: number; + m22: number; + m23: number; + m24: number; + m31: number; + m32: number; + m33: number; + m34: number; + m41: number; + m42: number; + m43: number; + m44: number; + static fromMatrix(other: DOMMatrix): DOMMatrix; + static fromFloat32Array(a: Float32Array): DOMMatrix; + static fromFloat64Array(a: Float64Array): DOMMatrix; +} + +export class ImageData { + constructor(sw: number, sh: number); + constructor(data: Uint8ClampedArray, sw: number, sh?: number); + readonly data: Uint8ClampedArray; + readonly height: number; + readonly width: number; +} + +// Not documented: backends + +/** Library version. */ +export const version: string +/** Cairo version. */ +export const cairoVersion: string +/** jpeglib version, if built with JPEG support. */ +export const jpegVersion: string | undefined +/** giflib version, if built with GIF support. */ +export const gifVersion: string | undefined +/** freetype version. */ +export const freetypeVersion: string +/** rsvg version. */ +export const rsvgVersion: string | undefined diff --git a/miniprogram/node_modules/canvas/index.js b/miniprogram/node_modules/canvas/index.js new file mode 100644 index 00000000..adde4da1 --- /dev/null +++ b/miniprogram/node_modules/canvas/index.js @@ -0,0 +1,94 @@ +const Canvas = require('./lib/canvas') +const Image = require('./lib/image') +const CanvasRenderingContext2D = require('./lib/context2d') +const CanvasPattern = require('./lib/pattern') +const packageJson = require('./package.json') +const bindings = require('./lib/bindings') +const fs = require('fs') +const PNGStream = require('./lib/pngstream') +const PDFStream = require('./lib/pdfstream') +const JPEGStream = require('./lib/jpegstream') +const { DOMPoint, DOMMatrix } = require('./lib/DOMMatrix') + +bindings.setDOMMatrix(DOMMatrix) + +function createCanvas (width, height, type) { + return new Canvas(width, height, type) +} + +function createImageData (array, width, height) { + return new bindings.ImageData(array, width, height) +} + +function loadImage (src) { + return new Promise((resolve, reject) => { + const image = new Image() + + function cleanup () { + image.onload = null + image.onerror = null + } + + image.onload = () => { cleanup(); resolve(image) } + image.onerror = (err) => { cleanup(); reject(err) } + + image.src = src + }) +} + +/** + * Resolve paths for registerFont. Must be called *before* creating a Canvas + * instance. + * @param src {string} Path to font file. + * @param fontFace {{family: string, weight?: string, style?: string}} Object + * specifying font information. `weight` and `style` default to `"normal"`. + */ +function registerFont (src, fontFace) { + // TODO this doesn't need to be on Canvas; it should just be a static method + // of `bindings`. + return Canvas._registerFont(fs.realpathSync(src), fontFace) +} + +/** + * Unload all fonts from pango to free up memory + */ +function deregisterAllFonts () { + return Canvas._deregisterAllFonts() +} + +exports.Canvas = Canvas +exports.Context2d = CanvasRenderingContext2D // Legacy/compat export +exports.CanvasRenderingContext2D = CanvasRenderingContext2D +exports.CanvasGradient = bindings.CanvasGradient +exports.CanvasPattern = CanvasPattern +exports.Image = Image +exports.ImageData = bindings.ImageData +exports.PNGStream = PNGStream +exports.PDFStream = PDFStream +exports.JPEGStream = JPEGStream +exports.DOMMatrix = DOMMatrix +exports.DOMPoint = DOMPoint + +exports.registerFont = registerFont +exports.deregisterAllFonts = deregisterAllFonts + +exports.createCanvas = createCanvas +exports.createImageData = createImageData +exports.loadImage = loadImage + +exports.backends = bindings.Backends + +/** Library version. */ +exports.version = packageJson.version +/** Cairo version. */ +exports.cairoVersion = bindings.cairoVersion +/** jpeglib version. */ +exports.jpegVersion = bindings.jpegVersion +/** gif_lib version. */ +exports.gifVersion = bindings.gifVersion ? bindings.gifVersion.replace(/[^.\d]/g, '') : undefined +/** freetype version. */ +exports.freetypeVersion = bindings.freetypeVersion +/** rsvg version. */ +exports.rsvgVersion = bindings.rsvgVersion +/** pango version. */ +exports.pangoVersion = bindings.pangoVersion diff --git a/miniprogram/node_modules/canvas/lib/DOMMatrix.js b/miniprogram/node_modules/canvas/lib/DOMMatrix.js new file mode 100644 index 00000000..97015adc --- /dev/null +++ b/miniprogram/node_modules/canvas/lib/DOMMatrix.js @@ -0,0 +1,678 @@ +'use strict' + +const util = require('util') + +// DOMMatrix per https://drafts.fxtf.org/geometry/#DOMMatrix + +class DOMPoint { + constructor (x, y, z, w) { + if (typeof x === 'object' && x !== null) { + w = x.w + z = x.z + y = x.y + x = x.x + } + this.x = typeof x === 'number' ? x : 0 + this.y = typeof y === 'number' ? y : 0 + this.z = typeof z === 'number' ? z : 0 + this.w = typeof w === 'number' ? w : 1 + } + + matrixTransform(init) { + // TODO: this next line is wrong. matrixTransform is supposed to only take + // an object with the DOMMatrix properties called DOMMatrixInit + const m = init instanceof DOMMatrix ? init : new DOMMatrix(init) + return m.transformPoint(this) + } + + toJSON() { + return { + x: this.x, + y: this.y, + z: this.z, + w: this.w + } + } + + static fromPoint(other) { + return new this(other.x, other.y, other.z, other.w) + } +} + +// Constants to index into _values (col-major) +const M11 = 0; const M12 = 1; const M13 = 2; const M14 = 3 +const M21 = 4; const M22 = 5; const M23 = 6; const M24 = 7 +const M31 = 8; const M32 = 9; const M33 = 10; const M34 = 11 +const M41 = 12; const M42 = 13; const M43 = 14; const M44 = 15 + +const DEGREE_PER_RAD = 180 / Math.PI +const RAD_PER_DEGREE = Math.PI / 180 + +function parseMatrix (init) { + let parsed = init.replace('matrix(', '') + parsed = parsed.split(',', 7) // 6 + 1 to handle too many params + if (parsed.length !== 6) throw new Error(`Failed to parse ${init}`) + parsed = parsed.map(parseFloat) + return [ + parsed[0], parsed[1], 0, 0, + parsed[2], parsed[3], 0, 0, + 0, 0, 1, 0, + parsed[4], parsed[5], 0, 1 + ] +} + +function parseMatrix3d (init) { + let parsed = init.replace('matrix3d(', '') + parsed = parsed.split(',', 17) // 16 + 1 to handle too many params + if (parsed.length !== 16) throw new Error(`Failed to parse ${init}`) + return parsed.map(parseFloat) +} + +function parseTransform (tform) { + const type = tform.split('(', 1)[0] + switch (type) { + case 'matrix': + return parseMatrix(tform) + case 'matrix3d': + return parseMatrix3d(tform) + // TODO This is supposed to support any CSS transform value. + default: + throw new Error(`${type} parsing not implemented`) + } +} + +class DOMMatrix { + constructor (init) { + this._is2D = true + this._values = new Float64Array([ + 1, 0, 0, 0, + 0, 1, 0, 0, + 0, 0, 1, 0, + 0, 0, 0, 1 + ]) + + let i + + if (typeof init === 'string') { // parse CSS transformList + if (init === '') return // default identity matrix + const tforms = init.split(/\)\s+/, 20).map(parseTransform) + if (tforms.length === 0) return + init = tforms[0] + for (i = 1; i < tforms.length; i++) init = multiply(tforms[i], init) + } + + i = 0 + if (init && init.length === 6) { + setNumber2D(this, M11, init[i++]) + setNumber2D(this, M12, init[i++]) + setNumber2D(this, M21, init[i++]) + setNumber2D(this, M22, init[i++]) + setNumber2D(this, M41, init[i++]) + setNumber2D(this, M42, init[i++]) + } else if (init && init.length === 16) { + setNumber2D(this, M11, init[i++]) + setNumber2D(this, M12, init[i++]) + setNumber3D(this, M13, init[i++]) + setNumber3D(this, M14, init[i++]) + setNumber2D(this, M21, init[i++]) + setNumber2D(this, M22, init[i++]) + setNumber3D(this, M23, init[i++]) + setNumber3D(this, M24, init[i++]) + setNumber3D(this, M31, init[i++]) + setNumber3D(this, M32, init[i++]) + setNumber3D(this, M33, init[i++]) + setNumber3D(this, M34, init[i++]) + setNumber2D(this, M41, init[i++]) + setNumber2D(this, M42, init[i++]) + setNumber3D(this, M43, init[i++]) + setNumber3D(this, M44, init[i]) + } else if (init !== undefined) { + throw new TypeError('Expected string or array.') + } + } + + toString () { + return this.is2D + ? `matrix(${this.a}, ${this.b}, ${this.c}, ${this.d}, ${this.e}, ${this.f})` + : `matrix3d(${this._values.join(', ')})` + } + + multiply (other) { + return newInstance(this._values).multiplySelf(other) + } + + multiplySelf (other) { + this._values = multiply(other._values, this._values) + if (!other.is2D) this._is2D = false + return this + } + + preMultiplySelf (other) { + this._values = multiply(this._values, other._values) + if (!other.is2D) this._is2D = false + return this + } + + translate (tx, ty, tz) { + return newInstance(this._values).translateSelf(tx, ty, tz) + } + + translateSelf (tx, ty, tz) { + if (typeof tx !== 'number') tx = 0 + if (typeof ty !== 'number') ty = 0 + if (typeof tz !== 'number') tz = 0 + this._values = multiply([ + 1, 0, 0, 0, + 0, 1, 0, 0, + 0, 0, 1, 0, + tx, ty, tz, 1 + ], this._values) + if (tz !== 0) this._is2D = false + return this + } + + scale (scaleX, scaleY, scaleZ, originX, originY, originZ) { + return newInstance(this._values).scaleSelf(scaleX, scaleY, scaleZ, originX, originY, originZ) + } + + scale3d (scale, originX, originY, originZ) { + return newInstance(this._values).scale3dSelf(scale, originX, originY, originZ) + } + + scale3dSelf (scale, originX, originY, originZ) { + return this.scaleSelf(scale, scale, scale, originX, originY, originZ) + } + + /** + * @deprecated + */ + scaleNonUniform(scaleX, scaleY) { + return this.scale(scaleX, scaleY) + } + + scaleSelf (scaleX, scaleY, scaleZ, originX, originY, originZ) { + // Not redundant with translate's checks because we need to negate the values later. + if (typeof originX !== 'number') originX = 0 + if (typeof originY !== 'number') originY = 0 + if (typeof originZ !== 'number') originZ = 0 + this.translateSelf(originX, originY, originZ) + if (typeof scaleX !== 'number') scaleX = 1 + if (typeof scaleY !== 'number') scaleY = scaleX + if (typeof scaleZ !== 'number') scaleZ = 1 + this._values = multiply([ + scaleX, 0, 0, 0, + 0, scaleY, 0, 0, + 0, 0, scaleZ, 0, + 0, 0, 0, 1 + ], this._values) + this.translateSelf(-originX, -originY, -originZ) + if (scaleZ !== 1 || originZ !== 0) this._is2D = false + return this + } + + rotateFromVector (x, y) { + return newInstance(this._values).rotateFromVectorSelf(x, y) + } + + rotateFromVectorSelf (x, y) { + if (typeof x !== 'number') x = 0 + if (typeof y !== 'number') y = 0 + const theta = (x === 0 && y === 0) ? 0 : Math.atan2(y, x) * DEGREE_PER_RAD + return this.rotateSelf(theta) + } + + rotate (rotX, rotY, rotZ) { + return newInstance(this._values).rotateSelf(rotX, rotY, rotZ) + } + + rotateSelf (rotX, rotY, rotZ) { + if (rotY === undefined && rotZ === undefined) { + rotZ = rotX + rotX = rotY = 0 + } + if (typeof rotY !== 'number') rotY = 0 + if (typeof rotZ !== 'number') rotZ = 0 + if (rotX !== 0 || rotY !== 0) this._is2D = false + rotX *= RAD_PER_DEGREE + rotY *= RAD_PER_DEGREE + rotZ *= RAD_PER_DEGREE + let c, s + c = Math.cos(rotZ) + s = Math.sin(rotZ) + this._values = multiply([ + c, s, 0, 0, + -s, c, 0, 0, + 0, 0, 1, 0, + 0, 0, 0, 1 + ], this._values) + c = Math.cos(rotY) + s = Math.sin(rotY) + this._values = multiply([ + c, 0, -s, 0, + 0, 1, 0, 0, + s, 0, c, 0, + 0, 0, 0, 1 + ], this._values) + c = Math.cos(rotX) + s = Math.sin(rotX) + this._values = multiply([ + 1, 0, 0, 0, + 0, c, s, 0, + 0, -s, c, 0, + 0, 0, 0, 1 + ], this._values) + return this + } + + rotateAxisAngle (x, y, z, angle) { + return newInstance(this._values).rotateAxisAngleSelf(x, y, z, angle) + } + + rotateAxisAngleSelf (x, y, z, angle) { + if (typeof x !== 'number') x = 0 + if (typeof y !== 'number') y = 0 + if (typeof z !== 'number') z = 0 + // Normalize axis + const length = Math.sqrt(x * x + y * y + z * z) + if (length === 0) return this + if (length !== 1) { + x /= length + y /= length + z /= length + } + angle *= RAD_PER_DEGREE + const c = Math.cos(angle) + const s = Math.sin(angle) + const t = 1 - c + const tx = t * x + const ty = t * y + // NB: This is the generic transform. If the axis is a major axis, there are + // faster transforms. + this._values = multiply([ + tx * x + c, tx * y + s * z, tx * z - s * y, 0, + tx * y - s * z, ty * y + c, ty * z + s * x, 0, + tx * z + s * y, ty * z - s * x, t * z * z + c, 0, + 0, 0, 0, 1 + ], this._values) + if (x !== 0 || y !== 0) this._is2D = false + return this + } + + skewX (sx) { + return newInstance(this._values).skewXSelf(sx) + } + + skewXSelf (sx) { + if (typeof sx !== 'number') return this + const t = Math.tan(sx * RAD_PER_DEGREE) + this._values = multiply([ + 1, 0, 0, 0, + t, 1, 0, 0, + 0, 0, 1, 0, + 0, 0, 0, 1 + ], this._values) + return this + } + + skewY (sy) { + return newInstance(this._values).skewYSelf(sy) + } + + skewYSelf (sy) { + if (typeof sy !== 'number') return this + const t = Math.tan(sy * RAD_PER_DEGREE) + this._values = multiply([ + 1, t, 0, 0, + 0, 1, 0, 0, + 0, 0, 1, 0, + 0, 0, 0, 1 + ], this._values) + return this + } + + flipX () { + return newInstance(multiply([ + -1, 0, 0, 0, + 0, 1, 0, 0, + 0, 0, 1, 0, + 0, 0, 0, 1 + ], this._values)) + } + + flipY () { + return newInstance(multiply([ + 1, 0, 0, 0, + 0, -1, 0, 0, + 0, 0, 1, 0, + 0, 0, 0, 1 + ], this._values)) + } + + inverse () { + return newInstance(this._values).invertSelf() + } + + invertSelf () { + const m = this._values + const inv = m.map(v => 0) + + inv[0] = m[5] * m[10] * m[15] - + m[5] * m[11] * m[14] - + m[9] * m[6] * m[15] + + m[9] * m[7] * m[14] + + m[13] * m[6] * m[11] - + m[13] * m[7] * m[10] + + inv[4] = -m[4] * m[10] * m[15] + + m[4] * m[11] * m[14] + + m[8] * m[6] * m[15] - + m[8] * m[7] * m[14] - + m[12] * m[6] * m[11] + + m[12] * m[7] * m[10] + + inv[8] = m[4] * m[9] * m[15] - + m[4] * m[11] * m[13] - + m[8] * m[5] * m[15] + + m[8] * m[7] * m[13] + + m[12] * m[5] * m[11] - + m[12] * m[7] * m[9] + + inv[12] = -m[4] * m[9] * m[14] + + m[4] * m[10] * m[13] + + m[8] * m[5] * m[14] - + m[8] * m[6] * m[13] - + m[12] * m[5] * m[10] + + m[12] * m[6] * m[9] + + // If the determinant is zero, this matrix cannot be inverted, and all + // values should be set to NaN, with the is2D flag set to false. + + const det = m[0] * inv[0] + m[1] * inv[4] + m[2] * inv[8] + m[3] * inv[12] + + if (det === 0) { + this._values = m.map(v => NaN) + this._is2D = false + return this + } + + inv[1] = -m[1] * m[10] * m[15] + + m[1] * m[11] * m[14] + + m[9] * m[2] * m[15] - + m[9] * m[3] * m[14] - + m[13] * m[2] * m[11] + + m[13] * m[3] * m[10] + + inv[5] = m[0] * m[10] * m[15] - + m[0] * m[11] * m[14] - + m[8] * m[2] * m[15] + + m[8] * m[3] * m[14] + + m[12] * m[2] * m[11] - + m[12] * m[3] * m[10] + + inv[9] = -m[0] * m[9] * m[15] + + m[0] * m[11] * m[13] + + m[8] * m[1] * m[15] - + m[8] * m[3] * m[13] - + m[12] * m[1] * m[11] + + m[12] * m[3] * m[9] + + inv[13] = m[0] * m[9] * m[14] - + m[0] * m[10] * m[13] - + m[8] * m[1] * m[14] + + m[8] * m[2] * m[13] + + m[12] * m[1] * m[10] - + m[12] * m[2] * m[9] + + inv[2] = m[1] * m[6] * m[15] - + m[1] * m[7] * m[14] - + m[5] * m[2] * m[15] + + m[5] * m[3] * m[14] + + m[13] * m[2] * m[7] - + m[13] * m[3] * m[6] + + inv[6] = -m[0] * m[6] * m[15] + + m[0] * m[7] * m[14] + + m[4] * m[2] * m[15] - + m[4] * m[3] * m[14] - + m[12] * m[2] * m[7] + + m[12] * m[3] * m[6] + + inv[10] = m[0] * m[5] * m[15] - + m[0] * m[7] * m[13] - + m[4] * m[1] * m[15] + + m[4] * m[3] * m[13] + + m[12] * m[1] * m[7] - + m[12] * m[3] * m[5] + + inv[14] = -m[0] * m[5] * m[14] + + m[0] * m[6] * m[13] + + m[4] * m[1] * m[14] - + m[4] * m[2] * m[13] - + m[12] * m[1] * m[6] + + m[12] * m[2] * m[5] + + inv[3] = -m[1] * m[6] * m[11] + + m[1] * m[7] * m[10] + + m[5] * m[2] * m[11] - + m[5] * m[3] * m[10] - + m[9] * m[2] * m[7] + + m[9] * m[3] * m[6] + + inv[7] = m[0] * m[6] * m[11] - + m[0] * m[7] * m[10] - + m[4] * m[2] * m[11] + + m[4] * m[3] * m[10] + + m[8] * m[2] * m[7] - + m[8] * m[3] * m[6] + + inv[11] = -m[0] * m[5] * m[11] + + m[0] * m[7] * m[9] + + m[4] * m[1] * m[11] - + m[4] * m[3] * m[9] - + m[8] * m[1] * m[7] + + m[8] * m[3] * m[5] + + inv[15] = m[0] * m[5] * m[10] - + m[0] * m[6] * m[9] - + m[4] * m[1] * m[10] + + m[4] * m[2] * m[9] + + m[8] * m[1] * m[6] - + m[8] * m[2] * m[5] + + inv.forEach((v, i) => { inv[i] = v / det }) + this._values = inv + return this + } + + setMatrixValue (transformList) { + const temp = new DOMMatrix(transformList) + this._values = temp._values + this._is2D = temp._is2D + return this + } + + transformPoint (point) { + point = new DOMPoint(point) + const x = point.x + const y = point.y + const z = point.z + const w = point.w + const values = this._values + const nx = values[M11] * x + values[M21] * y + values[M31] * z + values[M41] * w + const ny = values[M12] * x + values[M22] * y + values[M32] * z + values[M42] * w + const nz = values[M13] * x + values[M23] * y + values[M33] * z + values[M43] * w + const nw = values[M14] * x + values[M24] * y + values[M34] * z + values[M44] * w + return new DOMPoint(nx, ny, nz, nw) + } + + toFloat32Array () { + return Float32Array.from(this._values) + } + + toFloat64Array () { + return this._values.slice(0) + } + + static fromMatrix (init) { + if (!(init instanceof DOMMatrix)) throw new TypeError('Expected DOMMatrix') + return new DOMMatrix(init._values) + } + + static fromFloat32Array (init) { + if (!(init instanceof Float32Array)) throw new TypeError('Expected Float32Array') + return new DOMMatrix(init) + } + + static fromFloat64Array (init) { + if (!(init instanceof Float64Array)) throw new TypeError('Expected Float64Array') + return new DOMMatrix(init) + } + + [util.inspect.custom || 'inspect'] (depth, options) { + if (depth < 0) return '[DOMMatrix]' + + return `DOMMatrix [ + a: ${this.a} + b: ${this.b} + c: ${this.c} + d: ${this.d} + e: ${this.e} + f: ${this.f} + m11: ${this.m11} + m12: ${this.m12} + m13: ${this.m13} + m14: ${this.m14} + m21: ${this.m21} + m22: ${this.m22} + m23: ${this.m23} + m23: ${this.m23} + m31: ${this.m31} + m32: ${this.m32} + m33: ${this.m33} + m34: ${this.m34} + m41: ${this.m41} + m42: ${this.m42} + m43: ${this.m43} + m44: ${this.m44} + is2D: ${this.is2D} + isIdentity: ${this.isIdentity} ]` + } +} + +/** + * Checks that `value` is a number and sets the value. + */ +function setNumber2D (receiver, index, value) { + if (typeof value !== 'number') throw new TypeError('Expected number') + return (receiver._values[index] = value) +} + +/** + * Checks that `value` is a number, sets `_is2D = false` if necessary and sets + * the value. + */ +function setNumber3D (receiver, index, value) { + if (typeof value !== 'number') throw new TypeError('Expected number') + if (index === M33 || index === M44) { + if (value !== 1) receiver._is2D = false + } else if (value !== 0) receiver._is2D = false + return (receiver._values[index] = value) +} + +Object.defineProperties(DOMMatrix.prototype, { + m11: { get () { return this._values[M11] }, set (v) { return setNumber2D(this, M11, v) } }, + m12: { get () { return this._values[M12] }, set (v) { return setNumber2D(this, M12, v) } }, + m13: { get () { return this._values[M13] }, set (v) { return setNumber3D(this, M13, v) } }, + m14: { get () { return this._values[M14] }, set (v) { return setNumber3D(this, M14, v) } }, + m21: { get () { return this._values[M21] }, set (v) { return setNumber2D(this, M21, v) } }, + m22: { get () { return this._values[M22] }, set (v) { return setNumber2D(this, M22, v) } }, + m23: { get () { return this._values[M23] }, set (v) { return setNumber3D(this, M23, v) } }, + m24: { get () { return this._values[M24] }, set (v) { return setNumber3D(this, M24, v) } }, + m31: { get () { return this._values[M31] }, set (v) { return setNumber3D(this, M31, v) } }, + m32: { get () { return this._values[M32] }, set (v) { return setNumber3D(this, M32, v) } }, + m33: { get () { return this._values[M33] }, set (v) { return setNumber3D(this, M33, v) } }, + m34: { get () { return this._values[M34] }, set (v) { return setNumber3D(this, M34, v) } }, + m41: { get () { return this._values[M41] }, set (v) { return setNumber2D(this, M41, v) } }, + m42: { get () { return this._values[M42] }, set (v) { return setNumber2D(this, M42, v) } }, + m43: { get () { return this._values[M43] }, set (v) { return setNumber3D(this, M43, v) } }, + m44: { get () { return this._values[M44] }, set (v) { return setNumber3D(this, M44, v) } }, + + a: { get () { return this.m11 }, set (v) { return (this.m11 = v) } }, + b: { get () { return this.m12 }, set (v) { return (this.m12 = v) } }, + c: { get () { return this.m21 }, set (v) { return (this.m21 = v) } }, + d: { get () { return this.m22 }, set (v) { return (this.m22 = v) } }, + e: { get () { return this.m41 }, set (v) { return (this.m41 = v) } }, + f: { get () { return this.m42 }, set (v) { return (this.m42 = v) } }, + + is2D: { get () { return this._is2D } }, // read-only + + isIdentity: { + get () { + const values = this._values + return (values[M11] === 1 && values[M12] === 0 && values[M13] === 0 && values[M14] === 0 && + values[M21] === 0 && values[M22] === 1 && values[M23] === 0 && values[M24] === 0 && + values[M31] === 0 && values[M32] === 0 && values[M33] === 1 && values[M34] === 0 && + values[M41] === 0 && values[M42] === 0 && values[M43] === 0 && values[M44] === 1) + } + }, + + toJSON: { + value() { + return { + a: this.a, + b: this.b, + c: this.c, + d: this.d, + e: this.e, + f: this.f, + m11: this.m11, + m12: this.m12, + m13: this.m13, + m14: this.m14, + m21: this.m21, + m22: this.m22, + m23: this.m23, + m23: this.m23, + m31: this.m31, + m32: this.m32, + m33: this.m33, + m34: this.m34, + m41: this.m41, + m42: this.m42, + m43: this.m43, + m44: this.m44, + is2D: this.is2D, + isIdentity: this.isIdentity, + } + } + } +}) + +/** + * Instantiates a DOMMatrix, bypassing the constructor. + * @param {Float64Array} values Value to assign to `_values`. This is assigned + * without copying (okay because all usages are followed by a multiply). + */ +function newInstance (values) { + const instance = Object.create(DOMMatrix.prototype) + instance.constructor = DOMMatrix + instance._is2D = true + instance._values = values + return instance +} + +function multiply (A, B) { + const dest = new Float64Array(16) + for (let i = 0; i < 4; i++) { + for (let j = 0; j < 4; j++) { + let sum = 0 + for (let k = 0; k < 4; k++) { + sum += A[i * 4 + k] * B[k * 4 + j] + } + dest[i * 4 + j] = sum + } + } + return dest +} + +module.exports = { DOMMatrix, DOMPoint } diff --git a/miniprogram/node_modules/canvas/lib/bindings.js b/miniprogram/node_modules/canvas/lib/bindings.js new file mode 100644 index 00000000..40cef3c6 --- /dev/null +++ b/miniprogram/node_modules/canvas/lib/bindings.js @@ -0,0 +1,43 @@ +'use strict' + +const bindings = require('../build/Release/canvas.node') + +module.exports = bindings + +Object.defineProperty(bindings.Canvas.prototype, Symbol.toStringTag, { + value: 'HTMLCanvasElement', + configurable: true +}) + +Object.defineProperty(bindings.Image.prototype, Symbol.toStringTag, { + value: 'HTMLImageElement', + configurable: true +}) + +bindings.ImageData.prototype.toString = function () { + return '[object ImageData]' +} + +Object.defineProperty(bindings.ImageData.prototype, Symbol.toStringTag, { + value: 'ImageData', + configurable: true +}) + +bindings.CanvasGradient.prototype.toString = function () { + return '[object CanvasGradient]' +} + +Object.defineProperty(bindings.CanvasGradient.prototype, Symbol.toStringTag, { + value: 'CanvasGradient', + configurable: true +}) + +Object.defineProperty(bindings.CanvasPattern.prototype, Symbol.toStringTag, { + value: 'CanvasPattern', + configurable: true +}) + +Object.defineProperty(bindings.CanvasRenderingContext2d.prototype, Symbol.toStringTag, { + value: 'CanvasRenderingContext2d', + configurable: true +}) diff --git a/miniprogram/node_modules/canvas/lib/canvas.js b/miniprogram/node_modules/canvas/lib/canvas.js new file mode 100644 index 00000000..03fa1a95 --- /dev/null +++ b/miniprogram/node_modules/canvas/lib/canvas.js @@ -0,0 +1,113 @@ +'use strict' + +/*! + * Canvas + * Copyright (c) 2010 LearnBoost + * MIT Licensed + */ + +const bindings = require('./bindings') +const Canvas = module.exports = bindings.Canvas +const Context2d = require('./context2d') +const PNGStream = require('./pngstream') +const PDFStream = require('./pdfstream') +const JPEGStream = require('./jpegstream') +const FORMATS = ['image/png', 'image/jpeg'] +const util = require('util') + +// TODO || is for Node.js pre-v6.6.0 +Canvas.prototype[util.inspect.custom || 'inspect'] = function () { + return `[Canvas ${this.width}x${this.height}]` +} + +Canvas.prototype.getContext = function (contextType, contextAttributes) { + if (contextType == '2d') { + const ctx = this._context2d || (this._context2d = new Context2d(this, contextAttributes)) + this.context = ctx + ctx.canvas = this + return ctx + } +} + +Canvas.prototype.pngStream = +Canvas.prototype.createPNGStream = function (options) { + return new PNGStream(this, options) +} + +Canvas.prototype.pdfStream = +Canvas.prototype.createPDFStream = function (options) { + return new PDFStream(this, options) +} + +Canvas.prototype.jpegStream = +Canvas.prototype.createJPEGStream = function (options) { + return new JPEGStream(this, options) +} + +Canvas.prototype.toDataURL = function (a1, a2, a3) { + // valid arg patterns (args -> [type, opts, fn]): + // [] -> ['image/png', null, null] + // [qual] -> ['image/png', null, null] + // [undefined] -> ['image/png', null, null] + // ['image/png'] -> ['image/png', null, null] + // ['image/png', qual] -> ['image/png', null, null] + // [fn] -> ['image/png', null, fn] + // [type, fn] -> [type, null, fn] + // [undefined, fn] -> ['image/png', null, fn] + // ['image/png', qual, fn] -> ['image/png', null, fn] + // ['image/jpeg', fn] -> ['image/jpeg', null, fn] + // ['image/jpeg', opts, fn] -> ['image/jpeg', opts, fn] + // ['image/jpeg', qual, fn] -> ['image/jpeg', {quality: qual}, fn] + // ['image/jpeg', undefined, fn] -> ['image/jpeg', null, fn] + // ['image/jpeg'] -> ['image/jpeg', null, fn] + // ['image/jpeg', opts] -> ['image/jpeg', opts, fn] + // ['image/jpeg', qual] -> ['image/jpeg', {quality: qual}, fn] + + let type = 'image/png' + let opts = {} + let fn + + if (typeof a1 === 'function') { + fn = a1 + } else { + if (typeof a1 === 'string' && FORMATS.includes(a1.toLowerCase())) { + type = a1.toLowerCase() + } + + if (typeof a2 === 'function') { + fn = a2 + } else { + if (typeof a2 === 'object') { + opts = a2 + } else if (typeof a2 === 'number') { + opts = { quality: Math.max(0, Math.min(1, a2)) } + } + + if (typeof a3 === 'function') { + fn = a3 + } else if (undefined !== a3) { + throw new TypeError(`${typeof a3} is not a function`) + } + } + } + + if (this.width === 0 || this.height === 0) { + // Per spec, if the bitmap has no pixels, return this string: + const str = 'data:,' + if (fn) { + setTimeout(() => fn(null, str)) + return + } else { + return str + } + } + + if (fn) { + this.toBuffer((err, buf) => { + if (err) return fn(err) + fn(null, `data:${type};base64,${buf.toString('base64')}`) + }, type, opts) + } else { + return `data:${type};base64,${this.toBuffer(type, opts).toString('base64')}` + } +} diff --git a/miniprogram/node_modules/canvas/lib/context2d.js b/miniprogram/node_modules/canvas/lib/context2d.js new file mode 100644 index 00000000..103ec632 --- /dev/null +++ b/miniprogram/node_modules/canvas/lib/context2d.js @@ -0,0 +1,11 @@ +'use strict' + +/*! + * Canvas - Context2d + * Copyright (c) 2010 LearnBoost + * MIT Licensed + */ + +const bindings = require('./bindings') + +module.exports = bindings.CanvasRenderingContext2d diff --git a/miniprogram/node_modules/canvas/lib/image.js b/miniprogram/node_modules/canvas/lib/image.js new file mode 100644 index 00000000..a6c81ba8 --- /dev/null +++ b/miniprogram/node_modules/canvas/lib/image.js @@ -0,0 +1,97 @@ +'use strict' + +/*! + * Canvas - Image + * Copyright (c) 2010 LearnBoost + * MIT Licensed + */ + +/** + * Module dependencies. + */ + +const bindings = require('./bindings') +const Image = module.exports = bindings.Image +const util = require('util') + +const { GetSource, SetSource } = bindings + +Object.defineProperty(Image.prototype, 'src', { + /** + * src setter. Valid values: + * * `data:` URI + * * Local file path + * * HTTP or HTTPS URL + * * Buffer containing image data (i.e. not a `data:` URI stored in a Buffer) + * + * @param {String|Buffer} val filename, buffer, data URI, URL + * @api public + */ + set (val) { + if (typeof val === 'string') { + if (/^\s*data:/.test(val)) { // data: URI + const commaI = val.indexOf(',') + // 'base64' must come before the comma + const isBase64 = val.lastIndexOf('base64', commaI) !== -1 + const content = val.slice(commaI + 1) + setSource(this, Buffer.from(content, isBase64 ? 'base64' : 'utf8'), val) + } else if (/^\s*https?:\/\//.test(val)) { // remote URL + const onerror = err => { + if (typeof this.onerror === 'function') { + this.onerror(err) + } else { + throw err + } + } + + fetch(val, { + method: 'GET', + headers: { 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36' } + }) + .then(res => { + if (!res.ok) { + throw new Error(`Server responded with ${res.status}`) + } + return res.arrayBuffer() + }) + .then(data => { + setSource(this, Buffer.from(data)) + }) + .catch(onerror) + } else { // local file path assumed + setSource(this, val) + } + } else if (Buffer.isBuffer(val)) { + setSource(this, val) + } else { + const err = new Error("Invalid image source") + if (typeof this.onerror === 'function') this.onerror(err) + else throw err + } + }, + + get () { + // TODO https://github.com/Automattic/node-canvas/issues/118 + return getSource(this) + }, + + configurable: true +}) + +// TODO || is for Node.js pre-v6.6.0 +Image.prototype[util.inspect.custom || 'inspect'] = function () { + return '[Image' + + (this.complete ? ':' + this.width + 'x' + this.height : '') + + (this.src ? ' ' + this.src : '') + + (this.complete ? ' complete' : '') + + ']' +} + +function getSource (img) { + return img._originalSource || GetSource.call(img) +} + +function setSource (img, src, origSrc) { + SetSource.call(img, src) + img._originalSource = origSrc +} diff --git a/miniprogram/node_modules/canvas/lib/jpegstream.js b/miniprogram/node_modules/canvas/lib/jpegstream.js new file mode 100644 index 00000000..701d2f87 --- /dev/null +++ b/miniprogram/node_modules/canvas/lib/jpegstream.js @@ -0,0 +1,41 @@ +'use strict' + +/*! + * Canvas - JPEGStream + * Copyright (c) 2010 LearnBoost + * MIT Licensed + */ + +const { Readable } = require('stream') +function noop () {} + +class JPEGStream extends Readable { + constructor (canvas, options) { + super() + + if (canvas.streamJPEGSync === undefined) { + throw new Error('node-canvas was built without JPEG support.') + } + + this.options = options + this.canvas = canvas + } + + _read () { + // For now we're not controlling the c++ code's data emission, so we only + // call canvas.streamJPEGSync once and let it emit data at will. + this._read = noop + + this.canvas.streamJPEGSync(this.options, (err, chunk) => { + if (err) { + this.emit('error', err) + } else if (chunk) { + this.push(chunk) + } else { + this.push(null) + } + }) + } +}; + +module.exports = JPEGStream diff --git a/miniprogram/node_modules/canvas/lib/pattern.js b/miniprogram/node_modules/canvas/lib/pattern.js new file mode 100644 index 00000000..fe5bbc30 --- /dev/null +++ b/miniprogram/node_modules/canvas/lib/pattern.js @@ -0,0 +1,15 @@ +'use strict' + +/*! + * Canvas - CanvasPattern + * Copyright (c) 2010 LearnBoost + * MIT Licensed + */ + +const bindings = require('./bindings') + +module.exports = bindings.CanvasPattern + +bindings.CanvasPattern.prototype.toString = function () { + return '[object CanvasPattern]' +} diff --git a/miniprogram/node_modules/canvas/lib/pdfstream.js b/miniprogram/node_modules/canvas/lib/pdfstream.js new file mode 100644 index 00000000..8643af75 --- /dev/null +++ b/miniprogram/node_modules/canvas/lib/pdfstream.js @@ -0,0 +1,35 @@ +'use strict' + +/*! + * Canvas - PDFStream + */ + +const { Readable } = require('stream') +function noop () {} + +class PDFStream extends Readable { + constructor (canvas, options) { + super() + + this.canvas = canvas + this.options = options + } + + _read () { + // For now we're not controlling the c++ code's data emission, so we only + // call canvas.streamPDFSync once and let it emit data at will. + this._read = noop + + this.canvas.streamPDFSync((err, chunk, len) => { + if (err) { + this.emit('error', err) + } else if (len) { + this.push(chunk) + } else { + this.push(null) + } + }, this.options) + } +} + +module.exports = PDFStream diff --git a/miniprogram/node_modules/canvas/lib/pngstream.js b/miniprogram/node_modules/canvas/lib/pngstream.js new file mode 100644 index 00000000..db8fdb46 --- /dev/null +++ b/miniprogram/node_modules/canvas/lib/pngstream.js @@ -0,0 +1,42 @@ +'use strict' + +/*! + * Canvas - PNGStream + * Copyright (c) 2010 LearnBoost + * MIT Licensed + */ + +const { Readable } = require('stream') +function noop () {} + +class PNGStream extends Readable { + constructor (canvas, options) { + super() + + if (options && + options.palette instanceof Uint8ClampedArray && + options.palette.length % 4 !== 0) { + throw new Error('Palette length must be a multiple of 4.') + } + this.canvas = canvas + this.options = options || {} + } + + _read () { + // For now we're not controlling the c++ code's data emission, so we only + // call canvas.streamPNGSync once and let it emit data at will. + this._read = noop + + this.canvas.streamPNGSync((err, chunk, len) => { + if (err) { + this.emit('error', err) + } else if (len) { + this.push(chunk) + } else { + this.push(null) + } + }, this.options) + } +} + +module.exports = PNGStream diff --git a/miniprogram/node_modules/canvas/package.json b/miniprogram/node_modules/canvas/package.json new file mode 100644 index 00000000..7ba228af --- /dev/null +++ b/miniprogram/node_modules/canvas/package.json @@ -0,0 +1,71 @@ +{ + "name": "canvas", + "description": "Canvas graphics API backed by Cairo", + "version": "3.2.1", + "author": "TJ Holowaychuk ", + "main": "index.js", + "browser": "browser.js", + "types": "index.d.ts", + "contributors": [ + "Nathan Rajlich ", + "Rod Vagg ", + "Juriy Zaytsev " + ], + "keywords": [ + "canvas", + "graphic", + "graphics", + "pixman", + "cairo", + "image", + "images", + "pdf" + ], + "homepage": "https://github.com/Automattic/node-canvas", + "repository": "git://github.com/Automattic/node-canvas.git", + "scripts": { + "prebenchmark": "node-gyp build", + "benchmark": "node benchmarks/run.js", + "lint": "standard examples/*.js test/server.js test/public/*.js benchmarks/run.js lib/context2d.js util/has_lib.js browser.js index.js", + "test": "mocha test/*.test.js", + "pretest-server": "node-gyp build", + "test-server": "node test/server.js", + "generate-wpt": "node ./test/wpt/generate.js", + "test-wpt": "mocha test/wpt/generated/*.js", + "install": "prebuild-install -r napi || node-gyp rebuild", + "tsd": "tsd" + }, + "files": [ + "binding.gyp", + "browser.js", + "index.d.ts", + "index.js", + "lib/", + "src/", + "util/" + ], + "dependencies": { + "node-addon-api": "^7.0.0", + "prebuild-install": "^7.1.3" + }, + "devDependencies": { + "@types/node": "^10.12.18", + "assert-rejects": "^1.0.0", + "express": "^4.16.3", + "js-yaml": "^4.1.0", + "mocha": "^5.2.0", + "pixelmatch": "^4.0.2", + "standard": "^12.0.1", + "tsd": "^0.29.0", + "typescript": "^4.2.2" + }, + "engines": { + "node": "^18.12.0 || >= 20.9.0" + }, + "binary": { + "napi_versions": [ + 7 + ] + }, + "license": "MIT" +} diff --git a/miniprogram/node_modules/canvas/src/Backends.cc b/miniprogram/node_modules/canvas/src/Backends.cc new file mode 100644 index 00000000..3a557669 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/Backends.cc @@ -0,0 +1,18 @@ +#include "Backends.h" + +#include "backend/ImageBackend.h" +#include "backend/PdfBackend.h" +#include "backend/SvgBackend.h" + +using namespace Napi; + +void +Backends::Initialize(Napi::Env env, Napi::Object exports) { + Napi::Object obj = Napi::Object::New(env); + + ImageBackend::Initialize(obj); + PdfBackend::Initialize(obj); + SvgBackend::Initialize(obj); + + exports.Set("Backends", obj); +} diff --git a/miniprogram/node_modules/canvas/src/Backends.h b/miniprogram/node_modules/canvas/src/Backends.h new file mode 100644 index 00000000..66a1c1db --- /dev/null +++ b/miniprogram/node_modules/canvas/src/Backends.h @@ -0,0 +1,9 @@ +#pragma once + +#include "backend/Backend.h" +#include + +class Backends : public Napi::ObjectWrap { + public: + static void Initialize(Napi::Env env, Napi::Object exports); +}; diff --git a/miniprogram/node_modules/canvas/src/Canvas.cc b/miniprogram/node_modules/canvas/src/Canvas.cc new file mode 100644 index 00000000..bc790add --- /dev/null +++ b/miniprogram/node_modules/canvas/src/Canvas.cc @@ -0,0 +1,939 @@ +// Copyright (c) 2010 LearnBoost + +#include "Canvas.h" +#include "InstanceData.h" +#include // std::min +#include +#include +#include +#include "CanvasRenderingContext2d.h" +#include "closure.h" +#include +#include +#include +#include +#include "PNG.h" +#include "register_font.h" +#include +#include +#include +#include +#include "Util.h" +#include +#include "node_buffer.h" +#include "FontParser.h" + +#ifdef HAVE_JPEG +#include "JPEGStream.h" +#endif + +#include "backend/ImageBackend.h" +#include "backend/PdfBackend.h" +#include "backend/SvgBackend.h" + +#define GENERIC_FACE_ERROR \ + "The second argument to registerFont is required, and should be an object " \ + "with at least a family (string) and optionally weight (string/number) " \ + "and style (string)." + +using namespace std; + +std::vector Canvas::font_face_list; + +// Increases each time a font is (de)registered +int Canvas::fontSerial = 1; + +/* + * Initialize Canvas. + */ + +void +Canvas::Initialize(Napi::Env& env, Napi::Object& exports) { + Napi::HandleScope scope(env); + InstanceData* data = env.GetInstanceData(); + + // Constructor + Napi::Function ctor = DefineClass(env, "Canvas", { + InstanceMethod<&Canvas::ToBuffer>("toBuffer", napi_default_method), + InstanceMethod<&Canvas::StreamPNGSync>("streamPNGSync", napi_default_method), + InstanceMethod<&Canvas::StreamPDFSync>("streamPDFSync", napi_default_method), +#ifdef HAVE_JPEG + InstanceMethod<&Canvas::StreamJPEGSync>("streamJPEGSync", napi_default_method), +#endif + InstanceAccessor<&Canvas::GetType>("type", napi_default_jsproperty), + InstanceAccessor<&Canvas::GetStride>("stride", napi_default_jsproperty), + InstanceAccessor<&Canvas::GetWidth, &Canvas::SetWidth>("width", napi_default_jsproperty), + InstanceAccessor<&Canvas::GetHeight, &Canvas::SetHeight>("height", napi_default_jsproperty), + StaticValue("PNG_NO_FILTERS", Napi::Number::New(env, PNG_NO_FILTERS), napi_default_jsproperty), + StaticValue("PNG_FILTER_NONE", Napi::Number::New(env, PNG_FILTER_NONE), napi_default_jsproperty), + StaticValue("PNG_FILTER_SUB", Napi::Number::New(env, PNG_FILTER_SUB), napi_default_jsproperty), + StaticValue("PNG_FILTER_UP", Napi::Number::New(env, PNG_FILTER_UP), napi_default_jsproperty), + StaticValue("PNG_FILTER_AVG", Napi::Number::New(env, PNG_FILTER_AVG), napi_default_jsproperty), + StaticValue("PNG_FILTER_PAETH", Napi::Number::New(env, PNG_FILTER_PAETH), napi_default_jsproperty), + StaticValue("PNG_ALL_FILTERS", Napi::Number::New(env, PNG_ALL_FILTERS), napi_default_jsproperty), + StaticMethod<&Canvas::RegisterFont>("_registerFont", napi_default_method), + StaticMethod<&Canvas::DeregisterAllFonts>("_deregisterAllFonts", napi_default_method), + StaticMethod<&Canvas::ParseFont>("parseFont", napi_default_method) + }); + + data->CanvasCtor = Napi::Persistent(ctor); + exports.Set("Canvas", ctor); +} + +/* + * Initialize a Canvas with the given width and height. + */ + +Canvas::Canvas(const Napi::CallbackInfo& info) : Napi::ObjectWrap(info), env(info.Env()) { + InstanceData* data = env.GetInstanceData(); + ctor = Napi::Persistent(data->CanvasCtor.Value()); + Backend* backend = NULL; + Napi::Object jsBackend; + + if (info[0].IsNumber()) { + Napi::Number width = info[0].As(); + Napi::Number height = Napi::Number::New(env, 0); + + if (info[1].IsNumber()) height = info[1].As(); + + if (info[2].IsString()) { + std::string str = info[2].As(); + if (str == "pdf") { + Napi::Maybe instance = data->PdfBackendCtor.New({ width, height }); + if (instance.IsJust()) backend = PdfBackend::Unwrap(jsBackend = instance.Unwrap()); + } else if (str == "svg") { + Napi::Maybe instance = data->SvgBackendCtor.New({ width, height }); + if (instance.IsJust()) backend = SvgBackend::Unwrap(jsBackend = instance.Unwrap()); + } else { + Napi::Maybe instance = data->ImageBackendCtor.New({ width, height }); + if (instance.IsJust()) backend = ImageBackend::Unwrap(jsBackend = instance.Unwrap()); + } + } else { + Napi::Maybe instance = data->ImageBackendCtor.New({ width, height }); + if (instance.IsJust()) backend = ImageBackend::Unwrap(jsBackend = instance.Unwrap()); + } + } else if (info[0].IsObject()) { + jsBackend = info[0].As(); + if (jsBackend.InstanceOf(data->ImageBackendCtor.Value()).UnwrapOr(false)) { + backend = ImageBackend::Unwrap(jsBackend); + } else if (jsBackend.InstanceOf(data->PdfBackendCtor.Value()).UnwrapOr(false)) { + backend = PdfBackend::Unwrap(jsBackend); + } else if (jsBackend.InstanceOf(data->SvgBackendCtor.Value()).UnwrapOr(false)) { + backend = SvgBackend::Unwrap(jsBackend); + } else { + Napi::TypeError::New(env, "Invalid arguments").ThrowAsJavaScriptException(); + return; + } + } else { + Napi::Number width = Napi::Number::New(env, 0); + Napi::Number height = Napi::Number::New(env, 0); + Napi::Maybe instance = data->ImageBackendCtor.New({ width, height }); + if (instance.IsJust()) backend = ImageBackend::Unwrap(jsBackend = instance.Unwrap()); + } + + backend->setCanvas(this); + + if (!backend->isSurfaceValid()) { + Napi::Error::New(env, backend->getError()).ThrowAsJavaScriptException(); + return; + } + + // Note: the backend gets destroyed when the jsBackend is GC'd. The cleaner + // way would be to only store the jsBackend and unwrap it when the c++ ref is + // needed, but that's slower and a burden. The _backend might be null if we + // returned early, but since an exception was thrown it gets destroyed soon. + _backend = backend; + _jsBackend = Napi::Persistent(jsBackend); +} + +/* + * Get type string. + */ + +Napi::Value +Canvas::GetType(const Napi::CallbackInfo& info) { + return Napi::String::New(env, backend()->getName()); +} + +/* + * Get stride. + */ +Napi::Value +Canvas::GetStride(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, stride()); +} + +/* + * Get width. + */ + +Napi::Value +Canvas::GetWidth(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, getWidth()); +} + +/* + * Set width. + */ + +void +Canvas::SetWidth(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (value.IsNumber()) { + backend()->setWidth(value.As().Uint32Value()); + resurface(info.This().As()); + } +} + +/* + * Get height. + */ + +Napi::Value +Canvas::GetHeight(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, getHeight()); +} + +/* + * Set height. + */ + +void +Canvas::SetHeight(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (value.IsNumber()) { + backend()->setHeight(value.As().Uint32Value()); + resurface(info.This().As()); + } +} + +/* + * EIO toBuffer callback. + */ + +void +Canvas::ToPngBufferAsync(Closure* base) { + PngClosure* closure = static_cast(base); + + closure->status = canvas_write_to_png_stream( + closure->canvas->surface(), + PngClosure::writeVec, + closure); +} + +#ifdef HAVE_JPEG +void +Canvas::ToJpegBufferAsync(Closure* base) { + JpegClosure* closure = static_cast(base); + write_to_jpeg_buffer(closure->canvas->surface(), closure); +} +#endif + +static void +parsePNGArgs(Napi::Value arg, PngClosure& pngargs) { + if (arg.IsObject()) { + Napi::Object obj = arg.As(); + Napi::Value cLevel; + + if (obj.Get("compressionLevel").UnwrapTo(&cLevel) && cLevel.IsNumber()) { + uint32_t val = cLevel.As().Uint32Value(); + // See quote below from spec section 4.12.5.5. + if (val <= 9) pngargs.compressionLevel = val; + } + + Napi::Value rez; + if (obj.Get("resolution").UnwrapTo(&rez) && rez.IsNumber()) { + uint32_t val = rez.As().Uint32Value(); + if (val > 0) pngargs.resolution = val; + } + + Napi::Value filters; + if (obj.Get("filters").UnwrapTo(&filters) && filters.IsNumber()) { + pngargs.filters = filters.As().Uint32Value(); + } + + Napi::Value palette; + if (obj.Get("palette").UnwrapTo(&palette) && palette.IsTypedArray()) { + Napi::TypedArray palette_ta = palette.As(); + if (palette_ta.TypedArrayType() == napi_uint8_clamped_array) { + pngargs.nPaletteColors = palette_ta.ElementLength(); + if (pngargs.nPaletteColors % 4 != 0) { + throw "Palette length must be a multiple of 4."; + } + pngargs.palette = palette_ta.As().Data(); + pngargs.nPaletteColors /= 4; + // Optional background color index: + Napi::Value backgroundIndexVal; + if (obj.Get("backgroundIndex").UnwrapTo(&backgroundIndexVal) && backgroundIndexVal.IsNumber()) { + pngargs.backgroundIndex = backgroundIndexVal.As().Uint32Value(); + } + } + } + } +} + +#ifdef HAVE_JPEG +static void parseJPEGArgs(Napi::Value arg, JpegClosure& jpegargs) { + // "If Type(quality) is not Number, or if quality is outside that range, the + // user agent must use its default quality value, as if the quality argument + // had not been given." - 4.12.5.5 + if (arg.IsObject()) { + Napi::Object obj = arg.As(); + + Napi::Value qual; + if (obj.Get("quality").UnwrapTo(&qual) && qual.IsNumber()) { + double quality = qual.As().DoubleValue(); + if (quality >= 0.0 && quality <= 1.0) { + jpegargs.quality = static_cast(100.0 * quality); + } + } + + Napi::Value chroma; + if (obj.Get("chromaSubsampling").UnwrapTo(&chroma)) { + if (chroma.IsBoolean()) { + bool subsample = chroma.As().Value(); + jpegargs.chromaSubsampling = subsample ? 2 : 1; + } else if (chroma.IsNumber()) { + jpegargs.chromaSubsampling = chroma.As().Uint32Value(); + } + } + + Napi::Value progressive; + if (obj.Get("progressive").UnwrapTo(&progressive) && progressive.IsBoolean()) { + jpegargs.progressive = progressive.As().Value(); + } + } +} +#endif + +#if CAIRO_VERSION >= CAIRO_VERSION_ENCODE(1, 16, 0) + +static inline void setPdfMetaStr(cairo_surface_t* surf, Napi::Object opts, + cairo_pdf_metadata_t t, const char* propName) { + Napi::Value propValue; + if (opts.Get(propName).UnwrapTo(&propValue) && propValue.IsString()) { + // (copies char data) + cairo_pdf_surface_set_metadata(surf, t, propValue.As().Utf8Value().c_str()); + } +} + +static inline void setPdfMetaDate(cairo_surface_t* surf, Napi::Object opts, + cairo_pdf_metadata_t t, const char* propName) { + Napi::Value propValue; + if (opts.Get(propName).UnwrapTo(&propValue) && propValue.IsDate()) { + auto date = static_cast(propValue.As().ValueOf() / 1000); // ms -> s + char buf[sizeof "2011-10-08T07:07:09Z"]; + strftime(buf, sizeof buf, "%FT%TZ", gmtime(&date)); + cairo_pdf_surface_set_metadata(surf, t, buf); + } +} + +static void setPdfMetadata(Canvas* canvas, Napi::Object opts) { + cairo_surface_t* surf = canvas->surface(); + + setPdfMetaStr(surf, opts, CAIRO_PDF_METADATA_TITLE, "title"); + setPdfMetaStr(surf, opts, CAIRO_PDF_METADATA_AUTHOR, "author"); + setPdfMetaStr(surf, opts, CAIRO_PDF_METADATA_SUBJECT, "subject"); + setPdfMetaStr(surf, opts, CAIRO_PDF_METADATA_KEYWORDS, "keywords"); + setPdfMetaStr(surf, opts, CAIRO_PDF_METADATA_CREATOR, "creator"); + setPdfMetaDate(surf, opts, CAIRO_PDF_METADATA_CREATE_DATE, "creationDate"); + setPdfMetaDate(surf, opts, CAIRO_PDF_METADATA_MOD_DATE, "modDate"); +} + +#endif // CAIRO 16+ + +/* + * Converts/encodes data to a Buffer. Async when a callback function is passed. + + * PDF canvases: + (any) => Buffer + ("application/pdf", config) => Buffer + + * SVG canvases: + (any) => Buffer + + * ARGB data: + ("raw") => Buffer + + * PNG-encoded + () => Buffer + (undefined|"image/png", {compressionLevel?: number, filter?: number}) => Buffer + ((err: null|Error, buffer) => any) + ((err: null|Error, buffer) => any, undefined|"image/png", {compressionLevel?: number, filter?: number}) + + * JPEG-encoded + ("image/jpeg") => Buffer + ("image/jpeg", {quality?: number, progressive?: Boolean, chromaSubsampling?: Boolean|number}) => Buffer + ((err: null|Error, buffer) => any, "image/jpeg") + ((err: null|Error, buffer) => any, "image/jpeg", {quality?: number, progressive?: Boolean, chromaSubsampling?: Boolean|number}) + */ + +Napi::Value +Canvas::ToBuffer(const Napi::CallbackInfo& info) { + cairo_status_t status; + + // Vector canvases, sync only + const std::string name = backend()->getName(); + if (name == "pdf" || name == "svg") { + // mime type may be present, but it's not checked + PdfSvgClosure* closure; + if (name == "pdf") { + closure = static_cast(backend())->closure(); +#if CAIRO_VERSION >= CAIRO_VERSION_ENCODE(1, 16, 0) + if (info[1].IsObject()) { // toBuffer("application/pdf", config) + setPdfMetadata(this, info[1].As()); + } +#endif // CAIRO 16+ + } else { + closure = static_cast(backend())->closure(); + } + + cairo_surface_t *surf = surface(); + cairo_surface_finish(surf); + + cairo_status_t status = cairo_surface_status(surf); + if (status != CAIRO_STATUS_SUCCESS) { + Napi::Error::New(env, cairo_status_to_string(status)).ThrowAsJavaScriptException(); + return env.Undefined(); + } + + return Napi::Buffer::Copy(env, &closure->vec[0], closure->vec.size()); + } + + // Raw ARGB data -- just a memcpy() + if (info[0].StrictEquals(Napi::String::New(env, "raw"))) { + cairo_surface_t *surface = this->surface(); + cairo_surface_flush(surface); + if (nBytes() > node::Buffer::kMaxLength) { + Napi::Error::New(env, "Data exceeds maximum buffer length.").ThrowAsJavaScriptException(); + return env.Undefined(); + } + return Napi::Buffer::Copy(env, cairo_image_surface_get_data(surface), nBytes()); + } + + // Sync PNG, default + if (info[0].IsUndefined() || info[0].StrictEquals(Napi::String::New(env, "image/png"))) { + try { + PngClosure closure(this); + parsePNGArgs(info[1], closure); + if (closure.nPaletteColors == 0xFFFFFFFF) { + Napi::Error::New(env, "Palette length must be a multiple of 4.").ThrowAsJavaScriptException(); + return env.Undefined(); + } + + status = canvas_write_to_png_stream(surface(), PngClosure::writeVec, &closure); + + if (!env.IsExceptionPending()) { + if (status) { + throw status; // TODO: throw in js? + } else { + // TODO it's possible to avoid this copy + return Napi::Buffer::Copy(env, &closure.vec[0], closure.vec.size()); + } + } + } catch (cairo_status_t ex) { + CairoError(ex).ThrowAsJavaScriptException(); + } catch (const char* ex) { + Napi::Error::New(env, ex).ThrowAsJavaScriptException(); + } + + return env.Undefined(); + } + + // Async PNG + if (info[0].IsFunction() && + (info[1].IsUndefined() || info[1].StrictEquals(Napi::String::New(env, "image/png")))) { + + PngClosure* closure; + try { + closure = new PngClosure(this); + parsePNGArgs(info[2], *closure); + } catch (cairo_status_t ex) { + CairoError(ex).ThrowAsJavaScriptException(); + return env.Undefined(); + } catch (const char* ex) { + Napi::Error::New(env, ex).ThrowAsJavaScriptException(); + return env.Undefined(); + } + + Ref(); + closure->cb = Napi::Persistent(info[0].As()); + + // Make sure the surface exists since we won't have an isolate context in the async block: + surface(); + EncodingWorker* worker = new EncodingWorker(env); + worker->Init(&ToPngBufferAsync, closure); + worker->Queue(); + + return env.Undefined(); + } + +#ifdef HAVE_JPEG + // Sync JPEG + Napi::Value jpegStr = Napi::String::New(env, "image/jpeg"); + if (info[0].StrictEquals(jpegStr)) { + try { + JpegClosure closure(this); + parseJPEGArgs(info[1], closure); + + write_to_jpeg_buffer(surface(), &closure); + + if (!env.IsExceptionPending()) { + // TODO it's possible to avoid this copy. + return Napi::Buffer::Copy(env, &closure.vec[0], closure.vec.size()); + } + } catch (cairo_status_t ex) { + CairoError(ex).ThrowAsJavaScriptException(); + return env.Undefined(); + } + return env.Undefined(); + } + + // Async JPEG + if (info[0].IsFunction() && info[1].StrictEquals(jpegStr)) { + JpegClosure* closure = new JpegClosure(this); + parseJPEGArgs(info[2], *closure); + + Ref(); + closure->cb = Napi::Persistent(info[0].As()); + + // Make sure the surface exists since we won't have an isolate context in the async block: + surface(); + EncodingWorker* worker = new EncodingWorker(env); + worker->Init(&ToJpegBufferAsync, closure); + worker->Queue(); + return env.Undefined(); + } +#endif + + return env.Undefined(); +} + +/* + * Canvas::StreamPNG callback. + */ + +static cairo_status_t +streamPNG(void *c, const uint8_t *data, unsigned len) { + PngClosure* closure = (PngClosure*) c; + Napi::Env env = closure->canvas->env; + Napi::HandleScope scope(env); + Napi::AsyncContext async(env, "canvas:StreamPNG"); + Napi::Value buf = Napi::Buffer::Copy(env, data, len); + closure->cb.MakeCallback(env.Global(), { env.Null(), buf, Napi::Number::New(env, len) }, async); + return CAIRO_STATUS_SUCCESS; +} + +/* + * Stream PNG data synchronously. TODO async + * StreamPngSync(this, options: {palette?: Uint8ClampedArray, backgroundIndex?: uint32, compressionLevel: uint32, filters: uint32}) + */ + +void +Canvas::StreamPNGSync(const Napi::CallbackInfo& info) { + if (!info[0].IsFunction()) { + Napi::TypeError::New(env, "callback function required").ThrowAsJavaScriptException(); + return; + } + + PngClosure closure(this); + parsePNGArgs(info[1], closure); + + closure.cb = Napi::Persistent(info[0].As()); + + cairo_status_t status = canvas_write_to_png_stream(surface(), streamPNG, &closure); + + if (!env.IsExceptionPending()) { + if (status) { + closure.cb.Call(env.Global(), { CairoError(status).Value() }); + } else { + closure.cb.Call(env.Global(), { env.Null(), env.Null(), Napi::Number::New(env, 0) }); + } + } +} + + +struct PdfStreamInfo { + Napi::Function fn; + uint32_t len; + uint8_t* data; +}; + +/* + * Canvas::StreamPDF callback. + */ + +static cairo_status_t +streamPDF(void *c, const uint8_t *data, unsigned len) { + PdfStreamInfo* streaminfo = static_cast(c); + Napi::Env env = streaminfo->fn.Env(); + Napi::HandleScope scope(env); + Napi::AsyncContext async(env, "canvas:StreamPDF"); + // TODO this is technically wrong, we're returning a pointer to the data in a + // vector in a class with automatic storage duration. If the canvas goes out + // of scope while we're in the handler, a use-after-free could happen. + Napi::Value buf = Napi::Buffer::New(env, (uint8_t *)(data), len); + streaminfo->fn.MakeCallback(env.Global(), { env.Null(), buf, Napi::Number::New(env, len) }, async); + return CAIRO_STATUS_SUCCESS; +} + + +cairo_status_t canvas_write_to_pdf_stream(cairo_surface_t *surface, cairo_write_func_t write_func, PdfStreamInfo* streaminfo) { + size_t whole_chunks = streaminfo->len / PAGE_SIZE; + size_t remainder = streaminfo->len - whole_chunks * PAGE_SIZE; + + for (size_t i = 0; i < whole_chunks; ++i) { + write_func(streaminfo, &streaminfo->data[i * PAGE_SIZE], PAGE_SIZE); + } + + if (remainder) { + write_func(streaminfo, &streaminfo->data[whole_chunks * PAGE_SIZE], remainder); + } + + return CAIRO_STATUS_SUCCESS; +} + +/* + * Stream PDF data synchronously. + */ + +void +Canvas::StreamPDFSync(const Napi::CallbackInfo& info) { + if (!info[0].IsFunction()) { + Napi::TypeError::New(env, "callback function required").ThrowAsJavaScriptException(); + return; + } + + if (backend()->getName() != "pdf") { + Napi::TypeError::New(env, "wrong canvas type").ThrowAsJavaScriptException(); + return; + } + +#if CAIRO_VERSION >= CAIRO_VERSION_ENCODE(1, 16, 0) + if (info[1].IsObject()) { + setPdfMetadata(this, info[1].As()); + } +#endif + + cairo_surface_finish(surface()); + + PdfSvgClosure* closure = static_cast(backend())->closure(); + Napi::Function fn = info[0].As(); + PdfStreamInfo streaminfo; + streaminfo.fn = fn; + streaminfo.data = &closure->vec[0]; + streaminfo.len = closure->vec.size(); + + cairo_status_t status = canvas_write_to_pdf_stream(surface(), streamPDF, &streaminfo); + + if (!env.IsExceptionPending()) { + if (status) { + fn.Call(env.Global(), { CairoError(status).Value() }); + } else { + fn.Call(env.Global(), { env.Null(), env.Null(), Napi::Number::New(env, 0) }); + } + } +} + +/* + * Stream JPEG data synchronously. + */ + +#ifdef HAVE_JPEG +static uint32_t getSafeBufSize(Canvas* canvas) { + // Don't allow the buffer size to exceed the size of the canvas (#674) + // TODO not sure if this is really correct, but it fixed #674 + return (std::min)(canvas->getWidth() * canvas->getHeight() * 4, static_cast(PAGE_SIZE)); +} + +void +Canvas::StreamJPEGSync(const Napi::CallbackInfo& info) { + if (!info[1].IsFunction()) { + Napi::TypeError::New(env, "callback function required").ThrowAsJavaScriptException(); + return; + } + + JpegClosure closure(this); + parseJPEGArgs(info[0], closure); + closure.cb = Napi::Persistent(info[1].As()); + + uint32_t bufsize = getSafeBufSize(this); + write_to_jpeg_stream(surface(), bufsize, &closure); +} +#endif + +char * +str_value(Napi::Maybe maybe, const char *fallback, bool can_be_number) { + Napi::Value val; + if (maybe.UnwrapTo(&val)) { + if (val.IsString() || (can_be_number && val.IsNumber())) { + Napi::String strVal; + if (val.ToString().UnwrapTo(&strVal)) return strdup(strVal.Utf8Value().c_str()); + } else if (fallback) { + return strdup(fallback); + } + } + + return NULL; +} + +void +Canvas::RegisterFont(const Napi::CallbackInfo& info) { + Napi::Env env = info.Env(); + if (!info[0].IsString()) { + Napi::Error::New(env, "Wrong argument type").ThrowAsJavaScriptException(); + return; + } else if (!info[1].IsObject()) { + Napi::Error::New(env, GENERIC_FACE_ERROR).ThrowAsJavaScriptException(); + return; + } + + std::string filePath = info[0].As(); + PangoFontDescription *sys_desc = get_pango_font_description((unsigned char *)(filePath.c_str())); + + if (!sys_desc) { + Napi::Error::New(env, "Could not parse font file").ThrowAsJavaScriptException(); + return; + } + + PangoFontDescription *user_desc = pango_font_description_new(); + + // now check the attrs, there are many ways to be wrong + Napi::Object js_user_desc = info[1].As(); + + // TODO: use FontParser on these values just like the FontFace API works + char *family = str_value(js_user_desc.Get("family"), NULL, false); + char *weight = str_value(js_user_desc.Get("weight"), "normal", true); + char *style = str_value(js_user_desc.Get("style"), "normal", false); + + if (family && weight && style) { + pango_font_description_set_weight(user_desc, Canvas::GetWeightFromCSSString(weight)); + pango_font_description_set_style(user_desc, Canvas::GetStyleFromCSSString(style)); + pango_font_description_set_family(user_desc, family); + + auto found = std::find_if(font_face_list.begin(), font_face_list.end(), [&](FontFace& f) { + return pango_font_description_equal(f.sys_desc, sys_desc); + }); + + if (found != font_face_list.end()) { + pango_font_description_free(found->user_desc); + found->user_desc = user_desc; + } else if (register_font((unsigned char *) filePath.c_str())) { + FontFace face; + face.user_desc = user_desc; + face.sys_desc = sys_desc; + strncpy((char *)face.file_path, (char *) filePath.c_str(), 1023); + font_face_list.push_back(face); + } else { + pango_font_description_free(user_desc); + Napi::Error::New(env, "Could not load font to the system's font host").ThrowAsJavaScriptException(); + + } + } else { + pango_font_description_free(user_desc); + if (!env.IsExceptionPending()) { + Napi::Error::New(env, GENERIC_FACE_ERROR).ThrowAsJavaScriptException(); + } + } + + free(family); + free(weight); + free(style); + fontSerial++; +} + +void +Canvas::DeregisterAllFonts(const Napi::CallbackInfo& info) { + Napi::Env env = info.Env(); + // Unload all fonts from pango to free up memory + bool success = true; + + std::for_each(font_face_list.begin(), font_face_list.end(), [&](FontFace& f) { + if (!deregister_font( (unsigned char *)f.file_path )) success = false; + pango_font_description_free(f.user_desc); + pango_font_description_free(f.sys_desc); + }); + + font_face_list.clear(); + fontSerial++; + if (!success) Napi::Error::New(env, "Could not deregister one or more fonts").ThrowAsJavaScriptException(); +} + +/* + * Do not use! This is only exported for testing + */ +Napi::Value +Canvas::ParseFont(const Napi::CallbackInfo& info) { + Napi::Env env = info.Env(); + + if (info.Length() != 1) return env.Undefined(); + + Napi::String str; + if (!info[0].ToString().UnwrapTo(&str)) return env.Undefined(); + + bool ok; + auto props = FontParser::parse(str, &ok); + if (!ok) return env.Undefined(); + + Napi::Object obj = Napi::Object::New(env); + obj.Set("size", Napi::Number::New(env, props.fontSize)); + Napi::Array families = Napi::Array::New(env); + obj.Set("families", families); + + unsigned int index = 0; + + for (auto& family : props.fontFamily) { + families[index++] = Napi::String::New(env, family); + } + + obj.Set("weight", Napi::Number::New(env, props.fontWeight)); + obj.Set("variant", Napi::Number::New(env, static_cast(props.fontVariant))); + obj.Set("style", Napi::Number::New(env, static_cast(props.fontStyle))); + + return obj; +} + +/* + * Get a PangoStyle from a CSS string (like "italic") + */ + +PangoStyle +Canvas::GetStyleFromCSSString(const char *style) { + PangoStyle s = PANGO_STYLE_NORMAL; + + if (strlen(style) > 0) { + if (0 == strcmp("italic", style)) { + s = PANGO_STYLE_ITALIC; + } else if (0 == strcmp("oblique", style)) { + s = PANGO_STYLE_OBLIQUE; + } + } + + return s; +} + +/* + * Get a PangoWeight from a CSS string ("bold", "100", etc) + */ + +PangoWeight +Canvas::GetWeightFromCSSString(const char *weight) { + PangoWeight w = PANGO_WEIGHT_NORMAL; + + if (strlen(weight) > 0) { + if (0 == strcmp("bold", weight)) { + w = PANGO_WEIGHT_BOLD; + } else if (0 == strcmp("100", weight)) { + w = PANGO_WEIGHT_THIN; + } else if (0 == strcmp("200", weight)) { + w = PANGO_WEIGHT_ULTRALIGHT; + } else if (0 == strcmp("300", weight)) { + w = PANGO_WEIGHT_LIGHT; + } else if (0 == strcmp("400", weight)) { + w = PANGO_WEIGHT_NORMAL; + } else if (0 == strcmp("500", weight)) { + w = PANGO_WEIGHT_MEDIUM; + } else if (0 == strcmp("600", weight)) { + w = PANGO_WEIGHT_SEMIBOLD; + } else if (0 == strcmp("700", weight)) { + w = PANGO_WEIGHT_BOLD; + } else if (0 == strcmp("800", weight)) { + w = PANGO_WEIGHT_ULTRABOLD; + } else if (0 == strcmp("900", weight)) { + w = PANGO_WEIGHT_HEAVY; + } + } + + return w; +} + +/* + * Given a user description, return a description that will select the + * font either from the system or @font-face + */ + +PangoFontDescription * +Canvas::ResolveFontDescription(const PangoFontDescription *desc) { + // One of the user-specified families could map to multiple SFNT family names + // if someone registered two different fonts under the same family name. + // https://drafts.csswg.org/css-fonts-3/#font-style-matching + FontFace best; + istringstream families(pango_font_description_get_family(desc)); + unordered_set seen_families; + string resolved_families; + bool first = true; + + for (string family; getline(families, family, ','); ) { + string renamed_families; + for (auto& ff : font_face_list) { + string pangofamily = string(pango_font_description_get_family(ff.user_desc)); + if (streq_casein(family, pangofamily)) { + const char* sys_desc_family_name = pango_font_description_get_family(ff.sys_desc); + bool unseen = seen_families.find(sys_desc_family_name) == seen_families.end(); + bool better = best.user_desc == nullptr || pango_font_description_better_match(desc, best.user_desc, ff.user_desc); + + // Avoid sending duplicate SFNT font names due to a bug in Pango for macOS: + // https://bugzilla.gnome.org/show_bug.cgi?id=762873 + if (unseen) { + seen_families.insert(sys_desc_family_name); + + if (better) { + renamed_families = string(sys_desc_family_name) + (renamed_families.size() ? "," : "") + renamed_families; + } else { + renamed_families = renamed_families + (renamed_families.size() ? "," : "") + sys_desc_family_name; + } + } + + if (first && better) best = ff; + } + } + + if (resolved_families.size()) resolved_families += ','; + resolved_families += renamed_families.size() ? renamed_families : family; + first = false; + } + + PangoFontDescription* ret = pango_font_description_copy(best.sys_desc ? best.sys_desc : desc); + pango_font_description_set_family(ret, resolved_families.c_str()); + + return ret; +} + +/* + * Re-alloc the surface, destroying the previous. + */ + +void +Canvas::resurface(Napi::Object This) { + Napi::HandleScope scope(env); + Napi::Value context; + + if (This.Get("context").UnwrapTo(&context) && context.IsObject()) { + backend()->destroySurface(); + backend()->ensureSurface(); + // Reset context + Context2d *context2d = Context2d::Unwrap(context.As()); + cairo_t *prev = context2d->context(); + context2d->setContext(createCairoContext()); + context2d->resetState(); + cairo_destroy(prev); + } +} + +/** + * Wrapper around cairo_create() + * (do not call cairo_create directly, call this instead) + */ +cairo_t* +Canvas::createCairoContext() { + cairo_t* ret = cairo_create(surface()); + cairo_set_line_width(ret, 1); // Cairo defaults to 2 + return ret; +} + +/* + * Construct an Error from the given cairo status. + */ + +Napi::Error +Canvas::CairoError(cairo_status_t status) { + return Napi::Error::New(env, cairo_status_to_string(status)); +} diff --git a/miniprogram/node_modules/canvas/src/Canvas.h b/miniprogram/node_modules/canvas/src/Canvas.h new file mode 100644 index 00000000..7d0bc9d6 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/Canvas.h @@ -0,0 +1,101 @@ +// Copyright (c) 2010 LearnBoost + +#pragma once + +struct Closure; + +#include "backend/Backend.h" +#include "closure.h" +#include +#include "dll_visibility.h" +#include +#include +#include +#include + +/* + * FontFace describes a font file in terms of one PangoFontDescription that + * will resolve to it and one that the user describes it as (like @font-face) + */ +class FontFace { + public: + PangoFontDescription *sys_desc = nullptr; + PangoFontDescription *user_desc = nullptr; + unsigned char file_path[1024]; +}; + +enum text_baseline_t : uint8_t { + TEXT_BASELINE_ALPHABETIC = 0, + TEXT_BASELINE_TOP = 1, + TEXT_BASELINE_BOTTOM = 2, + TEXT_BASELINE_MIDDLE = 3, + TEXT_BASELINE_IDEOGRAPHIC = 4, + TEXT_BASELINE_HANGING = 5 +}; + +enum text_align_t : int8_t { + TEXT_ALIGNMENT_LEFT = -1, + TEXT_ALIGNMENT_CENTER = 0, + TEXT_ALIGNMENT_RIGHT = 1, + TEXT_ALIGNMENT_START = -2, + TEXT_ALIGNMENT_END = 2 +}; + +enum canvas_draw_mode_t : uint8_t { + TEXT_DRAW_PATHS, + TEXT_DRAW_GLYPHS +}; + +/* + * Canvas. + */ + +class Canvas : public Napi::ObjectWrap { + public: + Canvas(const Napi::CallbackInfo& info); + static void Initialize(Napi::Env& env, Napi::Object& target); + + Napi::Value ToBuffer(const Napi::CallbackInfo& info); + Napi::Value GetType(const Napi::CallbackInfo& info); + Napi::Value GetStride(const Napi::CallbackInfo& info); + Napi::Value GetWidth(const Napi::CallbackInfo& info); + Napi::Value GetHeight(const Napi::CallbackInfo& info); + void SetWidth(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetHeight(const Napi::CallbackInfo& info, const Napi::Value& value); + void StreamPNGSync(const Napi::CallbackInfo& info); + void StreamPDFSync(const Napi::CallbackInfo& info); + void StreamJPEGSync(const Napi::CallbackInfo& info); + static void RegisterFont(const Napi::CallbackInfo& info); + static void DeregisterAllFonts(const Napi::CallbackInfo& info); + static Napi::Value ParseFont(const Napi::CallbackInfo& info); + Napi::Error CairoError(cairo_status_t status); + static void ToPngBufferAsync(Closure* closure); + static void ToJpegBufferAsync(Closure* closure); + static PangoWeight GetWeightFromCSSString(const char *weight); + static PangoStyle GetStyleFromCSSString(const char *style); + static PangoFontDescription *ResolveFontDescription(const PangoFontDescription *desc); + + DLL_PUBLIC inline Backend* backend() { return _backend; } + DLL_PUBLIC inline cairo_surface_t* surface(){ return backend()->ensureSurface(); } + cairo_t* createCairoContext(); + + DLL_PUBLIC inline uint8_t *data(){ return cairo_image_surface_get_data(surface()); } + DLL_PUBLIC inline int stride(){ return cairo_image_surface_get_stride(surface()); } + DLL_PUBLIC inline std::size_t nBytes(){ + return static_cast(backend()->getHeight()) * stride(); + } + + DLL_PUBLIC inline int getWidth() { return backend()->getWidth(); } + DLL_PUBLIC inline int getHeight() { return backend()->getHeight(); } + + void resurface(Napi::Object This); + + Napi::Env env; + static int fontSerial; + + private: + Backend* _backend; + Napi::ObjectReference _jsBackend; + Napi::FunctionReference ctor; + static std::vector font_face_list; +}; diff --git a/miniprogram/node_modules/canvas/src/CanvasError.h b/miniprogram/node_modules/canvas/src/CanvasError.h new file mode 100644 index 00000000..535d153f --- /dev/null +++ b/miniprogram/node_modules/canvas/src/CanvasError.h @@ -0,0 +1,37 @@ +#pragma once + +#include +#include + +class CanvasError { + public: + std::string message; + std::string syscall; + std::string path; + int cerrno = 0; + void set(const char* iMessage = NULL, const char* iSyscall = NULL, int iErrno = 0, const char* iPath = NULL) { + if (iMessage) message.assign(iMessage); + if (iSyscall) syscall.assign(iSyscall); + cerrno = iErrno; + if (iPath) path.assign(iPath); + } + void reset() { + message.clear(); + syscall.clear(); + path.clear(); + cerrno = 0; + } + bool empty() { + return cerrno == 0 && message.empty(); + } + Napi::Error toError(Napi::Env env) { + if (cerrno) { + Napi::Error err = Napi::Error::New(env, strerror(cerrno)); + if (!syscall.empty()) err.Value().Set("syscall", syscall); + if (!path.empty()) err.Value().Set("path", path); + return err; + } else { + return Napi::Error::New(env, message); + } + } +}; diff --git a/miniprogram/node_modules/canvas/src/CanvasGradient.cc b/miniprogram/node_modules/canvas/src/CanvasGradient.cc new file mode 100644 index 00000000..ceb0e505 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/CanvasGradient.cc @@ -0,0 +1,113 @@ +// Copyright (c) 2010 LearnBoost + +#include "CanvasGradient.h" +#include "InstanceData.h" + +#include "Canvas.h" +#include "color.h" + +using namespace Napi; + +/* + * Initialize CanvasGradient. + */ + +void +Gradient::Initialize(Napi::Env& env, Napi::Object& exports) { + Napi::HandleScope scope(env); + InstanceData* data = env.GetInstanceData(); + + Napi::Function ctor = DefineClass(env, "CanvasGradient", { + InstanceMethod<&Gradient::AddColorStop>("addColorStop", napi_default_method) + }); + + exports.Set("CanvasGradient", ctor); + data->CanvasGradientCtor = Napi::Persistent(ctor); +} + +/* + * Initialize a new CanvasGradient. + */ + +Gradient::Gradient(const Napi::CallbackInfo& info) : Napi::ObjectWrap(info), env(info.Env()) { + // Linear + if ( + 4 == info.Length() && + info[0].IsNumber() && + info[1].IsNumber() && + info[2].IsNumber() && + info[3].IsNumber() + ) { + double x0 = info[0].As().DoubleValue(); + double y0 = info[1].As().DoubleValue(); + double x1 = info[2].As().DoubleValue(); + double y1 = info[3].As().DoubleValue(); + _pattern = cairo_pattern_create_linear(x0, y0, x1, y1); + return; + } + + // Radial + if ( + 6 == info.Length() && + info[0].IsNumber() && + info[1].IsNumber() && + info[2].IsNumber() && + info[3].IsNumber() && + info[4].IsNumber() && + info[5].IsNumber() + ) { + double x0 = info[0].As().DoubleValue(); + double y0 = info[1].As().DoubleValue(); + double r0 = info[2].As().DoubleValue(); + double x1 = info[3].As().DoubleValue(); + double y1 = info[4].As().DoubleValue(); + double r1 = info[5].As().DoubleValue(); + _pattern = cairo_pattern_create_radial(x0, y0, r0, x1, y1, r1); + return; + } + + Napi::TypeError::New(env, "invalid arguments").ThrowAsJavaScriptException(); +} + +/* + * Add color stop. + */ + +void +Gradient::AddColorStop(const Napi::CallbackInfo& info) { + if (!info[0].IsNumber()) { + Napi::TypeError::New(env, "offset required").ThrowAsJavaScriptException(); + return; + } + + if (!info[1].IsString()) { + Napi::TypeError::New(env, "color string required").ThrowAsJavaScriptException(); + return; + } + + short ok; + std::string str = info[1].As(); + uint32_t rgba = rgba_from_string(str.c_str(), &ok); + + if (ok) { + rgba_t color = rgba_create(rgba); + cairo_pattern_add_color_stop_rgba( + _pattern + , info[0].As().DoubleValue() + , color.r + , color.g + , color.b + , color.a); + } else { + Napi::TypeError::New(env, "parse color failed").ThrowAsJavaScriptException(); + } +} + + +/* + * Destroy the pattern. + */ + +Gradient::~Gradient() { + if (_pattern) cairo_pattern_destroy(_pattern); +} diff --git a/miniprogram/node_modules/canvas/src/CanvasGradient.h b/miniprogram/node_modules/canvas/src/CanvasGradient.h new file mode 100644 index 00000000..103e8074 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/CanvasGradient.h @@ -0,0 +1,20 @@ +// Copyright (c) 2010 LearnBoost + +#pragma once + +#include +#include + +class Gradient : public Napi::ObjectWrap { + public: + static void Initialize(Napi::Env& env, Napi::Object& target); + Gradient(const Napi::CallbackInfo& info); + void AddColorStop(const Napi::CallbackInfo& info); + inline cairo_pattern_t *pattern(){ return _pattern; } + ~Gradient(); + + Napi::Env env; + + private: + cairo_pattern_t *_pattern; +}; diff --git a/miniprogram/node_modules/canvas/src/CanvasPattern.cc b/miniprogram/node_modules/canvas/src/CanvasPattern.cc new file mode 100644 index 00000000..ec30b6f0 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/CanvasPattern.cc @@ -0,0 +1,129 @@ +// Copyright (c) 2010 LearnBoost + +#include "CanvasPattern.h" + +#include "Canvas.h" +#include "Image.h" +#include "InstanceData.h" + +using namespace Napi; + +const cairo_user_data_key_t *pattern_repeat_key; + +/* + * Initialize CanvasPattern. + */ + +void +Pattern::Initialize(Napi::Env& env, Napi::Object& exports) { + Napi::HandleScope scope(env); + InstanceData* data = env.GetInstanceData(); + + // Constructor + Napi::Function ctor = DefineClass(env, "CanvasPattern", { + InstanceMethod<&Pattern::setTransform>("setTransform", napi_default_method) + }); + + // Prototype + exports.Set("CanvasPattern", ctor); + data->CanvasPatternCtor = Napi::Persistent(ctor); +} + +/* + * Initialize a new CanvasPattern. + */ + +Pattern::Pattern(const Napi::CallbackInfo& info) : ObjectWrap(info), env(info.Env()) { + if (!info[0].IsObject()) { + Napi::TypeError::New(env, "Image or Canvas expected").ThrowAsJavaScriptException(); + return; + } + + Napi::Object obj = info[0].As(); + InstanceData* data = env.GetInstanceData(); + cairo_surface_t *surface; + + // Image + if (obj.InstanceOf(data->ImageCtor.Value()).UnwrapOr(false)) { + Image *img = Image::Unwrap(obj); + if (!img->isComplete()) { + Napi::Error::New(env, "Image given has not completed loading").ThrowAsJavaScriptException(); + return; + } + surface = img->surface(); + + // Canvas + } else if (obj.InstanceOf(data->CanvasCtor.Value()).UnwrapOr(false)) { + Canvas *canvas = Canvas::Unwrap(obj); + surface = canvas->surface(); + // Invalid + } else { + if (!env.IsExceptionPending()) { + Napi::TypeError::New(env, "Image or Canvas expected").ThrowAsJavaScriptException(); + } + return; + } + _pattern = cairo_pattern_create_for_surface(surface); + + if (info[1].IsString()) { + if ("no-repeat" == info[1].As().Utf8Value()) { + _repeat = NO_REPEAT; + } else if ("repeat-x" == info[1].As().Utf8Value()) { + _repeat = REPEAT_X; + } else if ("repeat-y" == info[1].As().Utf8Value()) { + _repeat = REPEAT_Y; + } + } + + cairo_pattern_set_user_data(_pattern, pattern_repeat_key, &_repeat, NULL); +} + +/* + * Set the pattern-space to user-space transform. + */ +void +Pattern::setTransform(const Napi::CallbackInfo& info) { + if (!info[0].IsObject()) { + Napi::TypeError::New(env, "Expected DOMMatrix").ThrowAsJavaScriptException(); + return; + } + + Napi::Object mat = info[0].As(); + + InstanceData* data = env.GetInstanceData(); + if (!mat.InstanceOf(data->DOMMatrixCtor.Value()).UnwrapOr(false)) { + if (!env.IsExceptionPending()) { + Napi::TypeError::New(env, "Expected DOMMatrix").ThrowAsJavaScriptException(); + } + return; + } + + Napi::Value one = Napi::Number::New(env, 1); + Napi::Value zero = Napi::Number::New(env, 0); + + cairo_matrix_t matrix; + cairo_matrix_init(&matrix, + mat.Get("a").UnwrapOr(one).As().DoubleValue(), + mat.Get("b").UnwrapOr(zero).As().DoubleValue(), + mat.Get("c").UnwrapOr(zero).As().DoubleValue(), + mat.Get("d").UnwrapOr(one).As().DoubleValue(), + mat.Get("e").UnwrapOr(zero).As().DoubleValue(), + mat.Get("f").UnwrapOr(zero).As().DoubleValue() + ); + + cairo_matrix_invert(&matrix); + cairo_pattern_set_matrix(_pattern, &matrix); +} + +repeat_type_t Pattern::get_repeat_type_for_cairo_pattern(cairo_pattern_t *pattern) { + void *ud = cairo_pattern_get_user_data(pattern, pattern_repeat_key); + return *reinterpret_cast(ud); +} + +/* + * Destroy the pattern. + */ + +Pattern::~Pattern() { + if (_pattern) cairo_pattern_destroy(_pattern); +} diff --git a/miniprogram/node_modules/canvas/src/CanvasPattern.h b/miniprogram/node_modules/canvas/src/CanvasPattern.h new file mode 100644 index 00000000..1f768e03 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/CanvasPattern.h @@ -0,0 +1,33 @@ +// Copyright (c) 2011 LearnBoost + +#pragma once + +#include +#include + +/* + * Canvas types. + */ + +typedef enum { + NO_REPEAT, // match CAIRO_EXTEND_NONE + REPEAT, // match CAIRO_EXTEND_REPEAT + REPEAT_X, // needs custom processing + REPEAT_Y // needs custom processing +} repeat_type_t; + +extern const cairo_user_data_key_t *pattern_repeat_key; + +class Pattern : public Napi::ObjectWrap { + public: + Pattern(const Napi::CallbackInfo& info); + static void Initialize(Napi::Env& env, Napi::Object& target); + void setTransform(const Napi::CallbackInfo& info); + static repeat_type_t get_repeat_type_for_cairo_pattern(cairo_pattern_t *pattern); + inline cairo_pattern_t *pattern(){ return _pattern; } + ~Pattern(); + Napi::Env env; + private: + cairo_pattern_t *_pattern; + repeat_type_t _repeat = REPEAT; +}; diff --git a/miniprogram/node_modules/canvas/src/CanvasRenderingContext2d.cc b/miniprogram/node_modules/canvas/src/CanvasRenderingContext2d.cc new file mode 100644 index 00000000..3f52c1fd --- /dev/null +++ b/miniprogram/node_modules/canvas/src/CanvasRenderingContext2d.cc @@ -0,0 +1,3502 @@ +// Copyright (c) 2010 LearnBoost + +#include "CanvasRenderingContext2d.h" + +#include +#include "backend/ImageBackend.h" +#include +#include "Canvas.h" +#include "CanvasGradient.h" +#include "CanvasPattern.h" +#include "InstanceData.h" +#include "FontParser.h" +#include +#include +#include "Image.h" +#include "ImageData.h" +#include +#include +#include "Point.h" +#include +#include "Util.h" +#include + +/* + * Rectangle arg assertions. + */ + +#define RECT_ARGS \ + double args[4]; \ + if(!checkArgs(info, args, 4)) \ + return; \ + double x = args[0]; \ + double y = args[1]; \ + double width = args[2]; \ + double height = args[3]; + +constexpr double twoPi = M_PI * 2.; + +/* + * Simple helper macro for a rather verbose function call. + */ + +#define PANGO_LAYOUT_GET_METRICS(LAYOUT) pango_context_get_metrics( \ + pango_layout_get_context(LAYOUT), \ + pango_layout_get_font_description(LAYOUT), \ + pango_language_from_string(state->lang.c_str())) + +inline static bool checkArgs(const Napi::CallbackInfo&info, double *args, int argsNum, int offset = 0){ + Napi::Env env = info.Env(); + int argsEnd = std::min(9, offset + argsNum); + bool areArgsValid = true; + + napi_value argv[9]; + size_t argc = 9; + napi_get_cb_info(env, static_cast(info), &argc, argv, nullptr, nullptr); + + for (int i = offset; i < argsEnd; i++) { + napi_valuetype type; + double val = 0; + + napi_typeof(env, argv[i], &type); + if (type == napi_number) { + // fast path + napi_get_value_double(env, argv[i], &val); + } else { + napi_value num; + if (napi_coerce_to_number(env, argv[i], &num) == napi_ok) { + napi_get_value_double(env, num, &val); + } + } + + if (areArgsValid) { + if (!std::isfinite(val)) { + // We should continue the loop instead of returning immediately + // See https://html.spec.whatwg.org/multipage/canvas.html + + areArgsValid = false; + continue; + } + + args[i - offset] = val; + } + } + + return areArgsValid; +} + +/* + * Initialize Context2d. + */ + +void +Context2d::Initialize(Napi::Env& env, Napi::Object& exports) { + Napi::HandleScope scope(env); + InstanceData* data = env.GetInstanceData(); + + Napi::Function ctor = DefineClass(env, "CanvasRenderingContext2D", { + InstanceMethod<&Context2d::DrawImage>("drawImage", napi_default_method), + InstanceMethod<&Context2d::PutImageData>("putImageData", napi_default_method), + InstanceMethod<&Context2d::GetImageData>("getImageData", napi_default_method), + InstanceMethod<&Context2d::CreateImageData>("createImageData", napi_default_method), + InstanceMethod<&Context2d::AddPage>("addPage", napi_default_method), + InstanceMethod<&Context2d::Save>("save", napi_default_method), + InstanceMethod<&Context2d::Restore>("restore", napi_default_method), + InstanceMethod<&Context2d::Rotate>("rotate", napi_default_method), + InstanceMethod<&Context2d::Translate>("translate", napi_default_method), + InstanceMethod<&Context2d::Transform>("transform", napi_default_method), + InstanceMethod<&Context2d::GetTransform>("getTransform", napi_default_method), + InstanceMethod<&Context2d::ResetTransform>("resetTransform", napi_default_method), + InstanceMethod<&Context2d::SetTransform>("setTransform", napi_default_method), + InstanceMethod<&Context2d::IsPointInPath>("isPointInPath", napi_default_method), + InstanceMethod<&Context2d::Scale>("scale", napi_default_method), + InstanceMethod<&Context2d::Clip>("clip", napi_default_method), + InstanceMethod<&Context2d::Fill>("fill", napi_default_method), + InstanceMethod<&Context2d::Stroke>("stroke", napi_default_method), + InstanceMethod<&Context2d::FillText>("fillText", napi_default_method), + InstanceMethod<&Context2d::StrokeText>("strokeText", napi_default_method), + InstanceMethod<&Context2d::FillRect>("fillRect", napi_default_method), + InstanceMethod<&Context2d::StrokeRect>("strokeRect", napi_default_method), + InstanceMethod<&Context2d::ClearRect>("clearRect", napi_default_method), + InstanceMethod<&Context2d::Rect>("rect", napi_default_method), + InstanceMethod<&Context2d::RoundRect>("roundRect", napi_default_method), + InstanceMethod<&Context2d::MeasureText>("measureText", napi_default_method), + InstanceMethod<&Context2d::MoveTo>("moveTo", napi_default_method), + InstanceMethod<&Context2d::LineTo>("lineTo", napi_default_method), + InstanceMethod<&Context2d::BezierCurveTo>("bezierCurveTo", napi_default_method), + InstanceMethod<&Context2d::QuadraticCurveTo>("quadraticCurveTo", napi_default_method), + InstanceMethod<&Context2d::BeginPath>("beginPath", napi_default_method), + InstanceMethod<&Context2d::ClosePath>("closePath", napi_default_method), + InstanceMethod<&Context2d::Arc>("arc", napi_default_method), + InstanceMethod<&Context2d::ArcTo>("arcTo", napi_default_method), + InstanceMethod<&Context2d::Ellipse>("ellipse", napi_default_method), + InstanceMethod<&Context2d::SetLineDash>("setLineDash", napi_default_method), + InstanceMethod<&Context2d::GetLineDash>("getLineDash", napi_default_method), + InstanceMethod<&Context2d::CreatePattern>("createPattern", napi_default_method), + InstanceMethod<&Context2d::CreateLinearGradient>("createLinearGradient", napi_default_method), + InstanceMethod<&Context2d::CreateRadialGradient>("createRadialGradient", napi_default_method), + #if CAIRO_VERSION >= CAIRO_VERSION_ENCODE(1, 16, 0) + InstanceMethod<&Context2d::BeginTag>("beginTag", napi_default_method), + InstanceMethod<&Context2d::EndTag>("endTag", napi_default_method), + #endif + InstanceAccessor<&Context2d::GetFormat>("pixelFormat", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetPatternQuality, &Context2d::SetPatternQuality>("patternQuality", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetImageSmoothingEnabled, &Context2d::SetImageSmoothingEnabled>("imageSmoothingEnabled", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetGlobalCompositeOperation, &Context2d::SetGlobalCompositeOperation>("globalCompositeOperation", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetGlobalAlpha, &Context2d::SetGlobalAlpha>("globalAlpha", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetShadowColor, &Context2d::SetShadowColor>("shadowColor", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetMiterLimit, &Context2d::SetMiterLimit>("miterLimit", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetLineWidth, &Context2d::SetLineWidth>("lineWidth", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetLineCap, &Context2d::SetLineCap>("lineCap", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetLineJoin, &Context2d::SetLineJoin>("lineJoin", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetLineDashOffset, &Context2d::SetLineDashOffset>("lineDashOffset", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetShadowOffsetX, &Context2d::SetShadowOffsetX>("shadowOffsetX", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetShadowOffsetY, &Context2d::SetShadowOffsetY>("shadowOffsetY", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetShadowBlur, &Context2d::SetShadowBlur>("shadowBlur", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetAntiAlias, &Context2d::SetAntiAlias>("antialias", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetTextDrawingMode, &Context2d::SetTextDrawingMode>("textDrawingMode", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetQuality, &Context2d::SetQuality>("quality", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetCurrentTransform, &Context2d::SetCurrentTransform>("currentTransform", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetFillStyle, &Context2d::SetFillStyle>("fillStyle", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetStrokeStyle, &Context2d::SetStrokeStyle>("strokeStyle", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetFont, &Context2d::SetFont>("font", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetTextBaseline, &Context2d::SetTextBaseline>("textBaseline", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetTextAlign, &Context2d::SetTextAlign>("textAlign", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetDirection, &Context2d::SetDirection>("direction", napi_default_jsproperty), + InstanceAccessor<&Context2d::GetLanguage, &Context2d::SetLanguage>("lang", napi_default_jsproperty) + }); + + exports.Set("CanvasRenderingContext2d", ctor); + data->Context2dCtor = Napi::Persistent(ctor); +} + +/* + * Create a cairo context. + */ + +Context2d::Context2d(const Napi::CallbackInfo& info) : Napi::ObjectWrap(info), env(info.Env()) { + InstanceData* data = env.GetInstanceData(); + + if (!info[0].IsObject()) { + Napi::TypeError::New(env, "Canvas expected").ThrowAsJavaScriptException(); + return; + } + + Napi::Object obj = info[0].As(); + if (!obj.InstanceOf(data->CanvasCtor.Value()).UnwrapOr(false)) { + if (!env.IsExceptionPending()) { + Napi::TypeError::New(env, "Canvas expected").ThrowAsJavaScriptException(); + } + return; + } + + _canvas = Canvas::Unwrap(obj); + + bool isImageBackend = _canvas->backend()->getName() == "image"; + if (isImageBackend) { + cairo_format_t format = ImageBackend::DEFAULT_FORMAT; + + if (info[1].IsObject()) { + Napi::Object ctxAttributes = info[1].As(); + Napi::Value pixelFormat; + + if (ctxAttributes.Get("pixelFormat").UnwrapTo(&pixelFormat) && pixelFormat.IsString()) { + std::string utf8PixelFormat = pixelFormat.As(); + if (utf8PixelFormat == "RGBA32") format = CAIRO_FORMAT_ARGB32; + else if (utf8PixelFormat == "RGB24") format = CAIRO_FORMAT_RGB24; + else if (utf8PixelFormat == "A8") format = CAIRO_FORMAT_A8; + else if (utf8PixelFormat == "RGB16_565") format = CAIRO_FORMAT_RGB16_565; + else if (utf8PixelFormat == "A1") format = CAIRO_FORMAT_A1; +#ifdef CAIRO_FORMAT_RGB30 + else if (utf8PixelFormat == "RGB30") format = CAIRO_FORMAT_RGB30; +#endif + } + + // alpha: false forces use of RGB24 + Napi::Value alpha; + + if (ctxAttributes.Get("alpha").UnwrapTo(&alpha) && alpha.IsBoolean() && !alpha.As().Value()) { + format = CAIRO_FORMAT_RGB24; + } + } + + static_cast(_canvas->backend())->setFormat(format); + } + + _context = _canvas->createCairoContext(); + _layout = pango_cairo_create_layout(_context); + + // As of January 2023, Pango rounds glyph positions which renders text wider + // or narrower than the browser. See #2184 for more information +#if PANGO_VERSION_CHECK(1, 44, 0) + pango_context_set_round_glyph_positions(pango_layout_get_context(_layout), FALSE); +#endif + + pango_layout_set_auto_dir(_layout, FALSE); + + states.emplace(); + state = &states.top(); + pango_layout_set_font_description(_layout, state->fontDescription); +} + +/* + * Destroy cairo context. + */ + +Context2d::~Context2d() { + if (_layout) g_object_unref(_layout); + if (_context) cairo_destroy(_context); + _resetPersistentHandles(); +} + +/* + * Reset canvas state. + */ + +void Context2d::resetState() { + states.pop(); + states.emplace(); + pango_layout_set_font_description(_layout, state->fontDescription); + _resetPersistentHandles(); +} + +void Context2d::_resetPersistentHandles() { + _fillStyle.Reset(); + _strokeStyle.Reset(); +} + +/* + * Save cairo / canvas state. + */ + +void +Context2d::save() { + cairo_save(_context); + states.emplace(states.top()); + state = &states.top(); +} + +/* + * Restore cairo / canvas state. + */ + +void +Context2d::restore() { + if (states.size() > 1) { + cairo_restore(_context); + states.pop(); + state = &states.top(); + pango_layout_set_font_description(_layout, state->fontDescription); + } +} + +/* + * Save flat path. + */ + +void +Context2d::savePath() { + _path = cairo_copy_path_flat(_context); + cairo_new_path(_context); +} + +/* + * Restore flat path. + */ + +void +Context2d::restorePath() { + cairo_new_path(_context); + cairo_append_path(_context, _path); + cairo_path_destroy(_path); +} + +/* + * Create temporary surface for gradient or pattern transparency + */ +cairo_pattern_t* +create_transparent_gradient(cairo_pattern_t *source, float alpha) { + double x0; + double y0; + double x1; + double y1; + double r0; + double r1; + int count; + int i; + double offset; + double r; + double g; + double b; + double a; + cairo_pattern_t *newGradient; + cairo_pattern_type_t type = cairo_pattern_get_type(source); + cairo_pattern_get_color_stop_count(source, &count); + if (type == CAIRO_PATTERN_TYPE_LINEAR) { + cairo_pattern_get_linear_points (source, &x0, &y0, &x1, &y1); + newGradient = cairo_pattern_create_linear(x0, y0, x1, y1); + } else if (type == CAIRO_PATTERN_TYPE_RADIAL) { + cairo_pattern_get_radial_circles(source, &x0, &y0, &r0, &x1, &y1, &r1); + newGradient = cairo_pattern_create_radial(x0, y0, r0, x1, y1, r1); + } else { + return NULL; + } + for ( i = 0; i < count; i++ ) { + cairo_pattern_get_color_stop_rgba(source, i, &offset, &r, &g, &b, &a); + cairo_pattern_add_color_stop_rgba(newGradient, offset, r, g, b, a * alpha); + } + return newGradient; +} + +cairo_pattern_t* +create_transparent_pattern(cairo_pattern_t *source, float alpha) { + cairo_surface_t *surface; + cairo_pattern_get_surface(source, &surface); + int width = cairo_image_surface_get_width(surface); + int height = cairo_image_surface_get_height(surface); + cairo_surface_t *mask_surface = cairo_image_surface_create( + CAIRO_FORMAT_ARGB32, + width, + height); + cairo_t *mask_context = cairo_create(mask_surface); + if (cairo_status(mask_context) != CAIRO_STATUS_SUCCESS) { + return NULL; + } + cairo_set_source(mask_context, source); + cairo_paint_with_alpha(mask_context, alpha); + cairo_destroy(mask_context); + cairo_pattern_t* newPattern = cairo_pattern_create_for_surface(mask_surface); + cairo_surface_destroy(mask_surface); + return newPattern; +} + +/* + * Fill and apply shadow. + */ + +void +Context2d::setFillRule(Napi::Value value) { + cairo_fill_rule_t rule = CAIRO_FILL_RULE_WINDING; + if (value.IsString()) { + std::string str = value.As().Utf8Value(); + if (str == "evenodd") { + rule = CAIRO_FILL_RULE_EVEN_ODD; + } + } + cairo_set_fill_rule(_context, rule); +} + +void +Context2d::fill(bool preserve) { + cairo_pattern_t *new_pattern; + bool needsRestore = false; + if (state->fillPattern) { + if (state->globalAlpha < 1) { + new_pattern = create_transparent_pattern(state->fillPattern, state->globalAlpha); + if (new_pattern == NULL) { + Napi::Error::New(env, "Failed to initialize context").ThrowAsJavaScriptException(); + // failed to allocate + return; + } + cairo_set_source(_context, new_pattern); + cairo_pattern_destroy(new_pattern); + } else { + cairo_pattern_set_filter(state->fillPattern, state->patternQuality); + cairo_set_source(_context, state->fillPattern); + } + repeat_type_t repeat = Pattern::get_repeat_type_for_cairo_pattern(state->fillPattern); + if (repeat == NO_REPEAT) { + cairo_pattern_set_extend(cairo_get_source(_context), CAIRO_EXTEND_NONE); + } else if (repeat == REPEAT) { + cairo_pattern_set_extend(cairo_get_source(_context), CAIRO_EXTEND_REPEAT); + } else { + cairo_save(_context); + cairo_path_t *savedPath = cairo_copy_path(_context); + cairo_surface_t *patternSurface = nullptr; + cairo_pattern_get_surface(cairo_get_source(_context), &patternSurface); + + double width, height; + if (repeat == REPEAT_X) { + double x1, x2; + cairo_path_extents(_context, &x1, nullptr, &x2, nullptr); + width = x2 - x1; + height = cairo_image_surface_get_height(patternSurface); + } else { + double y1, y2; + cairo_path_extents(_context, nullptr, &y1, nullptr, &y2); + width = cairo_image_surface_get_width(patternSurface); + height = y2 - y1; + } + + cairo_new_path(_context); + cairo_rectangle(_context, 0, 0, width, height); + cairo_clip(_context); + cairo_append_path(_context, savedPath); + cairo_path_destroy(savedPath); + cairo_pattern_set_extend(cairo_get_source(_context), CAIRO_EXTEND_REPEAT); + needsRestore = true; + } + } else if (state->fillGradient) { + if (state->globalAlpha < 1) { + new_pattern = create_transparent_gradient(state->fillGradient, state->globalAlpha); + if (new_pattern == NULL) { + Napi::Error::New(env, "Unexpected gradient type").ThrowAsJavaScriptException(); + // failed to recognize gradient + return; + } + cairo_pattern_set_filter(new_pattern, state->patternQuality); + cairo_set_source(_context, new_pattern); + cairo_pattern_destroy(new_pattern); + } else { + cairo_pattern_set_filter(state->fillGradient, state->patternQuality); + cairo_set_source(_context, state->fillGradient); + } + } else { + setSourceRGBA(state->fill); + } + if (preserve) { + hasShadow() + ? shadow(cairo_fill_preserve) + : cairo_fill_preserve(_context); + } else { + hasShadow() + ? shadow(cairo_fill) + : cairo_fill(_context); + } + if (needsRestore) { + cairo_restore(_context); + } +} + +/* + * Stroke and apply shadow. + */ + +void +Context2d::stroke(bool preserve) { + cairo_pattern_t *new_pattern; + if (state->strokePattern) { + if (state->globalAlpha < 1) { + new_pattern = create_transparent_pattern(state->strokePattern, state->globalAlpha); + if (new_pattern == NULL) { + Napi::Error::New(env, "Failed to initialize context").ThrowAsJavaScriptException(); + // failed to allocate + return; + } + cairo_set_source(_context, new_pattern); + cairo_pattern_destroy(new_pattern); + } else { + cairo_pattern_set_filter(state->strokePattern, state->patternQuality); + cairo_set_source(_context, state->strokePattern); + } + repeat_type_t repeat = Pattern::get_repeat_type_for_cairo_pattern(state->strokePattern); + if (NO_REPEAT == repeat) { + cairo_pattern_set_extend(cairo_get_source(_context), CAIRO_EXTEND_NONE); + } else { + cairo_pattern_set_extend(cairo_get_source(_context), CAIRO_EXTEND_REPEAT); + } + } else if (state->strokeGradient) { + if (state->globalAlpha < 1) { + new_pattern = create_transparent_gradient(state->strokeGradient, state->globalAlpha); + if (new_pattern == NULL) { + Napi::Error::New(env, "Unexpected gradient type").ThrowAsJavaScriptException(); + // failed to recognize gradient + return; + } + cairo_pattern_set_filter(new_pattern, state->patternQuality); + cairo_set_source(_context, new_pattern); + cairo_pattern_destroy(new_pattern); + } else { + cairo_pattern_set_filter(state->strokeGradient, state->patternQuality); + cairo_set_source(_context, state->strokeGradient); + } + } else { + setSourceRGBA(state->stroke); + } + + if (preserve) { + hasShadow() + ? shadow(cairo_stroke_preserve) + : cairo_stroke_preserve(_context); + } else { + hasShadow() + ? shadow(cairo_stroke) + : cairo_stroke(_context); + } +} + +/* + * Apply shadow with the given draw fn. + */ + +void +Context2d::shadow(void (fn)(cairo_t *cr)) { + cairo_path_t *path = cairo_copy_path_flat(_context); + cairo_save(_context); + + // shadowOffset is unaffected by current transform + cairo_matrix_t path_matrix; + cairo_get_matrix(_context, &path_matrix); + cairo_identity_matrix(_context); + + // Apply shadow + cairo_push_group(_context); + + // No need to invoke blur if shadowBlur is 0 + if (state->shadowBlur) { + // find out extent of path + double x1, y1, x2, y2; + if (fn == cairo_fill || fn == cairo_fill_preserve) { + cairo_fill_extents(_context, &x1, &y1, &x2, &y2); + } else { + cairo_stroke_extents(_context, &x1, &y1, &x2, &y2); + } + + // create new image surface that size + padding for blurring + double dx = x2-x1, dy = y2-y1; + cairo_user_to_device_distance(_context, &dx, &dy); + int pad = state->shadowBlur * 2; + cairo_surface_t *shadow_surface = cairo_image_surface_create( + CAIRO_FORMAT_ARGB32, + dx + 2 * pad, + dy + 2 * pad); + cairo_t *shadow_context = cairo_create(shadow_surface); + + // transform path to the right place + cairo_translate(shadow_context, pad-x1, pad-y1); + cairo_transform(shadow_context, &path_matrix); + + // set lineCap lineJoin lineDash + cairo_set_line_cap(shadow_context, cairo_get_line_cap(_context)); + cairo_set_line_join(shadow_context, cairo_get_line_join(_context)); + + double offset; + int dashes = cairo_get_dash_count(_context); + std::vector a(dashes); + cairo_get_dash(_context, a.data(), &offset); + cairo_set_dash(shadow_context, a.data(), dashes, offset); + + // draw the path and blur + cairo_set_line_width(shadow_context, cairo_get_line_width(_context)); + cairo_new_path(shadow_context); + cairo_append_path(shadow_context, path); + setSourceRGBA(shadow_context, state->shadow); + fn(shadow_context); + blur(shadow_surface, state->shadowBlur); + + // paint to original context + cairo_set_source_surface(_context, shadow_surface, + x1 - pad + state->shadowOffsetX + 1, + y1 - pad + state->shadowOffsetY + 1); + cairo_paint(_context); + cairo_destroy(shadow_context); + cairo_surface_destroy(shadow_surface); + } else { + // Offset first, then apply path's transform + cairo_translate( + _context + , state->shadowOffsetX + , state->shadowOffsetY); + cairo_transform(_context, &path_matrix); + + // Apply shadow + cairo_new_path(_context); + cairo_append_path(_context, path); + setSourceRGBA(state->shadow); + + fn(_context); + } + + // Paint the shadow + cairo_pop_group_to_source(_context); + cairo_paint(_context); + + // Restore state + cairo_restore(_context); + cairo_new_path(_context); + cairo_append_path(_context, path); + fn(_context); + + cairo_path_destroy(path); +} + +/* + * Set source RGBA for the current context + */ + +void +Context2d::setSourceRGBA(rgba_t color) { + setSourceRGBA(_context, color); +} + +/* + * Set source RGBA + */ + +void +Context2d::setSourceRGBA(cairo_t *ctx, rgba_t color) { + cairo_set_source_rgba( + ctx + , color.r + , color.g + , color.b + , color.a * state->globalAlpha); +} + +/* + * Check if the context has a drawable shadow. + */ + +bool +Context2d::hasShadow() { + return state->shadow.a + && (state->shadowBlur || state->shadowOffsetX || state->shadowOffsetY); +} + +/* + * Blur the given surface with the given radius. + */ + +void +Context2d::blur(cairo_surface_t *surface, int radius) { + // Steve Hanov, 2009 + // Released into the public domain. + radius = radius * 0.57735f + 0.5f; + // get width, height + int width = cairo_image_surface_get_width( surface ); + int height = cairo_image_surface_get_height( surface ); + const unsigned int size = width * height * sizeof(unsigned); + unsigned* precalc = (unsigned*)malloc(size); + cairo_surface_flush( surface ); + unsigned char* src = cairo_image_surface_get_data( surface ); + double mul=1.f/((radius*2)*(radius*2)); + int channel; + + // The number of times to perform the averaging. According to wikipedia, + // three iterations is good enough to pass for a gaussian. + const int MAX_ITERATIONS = 3; + int iteration; + + for ( iteration = 0; iteration < MAX_ITERATIONS; iteration++ ) { + for( channel = 0; channel < 4; channel++ ) { + int x,y; + + // precomputation step. + unsigned char* pix = src; + unsigned* pre = precalc; + + bool modified = false; + + pix += channel; + for (y=0;y0) tot+=pre[-1]; + if (y>0) tot+=pre[-width]; + if (x>0 && y>0) tot-=pre[-width-1]; + *pre++=tot; + if (!modified) modified = true; + pix += 4; + } + } + + if (!modified) { + memset(precalc, 0, size); + } + + // blur step. + pix = src + (int)radius * width * 4 + (int)radius * 4 + channel; + for (y=radius;y= width ? width - 1 : x + radius; + int b = y + radius >= height ? height - 1 : y + radius; + int tot = precalc[r+b*width] + precalc[l+t*width] - + precalc[l+b*width] - precalc[r+t*width]; + *pix=(unsigned char)(tot*mul); + pix += 4; + } + pix += (int)radius * 2 * 4; + } + } + } + + cairo_surface_mark_dirty(surface); + free(precalc); +} + +/* +* Get format (string). +*/ + +Napi::Value +Context2d::GetFormat(const Napi::CallbackInfo& info) { + std::string pixelFormatString; + switch (canvas()->backend()->getFormat()) { + case CAIRO_FORMAT_ARGB32: pixelFormatString = "RGBA32"; break; + case CAIRO_FORMAT_RGB24: pixelFormatString = "RGB24"; break; + case CAIRO_FORMAT_A8: pixelFormatString = "A8"; break; + case CAIRO_FORMAT_A1: pixelFormatString = "A1"; break; + case CAIRO_FORMAT_RGB16_565: pixelFormatString = "RGB16_565"; break; +#ifdef CAIRO_FORMAT_RGB30 + case CAIRO_FORMAT_RGB30: pixelFormatString = "RGB30"; break; +#endif + default: return env.Null(); + } + return Napi::String::New(env, pixelFormatString); +} + +/* + * Create a new page. + */ + +void +Context2d::AddPage(const Napi::CallbackInfo& info) { + if (canvas()->backend()->getName() != "pdf") { + Napi::Error::New(env, "only PDF canvases support .addPage()").ThrowAsJavaScriptException(); + return; + } + cairo_show_page(context()); + Napi::Number zero = Napi::Number::New(env, 0); + int width = info[0].ToNumber().UnwrapOr(zero).Int32Value(); + int height = info[1].ToNumber().UnwrapOr(zero).Int32Value(); + if (width < 1) width = canvas()->getWidth(); + if (height < 1) height = canvas()->getHeight(); + cairo_pdf_surface_set_size(canvas()->surface(), width, height); +} + +/* + * Get text direction. + */ +Napi::Value +Context2d::GetDirection(const Napi::CallbackInfo& info) { + return Napi::String::New(env, state->direction); +} + +/* + * Set text direction. + */ +void +Context2d::SetDirection(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (!value.IsString()) return; + + std::string dir = value.As(); + if (dir != "ltr" && dir != "rtl") return; + + state->direction = dir; +} + +/* + * Get language. + */ +Napi::Value +Context2d::GetLanguage(const Napi::CallbackInfo& info) { + return Napi::String::New(env, state->lang); +} + +/* + * Set language. + */ +void +Context2d::SetLanguage(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (!value.IsString()) return; + + std::string lang = value.As(); + state->lang = lang; +} + +/* + * Put image data. + * + * - imageData, dx, dy + * - imageData, dx, dy, sx, sy, sw, sh + * + */ + +void +Context2d::PutImageData(const Napi::CallbackInfo& info) { + if (!info[0].IsObject()) { + Napi::TypeError::New(env, "ImageData expected").ThrowAsJavaScriptException(); + return; + } + Napi::Object obj = info[0].As(); + InstanceData* data = env.GetInstanceData(); + if (!obj.InstanceOf(data->ImageDataCtor.Value()).UnwrapOr(false)) { + if (!env.IsExceptionPending()) { + Napi::TypeError::New(env, "ImageData expected").ThrowAsJavaScriptException(); + } + return; + } + + ImageData *imageData = ImageData::Unwrap(obj); + Napi::Number zero = Napi::Number::New(env, 0); + + uint8_t *src = imageData->data(); + uint8_t *dst = canvas()->data(); + + int dstStride = canvas()->stride(); + int Bpp = dstStride / canvas()->getWidth(); + int srcStride = Bpp * imageData->width(); + + int sx = 0 + , sy = 0 + , sw = 0 + , sh = 0 + , dx = info[1].ToNumber().UnwrapOr(zero).Int32Value() + , dy = info[2].ToNumber().UnwrapOr(zero).Int32Value() + , rows + , cols; + + switch (info.Length()) { + // imageData, dx, dy + case 3: + sw = imageData->width(); + sh = imageData->height(); + break; + // imageData, dx, dy, sx, sy, sw, sh + case 7: + sx = info[3].ToNumber().UnwrapOr(zero).Int32Value(); + sy = info[4].ToNumber().UnwrapOr(zero).Int32Value(); + sw = info[5].ToNumber().UnwrapOr(zero).Int32Value(); + sh = info[6].ToNumber().UnwrapOr(zero).Int32Value(); + // fix up negative height, width + if (sw < 0) sx += sw, sw = -sw; + if (sh < 0) sy += sh, sh = -sh; + // clamp the left edge + if (sx < 0) sw += sx, sx = 0; + if (sy < 0) sh += sy, sy = 0; + // clamp the right edge + if (sx + sw > imageData->width()) sw = imageData->width() - sx; + if (sy + sh > imageData->height()) sh = imageData->height() - sy; + // start destination at source offset + dx += sx; + dy += sy; + break; + default: + Napi::Error::New(env, "invalid arguments").ThrowAsJavaScriptException(); + return; + } + + // chop off outlying source data + if (dx < 0) sw += dx, sx -= dx, dx = 0; + if (dy < 0) sh += dy, sy -= dy, dy = 0; + // clamp width at canvas size + // Need to wrap std::min calls using parens to prevent macro expansion on + // windows. See http://stackoverflow.com/questions/5004858/stdmin-gives-error + cols = (std::min)(sw, canvas()->getWidth() - dx); + rows = (std::min)(sh, canvas()->getHeight() - dy); + + if (cols <= 0 || rows <= 0) return; + + switch (canvas()->backend()->getFormat()) { + case CAIRO_FORMAT_ARGB32: { + src += sy * srcStride + sx * 4; + dst += dstStride * dy + 4 * dx; + for (int y = 0; y < rows; ++y) { + uint8_t *dstRow = dst; + uint8_t *srcRow = src; + for (int x = 0; x < cols; ++x) { + // rgba + uint8_t r = *srcRow++; + uint8_t g = *srcRow++; + uint8_t b = *srcRow++; + uint8_t a = *srcRow++; + + // argb + // performance optimization: fully transparent/opaque pixels can be + // processed more efficiently. + if (a == 0) { + *dstRow++ = 0; + *dstRow++ = 0; + *dstRow++ = 0; + *dstRow++ = 0; + } else if (a == 255) { + *dstRow++ = b; + *dstRow++ = g; + *dstRow++ = r; + *dstRow++ = a; + } else { + float alpha = (float)a / 255; + *dstRow++ = b * alpha; + *dstRow++ = g * alpha; + *dstRow++ = r * alpha; + *dstRow++ = a; + } + } + dst += dstStride; + src += srcStride; + } + break; + } + case CAIRO_FORMAT_RGB24: { + src += sy * srcStride + sx * 4; + dst += dstStride * dy + 4 * dx; + for (int y = 0; y < rows; ++y) { + uint8_t *dstRow = dst; + uint8_t *srcRow = src; + for (int x = 0; x < cols; ++x) { + // rgba + uint8_t r = *srcRow++; + uint8_t g = *srcRow++; + uint8_t b = *srcRow++; + srcRow++; + + // argb + *dstRow++ = b; + *dstRow++ = g; + *dstRow++ = r; + *dstRow++ = 255; + } + dst += dstStride; + src += srcStride; + } + break; + } + case CAIRO_FORMAT_A8: { + src += sy * srcStride + sx; + dst += dstStride * dy + dx; + if (srcStride == dstStride && cols == dstStride) { + // fast path: strides are the same and doing a full-width put + memcpy(dst, src, cols * rows); + } else { + for (int y = 0; y < rows; ++y) { + memcpy(dst, src, cols); + dst += dstStride; + src += srcStride; + } + } + break; + } + case CAIRO_FORMAT_A1: { + // TODO Should this be totally packed, or maintain a stride divisible by 4? + Napi::Error::New(env, "putImageData for CANVAS_FORMAT_A1 is not yet implemented").ThrowAsJavaScriptException(); + + break; + } + case CAIRO_FORMAT_RGB16_565: { + src += sy * srcStride + sx * 2; + dst += dstStride * dy + 2 * dx; + for (int y = 0; y < rows; ++y) { + memcpy(dst, src, cols * 2); + dst += dstStride; + src += srcStride; + } + break; + } +#ifdef CAIRO_FORMAT_RGB30 + case CAIRO_FORMAT_RGB30: { + // TODO + Napi::Error::New(env, "putImageData for CANVAS_FORMAT_RGB30 is not yet implemented").ThrowAsJavaScriptException(); + + break; + } +#endif + default: { + Napi::Error::New(env, "Invalid pixel format or not an image canvas").ThrowAsJavaScriptException(); + return; + } + } + + cairo_surface_mark_dirty_rectangle( + canvas()->surface() + , dx + , dy + , cols + , rows); +} + +/* + * Get image data. + * + * - sx, sy, sw, sh + * + */ + +Napi::Value +Context2d::GetImageData(const Napi::CallbackInfo& info) { + Napi::Number zero = Napi::Number::New(env, 0); + Canvas *canvas = this->canvas(); + + int sx = info[0].ToNumber().UnwrapOr(zero).Int32Value(); + int sy = info[1].ToNumber().UnwrapOr(zero).Int32Value(); + int sw = info[2].ToNumber().UnwrapOr(zero).Int32Value(); + int sh = info[3].ToNumber().UnwrapOr(zero).Int32Value(); + + if (!sw) { + Napi::Error::New(env, "IndexSizeError: The source width is 0.").ThrowAsJavaScriptException(); + return env.Undefined(); + } + if (!sh) { + Napi::Error::New(env, "IndexSizeError: The source height is 0.").ThrowAsJavaScriptException(); + return env.Undefined(); + } + + int width = canvas->getWidth(); + int height = canvas->getHeight(); + + if (!width) { + Napi::TypeError::New(env, "Canvas width is 0").ThrowAsJavaScriptException(); + return env.Undefined(); + } + if (!height) { + Napi::TypeError::New(env, "Canvas height is 0").ThrowAsJavaScriptException(); + return env.Undefined(); + } + + // WebKit and Firefox have this behavior: + // Flip the coordinates so the origin is top/left-most: + if (sw < 0) { + sx += sw; + sw = -sw; + } + if (sh < 0) { + sy += sh; + sh = -sh; + } + + // Width and height to actually copy + int cw = sw; + int ch = sh; + // Offsets in the destination image + int ox = 0; + int oy = 0; + + // Clamp the copy width and height if the copy would go outside the image + if (sx + sw > width) cw = width - sx; + if (sy + sh > height) ch = height - sy; + + // Clamp the copy origin if the copy would go outside the image + if (sx < 0) { + ox = -sx; + cw += sx; + sx = 0; + } + if (sy < 0) { + oy = -sy; + ch += sy; + sy = 0; + } + + int srcStride = canvas->stride(); + int bpp = srcStride / width; + int size = sw * sh * bpp; + int dstStride = sw * bpp; + + uint8_t *src = canvas->data(); + + Napi::ArrayBuffer buffer = Napi::ArrayBuffer::New(env, size); + Napi::TypedArray dataArray; + + if (canvas->backend()->getFormat() == CAIRO_FORMAT_RGB16_565) { + dataArray = Napi::Uint16Array::New(env, size >> 1, buffer, 0); + } else { + dataArray = Napi::Uint8Array::New(env, size, buffer, 0, napi_uint8_clamped_array); + } + + uint8_t *dst = (uint8_t *)buffer.Data(); + + if (!(cw > 0 && ch > 0)) goto return_empty; + + switch (canvas->backend()->getFormat()) { + case CAIRO_FORMAT_ARGB32: { + dst += oy * dstStride + ox * 4; + // Rearrange alpha (argb -> rgba), undo alpha pre-multiplication, + // and store in big-endian format + for (int y = 0; y < ch; ++y) { + uint32_t *row = (uint32_t *)(src + srcStride * (y + sy)); + for (int x = 0; x < cw; ++x) { + int bx = x * 4; + uint32_t *pixel = row + x + sx; + uint8_t a = *pixel >> 24; + uint8_t r = *pixel >> 16; + uint8_t g = *pixel >> 8; + uint8_t b = *pixel; + dst[bx + 3] = a; + + // Performance optimization: fully transparent/opaque pixels can be + // processed more efficiently. + if (a == 0 || a == 255) { + dst[bx + 0] = r; + dst[bx + 1] = g; + dst[bx + 2] = b; + } else { + // Undo alpha pre-multiplication + float alphaR = (float)255 / a; + dst[bx + 0] = (int)((float)r * alphaR); + dst[bx + 1] = (int)((float)g * alphaR); + dst[bx + 2] = (int)((float)b * alphaR); + } + + } + dst += dstStride; + } + break; + } + case CAIRO_FORMAT_RGB24: { + dst += oy * dstStride + ox * 4; + // Rearrange alpha (argb -> rgba) and store in big-endian format + for (int y = 0; y < ch; ++y) { + uint32_t *row = (uint32_t *)(src + srcStride * (y + sy)); + for (int x = 0; x < cw; ++x) { + int bx = x * 4; + uint32_t *pixel = row + x + sx; + uint8_t r = *pixel >> 16; + uint8_t g = *pixel >> 8; + uint8_t b = *pixel; + + dst[bx + 0] = r; + dst[bx + 1] = g; + dst[bx + 2] = b; + dst[bx + 3] = 255; + } + dst += dstStride; + } + break; + } + case CAIRO_FORMAT_A8: { + dst += oy * dstStride + ox; + for (int y = 0; y < ch; ++y) { + uint8_t *row = (uint8_t *)(src + srcStride * (y + sy)); + memcpy(dst, row + sx, cw); + dst += dstStride; + } + break; + } + case CAIRO_FORMAT_A1: { + // TODO Should this be totally packed, or maintain a stride divisible by 4? + Napi::Error::New(env, "getImageData for CANVAS_FORMAT_A1 is not yet implemented").ThrowAsJavaScriptException(); + + break; + } + case CAIRO_FORMAT_RGB16_565: { + dst += oy * dstStride + ox * 2; + for (int y = 0; y < ch; ++y) { + uint16_t *row = (uint16_t *)(src + srcStride * (y + sy)); + memcpy(dst, row + sx, cw * 2); + dst += dstStride; + } + break; + } +#ifdef CAIRO_FORMAT_RGB30 + case CAIRO_FORMAT_RGB30: { + // TODO + Napi::Error::New(env, "getImageData for CANVAS_FORMAT_RGB30 is not yet implemented").ThrowAsJavaScriptException(); + + break; + } +#endif + default: { + // Unlikely + Napi::Error::New(env, "Invalid pixel format or not an image canvas").ThrowAsJavaScriptException(); + return env.Null(); + } + } + +return_empty: + Napi::Number swHandle = Napi::Number::New(env, sw); + Napi::Number shHandle = Napi::Number::New(env, sh); + Napi::Function ctor = env.GetInstanceData()->ImageDataCtor.Value(); + Napi::Maybe ret = ctor.New({ dataArray, swHandle, shHandle }); + + return ret.IsJust() ? ret.Unwrap() : env.Undefined(); +} + +/** + * Create `ImageData` with the given dimensions or + * `ImageData` instance for dimensions. + */ + +Napi::Value +Context2d::CreateImageData(const Napi::CallbackInfo& info){ + Canvas *canvas = this->canvas(); + Napi::Number zero = Napi::Number::New(env, 0); + int32_t width, height; + + if (info[0].IsObject()) { + Napi::Object obj = info[0].As(); + width = obj.Get("width").UnwrapOr(zero).ToNumber().UnwrapOr(zero).Int32Value(); + height = obj.Get("height").UnwrapOr(zero).ToNumber().UnwrapOr(zero).Int32Value(); + } else { + width = info[0].ToNumber().UnwrapOr(zero).Int32Value(); + height = info[1].ToNumber().UnwrapOr(zero).Int32Value(); + } + + int stride = canvas->stride(); + double Bpp = static_cast(stride) / canvas->getWidth(); + int nBytes = static_cast(Bpp * width * height + .5); + + Napi::ArrayBuffer ab = Napi::ArrayBuffer::New(env, nBytes); + Napi::Value arr; + + if (canvas->backend()->getFormat() == CAIRO_FORMAT_RGB16_565) + arr = Napi::Uint16Array::New(env, nBytes / 2, ab, 0); + else + arr = Napi::Uint8Array::New(env, nBytes, ab, 0, napi_uint8_clamped_array); + + Napi::Function ctor = env.GetInstanceData()->ImageDataCtor.Value(); + Napi::Maybe ret = ctor.New({ arr, Napi::Number::New(env, width), Napi::Number::New(env, height) }); + + return ret.IsJust() ? ret.Unwrap() : env.Undefined(); +} + +/* + * Take a transform matrix and return its components + * 0: angle, 1: scaleX, 2: scaleY, 3: skewX, 4: translateX, 5: translateY + */ +void decompose_matrix(cairo_matrix_t matrix, double *destination) { + double denom = pow(matrix.xx, 2) + pow(matrix.yx, 2); + destination[0] = atan2(matrix.yx, matrix.xx); + destination[1] = sqrt(denom); + destination[2] = (matrix.xx * matrix.yy - matrix.xy * matrix.yx) / destination[1]; + destination[3] = atan2(matrix.xx * matrix.xy + matrix.yx * matrix.yy, denom); + destination[4] = matrix.x0; + destination[5] = matrix.y0; +} + +/* + * Draw image src image to the destination (context). + * + * - dx, dy + * - dx, dy, dw, dh + * - sx, sy, sw, sh, dx, dy, dw, dh + * + */ + +void +Context2d::DrawImage(const Napi::CallbackInfo& info) { + int infoLen = info.Length(); + + if (infoLen != 3 && infoLen != 5 && infoLen != 9) { + Napi::TypeError::New(env, "Invalid arguments").ThrowAsJavaScriptException(); + return; + } + + if (!info[0].IsObject()) { + Napi::TypeError::New(env, "The first argument must be an object").ThrowAsJavaScriptException(); + return; + } + + double args[8]; + if(!checkArgs(info, args, infoLen - 1, 1)) + return; + + double sx = 0 + , sy = 0 + , sw = 0 + , sh = 0 + , dx = 0 + , dy = 0 + , dw = 0 + , dh = 0 + , source_w = 0 + , source_h = 0; + + cairo_surface_t *surface; + + Napi::Object obj = info[0].As(); + + // Image + if (obj.InstanceOf(env.GetInstanceData()->ImageCtor.Value()).UnwrapOr(false)) { + Image *img = Image::Unwrap(obj); + if (!img->isComplete()) { + Napi::Error::New(env, "Image given has not completed loading").ThrowAsJavaScriptException(); + return; + } + source_w = sw = img->width; + source_h = sh = img->height; + surface = img->surface(); + + // Canvas + } else if (obj.InstanceOf(env.GetInstanceData()->CanvasCtor.Value()).UnwrapOr(false)) { + Canvas *canvas = Canvas::Unwrap(obj); + source_w = sw = canvas->getWidth(); + source_h = sh = canvas->getHeight(); + surface = canvas->surface(); + + // Invalid + } else { + if (!env.IsExceptionPending()) { + Napi::TypeError::New(env, "Image or Canvas expected").ThrowAsJavaScriptException(); + } + return; + } + + cairo_t *ctx = context(); + + // Arguments + switch (infoLen) { + // img, sx, sy, sw, sh, dx, dy, dw, dh + case 9: + sx = args[0]; + sy = args[1]; + sw = args[2]; + sh = args[3]; + dx = args[4]; + dy = args[5]; + dw = args[6]; + dh = args[7]; + break; + // img, dx, dy, dw, dh + case 5: + dx = args[0]; + dy = args[1]; + dw = args[2]; + dh = args[3]; + break; + // img, dx, dy + case 3: + dx = args[0]; + dy = args[1]; + dw = sw; + dh = sh; + break; + } + + if (!(sw && sh && dw && dh)) + return; + + // Start draw + cairo_save(ctx); + + cairo_matrix_t matrix; + double transforms[6]; + cairo_get_matrix(ctx, &matrix); + decompose_matrix(matrix, transforms); + // extract the scale value from the current transform so that we know how many pixels we + // need for our extra canvas in the drawImage operation. + double current_scale_x = std::abs(transforms[1]); + double current_scale_y = std::abs(transforms[2]); + double extra_dx = 0; + double extra_dy = 0; + double fx = dw / sw * current_scale_x; // transforms[1] is scale on X + double fy = dh / sh * current_scale_y; // transforms[2] is scale on X + bool needScale = dw != sw || dh != sh; + bool needCut = sw != source_w || sh != source_h || sx < 0 || sy < 0; + bool sameCanvas = surface == canvas()->surface(); + bool needsExtraSurface = sameCanvas || needCut || needScale; + cairo_surface_t *surfTemp = NULL; + cairo_t *ctxTemp = NULL; + + if (needsExtraSurface) { + // we want to create the extra surface as small as possible. + // fx and fy are the total scaling we need to apply to sw, sh. + // from sw and sh we want to remove the part that is outside the source_w and soruce_h + double real_w = sw; + double real_h = sh; + double translate_x = 0; + double translate_y = 0; + // if sx or sy are negative, a part of the area represented by sw and sh is empty + // because there are empty pixels, so we cut it out. + // On the other hand if sx or sy are positive, but sw and sh extend outside the real + // source pixels, we cut the area in that case too. + if (sx < 0) { + extra_dx = -sx * fx; + real_w = sw + sx; + } else if (sx + sw > source_w) { + real_w = sw - (sx + sw - source_w); + } + if (sy < 0) { + extra_dy = -sy * fy; + real_h = sh + sy; + } else if (sy + sh > source_h) { + real_h = sh - (sy + sh - source_h); + } + // if after cutting we are still bigger than source pixels, we restrict again + if (real_w > source_w) { + real_w = source_w; + } + if (real_h > source_h) { + real_h = source_h; + } + // TODO: find a way to limit the surfTemp to real_w and real_h if fx and fy are bigger than 1. + // there are no more pixel than the one available in the source, no need to create a bigger surface. + surfTemp = cairo_image_surface_create(CAIRO_FORMAT_ARGB32, round(real_w * fx), round(real_h * fy)); + ctxTemp = cairo_create(surfTemp); + cairo_scale(ctxTemp, fx, fy); + if (sx > 0) { + translate_x = sx; + } + if (sy > 0) { + translate_y = sy; + } + cairo_set_source_surface(ctxTemp, surface, -translate_x, -translate_y); + cairo_pattern_set_filter(cairo_get_source(ctxTemp), state->imageSmoothingEnabled ? state->patternQuality : CAIRO_FILTER_NEAREST); + cairo_pattern_set_extend(cairo_get_source(ctxTemp), CAIRO_EXTEND_REFLECT); + cairo_paint_with_alpha(ctxTemp, 1); + surface = surfTemp; + } + // apply shadow if there is one + if (hasShadow()) { + if(state->shadowBlur) { + // we need to create a new surface in order to blur + int pad = state->shadowBlur * 2; + cairo_surface_t *shadow_surface = cairo_image_surface_create(CAIRO_FORMAT_ARGB32, dw + 2 * pad, dh + 2 * pad); + cairo_t *shadow_context = cairo_create(shadow_surface); + + // mask and blur + setSourceRGBA(shadow_context, state->shadow); + cairo_mask_surface(shadow_context, surface, pad, pad); + blur(shadow_surface, state->shadowBlur); + + // paint + // @note: ShadowBlur looks different in each browser. This implementation matches chrome as close as possible. + // The 1.4 offset comes from visual tests with Chrome. I have read the spec and part of the shadowBlur + // implementation, and its not immediately clear why an offset is necessary, but without it, the result + // in chrome is different. + cairo_set_source_surface(ctx, shadow_surface, + dx + state->shadowOffsetX - pad + 1.4, + dy + state->shadowOffsetY - pad + 1.4); + cairo_paint(ctx); + // cleanup + cairo_destroy(shadow_context); + cairo_surface_destroy(shadow_surface); + } else { + setSourceRGBA(state->shadow); + cairo_mask_surface(ctx, surface, + dx + (state->shadowOffsetX), + dy + (state->shadowOffsetY)); + } + } + + double scaled_dx = dx; + double scaled_dy = dy; + + if (needsExtraSurface && (current_scale_x != 1 || current_scale_y != 1)) { + // in this case our surface contains already current_scale_x, we need to scale back + cairo_scale(ctx, 1 / current_scale_x, 1 / current_scale_y); + scaled_dx *= current_scale_x; + scaled_dy *= current_scale_y; + } + // Paint + cairo_set_source_surface(ctx, surface, scaled_dx + extra_dx, scaled_dy + extra_dy); + cairo_pattern_set_filter(cairo_get_source(ctx), state->imageSmoothingEnabled ? state->patternQuality : CAIRO_FILTER_NEAREST); + cairo_pattern_set_extend(cairo_get_source(ctx), CAIRO_EXTEND_NONE); + cairo_paint_with_alpha(ctx, state->globalAlpha); + + cairo_restore(ctx); + + if (needsExtraSurface) { + cairo_destroy(ctxTemp); + cairo_surface_destroy(surfTemp); + } +} + +/* + * Get global alpha. + */ + +Napi::Value +Context2d::GetGlobalAlpha(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, state->globalAlpha); +} + +/* + * Set global alpha. + */ + +void +Context2d::SetGlobalAlpha(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::Maybe numberValue = value.ToNumber(); + if (numberValue.IsJust()) { + double n = numberValue.Unwrap().DoubleValue(); + if (n >= 0 && n <= 1) state->globalAlpha = n; + } +} + +/* + * Get global composite operation. + */ + +Napi::Value +Context2d::GetGlobalCompositeOperation(const Napi::CallbackInfo& info) { + cairo_t *ctx = context(); + + const char *op{}; + switch (cairo_get_operator(ctx)) { + // composite modes: + case CAIRO_OPERATOR_CLEAR: op = "clear"; break; + case CAIRO_OPERATOR_SOURCE: op = "copy"; break; + case CAIRO_OPERATOR_DEST: op = "destination"; break; + case CAIRO_OPERATOR_OVER: op = "source-over"; break; + case CAIRO_OPERATOR_DEST_OVER: op = "destination-over"; break; + case CAIRO_OPERATOR_IN: op = "source-in"; break; + case CAIRO_OPERATOR_DEST_IN: op = "destination-in"; break; + case CAIRO_OPERATOR_OUT: op = "source-out"; break; + case CAIRO_OPERATOR_DEST_OUT: op = "destination-out"; break; + case CAIRO_OPERATOR_ATOP: op = "source-atop"; break; + case CAIRO_OPERATOR_DEST_ATOP: op = "destination-atop"; break; + case CAIRO_OPERATOR_XOR: op = "xor"; break; + case CAIRO_OPERATOR_ADD: op = "lighter"; break; + // blend modes: + // Note: "source-over" and "normal" are synonyms. Chrome and FF both report + // "source-over" after setting gCO to "normal". + // case CAIRO_OPERATOR_OVER: op = "normal"; + case CAIRO_OPERATOR_MULTIPLY: op = "multiply"; break; + case CAIRO_OPERATOR_SCREEN: op = "screen"; break; + case CAIRO_OPERATOR_OVERLAY: op = "overlay"; break; + case CAIRO_OPERATOR_DARKEN: op = "darken"; break; + case CAIRO_OPERATOR_LIGHTEN: op = "lighten"; break; + case CAIRO_OPERATOR_COLOR_DODGE: op = "color-dodge"; break; + case CAIRO_OPERATOR_COLOR_BURN: op = "color-burn"; break; + case CAIRO_OPERATOR_HARD_LIGHT: op = "hard-light"; break; + case CAIRO_OPERATOR_SOFT_LIGHT: op = "soft-light"; break; + case CAIRO_OPERATOR_DIFFERENCE: op = "difference"; break; + case CAIRO_OPERATOR_EXCLUSION: op = "exclusion"; break; + case CAIRO_OPERATOR_HSL_HUE: op = "hue"; break; + case CAIRO_OPERATOR_HSL_SATURATION: op = "saturation"; break; + case CAIRO_OPERATOR_HSL_COLOR: op = "color"; break; + case CAIRO_OPERATOR_HSL_LUMINOSITY: op = "luminosity"; break; + // non-standard: + case CAIRO_OPERATOR_SATURATE: op = "saturate"; break; + default: op = "source-over"; + } + + return Napi::String::New(env, op); +} + +/* + * Set pattern quality. + */ + +void +Context2d::SetPatternQuality(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (value.IsString()) { + std::string quality = value.As().Utf8Value(); + if (quality == "fast") { + state->patternQuality = CAIRO_FILTER_FAST; + } else if (quality == "good") { + state->patternQuality = CAIRO_FILTER_GOOD; + } else if (quality == "best") { + state->patternQuality = CAIRO_FILTER_BEST; + } else if (quality == "nearest") { + state->patternQuality = CAIRO_FILTER_NEAREST; + } else if (quality == "bilinear") { + state->patternQuality = CAIRO_FILTER_BILINEAR; + } + } +} + +/* + * Get pattern quality. + */ + +Napi::Value +Context2d::GetPatternQuality(const Napi::CallbackInfo& info) { + const char *quality; + switch (state->patternQuality) { + case CAIRO_FILTER_FAST: quality = "fast"; break; + case CAIRO_FILTER_BEST: quality = "best"; break; + case CAIRO_FILTER_NEAREST: quality = "nearest"; break; + case CAIRO_FILTER_BILINEAR: quality = "bilinear"; break; + default: quality = "good"; + } + return Napi::String::New(env, quality); +} + +/* + * Set ImageSmoothingEnabled value. + */ + +void +Context2d::SetImageSmoothingEnabled(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::Boolean boolValue; + if (value.ToBoolean().UnwrapTo(&boolValue)) state->imageSmoothingEnabled = boolValue.Value(); +} + +/* + * Get pattern quality. + */ + +Napi::Value +Context2d::GetImageSmoothingEnabled(const Napi::CallbackInfo& info) { + return Napi::Boolean::New(env, state->imageSmoothingEnabled); +} + +/* + * Set global composite operation. + */ + +void +Context2d::SetGlobalCompositeOperation(const Napi::CallbackInfo& info, const Napi::Value& value) { + cairo_t *ctx = this->context(); + Napi::String opStr; + if (value.ToString().UnwrapTo(&opStr)) { // Unlike CSS colors, this *is* case-sensitive + const std::map blendmodes = { + // composite modes: + {"clear", CAIRO_OPERATOR_CLEAR}, + {"copy", CAIRO_OPERATOR_SOURCE}, + {"destination", CAIRO_OPERATOR_DEST}, // this seems to have been omitted from the spec + {"source-over", CAIRO_OPERATOR_OVER}, + {"destination-over", CAIRO_OPERATOR_DEST_OVER}, + {"source-in", CAIRO_OPERATOR_IN}, + {"destination-in", CAIRO_OPERATOR_DEST_IN}, + {"source-out", CAIRO_OPERATOR_OUT}, + {"destination-out", CAIRO_OPERATOR_DEST_OUT}, + {"source-atop", CAIRO_OPERATOR_ATOP}, + {"destination-atop", CAIRO_OPERATOR_DEST_ATOP}, + {"xor", CAIRO_OPERATOR_XOR}, + {"lighter", CAIRO_OPERATOR_ADD}, + // blend modes: + {"normal", CAIRO_OPERATOR_OVER}, + {"multiply", CAIRO_OPERATOR_MULTIPLY}, + {"screen", CAIRO_OPERATOR_SCREEN}, + {"overlay", CAIRO_OPERATOR_OVERLAY}, + {"darken", CAIRO_OPERATOR_DARKEN}, + {"lighten", CAIRO_OPERATOR_LIGHTEN}, + {"color-dodge", CAIRO_OPERATOR_COLOR_DODGE}, + {"color-burn", CAIRO_OPERATOR_COLOR_BURN}, + {"hard-light", CAIRO_OPERATOR_HARD_LIGHT}, + {"soft-light", CAIRO_OPERATOR_SOFT_LIGHT}, + {"difference", CAIRO_OPERATOR_DIFFERENCE}, + {"exclusion", CAIRO_OPERATOR_EXCLUSION}, + {"hue", CAIRO_OPERATOR_HSL_HUE}, + {"saturation", CAIRO_OPERATOR_HSL_SATURATION}, + {"color", CAIRO_OPERATOR_HSL_COLOR}, + {"luminosity", CAIRO_OPERATOR_HSL_LUMINOSITY}, + // non-standard: + {"saturate", CAIRO_OPERATOR_SATURATE} + }; + auto op = blendmodes.find(opStr.Utf8Value()); + if (op != blendmodes.end()) cairo_set_operator(ctx, op->second); + } +} + +/* + * Get shadow offset x. + */ + +Napi::Value +Context2d::GetShadowOffsetX(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, state->shadowOffsetX); +} + +/* + * Set shadow offset x. + */ + +void +Context2d::SetShadowOffsetX(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::Number numberValue; + if (value.ToNumber().UnwrapTo(&numberValue)) state->shadowOffsetX = numberValue.DoubleValue(); +} + +/* + * Get shadow offset y. + */ + +Napi::Value +Context2d::GetShadowOffsetY(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, state->shadowOffsetY); +} + +/* + * Set shadow offset y. + */ + +void +Context2d::SetShadowOffsetY(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::Number numberValue; + if (value.ToNumber().UnwrapTo(&numberValue)) state->shadowOffsetY = numberValue.DoubleValue(); +} + +/* + * Get shadow blur. + */ + +Napi::Value +Context2d::GetShadowBlur(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, state->shadowBlur); +} + +/* + * Set shadow blur. + */ + +void +Context2d::SetShadowBlur(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::Number n; + if (value.ToNumber().UnwrapTo(&n)) { + double v = n.DoubleValue(); + if (v >= 0 && v <= std::numeric_limitsshadowBlur)>::max()) { + state->shadowBlur = v; + } + } +} + +/* + * Get current antialiasing setting. + */ + +Napi::Value +Context2d::GetAntiAlias(const Napi::CallbackInfo& info) { + const char *aa; + switch (cairo_get_antialias(context())) { + case CAIRO_ANTIALIAS_NONE: aa = "none"; break; + case CAIRO_ANTIALIAS_GRAY: aa = "gray"; break; + case CAIRO_ANTIALIAS_SUBPIXEL: aa = "subpixel"; break; + default: aa = "default"; + } + return Napi::String::New(env, aa); +} + +/* + * Set antialiasing. + */ + +void +Context2d::SetAntiAlias(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::String stringValue; + + if (value.ToString().UnwrapTo(&stringValue)) { + std::string str = stringValue.Utf8Value(); + cairo_t *ctx = context(); + cairo_antialias_t a; + if (str == "none") { + a = CAIRO_ANTIALIAS_NONE; + } else if (str == "default") { + a = CAIRO_ANTIALIAS_DEFAULT; + } else if (str == "gray") { + a = CAIRO_ANTIALIAS_GRAY; + } else if (str == "subpixel") { + a = CAIRO_ANTIALIAS_SUBPIXEL; + } else { + a = cairo_get_antialias(ctx); + } + cairo_set_antialias(ctx, a); + } +} + +/* + * Get text drawing mode. + */ + +Napi::Value +Context2d::GetTextDrawingMode(const Napi::CallbackInfo& info) { + const char *mode; + if (state->textDrawingMode == TEXT_DRAW_PATHS) { + mode = "path"; + } else if (state->textDrawingMode == TEXT_DRAW_GLYPHS) { + mode = "glyph"; + } else { + mode = "unknown"; + } + return Napi::String::New(env, mode); +} + +/* + * Set text drawing mode. + */ + +void +Context2d::SetTextDrawingMode(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::String stringValue; + if (value.ToString().UnwrapTo(&stringValue)) { + std::string str = stringValue.Utf8Value(); + if (str == "path") { + state->textDrawingMode = TEXT_DRAW_PATHS; + } else if (str == "glyph") { + state->textDrawingMode = TEXT_DRAW_GLYPHS; + } + } +} + +/* + * Get filter. + */ + +Napi::Value +Context2d::GetQuality(const Napi::CallbackInfo& info) { + const char *filter; + switch (cairo_pattern_get_filter(cairo_get_source(context()))) { + case CAIRO_FILTER_FAST: filter = "fast"; break; + case CAIRO_FILTER_BEST: filter = "best"; break; + case CAIRO_FILTER_NEAREST: filter = "nearest"; break; + case CAIRO_FILTER_BILINEAR: filter = "bilinear"; break; + default: filter = "good"; + } + return Napi::String::New(env, filter); +} + +/* + * Set filter. + */ + +void +Context2d::SetQuality(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::String stringValue; + if (value.ToString().UnwrapTo(&stringValue)) { + std::string str = stringValue.Utf8Value(); + cairo_filter_t filter; + if (str == "fast") { + filter = CAIRO_FILTER_FAST; + } else if (str == "best") { + filter = CAIRO_FILTER_BEST; + } else if (str == "nearest") { + filter = CAIRO_FILTER_NEAREST; + } else if (str == "bilinear") { + filter = CAIRO_FILTER_BILINEAR; + } else { + filter = CAIRO_FILTER_GOOD; + } + cairo_pattern_set_filter(cairo_get_source(context()), filter); + } +} + +/* + * Helper for get current transform matrix + */ + +Napi::Value +Context2d::get_current_transform() { + Napi::Float64Array arr = Napi::Float64Array::New(env, 6); + double *dest = arr.Data(); + cairo_matrix_t matrix; + cairo_get_matrix(context(), &matrix); + dest[0] = matrix.xx; + dest[1] = matrix.yx; + dest[2] = matrix.xy; + dest[3] = matrix.yy; + dest[4] = matrix.x0; + dest[5] = matrix.y0; + Napi::Maybe ret = env.GetInstanceData()->DOMMatrixCtor.Value().New({ arr }); + return ret.IsJust() ? ret.Unwrap() : env.Undefined(); +} + +/* + * Helper for get/set transform. + */ + +void parse_matrix_from_object(cairo_matrix_t &matrix, Napi::Object mat) { + Napi::Value zero = Napi::Number::New(mat.Env(), 0); + cairo_matrix_init(&matrix, + mat.Get("a").UnwrapOr(zero).As().DoubleValue(), + mat.Get("b").UnwrapOr(zero).As().DoubleValue(), + mat.Get("c").UnwrapOr(zero).As().DoubleValue(), + mat.Get("d").UnwrapOr(zero).As().DoubleValue(), + mat.Get("e").UnwrapOr(zero).As().DoubleValue(), + mat.Get("f").UnwrapOr(zero).As().DoubleValue() + ); +} + + +/* + * Get current transform. + */ + +Napi::Value +Context2d::GetCurrentTransform(const Napi::CallbackInfo& info) { + return get_current_transform(); +} + +/* + * Set current transform. + */ + +void +Context2d::SetCurrentTransform(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::Object mat; + + if (value.ToObject().UnwrapTo(&mat)) { + if (!mat.InstanceOf(env.GetInstanceData()->DOMMatrixCtor.Value()).UnwrapOr(false)) { + if (!env.IsExceptionPending()) { + Napi::TypeError::New(env, "Expected DOMMatrix").ThrowAsJavaScriptException(); + } + return; + } + + cairo_matrix_t matrix; + parse_matrix_from_object(matrix, mat); + + cairo_transform(context(), &matrix); + } +} + +/* + * Get current fill style. + */ + +Napi::Value +Context2d::GetFillStyle(const Napi::CallbackInfo& info) { + Napi::Value style; + + if (_fillStyle.IsEmpty()) + style = _getFillColor(); + else + style = _fillStyle.Value(); + + return style; +} + +/* + * Set current fill style. + */ + +void +Context2d::SetFillStyle(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (value.IsString()) { + _fillStyle.Reset(); + _setFillColor(value.As()); + } else if (value.IsObject()) { + InstanceData *data = env.GetInstanceData(); + Napi::Object obj = value.As(); + if (obj.InstanceOf(data->CanvasGradientCtor.Value()).UnwrapOr(false)) { + _fillStyle.Reset(obj); + Gradient *grad = Gradient::Unwrap(obj); + state->fillGradient = grad->pattern(); + } else if (obj.InstanceOf(data->CanvasPatternCtor.Value()).UnwrapOr(false)) { + _fillStyle.Reset(obj); + Pattern *pattern = Pattern::Unwrap(obj); + state->fillPattern = pattern->pattern(); + } + } +} + +/* + * Get current stroke style. + */ + +Napi::Value +Context2d::GetStrokeStyle(const Napi::CallbackInfo& info) { + Napi::Value style; + + if (_strokeStyle.IsEmpty()) + style = _getStrokeColor(); + else + style = _strokeStyle.Value(); + + return style; +} + +/* + * Set current stroke style. + */ + +void +Context2d::SetStrokeStyle(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (value.IsString()) { + _strokeStyle.Reset(); + _setStrokeColor(value.As()); + } else if (value.IsObject()) { + InstanceData *data = env.GetInstanceData(); + Napi::Object obj = value.As(); + if (obj.InstanceOf(data->CanvasGradientCtor.Value()).UnwrapOr(false)) { + _strokeStyle.Reset(obj); + Gradient *grad = Gradient::Unwrap(obj); + state->strokeGradient = grad->pattern(); + } else if (obj.InstanceOf(data->CanvasPatternCtor.Value()).UnwrapOr(false)) { + _strokeStyle.Reset(value); + Pattern *pattern = Pattern::Unwrap(obj); + state->strokePattern = pattern->pattern(); + } + } +} + +/* + * Get miter limit. + */ + +Napi::Value +Context2d::GetMiterLimit(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, cairo_get_miter_limit(context())); +} + +/* + * Set miter limit. + */ + +void +Context2d::SetMiterLimit(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::Maybe numberValue = value.ToNumber(); + if (numberValue.IsJust()) { + double n = numberValue.Unwrap().DoubleValue(); + if (n > 0) cairo_set_miter_limit(context(), n); + } +} + +/* + * Get line width. + */ + +Napi::Value +Context2d::GetLineWidth(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, cairo_get_line_width(context())); +} + +/* + * Set line width. + */ + +void +Context2d::SetLineWidth(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::Maybe numberValue = value.ToNumber(); + if (numberValue.IsJust()) { + double n = numberValue.Unwrap().DoubleValue(); + if (n > 0 && n != std::numeric_limits::infinity()) { + cairo_set_line_width(context(), n); + } + } +} + +/* + * Get line join. + */ + +Napi::Value +Context2d::GetLineJoin(const Napi::CallbackInfo& info) { + const char *join; + switch (cairo_get_line_join(context())) { + case CAIRO_LINE_JOIN_BEVEL: join = "bevel"; break; + case CAIRO_LINE_JOIN_ROUND: join = "round"; break; + default: join = "miter"; + } + return Napi::String::New(env, join); +} + +/* + * Set line join. + */ + +void +Context2d::SetLineJoin(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::Maybe stringValue = value.ToString(); + cairo_t *ctx = context(); + + if (stringValue.IsJust()) { + std::string type = stringValue.Unwrap().Utf8Value(); + if (type == "round") { + cairo_set_line_join(ctx, CAIRO_LINE_JOIN_ROUND); + } else if (type == "bevel") { + cairo_set_line_join(ctx, CAIRO_LINE_JOIN_BEVEL); + } else { + cairo_set_line_join(ctx, CAIRO_LINE_JOIN_MITER); + } + } +} + +/* + * Get line cap. + */ + +Napi::Value +Context2d::GetLineCap(const Napi::CallbackInfo& info) { + const char *cap; + switch (cairo_get_line_cap(context())) { + case CAIRO_LINE_CAP_ROUND: cap = "round"; break; + case CAIRO_LINE_CAP_SQUARE: cap = "square"; break; + default: cap = "butt"; + } + return Napi::String::New(env, cap); +} + +/* + * Set line cap. + */ + +void +Context2d::SetLineCap(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::Maybe stringValue = value.ToString(); + cairo_t *ctx = context(); + + if (stringValue.IsJust()) { + std::string type = stringValue.Unwrap().Utf8Value(); + if (type == "round") { + cairo_set_line_cap(ctx, CAIRO_LINE_CAP_ROUND); + } else if (type == "square") { + cairo_set_line_cap(ctx, CAIRO_LINE_CAP_SQUARE); + } else { + cairo_set_line_cap(ctx, CAIRO_LINE_CAP_BUTT); + } + } +} + +/* + * Check if the given point is within the current path. + */ + +Napi::Value +Context2d::IsPointInPath(const Napi::CallbackInfo& info) { + if (info[0].IsNumber() && info[1].IsNumber()) { + cairo_t *ctx = context(); + double x = info[0].As(), y = info[1].As(); + setFillRule(info[2]); + return Napi::Boolean::New(env, cairo_in_fill(ctx, x, y) || cairo_in_stroke(ctx, x, y)); + } + return Napi::Boolean::New(env, false); +} + +/* + * Set shadow color. + */ + +void +Context2d::SetShadowColor(const Napi::CallbackInfo& info, const Napi::Value& value) { + Napi::Maybe stringValue = value.ToString(); + short ok; + + if (stringValue.IsJust()) { + std::string str = stringValue.Unwrap().Utf8Value(); + uint32_t rgba = rgba_from_string(str.c_str(), &ok); + if (ok) state->shadow = rgba_create(rgba); + } +} + +/* + * Get shadow color. + */ + +Napi::Value +Context2d::GetShadowColor(const Napi::CallbackInfo& info) { + char buf[64]; + rgba_to_string(state->shadow, buf, sizeof(buf)); + return Napi::String::New(env, buf); +} + +/* + * Set fill color, used internally for fillStyle= + */ + +void +Context2d::_setFillColor(Napi::Value arg) { + Napi::Maybe stringValue = arg.ToString(); + short ok; + + if (stringValue.IsJust()) { + Napi::String str = stringValue.Unwrap(); + char buf[128] = {0}; + napi_status status = napi_get_value_string_utf8(env, str, buf, sizeof(buf) - 1, nullptr); + if (status != napi_ok) return; + uint32_t rgba = rgba_from_string(buf, &ok); + if (!ok) return; + state->fillPattern = state->fillGradient = NULL; + state->fill = rgba_create(rgba); + } +} + +/* + * Get fill color. + */ + +Napi::Value +Context2d::_getFillColor() { + char buf[64]; + rgba_to_string(state->fill, buf, sizeof(buf)); + return Napi::String::New(env, buf); +} + +/* + * Set stroke color, used internally for strokeStyle= + */ + +void +Context2d::_setStrokeColor(Napi::Value arg) { + short ok; + std::string str = arg.As(); + uint32_t rgba = rgba_from_string(str.c_str(), &ok); + if (!ok) return; + state->strokePattern = state->strokeGradient = NULL; + state->stroke = rgba_create(rgba); +} + +/* + * Get stroke color. + */ + +Napi::Value +Context2d::_getStrokeColor() { + char buf[64]; + rgba_to_string(state->stroke, buf, sizeof(buf)); + return Napi::String::New(env, buf); +} + +Napi::Value +Context2d::CreatePattern(const Napi::CallbackInfo& info) { + Napi::Function ctor = env.GetInstanceData()->CanvasPatternCtor.Value(); + Napi::Maybe ret = ctor.New({ info[0], info[1] }); + return ret.IsJust() ? ret.Unwrap() : env.Undefined(); +} + +Napi::Value +Context2d::CreateLinearGradient(const Napi::CallbackInfo& info) { + Napi::Function ctor = env.GetInstanceData()->CanvasGradientCtor.Value(); + Napi::Maybe ret = ctor.New({ info[0], info[1], info[2], info[3] }); + return ret.IsJust() ? ret.Unwrap() : env.Undefined(); + +} + +Napi::Value +Context2d::CreateRadialGradient(const Napi::CallbackInfo& info) { + Napi::Function ctor = env.GetInstanceData()->CanvasGradientCtor.Value(); + Napi::Maybe ret = ctor.New({ info[0], info[1], info[2], info[3], info[4], info[5] }); + return ret.IsJust() ? ret.Unwrap() : env.Undefined(); +} + +/* + * Bezier curve. + */ + +void +Context2d::BezierCurveTo(const Napi::CallbackInfo& info) { + double args[6]; + if(!checkArgs(info, args, 6)) + return; + + cairo_curve_to(context() + , args[0] + , args[1] + , args[2] + , args[3] + , args[4] + , args[5]); +} + +/* + * Quadratic curve approximation from libsvg-cairo. + */ + +void +Context2d::QuadraticCurveTo(const Napi::CallbackInfo& info) { + double args[4]; + if(!checkArgs(info, args, 4)) + return; + + cairo_t *ctx = context(); + + double x, y + , x1 = args[0] + , y1 = args[1] + , x2 = args[2] + , y2 = args[3]; + + cairo_get_current_point(ctx, &x, &y); + + if (0 == x && 0 == y) { + x = x1; + y = y1; + } + + cairo_curve_to(ctx + , x + 2.0 / 3.0 * (x1 - x), y + 2.0 / 3.0 * (y1 - y) + , x2 + 2.0 / 3.0 * (x1 - x2), y2 + 2.0 / 3.0 * (y1 - y2) + , x2 + , y2); +} + +/* + * Save state. + */ + +void +Context2d::Save(const Napi::CallbackInfo& info) { + save(); +} + +/* + * Restore state. + */ + +void +Context2d::Restore(const Napi::CallbackInfo& info) { + restore(); +} + +/* + * Creates a new subpath. + */ + +void +Context2d::BeginPath(const Napi::CallbackInfo& info) { + cairo_new_path(context()); +} + +/* + * Marks the subpath as closed. + */ + +void +Context2d::ClosePath(const Napi::CallbackInfo& info) { + cairo_close_path(context()); +} + +/* + * Rotate transformation. + */ + +void +Context2d::Rotate(const Napi::CallbackInfo& info) { + double args[1]; + if(!checkArgs(info, args, 1)) + return; + + cairo_rotate(context(), args[0]); +} + +/* + * Modify the CTM. + */ + +void +Context2d::Transform(const Napi::CallbackInfo& info) { + double args[6]; + if(!checkArgs(info, args, 6)) + return; + + cairo_matrix_t matrix; + cairo_matrix_init(&matrix + , args[0] + , args[1] + , args[2] + , args[3] + , args[4] + , args[5]); + + cairo_transform(context(), &matrix); +} + +/* + * Get the CTM + */ + +Napi::Value +Context2d::GetTransform(const Napi::CallbackInfo& info) { + return get_current_transform(); +} + +/* + * Reset the CTM, used internally by setTransform(). + */ + +void +Context2d::ResetTransform(const Napi::CallbackInfo& info) { + cairo_identity_matrix(context()); +} + +/* + * Reset transform matrix to identity, then apply the given args. + */ + +void +Context2d::SetTransform(const Napi::CallbackInfo& info) { + Napi::Object mat; + + if (info.Length() == 1 && info[0].ToObject().UnwrapTo(&mat)) { + if (!mat.InstanceOf(env.GetInstanceData()->DOMMatrixCtor.Value()).UnwrapOr(false)) { + if (!env.IsExceptionPending()) { + Napi::TypeError::New(env, "Expected DOMMatrix").ThrowAsJavaScriptException(); + } + return; + } + + cairo_matrix_t matrix; + parse_matrix_from_object(matrix, mat); + + cairo_set_matrix(context(), &matrix); + } else { + cairo_identity_matrix(context()); + Context2d::Transform(info); + } +} + +/* + * Translate transformation. + */ + +void +Context2d::Translate(const Napi::CallbackInfo& info) { + double args[2]; + if(!checkArgs(info, args, 2)) + return; + + cairo_translate(context(), args[0], args[1]); +} + +/* + * Scale transformation. + */ + +void +Context2d::Scale(const Napi::CallbackInfo& info) { + double args[2]; + if(!checkArgs(info, args, 2)) + return; + + cairo_scale(context(), args[0], args[1]); +} + +/* + * Use path as clipping region. + */ + +void +Context2d::Clip(const Napi::CallbackInfo& info) { + setFillRule(info[0]); + cairo_t *ctx = context(); + cairo_clip_preserve(ctx); +} + +/* + * Fill the path. + */ + +void +Context2d::Fill(const Napi::CallbackInfo& info) { + setFillRule(info[0]); + fill(true); +} + +/* + * Stroke the path. + */ + +void +Context2d::Stroke(const Napi::CallbackInfo& info) { + stroke(true); +} + +/* + * Helper for fillText/strokeText + */ + +double +get_text_scale(PangoLayout *layout, double maxWidth) { + + PangoRectangle logical_rect; + pango_layout_get_pixel_extents(layout, NULL, &logical_rect); + + if (logical_rect.width > maxWidth) { + return maxWidth / logical_rect.width; + } else { + return 1.0; + } +} + +/* + * Make sure the layout's font list is up-to-date + */ +void +Context2d::checkFonts() { + // If fonts have been registered, the PangoContext is using an outdated FontMap + if (canvas()->fontSerial != fontSerial) { + pango_context_set_font_map( + pango_layout_get_context(_layout), + pango_cairo_font_map_get_default() + ); + + fontSerial = canvas()->fontSerial; + } +} + +void +Context2d::paintText(const Napi::CallbackInfo& info, bool stroke) { + int argsNum = info.Length() >= 4 ? 3 : 2; + + if (argsNum == 3 && info[3].IsUndefined()) + argsNum = 2; + + double args[3]; + if(!checkArgs(info, args, argsNum, 1)) + return; + + Napi::String strValue; + + if (!info[0].ToString().UnwrapTo(&strValue)) return; + + std::string str = strValue.Utf8Value(); + double x = args[0]; + double y = args[1]; + double scaled_by = 1; + + PangoLayout *layout = this->layout(); + + checkFonts(); + pango_layout_set_text(layout, str.c_str(), -1); + if (state->lang != "") { + pango_context_set_language(pango_layout_get_context(_layout), pango_language_from_string(state->lang.c_str())); + } + pango_cairo_update_layout(context(), layout); + + PangoDirection pango_dir = state->direction == "ltr" ? PANGO_DIRECTION_LTR : PANGO_DIRECTION_RTL; + pango_context_set_base_dir(pango_layout_get_context(_layout), pango_dir); + + if (argsNum == 3) { + if (args[2] <= 0) return; + scaled_by = get_text_scale(layout, args[2]); + cairo_save(context()); + cairo_scale(context(), scaled_by, 1); + } + + savePath(); + if (state->textDrawingMode == TEXT_DRAW_GLYPHS) { + if (stroke == true) { this->stroke(); } else { this->fill(); } + setTextPath(x / scaled_by, y); + } else if (state->textDrawingMode == TEXT_DRAW_PATHS) { + setTextPath(x / scaled_by, y); + if (stroke == true) { this->stroke(); } else { this->fill(); } + } + restorePath(); + if (argsNum == 3) { + cairo_restore(context()); + } +} + +/* + * Fill text at (x, y). + */ + +void +Context2d::FillText(const Napi::CallbackInfo& info) { + paintText(info, false); +} + +/* + * Stroke text at (x ,y). + */ + +void +Context2d::StrokeText(const Napi::CallbackInfo& info) { + paintText(info, true); +} + +/* + * Gets the baseline adjustment in device pixels + */ +inline double getBaselineAdjustment(PangoLayout* layout, short baseline) { + PangoRectangle logical_rect; + pango_layout_line_get_extents(pango_layout_get_line(layout, 0), NULL, &logical_rect); + + double scale = 1.0 / PANGO_SCALE; + double ascent = scale * pango_layout_get_baseline(layout); + double descent = scale * logical_rect.height - ascent; + + switch (baseline) { + case TEXT_BASELINE_ALPHABETIC: + return ascent; + case TEXT_BASELINE_MIDDLE: + return (ascent + descent) / 2.0; + case TEXT_BASELINE_BOTTOM: + return ascent + descent; + default: + return 0; + } +} + +text_align_t +Context2d::resolveTextAlignment() { + text_align_t alignment = state->textAlignment; + + // Convert start/end to left/right based on direction + if (alignment == TEXT_ALIGNMENT_START) { + return (state->direction == "rtl") ? TEXT_ALIGNMENT_RIGHT : TEXT_ALIGNMENT_LEFT; + } else if (alignment == TEXT_ALIGNMENT_END) { + return (state->direction == "rtl") ? TEXT_ALIGNMENT_LEFT : TEXT_ALIGNMENT_RIGHT; + } + + return alignment; +} + +/* + * Set text path for the string in the layout at (x, y). + * This function is called by paintText and won't behave correctly + * if is not called from there. + * it needs pango_layout_set_text and pango_cairo_update_layout to be called before + */ + +void +Context2d::setTextPath(double x, double y) { + PangoRectangle logical_rect; + text_align_t alignment = resolveTextAlignment(); + + switch (alignment) { + case TEXT_ALIGNMENT_CENTER: + pango_layout_get_pixel_extents(_layout, NULL, &logical_rect); + x -= logical_rect.width / 2; + break; + case TEXT_ALIGNMENT_RIGHT: + pango_layout_get_pixel_extents(_layout, NULL, &logical_rect); + x -= logical_rect.width; + break; + default: // TEXT_ALIGNMENT_LEFT + break; + } + + y -= getBaselineAdjustment(_layout, state->textBaseline); + + cairo_move_to(_context, x, y); + if (state->textDrawingMode == TEXT_DRAW_PATHS) { + pango_cairo_layout_path(_context, _layout); + } else if (state->textDrawingMode == TEXT_DRAW_GLYPHS) { + pango_cairo_show_layout(_context, _layout); + } +} + +/* + * Adds a point to the current subpath. + */ + +void +Context2d::LineTo(const Napi::CallbackInfo& info) { + double args[2]; + if(!checkArgs(info, args, 2)) + return; + + cairo_line_to(context(), args[0], args[1]); +} + +/* + * Creates a new subpath at the given point. + */ + +void +Context2d::MoveTo(const Napi::CallbackInfo& info) { + double args[2]; + if(!checkArgs(info, args, 2)) + return; + + cairo_move_to(context(), args[0], args[1]); +} + +/* + * Get font. + */ + +Napi::Value +Context2d::GetFont(const Napi::CallbackInfo& info) { + return Napi::String::New(env, state->font); +} + +/* + * Set font: + * - weight + * - style + * - size + * - unit + * - family + */ + +void +Context2d::SetFont(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (!value.IsString()) return; + + std::string str = value.As().Utf8Value(); + if (!str.length()) return; + + bool success; + auto props = FontParser::parse(str, &success); + if (!success) return; + + PangoFontDescription *desc = pango_font_description_copy(state->fontDescription); + pango_font_description_free(state->fontDescription); + + PangoStyle style = props.fontStyle == FontStyle::Italic ? PANGO_STYLE_ITALIC + : props.fontStyle == FontStyle::Oblique ? PANGO_STYLE_OBLIQUE + : PANGO_STYLE_NORMAL; + pango_font_description_set_style(desc, style); + + pango_font_description_set_weight(desc, static_cast(props.fontWeight)); + + std::string family = props.fontFamily.empty() ? "" : props.fontFamily[0]; + for (size_t i = 1; i < props.fontFamily.size(); i++) { + family += "," + props.fontFamily[i]; + } + if (family.length() > 0) { + // See #1643 - Pango understands "sans" whereas CSS uses "sans-serif" + std::string s1(family); + std::string s2("sans-serif"); + if (streq_casein(s1, s2)) { + pango_font_description_set_family(desc, "sans"); + } else { + pango_font_description_set_family(desc, family.c_str()); + } + } + + PangoFontDescription *sys_desc = Canvas::ResolveFontDescription(desc); + pango_font_description_free(desc); + + if (props.fontSize > 0) pango_font_description_set_absolute_size(sys_desc, props.fontSize * PANGO_SCALE); + + state->fontDescription = sys_desc; + pango_layout_set_font_description(_layout, sys_desc); + + state->font = str; +} + +/* + * Get text baseline. + */ + +Napi::Value +Context2d::GetTextBaseline(const Napi::CallbackInfo& info) { + const char* baseline; + switch (state->textBaseline) { + default: + case TEXT_BASELINE_ALPHABETIC: baseline = "alphabetic"; break; + case TEXT_BASELINE_TOP: baseline = "top"; break; + case TEXT_BASELINE_BOTTOM: baseline = "bottom"; break; + case TEXT_BASELINE_MIDDLE: baseline = "middle"; break; + case TEXT_BASELINE_IDEOGRAPHIC: baseline = "ideographic"; break; + case TEXT_BASELINE_HANGING: baseline = "hanging"; break; + } + return Napi::String::New(env, baseline); +} + +/* + * Set text baseline. + */ + +void +Context2d::SetTextBaseline(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (!value.IsString()) return; + + std::string opStr = value.As(); + const std::map modes = { + {"alphabetic", TEXT_BASELINE_ALPHABETIC}, + {"top", TEXT_BASELINE_TOP}, + {"bottom", TEXT_BASELINE_BOTTOM}, + {"middle", TEXT_BASELINE_MIDDLE}, + {"ideographic", TEXT_BASELINE_IDEOGRAPHIC}, + {"hanging", TEXT_BASELINE_HANGING} + }; + auto op = modes.find(opStr); + if (op == modes.end()) return; + + state->textBaseline = op->second; +} + +/* + * Get text align. + */ + +Napi::Value +Context2d::GetTextAlign(const Napi::CallbackInfo& info) { + const char* align; + switch (state->textAlignment) { + case TEXT_ALIGNMENT_LEFT: align = "left"; break; + case TEXT_ALIGNMENT_START: align = "start"; break; + case TEXT_ALIGNMENT_CENTER: align = "center"; break; + case TEXT_ALIGNMENT_RIGHT: align = "right"; break; + case TEXT_ALIGNMENT_END: align = "end"; break; + default: align = "start"; + } + return Napi::String::New(env, align); +} + +/* + * Set text align. + */ + +void +Context2d::SetTextAlign(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (!value.IsString()) return; + + std::string opStr = value.As(); + const std::map modes = { + {"center", TEXT_ALIGNMENT_CENTER}, + {"left", TEXT_ALIGNMENT_LEFT}, + {"start", TEXT_ALIGNMENT_START}, + {"right", TEXT_ALIGNMENT_RIGHT}, + {"end", TEXT_ALIGNMENT_END} + }; + auto op = modes.find(opStr); + if (op == modes.end()) return; + + state->textAlignment = op->second; +} + +/* + * Return the given text extents. + * TODO: Support for: + * hangingBaseline, ideographicBaseline, + * fontBoundingBoxAscent, fontBoundingBoxDescent + */ + +Napi::Value +Context2d::MeasureText(const Napi::CallbackInfo& info) { + cairo_t *ctx = this->context(); + + Napi::String str; + if (!info[0].ToString().UnwrapTo(&str)) return env.Undefined(); + + Napi::Object obj = Napi::Object::New(env); + + PangoRectangle _ink_rect, _logical_rect; + float_rectangle ink_rect, logical_rect; + PangoFontMetrics *metrics; + PangoLayout *layout = this->layout(); + + checkFonts(); + pango_layout_set_text(layout, str.Utf8Value().c_str(), -1); + if (state->lang != "") { + pango_context_set_language(pango_layout_get_context(_layout), pango_language_from_string(state->lang.c_str())); + } + pango_cairo_update_layout(ctx, layout); + + // Normally you could use pango_layout_get_pixel_extents and be done, or use + // pango_extents_to_pixels, but both of those round the pixels, so we have to + // divide by PANGO_SCALE manually + pango_layout_get_extents(layout, &_ink_rect, &_logical_rect); + + float inverse_pango_scale = 1. / PANGO_SCALE; + + logical_rect.x = _logical_rect.x * inverse_pango_scale; + logical_rect.y = _logical_rect.y * inverse_pango_scale; + logical_rect.width = _logical_rect.width * inverse_pango_scale; + logical_rect.height = _logical_rect.height * inverse_pango_scale; + + ink_rect.x = _ink_rect.x * inverse_pango_scale; + ink_rect.y = _ink_rect.y * inverse_pango_scale; + ink_rect.width = _ink_rect.width * inverse_pango_scale; + ink_rect.height = _ink_rect.height * inverse_pango_scale; + + metrics = PANGO_LAYOUT_GET_METRICS(layout); + + text_align_t alignment = resolveTextAlignment(); + + double x_offset; + switch (alignment) { + case TEXT_ALIGNMENT_CENTER: + x_offset = logical_rect.width / 2.; + break; + case TEXT_ALIGNMENT_RIGHT: + x_offset = logical_rect.width; + break; + case TEXT_ALIGNMENT_LEFT: + default: + x_offset = 0.0; + } + + double y_offset = getBaselineAdjustment(layout, state->textBaseline); + + obj.Set("width", Napi::Number::New(env, logical_rect.width)); + obj.Set("actualBoundingBoxLeft", Napi::Number::New(env, PANGO_LBEARING(ink_rect) + x_offset)); + obj.Set("actualBoundingBoxRight", Napi::Number::New(env, PANGO_RBEARING(ink_rect) - x_offset)); + obj.Set("actualBoundingBoxAscent", Napi::Number::New(env, y_offset + PANGO_ASCENT(ink_rect))); + obj.Set("actualBoundingBoxDescent", Napi::Number::New(env, PANGO_DESCENT(ink_rect) - y_offset)); + obj.Set("emHeightAscent", Napi::Number::New(env, -(PANGO_ASCENT(logical_rect) - y_offset))); + obj.Set("emHeightDescent", Napi::Number::New(env, PANGO_DESCENT(logical_rect) - y_offset)); + obj.Set("alphabeticBaseline", Napi::Number::New(env, -(pango_font_metrics_get_ascent(metrics) * inverse_pango_scale - y_offset))); + + pango_font_metrics_unref(metrics); + + return obj; +} + +/* + * Set line dash + * ref: http://www.w3.org/TR/2dcontext/#dom-context-2d-setlinedash + */ + +void +Context2d::SetLineDash(const Napi::CallbackInfo& info) { + if (!info[0].IsArray()) return; + Napi::Array dash = info[0].As(); + uint32_t dashes = dash.Length() & 1 ? dash.Length() * 2 : dash.Length(); + uint32_t zero_dashes = 0; + std::vector a(dashes); + for (uint32_t i=0; i().DoubleValue(); + if (a[i] == 0) zero_dashes++; + if (a[i] < 0 || !std::isfinite(a[i])) return; + } + + cairo_t *ctx = this->context(); + double offset; + cairo_get_dash(ctx, NULL, &offset); + if (zero_dashes == dashes) { + std::vector b(0); + cairo_set_dash(ctx, b.data(), 0, offset); + } else { + cairo_set_dash(ctx, a.data(), dashes, offset); + } +} + +/* + * Get line dash + * ref: http://www.w3.org/TR/2dcontext/#dom-context-2d-setlinedash + */ +Napi::Value +Context2d::GetLineDash(const Napi::CallbackInfo& info) { + cairo_t *ctx = this->context(); + int dashes = cairo_get_dash_count(ctx); + std::vector a(dashes); + cairo_get_dash(ctx, a.data(), NULL); + + Napi::Array dash = Napi::Array::New(env, dashes); + for (int i=0; icontext(); + + int dashes = cairo_get_dash_count(ctx); + std::vector a(dashes); + cairo_get_dash(ctx, a.data(), NULL); + cairo_set_dash(ctx, a.data(), dashes, offset); +} + +/* + * Get line dash offset + * ref: http://www.w3.org/TR/2dcontext/#dom-context-2d-setlinedash + */ +Napi::Value +Context2d::GetLineDashOffset(const Napi::CallbackInfo& info) { + cairo_t *ctx = this->context(); + double offset; + cairo_get_dash(ctx, NULL, &offset); + + return Napi::Number::New(env, offset); +} + +/* + * Fill the rectangle defined by x, y, width and height. + */ + +void +Context2d::FillRect(const Napi::CallbackInfo& info) { + RECT_ARGS; + if (0 == width || 0 == height) return; + cairo_t *ctx = context(); + savePath(); + cairo_rectangle(ctx, x, y, width, height); + fill(); + restorePath(); +} + +/* + * Stroke the rectangle defined by x, y, width and height. + */ + +void +Context2d::StrokeRect(const Napi::CallbackInfo& info) { + RECT_ARGS; + if (0 == width && 0 == height) return; + cairo_t *ctx = context(); + savePath(); + cairo_rectangle(ctx, x, y, width, height); + stroke(); + restorePath(); +} + +/* + * Clears all pixels defined by x, y, width and height. + */ + +void +Context2d::ClearRect(const Napi::CallbackInfo& info) { + RECT_ARGS; + if (0 == width || 0 == height) return; + cairo_t *ctx = context(); + cairo_save(ctx); + savePath(); + cairo_rectangle(ctx, x, y, width, height); + cairo_set_operator(ctx, CAIRO_OPERATOR_CLEAR); + cairo_fill(ctx); + restorePath(); + cairo_restore(ctx); +} + +/* + * Adds a rectangle subpath. + */ + +void +Context2d::Rect(const Napi::CallbackInfo& info) { + RECT_ARGS; + cairo_t *ctx = context(); + if (width == 0) { + cairo_move_to(ctx, x, y); + cairo_line_to(ctx, x, y + height); + } else if (height == 0) { + cairo_move_to(ctx, x, y); + cairo_line_to(ctx, x + width, y); + } else { + cairo_rectangle(ctx, x, y, width, height); + } +} + +// Draws an arc with two potentially different radii. +inline static +void elli_arc(cairo_t* ctx, double xc, double yc, double rx, double ry, double a1, double a2, bool clockwise=true) { + if (rx == 0. || ry == 0.) { + cairo_line_to(ctx, xc + rx, yc + ry); + } else { + cairo_save(ctx); + cairo_translate(ctx, xc, yc); + cairo_scale(ctx, rx, ry); + if (clockwise) + cairo_arc(ctx, 0., 0., 1., a1, a2); + else + cairo_arc_negative(ctx, 0., 0., 1., a2, a1); + cairo_restore(ctx); + } +} + +inline static +bool getRadius(Point& p, const Napi::Value& v) { + Napi::Env env = v.Env(); + if (v.IsObject()) { // 5.1 DOMPointInit + Napi::Value rx; + Napi::Value ry; + auto rxMaybe = v.As().Get("x"); + auto ryMaybe = v.As().Get("y"); + if (rxMaybe.UnwrapTo(&rx) && rx.IsNumber() && ryMaybe.UnwrapTo(&ry) && ry.IsNumber()) { + auto rxv = rx.As().DoubleValue(); + auto ryv = ry.As().DoubleValue(); + if (!std::isfinite(rxv) || !std::isfinite(ryv)) + return true; + if (rxv < 0 || ryv < 0) { + Napi::RangeError::New(env, "radii must be positive.").ThrowAsJavaScriptException(); + + return true; + } + p.x = rxv; + p.y = ryv; + return false; + } + } else if (v.IsNumber()) { // 5.2 unrestricted double + auto rv = v.As().DoubleValue(); + if (!std::isfinite(rv)) + return true; + if (rv < 0) { + Napi::RangeError::New(env, "radii must be positive.").ThrowAsJavaScriptException(); + + return true; + } + p.x = p.y = rv; + return false; + } + return true; +} + +/** + * https://html.spec.whatwg.org/multipage/canvas.html#dom-context-2d-roundrect + * x, y, w, h, [radius|[radii]] + */ +void +Context2d::RoundRect(const Napi::CallbackInfo& info) { + RECT_ARGS; + cairo_t *ctx = this->context(); + + // 4. Let normalizedRadii be an empty list + Point normalizedRadii[4]; + size_t nRadii = 4; + + if (info[4].IsUndefined()) { + for (size_t i = 0; i < 4; i++) + normalizedRadii[i].x = normalizedRadii[i].y = 0.; + + } else if (info[4].IsArray()) { + auto radiiList = info[4].As(); + nRadii = radiiList.Length(); + if (!(nRadii >= 1 && nRadii <= 4)) { + Napi::RangeError::New(env, "radii must be a list of one, two, three or four radii.").ThrowAsJavaScriptException(); + return; + } + // 5. For each radius of radii + for (size_t i = 0; i < nRadii; i++) { + Napi::Value r; + if (!radiiList.Get(i).UnwrapTo(&r) || getRadius(normalizedRadii[i], r)) + return; + } + + } else { + // 2. If radii is a double, then set radii to <> + if (getRadius(normalizedRadii[0], info[4])) + return; + for (size_t i = 1; i < 4; i++) { + normalizedRadii[i].x = normalizedRadii[0].x; + normalizedRadii[i].y = normalizedRadii[0].y; + } + } + + Point upperLeft, upperRight, lowerRight, lowerLeft; + if (nRadii == 4) { + upperLeft = normalizedRadii[0]; + upperRight = normalizedRadii[1]; + lowerRight = normalizedRadii[2]; + lowerLeft = normalizedRadii[3]; + } else if (nRadii == 3) { + upperLeft = normalizedRadii[0]; + upperRight = normalizedRadii[1]; + lowerLeft = normalizedRadii[1]; + lowerRight = normalizedRadii[2]; + } else if (nRadii == 2) { + upperLeft = normalizedRadii[0]; + lowerRight = normalizedRadii[0]; + upperRight = normalizedRadii[1]; + lowerLeft = normalizedRadii[1]; + } else { + upperLeft = normalizedRadii[0]; + upperRight = normalizedRadii[0]; + lowerRight = normalizedRadii[0]; + lowerLeft = normalizedRadii[0]; + } + + bool clockwise = true; + if (width < 0) { + clockwise = false; + x += width; + width = -width; + std::swap(upperLeft, upperRight); + std::swap(lowerLeft, lowerRight); + } + + if (height < 0) { + clockwise = !clockwise; + y += height; + height = -height; + std::swap(upperLeft, lowerLeft); + std::swap(upperRight, lowerRight); + } + + // 11. Corner curves must not overlap. Scale radii to prevent this. + { + auto top = upperLeft.x + upperRight.x; + auto right = upperRight.y + lowerRight.y; + auto bottom = lowerRight.x + lowerLeft.x; + auto left = upperLeft.y + lowerLeft.y; + auto scale = std::min({ width / top, height / right, width / bottom, height / left }); + if (scale < 1.) { + upperLeft.x *= scale; + upperLeft.y *= scale; + upperRight.x *= scale; + upperRight.y *= scale; + lowerLeft.x *= scale; + lowerLeft.y *= scale; + lowerRight.x *= scale; + lowerRight.y *= scale; + } + } + + // 12. Draw + cairo_move_to(ctx, x + upperLeft.x, y); + if (clockwise) { + cairo_line_to(ctx, x + width - upperRight.x, y); + elli_arc(ctx, x + width - upperRight.x, y + upperRight.y, upperRight.x, upperRight.y, 3. * M_PI / 2., 0.); + cairo_line_to(ctx, x + width, y + height - lowerRight.y); + elli_arc(ctx, x + width - lowerRight.x, y + height - lowerRight.y, lowerRight.x, lowerRight.y, 0, M_PI / 2.); + cairo_line_to(ctx, x + lowerLeft.x, y + height); + elli_arc(ctx, x + lowerLeft.x, y + height - lowerLeft.y, lowerLeft.x, lowerLeft.y, M_PI / 2., M_PI); + cairo_line_to(ctx, x, y + upperLeft.y); + elli_arc(ctx, x + upperLeft.x, y + upperLeft.y, upperLeft.x, upperLeft.y, M_PI, 3. * M_PI / 2.); + } else { + elli_arc(ctx, x + upperLeft.x, y + upperLeft.y, upperLeft.x, upperLeft.y, M_PI, 3. * M_PI / 2., false); + cairo_line_to(ctx, x, y + upperLeft.y); + elli_arc(ctx, x + lowerLeft.x, y + height - lowerLeft.y, lowerLeft.x, lowerLeft.y, M_PI / 2., M_PI, false); + cairo_line_to(ctx, x + lowerLeft.x, y + height); + elli_arc(ctx, x + width - lowerRight.x, y + height - lowerRight.y, lowerRight.x, lowerRight.y, 0, M_PI / 2., false); + cairo_line_to(ctx, x + width, y + height - lowerRight.y); + elli_arc(ctx, x + width - upperRight.x, y + upperRight.y, upperRight.x, upperRight.y, 3. * M_PI / 2., 0., false); + cairo_line_to(ctx, x + width - upperRight.x, y); + } + cairo_close_path(ctx); +} + +// Adapted from https://chromium.googlesource.com/chromium/blink/+/refs/heads/main/Source/modules/canvas2d/CanvasPathMethods.cpp +static void canonicalizeAngle(double& startAngle, double& endAngle) { + // Make 0 <= startAngle < 2*PI + double newStartAngle = std::fmod(startAngle, twoPi); + if (newStartAngle < 0) { + newStartAngle += twoPi; + // Check for possible catastrophic cancellation in cases where + // newStartAngle was a tiny negative number (c.f. crbug.com/503422) + if (newStartAngle >= twoPi) + newStartAngle -= twoPi; + } + double delta = newStartAngle - startAngle; + startAngle = newStartAngle; + endAngle = endAngle + delta; +} + +// Adapted from https://chromium.googlesource.com/chromium/blink/+/refs/heads/main/Source/modules/canvas2d/CanvasPathMethods.cpp +static double adjustEndAngle(double startAngle, double endAngle, bool counterclockwise) { + double newEndAngle = endAngle; + /* http://www.whatwg.org/specs/web-apps/current-work/multipage/the-canvas-element.html#dom-context-2d-arc + * If the counterclockwise argument is false and endAngle-startAngle is equal to or greater than 2pi, or, + * if the counterclockwise argument is true and startAngle-endAngle is equal to or greater than 2pi, + * then the arc is the whole circumference of this ellipse, and the point at startAngle along this circle's circumference, + * measured in radians clockwise from the ellipse's semi-major axis, acts as both the start point and the end point. + */ + if (!counterclockwise && endAngle - startAngle >= twoPi) + newEndAngle = startAngle + twoPi; + else if (counterclockwise && startAngle - endAngle >= twoPi) + newEndAngle = startAngle - twoPi; + /* + * Otherwise, the arc is the path along the circumference of this ellipse from the start point to the end point, + * going anti-clockwise if the counterclockwise argument is true, and clockwise otherwise. + * Since the points are on the ellipse, as opposed to being simply angles from zero, + * the arc can never cover an angle greater than 2pi radians. + */ + /* NOTE: When startAngle = 0, endAngle = 2Pi and counterclockwise = true, the spec does not indicate clearly. + * We draw the entire circle, because some web sites use arc(x, y, r, 0, 2*Math.PI, true) to draw circle. + * We preserve backward-compatibility. + */ + else if (!counterclockwise && startAngle > endAngle) + newEndAngle = startAngle + (twoPi - std::fmod(startAngle - endAngle, twoPi)); + else if (counterclockwise && startAngle < endAngle) + newEndAngle = startAngle - (twoPi - std::fmod(endAngle - startAngle, twoPi)); + return newEndAngle; +} + +/* + * Adds an arc at x, y with the given radii and start/end angles. + */ + +void +Context2d::Arc(const Napi::CallbackInfo& info) { + double args[5]; + if(!checkArgs(info, args, 5)) + return; + + auto x = args[0]; + auto y = args[1]; + auto radius = args[2]; + auto startAngle = args[3]; + auto endAngle = args[4]; + + if (radius < 0) { + Napi::RangeError::New(env, "The radius provided is negative.").ThrowAsJavaScriptException(); + return; + } + + Napi::Boolean counterclockwiseValue; + if (!info[5].ToBoolean().UnwrapTo(&counterclockwiseValue)) return; + bool counterclockwise = counterclockwiseValue.Value(); + + cairo_t *ctx = context(); + + canonicalizeAngle(startAngle, endAngle); + endAngle = adjustEndAngle(startAngle, endAngle, counterclockwise); + + if (counterclockwise) { + cairo_arc_negative(ctx, x, y, radius, startAngle, endAngle); + } else { + cairo_arc(ctx, x, y, radius, startAngle, endAngle); + } +} + +/* + * Adds an arcTo point (x0,y0) to (x1,y1) with the given radius. + * + * Implementation influenced by WebKit. + */ + +void +Context2d::ArcTo(const Napi::CallbackInfo& info) { + double args[5]; + if(!checkArgs(info, args, 5)) + return; + + cairo_t *ctx = context(); + + // Current path point + double x, y; + cairo_get_current_point(ctx, &x, &y); + Point p0(x, y); + + // Point (x0,y0) + Point p1(args[0], args[1]); + + // Point (x1,y1) + Point p2(args[2], args[3]); + + float radius = args[4]; + + if ((p1.x == p0.x && p1.y == p0.y) + || (p1.x == p2.x && p1.y == p2.y) + || radius == 0.f) { + cairo_line_to(ctx, p1.x, p1.y); + return; + } + + Point p1p0((p0.x - p1.x),(p0.y - p1.y)); + Point p1p2((p2.x - p1.x),(p2.y - p1.y)); + float p1p0_length = sqrtf(p1p0.x * p1p0.x + p1p0.y * p1p0.y); + float p1p2_length = sqrtf(p1p2.x * p1p2.x + p1p2.y * p1p2.y); + + double cos_phi = (p1p0.x * p1p2.x + p1p0.y * p1p2.y) / (p1p0_length * p1p2_length); + // all points on a line logic + if (-1 == cos_phi) { + cairo_line_to(ctx, p1.x, p1.y); + return; + } + + if (1 == cos_phi) { + // add infinite far away point + unsigned int max_length = 65535; + double factor_max = max_length / p1p0_length; + Point ep((p0.x + factor_max * p1p0.x), (p0.y + factor_max * p1p0.y)); + cairo_line_to(ctx, ep.x, ep.y); + return; + } + + float tangent = radius / tan(acos(cos_phi) / 2); + float factor_p1p0 = tangent / p1p0_length; + Point t_p1p0((p1.x + factor_p1p0 * p1p0.x), (p1.y + factor_p1p0 * p1p0.y)); + + Point orth_p1p0(p1p0.y, -p1p0.x); + float orth_p1p0_length = sqrt(orth_p1p0.x * orth_p1p0.x + orth_p1p0.y * orth_p1p0.y); + float factor_ra = radius / orth_p1p0_length; + + double cos_alpha = (orth_p1p0.x * p1p2.x + orth_p1p0.y * p1p2.y) / (orth_p1p0_length * p1p2_length); + if (cos_alpha < 0.f) + orth_p1p0 = Point(-orth_p1p0.x, -orth_p1p0.y); + + Point p((t_p1p0.x + factor_ra * orth_p1p0.x), (t_p1p0.y + factor_ra * orth_p1p0.y)); + + orth_p1p0 = Point(-orth_p1p0.x, -orth_p1p0.y); + float sa = acos(orth_p1p0.x / orth_p1p0_length); + if (orth_p1p0.y < 0.f) + sa = 2 * M_PI - sa; + + bool anticlockwise = false; + + float factor_p1p2 = tangent / p1p2_length; + Point t_p1p2((p1.x + factor_p1p2 * p1p2.x), (p1.y + factor_p1p2 * p1p2.y)); + Point orth_p1p2((t_p1p2.x - p.x),(t_p1p2.y - p.y)); + float orth_p1p2_length = sqrtf(orth_p1p2.x * orth_p1p2.x + orth_p1p2.y * orth_p1p2.y); + float ea = acos(orth_p1p2.x / orth_p1p2_length); + + if (orth_p1p2.y < 0) ea = 2 * M_PI - ea; + if ((sa > ea) && ((sa - ea) < M_PI)) anticlockwise = true; + if ((sa < ea) && ((ea - sa) > M_PI)) anticlockwise = true; + + cairo_line_to(ctx, t_p1p0.x, t_p1p0.y); + + if (anticlockwise && M_PI * 2 != radius) { + cairo_arc_negative(ctx + , p.x + , p.y + , radius + , sa + , ea); + } else { + cairo_arc(ctx + , p.x + , p.y + , radius + , sa + , ea); + } +} + +/* + * Adds an ellipse to the path which is centered at (x, y) position with the + * radii radiusX and radiusY starting at startAngle and ending at endAngle + * going in the given direction by anticlockwise (defaulting to clockwise). + */ + +void +Context2d::Ellipse(const Napi::CallbackInfo& info) { + double args[7]; + if(!checkArgs(info, args, 7)) + return; + + double radiusX = args[2]; + double radiusY = args[3]; + + if (radiusX == 0 || radiusY == 0) return; + + double x = args[0]; + double y = args[1]; + double rotation = args[4]; + double startAngle = args[5]; + double endAngle = args[6]; + Napi::Boolean anticlockwiseValue; + + if (!info[7].ToBoolean().UnwrapTo(&anticlockwiseValue)) return; + bool anticlockwise = anticlockwiseValue.Value(); + + cairo_t *ctx = context(); + + // See https://www.cairographics.org/cookbook/ellipses/ + double xRatio = radiusX / radiusY; + + cairo_matrix_t save_matrix; + cairo_get_matrix(ctx, &save_matrix); + cairo_translate(ctx, x, y); + cairo_rotate(ctx, rotation); + cairo_scale(ctx, xRatio, 1.0); + cairo_translate(ctx, -x, -y); + if (anticlockwise && M_PI * 2 != args[4]) { + cairo_arc_negative(ctx, + x, + y, + radiusY, + startAngle, + endAngle); + } else { + cairo_arc(ctx, + x, + y, + radiusY, + startAngle, + endAngle); + } + cairo_set_matrix(ctx, &save_matrix); +} + +#if CAIRO_VERSION >= CAIRO_VERSION_ENCODE(1, 16, 0) + +void +Context2d::BeginTag(const Napi::CallbackInfo& info) { + std::string tagName = ""; + std::string attributes = ""; + + if (info.Length() == 0) { + Napi::TypeError::New(env, "Tag name is required").ThrowAsJavaScriptException(); + return; + } else { + if (!info[0].IsString()) { + Napi::TypeError::New(env, "Tag name must be a string.").ThrowAsJavaScriptException(); + return; + } else { + tagName = info[0].As().Utf8Value(); + } + + if (info.Length() > 1) { + if (!info[1].IsString()) { + Napi::TypeError::New(env, "Attributes must be a string matching Cairo's attribute format").ThrowAsJavaScriptException(); + return; + } else { + attributes = info[1].As().Utf8Value(); + } + } + } + + cairo_tag_begin(_context, tagName.c_str(), attributes.c_str()); +} + +void +Context2d::EndTag(const Napi::CallbackInfo& info) { + if (info.Length() == 0) { + Napi::TypeError::New(env, "Tag name is required").ThrowAsJavaScriptException(); + return; + } + + if (!info[0].IsString()) { + Napi::TypeError::New(env, "Tag name must be a string.").ThrowAsJavaScriptException(); + return; + } + + std::string tagName = info[0].As().Utf8Value(); + + cairo_tag_end(_context, tagName.c_str()); +} + +#endif diff --git a/miniprogram/node_modules/canvas/src/CanvasRenderingContext2d.h b/miniprogram/node_modules/canvas/src/CanvasRenderingContext2d.h new file mode 100644 index 00000000..1d954889 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/CanvasRenderingContext2d.h @@ -0,0 +1,237 @@ +// Copyright (c) 2010 LearnBoost + +#pragma once + +#include "cairo.h" +#include "Canvas.h" +#include "color.h" +#include "napi.h" +#include +#include + +/* + * State struct. + * + * Used in conjunction with Save() / Restore() since + * cairo's gstate maintains only a single source pattern at a time. + */ + +struct canvas_state_t { + rgba_t fill = { 0, 0, 0, 1 }; + rgba_t stroke = { 0, 0, 0, 1 }; + rgba_t shadow = { 0, 0, 0, 0 }; + double shadowOffsetX = 0.; + double shadowOffsetY = 0.; + cairo_pattern_t* fillPattern = nullptr; + cairo_pattern_t* strokePattern = nullptr; + cairo_pattern_t* fillGradient = nullptr; + cairo_pattern_t* strokeGradient = nullptr; + PangoFontDescription* fontDescription = nullptr; + std::string font = "10px sans-serif"; + cairo_filter_t patternQuality = CAIRO_FILTER_GOOD; + float globalAlpha = 1.f; + int shadowBlur = 0; + text_align_t textAlignment = TEXT_ALIGNMENT_START; + text_baseline_t textBaseline = TEXT_BASELINE_ALPHABETIC; + canvas_draw_mode_t textDrawingMode = TEXT_DRAW_PATHS; + bool imageSmoothingEnabled = true; + std::string direction = "ltr"; + std::string lang = ""; + + canvas_state_t() { + fontDescription = pango_font_description_from_string("sans"); + pango_font_description_set_absolute_size(fontDescription, 10 * PANGO_SCALE); + } + + canvas_state_t(const canvas_state_t& other) { + fill = other.fill; + stroke = other.stroke; + patternQuality = other.patternQuality; + fillPattern = other.fillPattern; + strokePattern = other.strokePattern; + fillGradient = other.fillGradient; + strokeGradient = other.strokeGradient; + globalAlpha = other.globalAlpha; + textAlignment = other.textAlignment; + textBaseline = other.textBaseline; + shadow = other.shadow; + shadowBlur = other.shadowBlur; + shadowOffsetX = other.shadowOffsetX; + shadowOffsetY = other.shadowOffsetY; + textDrawingMode = other.textDrawingMode; + fontDescription = pango_font_description_copy(other.fontDescription); + font = other.font; + imageSmoothingEnabled = other.imageSmoothingEnabled; + lang = other.lang; + } + + ~canvas_state_t() { + pango_font_description_free(fontDescription); + } +}; + +/* + * Equivalent to a PangoRectangle but holds floats instead of ints + * (software pixels are stored here instead of pango units) + * + * Should be compatible with PANGO_ASCENT, PANGO_LBEARING, etc. + */ + +typedef struct { + float x; + float y; + float width; + float height; +} float_rectangle; + +class Context2d : public Napi::ObjectWrap { + public: + std::stack states; + canvas_state_t *state; + Context2d(const Napi::CallbackInfo& info); + static void Initialize(Napi::Env& env, Napi::Object& target); + void DrawImage(const Napi::CallbackInfo& info); + void PutImageData(const Napi::CallbackInfo& info); + void Save(const Napi::CallbackInfo& info); + void Restore(const Napi::CallbackInfo& info); + void Rotate(const Napi::CallbackInfo& info); + void Translate(const Napi::CallbackInfo& info); + void Scale(const Napi::CallbackInfo& info); + void Transform(const Napi::CallbackInfo& info); + Napi::Value GetTransform(const Napi::CallbackInfo& info); + void ResetTransform(const Napi::CallbackInfo& info); + void SetTransform(const Napi::CallbackInfo& info); + Napi::Value IsPointInPath(const Napi::CallbackInfo& info); + void BeginPath(const Napi::CallbackInfo& info); + void ClosePath(const Napi::CallbackInfo& info); + void AddPage(const Napi::CallbackInfo& info); + void Clip(const Napi::CallbackInfo& info); + void Fill(const Napi::CallbackInfo& info); + void Stroke(const Napi::CallbackInfo& info); + void FillText(const Napi::CallbackInfo& info); + void StrokeText(const Napi::CallbackInfo& info); + static Napi::Value SetFont(const Napi::CallbackInfo& info); + static Napi::Value SetFillColor(const Napi::CallbackInfo& info); + static Napi::Value SetStrokeColor(const Napi::CallbackInfo& info); + static Napi::Value SetStrokePattern(const Napi::CallbackInfo& info); + static Napi::Value SetTextAlignment(const Napi::CallbackInfo& info); + void SetLineDash(const Napi::CallbackInfo& info); + Napi::Value GetLineDash(const Napi::CallbackInfo& info); + Napi::Value MeasureText(const Napi::CallbackInfo& info); + void BezierCurveTo(const Napi::CallbackInfo& info); + void QuadraticCurveTo(const Napi::CallbackInfo& info); + void LineTo(const Napi::CallbackInfo& info); + void MoveTo(const Napi::CallbackInfo& info); + void FillRect(const Napi::CallbackInfo& info); + void StrokeRect(const Napi::CallbackInfo& info); + void ClearRect(const Napi::CallbackInfo& info); + void Rect(const Napi::CallbackInfo& info); + void RoundRect(const Napi::CallbackInfo& info); + void Arc(const Napi::CallbackInfo& info); + void ArcTo(const Napi::CallbackInfo& info); + void Ellipse(const Napi::CallbackInfo& info); + Napi::Value GetImageData(const Napi::CallbackInfo& info); + Napi::Value CreateImageData(const Napi::CallbackInfo& info); + static Napi::Value GetStrokeColor(const Napi::CallbackInfo& info); + Napi::Value CreatePattern(const Napi::CallbackInfo& info); + Napi::Value CreateLinearGradient(const Napi::CallbackInfo& info); + Napi::Value CreateRadialGradient(const Napi::CallbackInfo& info); + Napi::Value GetFormat(const Napi::CallbackInfo& info); + Napi::Value GetPatternQuality(const Napi::CallbackInfo& info); + Napi::Value GetImageSmoothingEnabled(const Napi::CallbackInfo& info); + Napi::Value GetGlobalCompositeOperation(const Napi::CallbackInfo& info); + Napi::Value GetGlobalAlpha(const Napi::CallbackInfo& info); + Napi::Value GetShadowColor(const Napi::CallbackInfo& info); + Napi::Value GetMiterLimit(const Napi::CallbackInfo& info); + Napi::Value GetLineCap(const Napi::CallbackInfo& info); + Napi::Value GetLineJoin(const Napi::CallbackInfo& info); + Napi::Value GetLineWidth(const Napi::CallbackInfo& info); + Napi::Value GetLineDashOffset(const Napi::CallbackInfo& info); + Napi::Value GetShadowOffsetX(const Napi::CallbackInfo& info); + Napi::Value GetShadowOffsetY(const Napi::CallbackInfo& info); + Napi::Value GetShadowBlur(const Napi::CallbackInfo& info); + Napi::Value GetAntiAlias(const Napi::CallbackInfo& info); + Napi::Value GetTextDrawingMode(const Napi::CallbackInfo& info); + Napi::Value GetQuality(const Napi::CallbackInfo& info); + Napi::Value GetCurrentTransform(const Napi::CallbackInfo& info); + Napi::Value GetFillStyle(const Napi::CallbackInfo& info); + Napi::Value GetStrokeStyle(const Napi::CallbackInfo& info); + Napi::Value GetFont(const Napi::CallbackInfo& info); + Napi::Value GetTextBaseline(const Napi::CallbackInfo& info); + Napi::Value GetTextAlign(const Napi::CallbackInfo& info); + Napi::Value GetLanguage(const Napi::CallbackInfo& info); + void SetPatternQuality(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetImageSmoothingEnabled(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetGlobalCompositeOperation(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetGlobalAlpha(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetShadowColor(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetMiterLimit(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetLineCap(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetLineJoin(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetLineWidth(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetLineDashOffset(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetShadowOffsetX(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetShadowOffsetY(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetShadowBlur(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetAntiAlias(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetTextDrawingMode(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetQuality(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetCurrentTransform(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetFillStyle(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetStrokeStyle(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetFont(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetTextBaseline(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetTextAlign(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetLanguage(const Napi::CallbackInfo& info, const Napi::Value& value); + #if CAIRO_VERSION >= CAIRO_VERSION_ENCODE(1, 16, 0) + void BeginTag(const Napi::CallbackInfo& info); + void EndTag(const Napi::CallbackInfo& info); + #endif + Napi::Value GetDirection(const Napi::CallbackInfo& info); + void SetDirection(const Napi::CallbackInfo& info, const Napi::Value& value); + inline void setContext(cairo_t *ctx) { _context = ctx; } + inline cairo_t *context(){ return _context; } + inline Canvas *canvas(){ return _canvas; } + inline bool hasShadow(); + void inline setSourceRGBA(rgba_t color); + void inline setSourceRGBA(cairo_t *ctx, rgba_t color); + void setTextPath(double x, double y); + void blur(cairo_surface_t *surface, int radius); + void shadow(void (fn)(cairo_t *cr)); + void shadowStart(); + void shadowApply(); + void savePath(); + void restorePath(); + void saveState(); + void restoreState(); + void inline setFillRule(Napi::Value value); + void fill(bool preserve = false); + void stroke(bool preserve = false); + void save(); + void restore(); + void setFontFromState(); + void resetState(); + inline PangoLayout *layout(){ return _layout; } + ~Context2d(); + Napi::Env env; + + private: + void _resetPersistentHandles(); + Napi::Value _getFillColor(); + Napi::Value _getStrokeColor(); + Napi::Value get_current_transform(); + void _setFillColor(Napi::Value arg); + void _setFillPattern(Napi::Value arg); + void _setStrokeColor(Napi::Value arg); + void _setStrokePattern(Napi::Value arg); + void checkFonts(); + void paintText(const Napi::CallbackInfo&, bool); + text_align_t resolveTextAlignment(); + Napi::Reference _fillStyle; + Napi::Reference _strokeStyle; + Canvas *_canvas; + cairo_t *_context = nullptr; + cairo_path_t *_path; + PangoLayout *_layout = nullptr; + int fontSerial = 1; +}; diff --git a/miniprogram/node_modules/canvas/src/CharData.h b/miniprogram/node_modules/canvas/src/CharData.h new file mode 100644 index 00000000..00fc4eff --- /dev/null +++ b/miniprogram/node_modules/canvas/src/CharData.h @@ -0,0 +1,233 @@ +// This is used for classifying characters according to the definition of tokens +// in the CSS standards, but could be extended for any other future uses + +#pragma once + +#include + +namespace CharData { + static constexpr uint8_t Whitespace = 0x1; + static constexpr uint8_t Newline = 0x2; + static constexpr uint8_t Hex = 0x4; + static constexpr uint8_t Nmstart = 0x8; + static constexpr uint8_t Nmchar = 0x10; + static constexpr uint8_t Sign = 0x20; + static constexpr uint8_t Digit = 0x40; + static constexpr uint8_t NumStart = 0x80; +}; + +using namespace CharData; + +constexpr const uint8_t charData[256] = { + 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0-8 + Whitespace, // 9 (HT) + Whitespace | Newline, // 10 (LF) + 0, // 11 (VT) + Whitespace | Newline, // 12 (FF) + Whitespace | Newline, // 13 (CR) + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 14-31 + Whitespace, // 32 (Space) + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 33-42 + Sign | NumStart, // 43 (+) + 0, // 44 + Nmchar | Sign | NumStart, // 45 (-) + 0, 0, // 46-47 + Nmchar | Digit | NumStart | Hex, // 48 (0) + Nmchar | Digit | NumStart | Hex, // 49 (1) + Nmchar | Digit | NumStart | Hex, // 50 (2) + Nmchar | Digit | NumStart | Hex, // 51 (3) + Nmchar | Digit | NumStart | Hex, // 52 (4) + Nmchar | Digit | NumStart | Hex, // 53 (5) + Nmchar | Digit | NumStart | Hex, // 54 (6) + Nmchar | Digit | NumStart | Hex, // 55 (7) + Nmchar | Digit | NumStart | Hex, // 56 (8) + Nmchar | Digit | NumStart | Hex, // 57 (9) + 0, 0, 0, 0, 0, 0, 0, // 58-64 + Nmstart | Nmchar | Hex, // 65 (A) + Nmstart | Nmchar | Hex, // 66 (B) + Nmstart | Nmchar | Hex, // 67 (C) + Nmstart | Nmchar | Hex, // 68 (D) + Nmstart | Nmchar | Hex, // 69 (E) + Nmstart | Nmchar | Hex, // 70 (F) + Nmstart | Nmchar, // 71 (G) + Nmstart | Nmchar, // 72 (H) + Nmstart | Nmchar, // 73 (I) + Nmstart | Nmchar, // 74 (J) + Nmstart | Nmchar, // 75 (K) + Nmstart | Nmchar, // 76 (L) + Nmstart | Nmchar, // 77 (M) + Nmstart | Nmchar, // 78 (N) + Nmstart | Nmchar, // 79 (O) + Nmstart | Nmchar, // 80 (P) + Nmstart | Nmchar, // 81 (Q) + Nmstart | Nmchar, // 82 (R) + Nmstart | Nmchar, // 83 (S) + Nmstart | Nmchar, // 84 (T) + Nmstart | Nmchar, // 85 (U) + Nmstart | Nmchar, // 86 (V) + Nmstart | Nmchar, // 87 (W) + Nmstart | Nmchar, // 88 (X) + Nmstart | Nmchar, // 89 (Y) + Nmstart | Nmchar, // 90 (Z) + 0, // 91 + Nmstart, // 92 (\) + 0, 0, // 93-94 + Nmstart | Nmchar, // 95 (_) + 0, // 96 + Nmstart | Nmchar | Hex, // 97 (a) + Nmstart | Nmchar | Hex, // 98 (b) + Nmstart | Nmchar | Hex, // 99 (c) + Nmstart | Nmchar | Hex, // 100 (d) + Nmstart | Nmchar | Hex, // 101 (e) + Nmstart | Nmchar | Hex, // 102 (f) + Nmstart | Nmchar, // 103 (g) + Nmstart | Nmchar, // 104 (h) + Nmstart | Nmchar, // 105 (i) + Nmstart | Nmchar, // 106 (j) + Nmstart | Nmchar, // 107 (k) + Nmstart | Nmchar, // 108 (l) + Nmstart | Nmchar, // 109 (m) + Nmstart | Nmchar, // 110 (n) + Nmstart | Nmchar, // 111 (o) + Nmstart | Nmchar, // 112 (p) + Nmstart | Nmchar, // 113 (q) + Nmstart | Nmchar, // 114 (r) + Nmstart | Nmchar, // 115 (s) + Nmstart | Nmchar, // 116 (t) + Nmstart | Nmchar, // 117 (u) + Nmstart | Nmchar, // 118 (v) + Nmstart | Nmchar, // 119 (w) + Nmstart | Nmchar, // 120 (x) + Nmstart | Nmchar, // 121 (y) + Nmstart | Nmchar, // 122 (z) + 0, 0, 0, 0, 0, // 123-127 + // Non-ASCII + Nmstart | Nmchar, // 128 + Nmstart | Nmchar, // 129 + Nmstart | Nmchar, // 130 + Nmstart | Nmchar, // 131 + Nmstart | Nmchar, // 132 + Nmstart | Nmchar, // 133 + Nmstart | Nmchar, // 134 + Nmstart | Nmchar, // 135 + Nmstart | Nmchar, // 136 + Nmstart | Nmchar, // 137 + Nmstart | Nmchar, // 138 + Nmstart | Nmchar, // 139 + Nmstart | Nmchar, // 140 + Nmstart | Nmchar, // 141 + Nmstart | Nmchar, // 142 + Nmstart | Nmchar, // 143 + Nmstart | Nmchar, // 144 + Nmstart | Nmchar, // 145 + Nmstart | Nmchar, // 146 + Nmstart | Nmchar, // 147 + Nmstart | Nmchar, // 148 + Nmstart | Nmchar, // 149 + Nmstart | Nmchar, // 150 + Nmstart | Nmchar, // 151 + Nmstart | Nmchar, // 152 + Nmstart | Nmchar, // 153 + Nmstart | Nmchar, // 154 + Nmstart | Nmchar, // 155 + Nmstart | Nmchar, // 156 + Nmstart | Nmchar, // 157 + Nmstart | Nmchar, // 158 + Nmstart | Nmchar, // 159 + Nmstart | Nmchar, // 160 + Nmstart | Nmchar, // 161 + Nmstart | Nmchar, // 162 + Nmstart | Nmchar, // 163 + Nmstart | Nmchar, // 164 + Nmstart | Nmchar, // 165 + Nmstart | Nmchar, // 166 + Nmstart | Nmchar, // 167 + Nmstart | Nmchar, // 168 + Nmstart | Nmchar, // 169 + Nmstart | Nmchar, // 170 + Nmstart | Nmchar, // 171 + Nmstart | Nmchar, // 172 + Nmstart | Nmchar, // 173 + Nmstart | Nmchar, // 174 + Nmstart | Nmchar, // 175 + Nmstart | Nmchar, // 176 + Nmstart | Nmchar, // 177 + Nmstart | Nmchar, // 178 + Nmstart | Nmchar, // 179 + Nmstart | Nmchar, // 180 + Nmstart | Nmchar, // 181 + Nmstart | Nmchar, // 182 + Nmstart | Nmchar, // 183 + Nmstart | Nmchar, // 184 + Nmstart | Nmchar, // 185 + Nmstart | Nmchar, // 186 + Nmstart | Nmchar, // 187 + Nmstart | Nmchar, // 188 + Nmstart | Nmchar, // 189 + Nmstart | Nmchar, // 190 + Nmstart | Nmchar, // 191 + Nmstart | Nmchar, // 192 + Nmstart | Nmchar, // 193 + Nmstart | Nmchar, // 194 + Nmstart | Nmchar, // 195 + Nmstart | Nmchar, // 196 + Nmstart | Nmchar, // 197 + Nmstart | Nmchar, // 198 + Nmstart | Nmchar, // 199 + Nmstart | Nmchar, // 200 + Nmstart | Nmchar, // 201 + Nmstart | Nmchar, // 202 + Nmstart | Nmchar, // 203 + Nmstart | Nmchar, // 204 + Nmstart | Nmchar, // 205 + Nmstart | Nmchar, // 206 + Nmstart | Nmchar, // 207 + Nmstart | Nmchar, // 208 + Nmstart | Nmchar, // 209 + Nmstart | Nmchar, // 210 + Nmstart | Nmchar, // 211 + Nmstart | Nmchar, // 212 + Nmstart | Nmchar, // 213 + Nmstart | Nmchar, // 214 + Nmstart | Nmchar, // 215 + Nmstart | Nmchar, // 216 + Nmstart | Nmchar, // 217 + Nmstart | Nmchar, // 218 + Nmstart | Nmchar, // 219 + Nmstart | Nmchar, // 220 + Nmstart | Nmchar, // 221 + Nmstart | Nmchar, // 222 + Nmstart | Nmchar, // 223 + Nmstart | Nmchar, // 224 + Nmstart | Nmchar, // 225 + Nmstart | Nmchar, // 226 + Nmstart | Nmchar, // 227 + Nmstart | Nmchar, // 228 + Nmstart | Nmchar, // 229 + Nmstart | Nmchar, // 230 + Nmstart | Nmchar, // 231 + Nmstart | Nmchar, // 232 + Nmstart | Nmchar, // 233 + Nmstart | Nmchar, // 234 + Nmstart | Nmchar, // 235 + Nmstart | Nmchar, // 236 + Nmstart | Nmchar, // 237 + Nmstart | Nmchar, // 238 + Nmstart | Nmchar, // 239 + Nmstart | Nmchar, // 240 + Nmstart | Nmchar, // 241 + Nmstart | Nmchar, // 242 + Nmstart | Nmchar, // 243 + Nmstart | Nmchar, // 244 + Nmstart | Nmchar, // 245 + Nmstart | Nmchar, // 246 + Nmstart | Nmchar, // 247 + Nmstart | Nmchar, // 248 + Nmstart | Nmchar, // 249 + Nmstart | Nmchar, // 250 + Nmstart | Nmchar, // 251 + Nmstart | Nmchar, // 252 + Nmstart | Nmchar, // 253 + Nmstart | Nmchar, // 254 + Nmstart | Nmchar // 255 +}; diff --git a/miniprogram/node_modules/canvas/src/Font.h b/miniprogram/node_modules/canvas/src/Font.h new file mode 100644 index 00000000..09b73afc --- /dev/null +++ b/miniprogram/node_modules/canvas/src/Font.h @@ -0,0 +1,38 @@ +// Copyright (c) 2024 Caleb Hearon +// Stuff common to all perspectives on fonts: CSS, OS fonts, querying, etc. +#pragma once + +#include +#include + +enum class FontStyle { + Normal, + Italic, + Oblique +}; + +enum class FontVariant { + Normal, + SmallCaps +}; + +// Descriptors and properties (see next comments) +struct FontBase { + uint16_t weight{400}; + FontVariant variant{FontVariant::Normal}; + FontStyle style{FontStyle::Normal}; +}; + +// Descriptors describe real fonts on the OS +struct FontDescriptor : FontBase { + std::unique_ptr family; + std::unique_ptr url = nullptr; + std::unique_ptr data = nullptr; + size_t data_len = 0; +}; + +// Properties describe desired fonts from CSS/ctx.font +struct FontProperties : FontBase { + std::vector families; + double size{16.0f}; +}; diff --git a/miniprogram/node_modules/canvas/src/FontFace.cc b/miniprogram/node_modules/canvas/src/FontFace.cc new file mode 100644 index 00000000..cfcb8646 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/FontFace.cc @@ -0,0 +1,161 @@ +// Copyright (c) 2024 Caleb Hearon + +#include "FontFace.h" +#include "FontParser.h" +#include "InstanceData.h" + +#include +#include + +static size_t lastId = 0; + +FontFace::FontFace(const Napi::CallbackInfo& info) : + Napi::ObjectWrap(info), + id(++lastId), + env(info.Env()) +{ + if (info.Length() < 2) { + Napi::TypeError::New(env, "Family and source arguments are required").ThrowAsJavaScriptException(); + return; + } + + if (!setFamilyInternal(info[0])) return; + + if (info[1].IsString()) { + Napi::String url = info[1].As(); + size_t len; + napi_get_value_string_utf8(env, url, nullptr, 0, &len); + len++; + descriptor.url = std::make_unique(len); + napi_get_value_string_utf8(env, url, descriptor.url.get(), len, nullptr); + } else if (info[1].IsTypedArray()) { + Napi::TypedArray ta = info[0].As(); + Napi::ArrayBuffer buf = ta.ArrayBuffer(); + descriptor.data_len = ta.ByteLength(); + descriptor.data = std::make_unique(descriptor.data_len); + std::memcpy(descriptor.data.get(), static_cast(buf.Data()) + ta.ByteOffset(), descriptor.data_len); + } else if (info[1].IsArrayBuffer()) { + Napi::ArrayBuffer buf = info[0].As(); + descriptor.data_len = buf.ByteLength(); + descriptor.data = std::make_unique(descriptor.data_len); + std::memcpy(descriptor.data.get(), buf.Data(), descriptor.data_len); + } else { + Napi::TypeError::New(env, "Source must be a string or buffer").ThrowAsJavaScriptException(); + return; + } + + if (info.Length() >= 3) { + if (!info[2].IsObject()) { + Napi::TypeError::New(env, "Descriptors must be an object").ThrowAsJavaScriptException(); + return; + } + Napi::Object descriptors = info[2].As(); + Napi::Value value; + if (descriptors.Has("weight").UnwrapOr(false) && descriptors.Get("weight").UnwrapTo(&value)) { + if (!setWeightInternal(value)) return; + } + if (descriptors.Has("style").UnwrapOr(false) && descriptors.Get("style").UnwrapTo(&value)) { + if (!setStyleInternal(value)) return; + } + } +} + +void +FontFace::Initialize(Napi::Env& env, Napi::Object& exports) { + InstanceData *data = env.GetInstanceData(); + Napi::Function ctor = DefineClass(env, "FontFace", { + InstanceAccessor<&FontFace::GetFamily, &FontFace::SetFamily>("family", napi_default_jsproperty), + InstanceAccessor<&FontFace::GetStyle, &FontFace::SetStyle>("style", napi_default_jsproperty), + InstanceAccessor<&FontFace::GetWeight, &FontFace::SetWeight>("weight", napi_default_jsproperty), + InstanceAccessor<&FontFace::GetStatus>("status", napi_default_jsproperty) + }); + + data->FontFaceCtor = Napi::Persistent(ctor); + exports.Set("FontFace", ctor); +} + +Napi::Value +FontFace::GetFamily(const Napi::CallbackInfo& info) { + return Napi::String::New(env, descriptor.family.get()); +} + +bool +FontFace::setFamilyInternal(const Napi::Value& value) { + // According to the specs, this is supposed to go through the CSS parser, so + // fonts starting with numbers or having special characters should actually + // throw an error. However, Safari accepts anything, and Firefox puts quotes + // around the family: https://bugzilla.mozilla.org/show_bug.cgi?id=1986533 + if (Napi::String family; value.ToString().UnwrapTo(&family)) { + size_t len; + napi_get_value_string_utf8(env, family, nullptr, 0, &len); + len++; + descriptor.family = std::make_unique(len); + napi_get_value_string_utf8(env, family, descriptor.family.get(), len, nullptr); + return true; + } + return false; +} + +void +FontFace::SetFamily(const Napi::CallbackInfo& info, const Napi::Value& value) { + setFamilyInternal(value); +} + +Napi::Value +FontFace::GetStyle(const Napi::CallbackInfo& info) { + switch (descriptor.style) { + case FontStyle::Normal: return Napi::String::New(env, "normal"); + case FontStyle::Italic: return Napi::String::New(env, "italic"); + case FontStyle::Oblique: return Napi::String::New(env, "oblique"); + } +} + +bool +FontFace::setStyleInternal(const Napi::Value& value) { + if (Napi::String style; value.ToString().UnwrapTo(&style)) { + if (auto value = FontParser::parseStyle(style.Utf8Value()); value) { + descriptor.style = *value; + return true; + } else { + Napi::TypeError::New(env, "Could not parse style").ThrowAsJavaScriptException(); + } + } + return false; +} + +void +FontFace::SetStyle(const Napi::CallbackInfo& info, const Napi::Value& value) { + setStyleInternal(value); +} + +Napi::Value +FontFace::GetWeight(const Napi::CallbackInfo& info) { + return Napi::String::New(env, std::to_string(descriptor.weight)); +} + +bool +FontFace::setWeightInternal(const Napi::Value& value) { + if (Napi::String weight; value.ToString().UnwrapTo(&weight)) { + if (auto value = FontParser::parseWeight(weight.Utf8Value()); value) { + descriptor.weight = *value; + return true; + } else { + Napi::TypeError::New(env, "Could not parse weight").ThrowAsJavaScriptException(); + } + } + return false; +} + +void +FontFace::SetWeight(const Napi::CallbackInfo& info, const Napi::Value& value) { + setWeightInternal(value); +} + +Napi::Value +FontFace::GetStatus(const Napi::CallbackInfo& info) { + switch (status) { + case Status::Unloaded: return Napi::String::New(env, "unloaded"); + case Status::Loaded: return Napi::String::New(env, "loaded"); + case Status::Error: return Napi::String::New(env, "error"); + } +} diff --git a/miniprogram/node_modules/canvas/src/FontFace.h b/miniprogram/node_modules/canvas/src/FontFace.h new file mode 100644 index 00000000..dedb205b --- /dev/null +++ b/miniprogram/node_modules/canvas/src/FontFace.h @@ -0,0 +1,34 @@ +// Copyright (c) 2024 Caleb Hearon +// +// TODO: ttc/otc with fragment identifier + +#pragma once + +#include +#include "Font.h" + +class FontFace : public Napi::ObjectWrap { + public: + FontFace(const Napi::CallbackInfo& info); + static void Initialize(Napi::Env& env, Napi::Object& target); + + Napi::Value GetFamily(const Napi::CallbackInfo& info); + Napi::Value GetStyle(const Napi::CallbackInfo& info); + Napi::Value GetWeight(const Napi::CallbackInfo& info); + Napi::Value GetStatus(const Napi::CallbackInfo& info); + bool setFamilyInternal(const Napi::Value& value); + void SetFamily(const Napi::CallbackInfo& info, const Napi::Value& value); + bool setStyleInternal(const Napi::Value& value); + void SetStyle(const Napi::CallbackInfo& info, const Napi::Value& value); + bool setWeightInternal(const Napi::Value& value); + void SetWeight(const Napi::CallbackInfo& info, const Napi::Value& value); + + size_t id; + + //TODO private + FontDescriptor descriptor; + private: + enum class Status { Unloaded, Loaded, Error }; + Status status = Status::Unloaded; + Napi::Env env; +}; diff --git a/miniprogram/node_modules/canvas/src/FontFaceSet.cc b/miniprogram/node_modules/canvas/src/FontFaceSet.cc new file mode 100644 index 00000000..aaa9927e --- /dev/null +++ b/miniprogram/node_modules/canvas/src/FontFaceSet.cc @@ -0,0 +1,191 @@ +// Copyright (c) 2024 Caleb Hearon + +#include + +#include "FontFaceSet.h" +#include "InstanceData.h" + +FontFaceSet::FontFaceSet(Napi::CallbackInfo& info) : + env(info.Env()), + ready(Napi::Promise::Deferred::New(info.Env())), + Napi::ObjectWrap(info) { +} + +void +FontFaceSet::Initialize(Napi::Env& env, Napi::Object& exports) { + InstanceData *data = env.GetInstanceData(); + + Napi::Symbol iteratorSymbol = Napi::Symbol::WellKnown(env, "iterator").Unwrap(); + + Napi::Function ctor = DefineClass(env, "FontFaceSet", { + InstanceMethod<&FontFaceSet::Add>("add", napi_default_method), + InstanceMethod<&FontFaceSet::Has>("has", napi_default_method), + InstanceMethod<&FontFaceSet::Clear>("clear", napi_default_method), + InstanceMethod<&FontFaceSet::Delete>("delete", napi_default_method), + InstanceMethod<&FontFaceSet::Iterator>(iteratorSymbol, napi_default_jsproperty), + InstanceAccessor<&FontFaceSet::Size>("size", napi_default_jsproperty) + }); + + Napi::Object jsFonts = ctor.New({}).Unwrap(); + FontFaceSet* cppFonts = FontFaceSet::Unwrap(jsFonts); + + // FontFaceSet is a singleton. 2/3 browsers do not allow you to construct + // FontFaceSet, against specs, which allow you to use them to load groups. + data->fonts = cppFonts; + exports.Set("fonts", jsFonts); +} + +Napi::Value +FontFaceSet::Add(const Napi::CallbackInfo& info) { + InstanceData *data = env.GetInstanceData(); + + if (info.Length() == 0) { + Napi::TypeError::New(env, "face argument is required").ThrowAsJavaScriptException(); + return env.Undefined(); + } + + bool isFontFace; + Napi::Object obj; + + if (!info[0].IsObject()) { + isFontFace = false; + } else { + obj = info[0].As(); + if (!obj.InstanceOf(data->FontFaceCtor.Value()).UnwrapTo(&isFontFace)) return env.Undefined(); + } + if (!isFontFace) { + Napi::TypeError::New(env, "Expected instance of FontFace").ThrowAsJavaScriptException(); + return env.Undefined(); + } + + FontFace* face = FontFace::Unwrap(obj); + + if (!facesHash.contains(face->id)) { + facesHash.insert({face->id, facesData.size()}); + facesData.push_back({Napi::Persistent(obj), face}); + } + + return info.This(); // TODO not technically right +} + +Napi::Value +FontFaceSet::Has(const Napi::CallbackInfo& info) { + InstanceData *data = env.GetInstanceData(); + + if (info.Length() == 0) { + Napi::TypeError::New(env, "face argument is required").ThrowAsJavaScriptException(); + return env.Undefined(); + } + + bool isFontFace; + Napi::Object obj; + + if (!info[0].IsObject()) { + isFontFace = false; + } else { + obj = info[0].As(); + if (!obj.InstanceOf(data->FontFaceCtor.Value()).UnwrapTo(&isFontFace)) return env.Undefined(); + } + if (!isFontFace) { + Napi::TypeError::New(env, "Expected instance of FontFace").ThrowAsJavaScriptException(); + return env.Undefined(); + } + + FontFace* face = FontFace::Unwrap(obj); + + return facesHash.find(face->id) != facesHash.end() + ? Napi::Boolean::New(env, true) + : Napi::Boolean::New(env, false); +} + +void +FontFaceSet::Clear(const Napi::CallbackInfo& info) { + for (auto& entry : facesData) { + entry.face = nullptr; + entry.ref.Reset(); + } + facesHash.clear(); +} + +Napi::Value +FontFaceSet::Delete(const Napi::CallbackInfo& info) { + InstanceData *data = env.GetInstanceData(); + + if (info.Length() == 0) { + Napi::TypeError::New(env, "face argument is required").ThrowAsJavaScriptException(); + return env.Undefined(); + } + + bool isFontFace; + Napi::Object obj; + + if (!info[0].IsObject()) { + isFontFace = false; + } else { + obj = info[0].As(); + if (!obj.InstanceOf(data->FontFaceCtor.Value()).UnwrapTo(&isFontFace)) return env.Undefined(); + } + if (!isFontFace) { + Napi::TypeError::New(env, "Expected instance of FontFace").ThrowAsJavaScriptException(); + return env.Undefined(); + } + + FontFace* face = FontFace::Unwrap(obj); + + if (auto it = facesHash.find(face->id); it != facesHash.end()) { + facesData[it->second].face = nullptr; + facesData[it->second].ref.Reset(); + facesHash.erase(it); + loaded.erase(face->id); + failed.erase(face->id); + return Napi::Boolean::New(env, true); + } else { + return Napi::Boolean::New(env, false); + } +} + +struct IteratorContext { + // There is only ever one FontFaceSet so this should always be valid + FontFaceSet* set; + size_t index; + + static void finalize(Napi::Env env, IteratorContext* ctx) { + delete ctx; + } + + static Napi::Value next(const Napi::CallbackInfo& info) { + Napi::Env env = info.Env(); + IteratorContext* ctx = static_cast(info.Data()); + FontFaceSet& set = *(ctx->set); + + Napi::Object ret = Napi::Object::New(env); + + // Skip past empty slots + for (size_t i = ctx->index; i < set.facesData.size(); i++) { + auto& entry = set.facesData[ctx->index++]; + if (entry.face != nullptr) { + ret.Set("done", Napi::Boolean::New(env, false)); + ret.Set("value", entry.ref.Value()); + return ret; + } + } + + ret.Set("done", Napi::Boolean::New(env, true)); + return ret; + } +}; + +Napi::Value +FontFaceSet::Iterator(const Napi::CallbackInfo& info) { + Napi::Object iterator = Napi::Object::New(env); + IteratorContext* ctx = new IteratorContext{this, 0}; + iterator.AddFinalizer(IteratorContext::finalize, ctx); + Napi::Function next = Napi::Function::New(env, IteratorContext::next, "next", ctx); + iterator["next"] = next; + return iterator; +} + +Napi::Value +FontFaceSet::Size(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, facesHash.size()); +} diff --git a/miniprogram/node_modules/canvas/src/FontFaceSet.h b/miniprogram/node_modules/canvas/src/FontFaceSet.h new file mode 100644 index 00000000..035ff3ef --- /dev/null +++ b/miniprogram/node_modules/canvas/src/FontFaceSet.h @@ -0,0 +1,43 @@ +// Copyright (c) 2024 Caleb Hearon + +#pragma once + +#include +#include +#include + +#include "FontFace.h" + +static bool ref_compare(const FontFace* a, const FontFace* b) { + return a->id < b->id; +} + +struct FontFaceSetEntry { + Napi::ObjectReference ref; + FontFace* face; +}; + +class FontFaceSet : public Napi::ObjectWrap { + public: + FontFaceSet(Napi::CallbackInfo& info); + static void Initialize(Napi::Env& env, Napi::Object& exports); + + Napi::Value Add(const Napi::CallbackInfo& info); + Napi::Value Has(const Napi::CallbackInfo& info); + void Clear(const Napi::CallbackInfo& info); + Napi::Value Delete(const Napi::CallbackInfo& info); + Napi::Value Iterator(const Napi::CallbackInfo& info); + Napi::Value Size(const Napi::CallbackInfo& info); + + // Iteration of faces must be safe. We'll achieve this by iterating all keys + // ever inserted, which is how the EcmaScript standards say to iterate Sets. + std::unordered_map facesHash; + std::vector facesData; + + private: + std::set loading; + std::set loaded; + std::set failed; + Napi::Promise::Deferred ready; + Napi::Env env; +}; diff --git a/miniprogram/node_modules/canvas/src/FontManager.cc b/miniprogram/node_modules/canvas/src/FontManager.cc new file mode 100644 index 00000000..f7a84b80 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/FontManager.cc @@ -0,0 +1,197 @@ +#include +#include + +#include "FontManager.h" +#include "FontFaceSet.h" + +bool +compareFamilyNames(const char* str1, size_t len1, const char* str2, size_t len2) { + size_t start1 = 0; + size_t end1 = len1; + size_t start2 = 0; + size_t end2 = len2; + + while (start1 < len1 && std::isspace(str1[start1])) start1++; + while (end1 > start1 && std::isspace(str1[end1 - 1])) end1--; + + while (start2 < len2 && std::isspace(str2[start2])) start2++; + while (end2 > start2 && std::isspace(str2[end2 - 1])) end2--; + + if (end1 - start1 != end2 - start2) return false; + + for (size_t i = 0; i < end1 - start1; i++) { + if (std::tolower(str1[start1 + i]) != std::tolower(str2[start2 + i])) { + return false; + } + } + + return true; +} + +void +FontManager::narrowByStyle( + std::vector& fonts, + FontProperties& properties +) { + size_t nNormal = 0; + size_t nItalic = 0; + size_t nOblique = 0; + + assert(fonts.size() > 1 && "Precondition failed: 1 or 0 fonts in the set"); + + for (const FontDescriptor* font : fonts) { + switch (font->style) { + case FontStyle::Normal: nNormal++; break; + case FontStyle::Italic: nItalic++; break; + case FontStyle::Oblique: nOblique++; break; + } + } + + FontStyle choose; + switch (properties.style) { + case FontStyle::Normal: + choose = nNormal ? FontStyle::Normal : nOblique ? FontStyle::Oblique : FontStyle::Italic; + break; + case FontStyle::Italic: + choose = nItalic ? FontStyle::Italic : nOblique ? FontStyle::Oblique : FontStyle::Normal; + break; + case FontStyle::Oblique: + choose = nOblique ? FontStyle::Oblique : nItalic ? FontStyle::Italic : FontStyle::Normal; + break; + } + + for (size_t i = 0; i < fonts.size(); i++) { + if (fonts[i]->style != choose) { + std::swap(fonts[i], fonts[fonts.size() - 1]); + fonts.pop_back(); + } + } +} + +const FontDescriptor* +FontManager::narrowByWeight( + std::vector fonts, + FontProperties& properties +) { + std::sort( + fonts.begin(), + fonts.end(), + [](const FontDescriptor* a, const FontDescriptor* b) { + return a->weight < b->weight; + } + ); + + assert(fonts.size() && "Precondition failed: 1 or 0 fonts in the set"); + + for (const FontDescriptor* font : fonts) { + if (font->weight == properties.weight) { + return font; + } + } + + const FontDescriptor* bestBelow = nullptr; + size_t bestBelowDistance = 900; // max possible is 800 + const FontDescriptor* bestAbove = nullptr; + size_t bestAboveDistance = 900; + size_t divider = properties.weight == 400 ? 500 + : properties.weight == 500 ? 400 + : properties.weight; + + for (const FontDescriptor* font : fonts) { + size_t distance = font->weight < properties.weight + ? properties.weight - font->weight + : font->weight - properties.weight; + + if (font->weight < divider) { + if (distance <= bestBelowDistance) { + bestBelow = font; + bestBelowDistance = distance; + } + } else { + if (distance < bestAboveDistance) { + bestAbove = font; + bestAboveDistance = distance; + } + } + } + + if (bestBelow && bestAbove) { + return divider <= 500 ? bestBelow : bestAbove; + } else { + return bestBelow ? bestBelow : bestAbove; + } +} + +/** + * NOTE: the FontDescriptor is owned by the FontManager; do not use it again! + */ +std::vector +FontManager::query( + FontProperties& properties, + FontFaceSet* registered, + std::vector& fallbacks +) { + std::vector allFamilyResults; + std::vector familyResults; + + if (!system_fonts_loaded) { + readSystemFonts(system_fonts); + system_fonts_loaded = true; + } + + auto maybeAdd = [&](const std::string& family, const FontDescriptor* desc) { + if ( + compareFamilyNames( + family.c_str(), + family.size(), + desc->family.get(), + strlen(desc->family.get()) + ) && std::find( + familyResults.begin(), + familyResults.end(), + desc + ) == familyResults.end() + ) familyResults.push_back(desc); + }; + + for (const std::string& family : properties.families) { + auto genericFamilies = getGenericList(family); + if (genericFamilies) { + for (const std::string& family : **genericFamilies) { + for (const FontDescriptor& desc : system_fonts) { + maybeAdd(family, &desc); + } + } + } else { + for (auto& entry : registered->facesData) { + if (entry.face != nullptr) maybeAdd(family, &(entry.face->descriptor)); + } + for (const FontDescriptor& desc : system_fonts) { + maybeAdd(family, &desc); + } + } + + if (familyResults.size() == 1) { + allFamilyResults.push_back(familyResults[0]); + familyResults.clear(); + } else if (familyResults.size() > 1) { + narrowByStyle(familyResults, properties); + allFamilyResults.push_back(narrowByWeight(familyResults, properties)); + familyResults.clear(); + } + } + + for (const std::string& fallback : fallbacks) { + for (const FontDescriptor& desc : system_fonts) { + maybeAdd(fallback, &desc); + } + + if (familyResults.size() > 1) { + narrowByStyle(familyResults, properties); + allFamilyResults.push_back(narrowByWeight(familyResults, properties)); + familyResults.clear(); + } + } + + return allFamilyResults; +} diff --git a/miniprogram/node_modules/canvas/src/FontManager.h b/miniprogram/node_modules/canvas/src/FontManager.h new file mode 100644 index 00000000..19086c3e --- /dev/null +++ b/miniprogram/node_modules/canvas/src/FontManager.h @@ -0,0 +1,45 @@ +#pragma once + +#include + +#include "Font.h" +#include "FontFaceSet.h" +#include "unicode.h" + +class FontManager { + public: + virtual ~FontManager() = default; + + virtual void readSystemFonts( + std::vector& results + ) = 0; + + virtual void populateFallbackFonts( + std::vector& families, + script_t script + ) = 0; + + virtual std::optional*> getGenericList( + const std::string& generic + ) = 0; + + std::vector query( + FontProperties& properties, + FontFaceSet* registered, + std::vector& fallbacks + ); + + private: + void narrowByStyle( + std::vector& fonts, + FontProperties& properties + ); + + const FontDescriptor* narrowByWeight( + std::vector fonts, + FontProperties& properties + ); + + bool system_fonts_loaded = false; + std::vector system_fonts; +}; diff --git a/miniprogram/node_modules/canvas/src/FontManagerMacos.cc b/miniprogram/node_modules/canvas/src/FontManagerMacos.cc new file mode 100644 index 00000000..20ac5010 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/FontManagerMacos.cc @@ -0,0 +1,640 @@ +// Copyright (c) 2025 Caleb Hearon +// +// References: +// - https://github.com/foliojs/font-manager +// - https://searchfox.org/firefox-main/rev/30ea9a2fd7271e9c731df414bd80e46edc3190eb/gfx/thebes/CoreTextFontList.cpp + +#include +#include +#include +#include +#include +#include + +#include "FontManagerMacos.h" +#include "Font.h" +#include "unicode.h" + +// Forward declarations for Objective-C types we need +typedef void NSString; +typedef void NSURL; +typedef void NSArray; + +const uint16_t MAX_STYLE_LENGTH = 128; // like "Bold Italic", so should never be big + +inline double round(double aNum) { + return aNum >= 0.0 ? std::floor(aNum + 0.5) : std::ceil(aNum - 0.5); +} + +// https://searchfox.org/firefox-main/rev/30ea9a2fd7271e9c731df414bd80e46edc3190eb/gfx/thebes/CoreTextFontList.cpp#770 +static uint32_t convertWeight(float aCTWeight) { + constexpr std::pair kCoreTextToCSSWeights[] = { + {-1.0, 1}, + {-0.8, 100}, + {-0.6, 200}, + {-0.4, 300}, + {0.0, 400}, // standard 'regular' weight + {0.23, 500}, + {0.3, 600}, + {0.4, 700}, // standard 'bold' weight + {0.56, 800}, + {0.62, 900}, // Core Text seems to return 0.62 for faces with both + // usWeightClass=800 and 900 in their OS/2 tables! + // We use 900 as there are also fonts that return 0.56, + // so we want an intermediate value for that. + {1.0, 1000} + }; + const auto* begin = &kCoreTextToCSSWeights[0]; + const auto* end = begin + std::size(kCoreTextToCSSWeights); + auto m = std::upper_bound( + begin, + end, + aCTWeight, + [](CGFloat aValue, const std::pair& aMapping) { + return aValue <= aMapping.first; + } + ); + + if (m == end) return 1000; + if (m->first == aCTWeight || m == begin) return m->second; + // Interpolate between the preceding and found entries: + const auto* prev = m - 1; + const auto t = (aCTWeight - prev->first) / (m->first - prev->first); + return round(prev->second * (1.0 - t) + m->second * t); +} + +void +create_font_descriptor( + std::vector& results, + CTFontDescriptorRef descriptor +) { + FontDescriptor desc; + + // TODO: these all need null-checked... + NSURL *nsUrl = (NSURL *) CTFontDescriptorCopyAttribute(descriptor, kCTFontURLAttribute); + CFStringRef nsPath = CFURLCopyFileSystemPath((CFURLRef)nsUrl, kCFURLPOSIXPathStyle); + NSString *nsFamily = (NSString *) CTFontDescriptorCopyAttribute(descriptor, kCTFontFamilyNameAttribute); + NSString *nsStyle = (NSString *) CTFontDescriptorCopyAttribute(descriptor, kCTFontStyleNameAttribute); + CFDictionaryRef nsTraits = (CFDictionaryRef) CTFontDescriptorCopyAttribute(descriptor, kCTFontTraitsAttribute); + + // weight + CFNumberRef weightVal = (CFNumberRef) CFDictionaryGetValue(nsTraits, kCTFontWeightTrait); + float weightValue; + CFNumberGetValue(weightVal, kCFNumberFloatType, &weightValue); + desc.weight = (uint32_t) convertWeight(weightValue); + + // file path + CFIndex pathLength = CFStringGetLength(nsPath) * 2 + 1; + desc.url = std::make_unique(pathLength); + CFStringGetCString(nsPath, desc.url.get(), pathLength, kCFStringEncodingUTF8); + + // family name + CFIndex familyLength = CFStringGetLength((CFStringRef)nsFamily) * 2 + 1; + std::unique_ptr family = std::make_unique(familyLength); + CFStringGetCString((CFStringRef)nsFamily, family.get(), familyLength, kCFStringEncodingUTF8); + desc.family = std::move(family); + + // style + CFNumberRef symbolicTraitsVal = (CFNumberRef)CFDictionaryGetValue(nsTraits, kCTFontSymbolicTrait); + unsigned int symbolicTraits; + CFNumberGetValue(symbolicTraitsVal, kCFNumberIntType, &symbolicTraits); + desc.style = FontStyle::Normal; + if (symbolicTraits & kCTFontItalicTrait) { + desc.style = FontStyle::Italic; + } else { + char styleBuffer[MAX_STYLE_LENGTH]; + CFStringGetCString((CFStringRef)nsStyle, styleBuffer, MAX_STYLE_LENGTH, kCFStringEncodingUTF8); + if (strstr(styleBuffer, "Oblique") != NULL) desc.style = FontStyle::Oblique; + } + + results.push_back(std::move(desc)); + + CFRelease(nsUrl); + CFRelease(nsPath); + CFRelease(nsFamily); + CFRelease(nsStyle); + CFRelease(nsTraits); +} + +void FontManagerMacos::readSystemFonts(std::vector& results) { + static CTFontCollectionRef collection = NULL; + if (collection == NULL) collection = CTFontCollectionCreateFromAvailableFonts(NULL); + + NSArray *matches = (NSArray *) CTFontCollectionCreateMatchingFontDescriptors(collection); + CFIndex count = CFArrayGetCount((CFArrayRef) matches); + + results.reserve(count); + + for (CFIndex i = 0; i < count; i++) { + CTFontDescriptorRef match = (CTFontDescriptorRef)CFArrayGetValueAtIndex((CFArrayRef)matches, i); + create_font_descriptor(results, match); + } + + CFRelease(matches); +} + +void FontManagerMacos::populateFallbackFonts( + std::vector& families, + script_t script +) { + switch (script) { + case SCRIPT_COMMON: + case SCRIPT_INHERITED: + // In most cases, COMMON and INHERITED characters will be merged into + // their context, but if they occur without any specific script context + // we'll just try common default fonts here. + case SCRIPT_LATIN: + case SCRIPT_CYRILLIC: + case SCRIPT_GREEK: + families.push_back("Lucida Grande"); + break; + + // CJK-related script codes are a bit troublesome because of unification; + // we'll probably just get HAN much of the time, so the choice of which + // language font to try for fallback is rather arbitrary. Usually, though, + // we hope that font prefs will have handled this earlier. + case SCRIPT_BOPOMOFO: + case SCRIPT_HAN: + families.push_back("Songti SC"); + families.push_back("SimSun-ExtB"); + break; + + case SCRIPT_HIRAGANA: + case SCRIPT_KATAKANA: + families.push_back("Hiragino Sans"); + families.push_back("Hiragino Kaku Gothic ProN"); + break; + + case SCRIPT_HANGUL: + families.push_back("Nanum Gothic"); + families.push_back("Apple SD Gothic Neo"); + break; + + // For most other scripts, macOS comes with a default font we can use. + case SCRIPT_ARABIC: + families.push_back("Geeza Pro"); + break; + case SCRIPT_ARMENIAN: + families.push_back("Mshtakan"); + break; + case SCRIPT_BENGALI: + families.push_back("Bangla Sangam MN"); + break; + case SCRIPT_CHEROKEE: + families.push_back("Plantagenet Cherokee"); + break; + case SCRIPT_COPTIC: + families.push_back("Noto Sans Coptic"); + break; + case SCRIPT_DESERET: + families.push_back("Baskerville"); + break; + case SCRIPT_DEVANAGARI: + families.push_back("Devanagari Sangam MN"); + break; + case SCRIPT_ETHIOPIC: + families.push_back("Kefa"); + break; + case SCRIPT_GEORGIAN: + families.push_back("Helvetica"); + break; + case SCRIPT_GOTHIC: + families.push_back("Noto Sans Gothic"); + break; + case SCRIPT_GUJARATI: + families.push_back("Gujarati Sangam MN"); + break; + case SCRIPT_GURMUKHI: + families.push_back("Gurmukhi MN"); + break; + case SCRIPT_HEBREW: + families.push_back("Lucida Grande"); + break; + case SCRIPT_KANNADA: + families.push_back("Kannada MN"); + break; + case SCRIPT_KHMER: + families.push_back("Khmer MN"); + break; + case SCRIPT_LAO: + families.push_back("Lao MN"); + break; + case SCRIPT_MALAYALAM: + families.push_back("Malayalam Sangam MN"); + break; + case SCRIPT_MONGOLIAN: + families.push_back("Noto Sans Mongolian"); + break; + case SCRIPT_MYANMAR: + families.push_back("Myanmar MN"); + break; + case SCRIPT_OGHAM: + families.push_back("Noto Sans Ogham"); + break; + case SCRIPT_OLD_ITALIC: + families.push_back("Noto Sans Old Italic"); + break; + case SCRIPT_ORIYA: + families.push_back("Oriya Sangam MN"); + break; + case SCRIPT_RUNIC: + families.push_back("Noto Sans Runic"); + break; + case SCRIPT_SINHALA: + families.push_back("Sinhala Sangam MN"); + break; + case SCRIPT_SYRIAC: + families.push_back("Noto Sans Syriac"); + break; + case SCRIPT_TAMIL: + families.push_back("Tamil MN"); + break; + case SCRIPT_TELUGU: + families.push_back("Telugu MN"); + break; + case SCRIPT_THAANA: + families.push_back("Noto Sans Thaana"); + break; + case SCRIPT_THAI: + families.push_back("Thonburi"); + break; + case SCRIPT_TIBETAN: + families.push_back("Kailasa"); + break; + case SCRIPT_CANADIAN_ABORIGINAL: + families.push_back("Euphemia UCAS"); + break; + case SCRIPT_YI: + families.push_back("Noto Sans Yi"); + families.push_back("STHeiti"); + break; + case SCRIPT_TAGALOG: + families.push_back("Noto Sans Tagalog"); + break; + case SCRIPT_HANUNOO: + families.push_back("Noto Sans Hanunoo"); + break; + case SCRIPT_BUHID: + families.push_back("Noto Sans Buhid"); + break; + case SCRIPT_TAGBANWA: + families.push_back("Noto Sans Tagbanwa"); + break; + case SCRIPT_BRAILLE: + families.push_back("Apple Braille"); + break; + case SCRIPT_CYPRIOT: + families.push_back("Noto Sans Cypriot"); + break; + case SCRIPT_LIMBU: + families.push_back("Noto Sans Limbu"); + break; + case SCRIPT_LINEAR_B: + families.push_back("Noto Sans Linear B"); + break; + case SCRIPT_OSMANYA: + families.push_back("Noto Sans Osmanya"); + break; + case SCRIPT_SHAVIAN: + families.push_back("Noto Sans Shavian"); + break; + case SCRIPT_TAI_LE: + families.push_back("Noto Sans Tai Le"); + break; + case SCRIPT_UGARITIC: + families.push_back("Noto Sans Ugaritic"); + break; + case SCRIPT_BUGINESE: + families.push_back("Noto Sans Buginese"); + break; + case SCRIPT_GLAGOLITIC: + families.push_back("Noto Sans Glagolitic"); + break; + case SCRIPT_KHAROSHTHI: + families.push_back("Noto Sans Kharoshthi"); + break; + case SCRIPT_SYLOTI_NAGRI: + families.push_back("Noto Sans Syloti Nagri"); + break; + case SCRIPT_NEW_TAI_LUE: + families.push_back("Noto Sans New Tai Lue"); + break; + case SCRIPT_TIFINAGH: + families.push_back("Noto Sans Tifinagh"); + break; + case SCRIPT_OLD_PERSIAN: + families.push_back("Noto Sans Old Persian"); + break; + case SCRIPT_BALINESE: + families.push_back("Noto Sans Balinese"); + break; + case SCRIPT_BATAK: + families.push_back("Noto Sans Batak"); + break; + case SCRIPT_BRAHMI: + families.push_back("Noto Sans Brahmi"); + break; + case SCRIPT_CHAM: + families.push_back("Noto Sans Cham"); + break; + case SCRIPT_EGYPTIAN_HIEROGLYPHS: + families.push_back("Noto Sans Egyptian Hieroglyphs"); + break; + case SCRIPT_PAHAWH_HMONG: + families.push_back("Noto Sans Pahawh Hmong"); + break; + case SCRIPT_OLD_HUNGARIAN: + families.push_back("Noto Sans Old Hungarian"); + break; + case SCRIPT_JAVANESE: + families.push_back("Noto Sans Javanese"); + break; + case SCRIPT_KAYAH_LI: + families.push_back("Noto Sans Kayah Li"); + break; + case SCRIPT_LEPCHA: + families.push_back("Noto Sans Lepcha"); + break; + case SCRIPT_LINEAR_A: + families.push_back("Noto Sans Linear A"); + break; + case SCRIPT_MANDAIC: + families.push_back("Noto Sans Mandaic"); + break; + case SCRIPT_NKO: + families.push_back("Noto Sans NKo"); + break; + case SCRIPT_OLD_TURKIC: + families.push_back("Noto Sans Old Turkic"); + break; + case SCRIPT_OLD_PERMIC: + families.push_back("Noto Sans Old Permic"); + break; + case SCRIPT_PHAGS_PA: + families.push_back("Noto Sans PhagsPa"); + break; + case SCRIPT_PHOENICIAN: + families.push_back("Noto Sans Phoenician"); + break; + case SCRIPT_MIAO: + families.push_back("Noto Sans Miao"); + break; + case SCRIPT_VAI: + families.push_back("Noto Sans Vai"); + break; + case SCRIPT_CUNEIFORM: + families.push_back("Noto Sans Cuneiform"); + break; + case SCRIPT_CARIAN: + families.push_back("Noto Sans Carian"); + break; + case SCRIPT_TAI_THAM: + families.push_back("Noto Sans Tai Tham"); + break; + case SCRIPT_LYCIAN: + families.push_back("Noto Sans Lycian"); + break; + case SCRIPT_LYDIAN: + families.push_back("Noto Sans Lydian"); + break; + case SCRIPT_OL_CHIKI: + families.push_back("Noto Sans Ol Chiki"); + break; + case SCRIPT_REJANG: + families.push_back("Noto Sans Rejang"); + break; + case SCRIPT_SAURASHTRA: + families.push_back("Noto Sans Saurashtra"); + break; + case SCRIPT_SUNDANESE: + families.push_back("Noto Sans Sundanese"); + break; + case SCRIPT_MEETEI_MAYEK: + families.push_back("Noto Sans Meetei Mayek"); + break; + case SCRIPT_IMPERIAL_ARAMAIC: + families.push_back("Noto Sans Imperial Aramaic"); + break; + case SCRIPT_AVESTAN: + families.push_back("Noto Sans Avestan"); + break; + case SCRIPT_CHAKMA: + families.push_back("Noto Sans Chakma"); + break; + case SCRIPT_KAITHI: + families.push_back("Noto Sans Kaithi"); + break; + case SCRIPT_MANICHAEAN: + families.push_back("Noto Sans Manichaean"); + break; + case SCRIPT_INSCRIPTIONAL_PAHLAVI: + families.push_back("Noto Sans Inscriptional Pahlavi"); + break; + case SCRIPT_PSALTER_PAHLAVI: + families.push_back("Noto Sans Psalter Pahlavi"); + break; + case SCRIPT_INSCRIPTIONAL_PARTHIAN: + families.push_back("Noto Sans Inscriptional Parthian"); + break; + case SCRIPT_SAMARITAN: + families.push_back("Noto Sans Samaritan"); + break; + case SCRIPT_TAI_VIET: + families.push_back("Noto Sans Tai Viet"); + break; + case SCRIPT_BAMUM: + families.push_back("Noto Sans Bamum"); + break; + case SCRIPT_LISU: + families.push_back("Noto Sans Lisu"); + break; + case SCRIPT_OLD_SOUTH_ARABIAN: + families.push_back("Noto Sans Old South Arabian"); + break; + case SCRIPT_BASSA_VAH: + families.push_back("Noto Sans Bassa Vah"); + break; + case SCRIPT_DUPLOYAN: + families.push_back("Noto Sans Duployan"); + break; + case SCRIPT_ELBASAN: + families.push_back("Noto Sans Elbasan"); + break; + case SCRIPT_GRANTHA: + families.push_back("Noto Sans Grantha"); + break; + case SCRIPT_MENDE_KIKAKUI: + families.push_back("Noto Sans Mende Kikakui"); + break; + case SCRIPT_MEROITIC_CURSIVE: + case SCRIPT_MEROITIC_HIEROGLYPHS: + families.push_back("Noto Sans Meroitic"); + break; + case SCRIPT_OLD_NORTH_ARABIAN: + families.push_back("Noto Sans Old North Arabian"); + break; + case SCRIPT_NABATAEAN: + families.push_back("Noto Sans Nabataean"); + break; + case SCRIPT_PALMYRENE: + families.push_back("Noto Sans Palmyrene"); + break; + case SCRIPT_KHUDAWADI: + families.push_back("Noto Sans Khudawadi"); + break; + case SCRIPT_WARANG_CITI: + families.push_back("Noto Sans Warang Citi"); + break; + case SCRIPT_MRO: + families.push_back("Noto Sans Mro"); + break; + case SCRIPT_SHARADA: + families.push_back("Noto Sans Sharada"); + break; + case SCRIPT_SORA_SOMPENG: + families.push_back("Noto Sans Sora Sompeng"); + break; + case SCRIPT_TAKRI: + families.push_back("Noto Sans Takri"); + break; + case SCRIPT_KHOJKI: + families.push_back("Noto Sans Khojki"); + break; + case SCRIPT_TIRHUTA: + families.push_back("Noto Sans Tirhuta"); + break; + case SCRIPT_CAUCASIAN_ALBANIAN: + families.push_back("Noto Sans Caucasian Albanian"); + break; + case SCRIPT_MAHAJANI: + families.push_back("Noto Sans Mahajani"); + break; + case SCRIPT_AHOM: + families.push_back("Noto Serif Ahom"); + break; + case SCRIPT_HATRAN: + families.push_back("Noto Sans Hatran"); + break; + case SCRIPT_MODI: + families.push_back("Noto Sans Modi"); + break; + case SCRIPT_MULTANI: + families.push_back("Noto Sans Multani"); + break; + case SCRIPT_PAU_CIN_HAU: + families.push_back("Noto Sans Pau Cin Hau"); + break; + case SCRIPT_SIDDHAM: + families.push_back("Noto Sans Siddham"); + break; + case SCRIPT_ADLAM: + families.push_back("Noto Sans Adlam"); + break; + case SCRIPT_BHAIKSUKI: + families.push_back("Noto Sans Bhaiksuki"); + break; + case SCRIPT_MARCHEN: + families.push_back("Noto Sans Marchen"); + break; + case SCRIPT_NEWA: + families.push_back("Noto Sans Newa"); + break; + case SCRIPT_OSAGE: + families.push_back("Noto Sans Osage"); + break; + case SCRIPT_HANIFI_ROHINGYA: + families.push_back("Noto Sans Hanifi Rohingya"); + break; + case SCRIPT_WANCHO: + families.push_back("Noto Sans Wancho"); + break; + + // Script codes for which no commonly-installed font is currently known. + // Probably future macOS versions will add Noto fonts for many of these, + // so we should watch for updates. + case SCRIPT_NONE: + case SCRIPT_NUSHU: + case SCRIPT_TANGUT: + case SCRIPT_ANATOLIAN_HIEROGLYPHS: + case SCRIPT_MASARAM_GONDI: + case SCRIPT_SOYOMBO: + case SCRIPT_ZANABAZAR_SQUARE: + case SCRIPT_DOGRA: + case SCRIPT_GUNJALA_GONDI: + case SCRIPT_MAKASAR: + case SCRIPT_MEDEFAIDRIN: + case SCRIPT_SOGDIAN: + case SCRIPT_OLD_SOGDIAN: + case SCRIPT_ELYMAIC: + case SCRIPT_NYIAKENG_PUACHUE_HMONG: + case SCRIPT_NANDINAGARI: + case SCRIPT_CHORASMIAN: + case SCRIPT_DIVES_AKURU: + case SCRIPT_KHITAN_SMALL_SCRIPT: + case SCRIPT_YEZIDI: + case SCRIPT_CYPRO_MINOAN: + case SCRIPT_OLD_UYGHUR: + case SCRIPT_TANGSA: + case SCRIPT_TOTO: + case SCRIPT_VITHKUQI: + case SCRIPT_KAWI: + case SCRIPT_NAG_MUNDARI: + case SCRIPT_GARAY: + case SCRIPT_GURUNG_KHEMA: + case SCRIPT_KIRAT_RAI: + case SCRIPT_OL_ONAL: + case SCRIPT_SIGNWRITING: + case SCRIPT_SUNUWAR: + case SCRIPT_TODHRI: + case SCRIPT_TULU_TIGALARI: + break; + } + + // TODO: Color Emoji should depend on if the default presentation for the + // codepoint is color or if a VS16 selector is present. + + families.push_back("Apple Color Emoji"); + + // TODO: Firefox makes the middle these 6 conditional on the codepoint. + // When users try to paint text that isn't in the first few families, this + // is going to be slower than it needs to be. Original Firefox comment next... + // + // Symbols/dingbats are generally Script=COMMON but may be resolved to any + // surrounding script run. So we'll always append a couple of likely fonts + // for such characters. + families.push_back("Zapf Dingbats"); + families.push_back("Geneva"); + families.push_back("STIXGeneral"); + families.push_back("Apple Symbols"); + // Japanese fonts also cover a lot of miscellaneous symbols + families.push_back("Hiragino Sans"); + families.push_back("Hiragino Kaku Gothic ProN"); + + // Arial Unicode MS has lots of glyphs for obscure characters; try it as a + // last resort. + families.push_back("Arial Unicode MS"); +} + +// See the preferences font.name-list.*.x-western in Firefox +const std::vector serif_fonts = {"Times", "Times New Roman"}; +const std::vector sans_serif_fonts = {"Helvetica", "Arial"}; +const std::vector monospace_fonts = {"Menlo"}; +const std::vector cursive_fonts = {"Apple Chancery"}; +const std::vector fantasy_fonts = {"Papyrus"}; + +std::optional*> +FontManagerMacos::getGenericList(const std::string& generic) { + if (generic == "serif") { + return &serif_fonts; + } else if (generic == "sans-serif") { + return &sans_serif_fonts; + } else if (generic == "monospace") { + return &monospace_fonts; + } else if (generic == "cursive") { + return &cursive_fonts; + } else if (generic == "fantasy") { + return &fantasy_fonts; + } else { + return std::nullopt; + } +} diff --git a/miniprogram/node_modules/canvas/src/FontManagerMacos.h b/miniprogram/node_modules/canvas/src/FontManagerMacos.h new file mode 100644 index 00000000..ac6696db --- /dev/null +++ b/miniprogram/node_modules/canvas/src/FontManagerMacos.h @@ -0,0 +1,11 @@ +#pragma once + +#include "FontManager.h" +#include + +class FontManagerMacos : public FontManager { + public: + void readSystemFonts(std::vector& properties) override; + void populateFallbackFonts(std::vector& families, script_t script) override; + std::optional*> getGenericList(const std::string& generic) override; +}; diff --git a/miniprogram/node_modules/canvas/src/FontParser.cc b/miniprogram/node_modules/canvas/src/FontParser.cc new file mode 100644 index 00000000..773502cb --- /dev/null +++ b/miniprogram/node_modules/canvas/src/FontParser.cc @@ -0,0 +1,605 @@ +// This is written to exactly parse the `font` shorthand in CSS2: +// https://www.w3.org/TR/CSS22/fonts.html#font-shorthand +// https://www.w3.org/TR/CSS22/syndata.html#tokenization +// +// We may want to update it for CSS 3 (e.g. font-stretch, or updated +// tokenization) but I've only ever seen one or two issues filed in node-canvas +// due to parsing in my 8 years on the project + +#include "FontParser.h" +#include "CharData.h" +#include +#include + +Token::Token(Type type, std::string value) : type_(type), value_(std::move(value)) {} + +Token::Token(Type type, double value) : type_(type), value_(value) {} + +Token::Token(Type type) : type_(type), value_(std::string{}) {} + +const std::string& +Token::getString() const { + static const std::string empty; + auto* str = std::get_if(&value_); + return str ? *str : empty; +} + +double +Token::getNumber() const { + auto* num = std::get_if(&value_); + return num ? *num : 0.0f; +} + +Tokenizer::Tokenizer(std::string_view input) : input_(input) {} + +std::string +Tokenizer::utf8Encode(uint32_t codepoint) { + std::string result; + + if (codepoint < 0x80) { + result += static_cast(codepoint); + } else if (codepoint < 0x800) { + result += static_cast((codepoint >> 6) | 0xc0); + result += static_cast((codepoint & 0x3f) | 0x80); + } else if (codepoint < 0x10000) { + result += static_cast((codepoint >> 12) | 0xe0); + result += static_cast(((codepoint >> 6) & 0x3f) | 0x80); + result += static_cast((codepoint & 0x3f) | 0x80); + } else { + result += static_cast((codepoint >> 18) | 0xf0); + result += static_cast(((codepoint >> 12) & 0x3f) | 0x80); + result += static_cast(((codepoint >> 6) & 0x3f) | 0x80); + result += static_cast((codepoint & 0x3f) | 0x80); + } + + return result; +} + +char +Tokenizer::peek() const { + return position_ < input_.length() ? input_[position_] : '\0'; +} + +char +Tokenizer::advance() { + return position_ < input_.length() ? input_[position_++] : '\0'; +} + +Token +Tokenizer::parseNumber() { + enum class State { + Start, + AfterSign, + Digits, + AfterDecimal, + AfterE, + AfterESign, + ExponentDigits + }; + + size_t start = position_; + size_t ePosition = 0; + State state = State::Start; + bool valid = false; + + while (position_ < input_.length()) { + char c = peek(); + uint8_t flags = charData[static_cast(c)]; + + switch (state) { + case State::Start: + if (flags & CharData::Sign) { + position_++; + state = State::AfterSign; + } else if (flags & CharData::Digit) { + position_++; + state = State::Digits; + valid = true; + } else if (c == '.') { + position_++; + state = State::AfterDecimal; + } else { + goto done; + } + break; + + case State::AfterSign: + if (flags & CharData::Digit) { + position_++; + state = State::Digits; + valid = true; + } else if (c == '.') { + position_++; + state = State::AfterDecimal; + } else { + goto done; + } + break; + + case State::Digits: + if (flags & CharData::Digit) { + position_++; + } else if (c == '.') { + position_++; + state = State::AfterDecimal; + } else if (c == 'e' || c == 'E') { + ePosition = position_; + position_++; + state = State::AfterE; + valid = false; + } else { + goto done; + } + break; + + case State::AfterDecimal: + if (flags & CharData::Digit) { + position_++; + valid = true; + state = State::Digits; + } else { + goto done; + } + break; + + case State::AfterE: + if (flags & CharData::Sign) { + position_++; + state = State::AfterESign; + } else if (flags & CharData::Digit) { + position_++; + valid = true; + state = State::ExponentDigits; + } else { + position_ = ePosition; + valid = true; + goto done; + } + break; + + case State::AfterESign: + if (flags & CharData::Digit) { + position_++; + valid = true; + state = State::ExponentDigits; + } else { + position_ = ePosition; + valid = true; + goto done; + } + break; + + case State::ExponentDigits: + if (flags & CharData::Digit) { + position_++; + } else { + goto done; + } + break; + } + } + +done: + if (!valid) { + position_ = start; + return Token(Token::Type::Invalid); + } + + std::string number_str(input_.substr(start, position_ - start)); + double value = std::stod(number_str); + return Token(Token::Type::Number, value); +} + +// Note that identifiers are always lower-case. This helps us make easier/more +// efficient comparisons, but means that font-families specified as identifiers +// will be lower-cased. Since font selection isn't case sensitive, this +// shouldn't ever be a problem. +Token +Tokenizer::parseIdentifier() { + std::string identifier; + auto flags = CharData::Nmstart; + auto start = position_; + + while (position_ < input_.length()) { + char c = peek(); + + if (c == '\\') { + advance(); + if (!parseEscape(identifier)) { + position_ = start; + return Token(Token::Type::Invalid); + } + flags = CharData::Nmchar; + } else if (charData[static_cast(c)] & flags) { + identifier += advance() + (c >= 'A' && c <= 'Z' ? 32 : 0); + flags = CharData::Nmchar; + } else { + break; + } + } + + return Token(Token::Type::Identifier, identifier); +} + +uint32_t +Tokenizer::parseUnicode() { + uint32_t value = 0; + size_t count = 0; + + while (position_ < input_.length() && count < 6) { + char c = peek(); + uint32_t digit; + + if (c >= '0' && c <= '9') { + digit = c - '0'; + } else if (c >= 'a' && c <= 'f') { + digit = c - 'a' + 10; + } else if (c >= 'A' && c <= 'F') { + digit = c - 'A' + 10; + } else { + break; + } + + value = value * 16 + digit; + advance(); + count++; + } + + // Optional whitespace after hex escape + char c = peek(); + if (c == '\r') { + advance(); + if (peek() == '\n') advance(); + } else if (isWhitespace(c)) { + advance(); + } + + return value; +} + +bool +Tokenizer::parseEscape(std::string& str) { + char c = peek(); + auto flags = charData[static_cast(c)]; + + if (flags & CharData::Hex) { + str += utf8Encode(parseUnicode()); + return true; + } else if (!(flags & CharData::Newline) && !(flags & CharData::Hex)) { + str += advance(); + return true; + } + + return false; +} + +Token +Tokenizer::parseString(char quote) { + advance(); + std::string value; + auto start = position_; + + while (position_ < input_.length()) { + char c = peek(); + + if (c == quote) { + advance(); + return Token(Token::Type::QuotedString, value); + } else if (c == '\\') { + advance(); + c = peek(); + if (c == '\r') { + advance(); + if (peek() == '\n') advance(); + } else if (isNewline(c)) { + advance(); + } else { + if (!parseEscape(value)) { + position_ = start; + return Token(Token::Type::Invalid); + } + } + } else { + value += advance(); + } + } + + position_ = start; + return Token(Token::Type::Invalid); +} + +Token +Tokenizer::nextToken() { + if (position_ >= input_.length()) { + return Token(Token::Type::EndOfInput); + } + + char c = peek(); + auto flags = charData[static_cast(c)]; + + if (isWhitespace(c)) { + std::string whitespace; + while (position_ < input_.length() && isWhitespace(peek())) { + whitespace += advance(); + } + return Token(Token::Type::Whitespace, whitespace); + } + + if (flags & CharData::NumStart) { + Token token = parseNumber(); + if (token.type() != Token::Type::Invalid) return token; + } + + if (flags & CharData::Nmstart) { + Token token = parseIdentifier(); + if (token.type() != Token::Type::Invalid) return token; + } + + if (c == '"') { + Token token = parseString('"'); + if (token.type() != Token::Type::Invalid) return token; + } + + if (c == '\'') { + Token token = parseString('\''); + if (token.type() != Token::Type::Invalid) return token; + } + + switch (advance()) { + case '/': return Token(Token::Type::Slash); + case ',': return Token(Token::Type::Comma); + case '%': return Token(Token::Type::Percent); + default: return Token(Token::Type::Invalid); + } +} + +FontParser::FontParser(std::string_view input) + : tokenizer_(input) + , currentToken_(tokenizer_.nextToken()) + , nextToken_(tokenizer_.nextToken()) {} + +const std::unordered_map FontParser::weightMap = { + {"normal", 400}, + {"bold", 700}, + {"lighter", 100}, + {"bolder", 700} +}; + +const std::unordered_map FontParser::unitMap = { + {"cm", 37.8f}, + {"mm", 3.78f}, + {"in", 96.0f}, + {"pt", 96.0f / 72.0f}, + {"pc", 96.0f / 6.0f}, + {"em", 16.0f}, + {"px", 1.0f} +}; + +void +FontParser::advance() { + currentToken_ = nextToken_; + nextToken_ = tokenizer_.nextToken(); +} + +void +FontParser::skipWs() { + while (currentToken_.type() == Token::Type::Whitespace) advance(); +} + +bool +FontParser::check(Token::Type type) const { + return currentToken_.type() == type; +} + +bool +FontParser::checkWs() const { + return nextToken_.type() == Token::Type::Whitespace + || nextToken_.type() == Token::Type::EndOfInput; +} + +bool +FontParser::parseFontStyle(FontProperties& props) { + if (check(Token::Type::Identifier)) { + const auto& value = currentToken_.getString(); + if (value == "italic") { + props.fontStyle = FontStyle::Italic; + advance(); + return true; + } else if (value == "oblique") { + props.fontStyle = FontStyle::Oblique; + advance(); + return true; + } else if (value == "normal") { + props.fontStyle = FontStyle::Normal; + advance(); + return true; + } + } + + return false; +} + +bool +FontParser::parseFontVariant(FontProperties& props) { + if (check(Token::Type::Identifier)) { + const auto& value = currentToken_.getString(); + if (value == "small-caps") { + props.fontVariant = FontVariant::SmallCaps; + advance(); + return true; + } else if (value == "normal") { + props.fontVariant = FontVariant::Normal; + advance(); + return true; + } + } + + return false; +} + +bool +FontParser::parseFontWeight(FontProperties& props) { + if (check(Token::Type::Number)) { + double weightFloat = currentToken_.getNumber(); + int weight = static_cast(weightFloat); + if (weight < 1 || weight > 1000) return false; + props.fontWeight = static_cast(weight); + advance(); + return true; + } else if (check(Token::Type::Identifier)) { + const auto& value = currentToken_.getString(); + + if (auto it = weightMap.find(value); it != weightMap.end()) { + props.fontWeight = it->second; + advance(); + return true; + } + } + + return false; +} + +bool +FontParser::parseFontSize(FontProperties& props) { + if (!check(Token::Type::Number)) return false; + + props.fontSize = currentToken_.getNumber(); + advance(); + + double multiplier = 1.0f; + if (check(Token::Type::Identifier)) { + const auto& unit = currentToken_.getString(); + + if (auto it = unitMap.find(unit); it != unitMap.end()) { + multiplier = it->second; + advance(); + } else { + return false; + } + } else if (check(Token::Type::Percent)) { + multiplier = 16.0f / 100.0f; + advance(); + } else { + return false; + } + + // Technically if we consumed some tokens but couldn't parse the font-size, + // we should rewind the tokenizer, but I don't think the grammar allows for + // any valid alternates in this specific case + + props.fontSize *= multiplier; + return true; +} + +// line-height is not used by canvas ever, but should still parse +bool +FontParser::parseLineHeight(FontProperties& props) { + if (check(Token::Type::Slash)) { + advance(); + skipWs(); + if (check(Token::Type::Number)) { + advance(); + if (check(Token::Type::Percent)) { + advance(); + } else if (check(Token::Type::Identifier)) { + auto identifier = currentToken_.getString(); + if (auto it = unitMap.find(identifier); it != unitMap.end()) { + advance(); + } else { + return false; + } + } else { + return false; + } + } else if (check(Token::Type::Identifier) && currentToken_.getString() == "normal") { + advance(); + } else { + return false; + } + } + + return true; +} + +bool +FontParser::parseFontFamily(FontProperties& props) { + while (!check(Token::Type::EndOfInput)) { + std::string family = ""; + std::string trailingWs = ""; + bool found = false; + + while ( + check(Token::Type::QuotedString) || + check(Token::Type::Identifier) || + check(Token::Type::Whitespace) + ) { + if (check(Token::Type::Whitespace)) { + if (found) trailingWs += currentToken_.getString(); + } else { // Identifier, QuotedString + if (found) { + family += trailingWs; + trailingWs.clear(); + } + + family += currentToken_.getString(); + found = true; + } + + advance(); + } + + if (!found) return false; // only whitespace or non-id/string found + + props.fontFamily.push_back(family); + + if (check(Token::Type::Comma)) advance(); + } + + return true; +} + +FontProperties +FontParser::parse(const std::string& fontString, bool* success) { + FontParser parser(fontString); + auto result = parser.parseFont(); + if (success) *success = !parser.hasError_; + return result; +} + +FontProperties +FontParser::parseFont() { + FontProperties props; + uint8_t state = 0b111; + + skipWs(); + + for (size_t i = 0; i < 3 && checkWs(); i++) { + if ((state & 0b001) && parseFontStyle(props)) { + state &= 0b110; + goto match; + } + + if ((state & 0b010) && parseFontVariant(props)) { + state &= 0b101; + goto match; + } + + if ((state & 0b100) && parseFontWeight(props)) { + state &= 0b011; + goto match; + } + + break; // all attempts exhausted + match: skipWs(); // success: move to the next non-ws token + } + + if (parseFontSize(props)) { + skipWs(); + if (parseLineHeight(props) && parseFontFamily(props)) { + return props; + } + } + + hasError_ = true; + return props; +} diff --git a/miniprogram/node_modules/canvas/src/FontParser.h b/miniprogram/node_modules/canvas/src/FontParser.h new file mode 100644 index 00000000..c8880210 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/FontParser.h @@ -0,0 +1,115 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include "CharData.h" + +enum class FontStyle { + Normal, + Italic, + Oblique +}; + +enum class FontVariant { + Normal, + SmallCaps +}; + +struct FontProperties { + double fontSize{16.0f}; + std::vector fontFamily; + uint16_t fontWeight{400}; + FontVariant fontVariant{FontVariant::Normal}; + FontStyle fontStyle{FontStyle::Normal}; +}; + +class Token { + public: + enum class Type { + Invalid, + Number, + Percent, + Identifier, + Slash, + Comma, + QuotedString, + Whitespace, + EndOfInput + }; + + Token(Type type, std::string value); + Token(Type type, double value); + Token(Type type); + + Type type() const { return type_; } + + const std::string& getString() const; + double getNumber() const; + + private: + Type type_; + std::variant value_; +}; + +class Tokenizer { + public: + Tokenizer(std::string_view input); + Token nextToken(); + + private: + std::string_view input_; + size_t position_{0}; + + // Util + std::string utf8Encode(uint32_t codepoint); + inline bool isWhitespace(char c) const { + return charData[static_cast(c)] & CharData::Whitespace; + } + inline bool isNewline(char c) const { + return charData[static_cast(c)] & CharData::Newline; + } + + // Moving through the string + char peek() const; + char advance(); + + // Tokenize + Token parseNumber(); + Token parseIdentifier(); + uint32_t parseUnicode(); + bool parseEscape(std::string& str); + Token parseString(char quote); +}; + +class FontParser { + public: + static FontProperties parse(const std::string& fontString, bool* success = nullptr); + + private: + static const std::unordered_map weightMap; + static const std::unordered_map unitMap; + + FontParser(std::string_view input); + + void advance(); + void skipWs(); + bool check(Token::Type type) const; + bool checkWs() const; + + bool parseFontStyle(FontProperties& props); + bool parseFontVariant(FontProperties& props); + bool parseFontWeight(FontProperties& props); + bool parseFontSize(FontProperties& props); + bool parseLineHeight(FontProperties& props); + bool parseFontFamily(FontProperties& props); + FontProperties parseFont(); + + Tokenizer tokenizer_; + Token currentToken_; + Token nextToken_; + bool hasError_{false}; +}; diff --git a/miniprogram/node_modules/canvas/src/Image.cc b/miniprogram/node_modules/canvas/src/Image.cc new file mode 100644 index 00000000..97373650 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/Image.cc @@ -0,0 +1,1720 @@ +// Copyright (c) 2010 LearnBoost + +#include "Image.h" +#include "InstanceData.h" + +#include "bmp/BMPParser.h" +#include "Canvas.h" +#include +#include +#include +#include +#include + +/* Cairo limit: + * https://lists.cairographics.org/archives/cairo/2010-December/021422.html + */ +static constexpr int canvas_max_side = (1 << 15) - 1; + +#ifdef HAVE_GIF +typedef struct { + uint8_t *buf; + unsigned len; + unsigned pos; +} gif_data_t; +#endif + +#ifdef HAVE_JPEG +#include + +struct canvas_jpeg_error_mgr: jpeg_error_mgr { + Image* image; + jmp_buf setjmp_buffer; +}; +#endif + +/* + * Read closure used by loadFromBuffer. + */ + +typedef struct { + Napi::Env* env; + unsigned len; + uint8_t *buf; +} read_closure_t; + +/* + * Initialize Image. + */ + +void +Image::Initialize(Napi::Env& env, Napi::Object& exports) { + InstanceData *data = env.GetInstanceData(); + Napi::HandleScope scope(env); + + Napi::Function ctor = DefineClass(env, "Image", { + InstanceAccessor<&Image::GetComplete>("complete", napi_default_jsproperty), + InstanceAccessor<&Image::GetWidth, &Image::SetWidth>("width", napi_default_jsproperty), + InstanceAccessor<&Image::GetHeight, &Image::SetHeight>("height", napi_default_jsproperty), + InstanceAccessor<&Image::GetNaturalWidth>("naturalWidth", napi_default_jsproperty), + InstanceAccessor<&Image::GetNaturalHeight>("naturalHeight", napi_default_jsproperty), + InstanceAccessor<&Image::GetDataMode, &Image::SetDataMode>("dataMode", napi_default_jsproperty), + StaticValue("MODE_IMAGE", Napi::Number::New(env, DATA_IMAGE), napi_default_jsproperty), + StaticValue("MODE_MIME", Napi::Number::New(env, DATA_MIME), napi_default_jsproperty) + }); + + // Used internally in lib/image.js + exports.Set("GetSource", Napi::Function::New(env, &GetSource)); + exports.Set("SetSource", Napi::Function::New(env, &SetSource)); + + data->ImageCtor = Napi::Persistent(ctor); + exports.Set("Image", ctor); +} + +/* + * Initialize a new Image. + */ + +Image::Image(const Napi::CallbackInfo& info) : ObjectWrap(info), env(info.Env()) { + data_mode = DATA_IMAGE; + info.This().ToObject().Unwrap().Set("onload", env.Null()); + info.This().ToObject().Unwrap().Set("onerror", env.Null()); + filename = NULL; + _data = nullptr; + _data_len = 0; + _surface = NULL; + width = height = 0; + naturalWidth = naturalHeight = 0; + state = DEFAULT; +#ifdef HAVE_RSVG + _rsvg = NULL; + _is_svg = false; + _svg_last_width = _svg_last_height = 0; +#endif +} + +/* + * Get complete boolean. + */ + +Napi::Value +Image::GetComplete(const Napi::CallbackInfo& info) { + return Napi::Boolean::New(env, true); +} + +/* + * Get dataMode. + */ + +Napi::Value +Image::GetDataMode(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, data_mode); +} + +/* + * Set dataMode. + */ + +void +Image::SetDataMode(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (value.IsNumber()) { + int mode = value.As().Uint32Value(); + data_mode = (data_mode_t) mode; + } +} + +/* + * Get natural width + */ + +Napi::Value +Image::GetNaturalWidth(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, naturalWidth); +} + +/* + * Get width. + */ + +Napi::Value +Image::GetWidth(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, width); +} + +/* + * Set width. + */ + +void +Image::SetWidth(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (value.IsNumber()) { + width = value.As().Uint32Value(); + } +} + +/* + * Get natural height + */ + +Napi::Value +Image::GetNaturalHeight(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, naturalHeight); +} + +/* + * Get height. + */ + +Napi::Value +Image::GetHeight(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, height); +} +/* + * Set height. + */ + +void +Image::SetHeight(const Napi::CallbackInfo& info, const Napi::Value& value) { + if (value.IsNumber()) { + height = value.As().Uint32Value(); + } +} + +/* + * Get src path. + */ + +Napi::Value +Image::GetSource(const Napi::CallbackInfo& info){ + Napi::Env env = info.Env(); + Image *img = Image::Unwrap(info.This().As()); + return Napi::String::New(env, img->filename ? img->filename : ""); +} + +/* + * Clean up assets and variables. + */ + +void +Image::clearData() { + if (_surface) { + cairo_surface_destroy(_surface); + Napi::MemoryManagement::AdjustExternalMemory(env, -_data_len); + _data_len = 0; + _surface = NULL; + } + + delete[] _data; + _data = nullptr; + + free(filename); + filename = NULL; + +#ifdef HAVE_RSVG + if (_rsvg != NULL) { + g_object_unref(_rsvg); + _rsvg = NULL; + } +#endif + + width = height = 0; + naturalWidth = naturalHeight = 0; + state = DEFAULT; +} + +/* + * Set src path. + */ + +void +Image::SetSource(const Napi::CallbackInfo& info){ + Napi::Env env = info.Env(); + Napi::Object This = info.This().As(); + Image *img = Image::Unwrap(This); + + cairo_status_t status = CAIRO_STATUS_READ_ERROR; + + Napi::Value value = info[0]; + + img->clearData(); + // Clear errno in case some unrelated previous syscall failed + errno = 0; + + // url string + if (value.IsString()) { + std::string src = value.As().Utf8Value(); + if (img->filename) free(img->filename); + img->filename = strdup(src.c_str()); + status = img->load(); + // Buffer + } else if (value.IsBuffer()) { + uint8_t *buf = value.As>().Data(); + unsigned len = value.As>().Length(); + status = img->loadFromBuffer(buf, len); + } + + if (status) { + Napi::Value onerrorFn; + if (This.Get("onerror").UnwrapTo(&onerrorFn) && onerrorFn.IsFunction()) { + Napi::Error arg; + if (img->errorInfo.empty()) { + arg = Napi::Error::New(env, Napi::String::New(env, cairo_status_to_string(status))); + } else { + arg = img->errorInfo.toError(env); + } + onerrorFn.As().Call({ arg.Value() }); + } + } else { + img->loaded(); + Napi::Value onloadFn; + if (This.Get("onload").UnwrapTo(&onloadFn) && onloadFn.IsFunction()) { + onloadFn.As().Call({}); + } + } +} + +/* + * Load image data from `buf` by sniffing + * the bytes to determine format. + */ + +cairo_status_t +Image::loadFromBuffer(uint8_t *buf, unsigned len) { + uint8_t data[4] = {0}; + memcpy(data, buf, (len < 4 ? len : 4) * sizeof(uint8_t)); + + if (isPNG(data)) return loadPNGFromBuffer(buf); + + if (isGIF(data)) { +#ifdef HAVE_GIF + return loadGIFFromBuffer(buf, len); +#else + this->errorInfo.set("node-canvas was built without GIF support"); + return CAIRO_STATUS_READ_ERROR; +#endif + } + + if (isJPEG(data)) { +#ifdef HAVE_JPEG + if (DATA_IMAGE == data_mode) return loadJPEGFromBuffer(buf, len); + if (DATA_MIME == data_mode) return decodeJPEGBufferIntoMimeSurface(buf, len); + if ((DATA_IMAGE | DATA_MIME) == data_mode) { + cairo_status_t status; + status = loadJPEGFromBuffer(buf, len); + if (status) return status; + return assignDataAsMime(buf, len, CAIRO_MIME_TYPE_JPEG); + } +#else // HAVE_JPEG + this->errorInfo.set("node-canvas was built without JPEG support"); + return CAIRO_STATUS_READ_ERROR; +#endif + } + + // confirm svg using first 1000 chars + // if a very long comment precedes the root tag, isSVG returns false + unsigned head_len = (len < 1000 ? len : 1000); + if (isSVG(buf, head_len)) { +#ifdef HAVE_RSVG + return loadSVGFromBuffer(buf, len); +#else + this->errorInfo.set("node-canvas was built without SVG support"); + return CAIRO_STATUS_READ_ERROR; +#endif + } + + if (isBMP(buf, len)) + return loadBMPFromBuffer(buf, len); + + this->errorInfo.set("Unsupported image type"); + return CAIRO_STATUS_READ_ERROR; +} + +/* + * Load PNG data from `buf`. + */ + +cairo_status_t +Image::loadPNGFromBuffer(uint8_t *buf) { + read_closure_t closure; + closure.len = 0; + closure.buf = buf; + closure.env = &env; + _surface = cairo_image_surface_create_from_png_stream(readPNG, &closure); + cairo_status_t status = cairo_surface_status(_surface); + if (status) return status; + return CAIRO_STATUS_SUCCESS; +} + +/* + * Read PNG data. + */ + +cairo_status_t +Image::readPNG(void *c, uint8_t *data, unsigned int len) { + read_closure_t *closure = (read_closure_t *) c; + memcpy(data, closure->buf + closure->len, len); + closure->len += len; + return CAIRO_STATUS_SUCCESS; +} + +/* + * Destroy image and associated surface. + */ + +Image::~Image() { + clearData(); +} + +/* + * Initiate image loading. + */ + +cairo_status_t +Image::load() { + if (LOADING != state) { + state = LOADING; + return loadSurface(); + } + return CAIRO_STATUS_READ_ERROR; +} + +/* + * Set state, assign dimensions. + */ + +void +Image::loaded() { + Napi::HandleScope scope(env); + state = COMPLETE; + + width = naturalWidth = cairo_image_surface_get_width(_surface); + height = naturalHeight = cairo_image_surface_get_height(_surface); + _data_len = naturalHeight * cairo_image_surface_get_stride(_surface); + Napi::MemoryManagement::AdjustExternalMemory(env, _data_len); +} + +/* + * Returns this image's surface. + */ +cairo_surface_t *Image::surface() { +#ifdef HAVE_RSVG + if (_is_svg && (_svg_last_width != width || _svg_last_height != height)) { + if (_surface != NULL) { + cairo_surface_destroy(_surface); + _surface = NULL; + } + + cairo_status_t status = renderSVGToSurface(); + if (status != CAIRO_STATUS_SUCCESS) { + g_object_unref(_rsvg); + Napi::Error::New(env, cairo_status_to_string(status)).ThrowAsJavaScriptException(); + + return NULL; + } + } +#endif + return _surface; +} + +/* + * Load cairo surface from the image src. + * + * TODO: support more formats + * TODO: use node IO or at least thread pool + */ + +cairo_status_t +Image::loadSurface() { + FILE *stream = fopen(filename, "rb"); + if (!stream) { + this->errorInfo.set(NULL, "fopen", errno, filename); + return CAIRO_STATUS_READ_ERROR; + } + uint8_t buf[5]; + if (1 != fread(&buf, 5, 1, stream)) { + fclose(stream); + return CAIRO_STATUS_READ_ERROR; + } + rewind(stream); + + // png + if (isPNG(buf)) { + fclose(stream); + return loadPNG(); + } + + + if (isGIF(buf)) { +#ifdef HAVE_GIF + return loadGIF(stream); +#else + this->errorInfo.set("node-canvas was built without GIF support"); + return CAIRO_STATUS_READ_ERROR; +#endif + } + + if (isJPEG(buf)) { +#ifdef HAVE_JPEG + return loadJPEG(stream); +#else + this->errorInfo.set("node-canvas was built without JPEG support"); + return CAIRO_STATUS_READ_ERROR; +#endif + } + + // confirm svg using first 1000 chars + // if a very long comment precedes the root tag, isSVG returns false + uint8_t head[1000] = {0}; + fseek(stream, 0 , SEEK_END); + long len = ftell(stream); + unsigned head_len = (len < 1000 ? len : 1000); + unsigned head_size = head_len * sizeof(uint8_t); + rewind(stream); + if (head_size != fread(&head, 1, head_size, stream)) { + fclose(stream); + return CAIRO_STATUS_READ_ERROR; + } + rewind(stream); + if (isSVG(head, head_len)) { +#ifdef HAVE_RSVG + return loadSVG(stream); +#else + this->errorInfo.set("node-canvas was built without SVG support"); + return CAIRO_STATUS_READ_ERROR; +#endif + } + + if (isBMP(buf, 2)) + return loadBMP(stream); + + fclose(stream); + + this->errorInfo.set("Unsupported image type"); + return CAIRO_STATUS_READ_ERROR; +} + +/* + * Load PNG. + */ + +cairo_status_t +Image::loadPNG() { + _surface = cairo_image_surface_create_from_png(filename); + return cairo_surface_status(_surface); +} + +// GIF support + +#ifdef HAVE_GIF + +/* + * Return the alpha color for `gif` at `frame`, or -1. + */ + +int +get_gif_transparent_color(GifFileType *gif, int frame) { + ExtensionBlock *ext = gif->SavedImages[frame].ExtensionBlocks; + int len = gif->SavedImages[frame].ExtensionBlockCount; + for (int x = 0; x < len; ++x, ++ext) { + if ((ext->Function == GRAPHICS_EXT_FUNC_CODE) && (ext->Bytes[0] & 1)) { + return ext->Bytes[3] == 0 ? 0 : (uint8_t) ext->Bytes[3]; + } + } + return -1; +} + +/* + * Memory GIF reader callback. + */ + +int +read_gif_from_memory(GifFileType *gif, GifByteType *buf, int len) { + gif_data_t *data = (gif_data_t *) gif->UserData; + if ((data->pos + len) > data->len) len = data->len - data->pos; + memcpy(buf, data->pos + data->buf, len); + data->pos += len; + return len; +} + +/* + * Load GIF. + */ + +cairo_status_t +Image::loadGIF(FILE *stream) { + struct stat s; + int fd = fileno(stream); + + // stat + if (fstat(fd, &s) < 0) { + fclose(stream); + return CAIRO_STATUS_READ_ERROR; + } + + uint8_t *buf = (uint8_t *) malloc(s.st_size); + + if (!buf) { + fclose(stream); + this->errorInfo.set(NULL, "malloc", errno); + return CAIRO_STATUS_NO_MEMORY; + } + + size_t read = fread(buf, s.st_size, 1, stream); + fclose(stream); + + cairo_status_t result = CAIRO_STATUS_READ_ERROR; + if (1 == read) result = loadGIFFromBuffer(buf, s.st_size); + free(buf); + + return result; +} + +/* + * Load give from `buf` and the given `len`. + */ + +cairo_status_t +Image::loadGIFFromBuffer(uint8_t *buf, unsigned len) { + int i = 0; + GifFileType* gif; + + gif_data_t gifd = { buf, len, 0 }; + +#if GIFLIB_MAJOR >= 5 + int errorcode; + if ((gif = DGifOpen((void*) &gifd, read_gif_from_memory, &errorcode)) == NULL) + return CAIRO_STATUS_READ_ERROR; +#else + if ((gif = DGifOpen((void*) &gifd, read_gif_from_memory)) == NULL) + return CAIRO_STATUS_READ_ERROR; +#endif + + if (GIF_OK != DGifSlurp(gif)) { + GIF_CLOSE_FILE(gif); + return CAIRO_STATUS_READ_ERROR; + } + + if (gif->SWidth > canvas_max_side || gif->SHeight > canvas_max_side) { + GIF_CLOSE_FILE(gif); + return CAIRO_STATUS_INVALID_SIZE; + } + + width = naturalWidth = gif->SWidth; + height = naturalHeight = gif->SHeight; + + uint8_t *data = new uint8_t[naturalWidth * naturalHeight * 4]; + if (!data) { + GIF_CLOSE_FILE(gif); + this->errorInfo.set(NULL, "malloc", errno); + return CAIRO_STATUS_NO_MEMORY; + } + + GifImageDesc *img = &gif->SavedImages[i].ImageDesc; + + // local colormap takes precedence over global + ColorMapObject *colormap = img->ColorMap + ? img->ColorMap + : gif->SColorMap; + + if (colormap == nullptr) { + GIF_CLOSE_FILE(gif); + return CAIRO_STATUS_READ_ERROR; + } + + int bgColor = 0; + int alphaColor = get_gif_transparent_color(gif, i); + if (gif->SColorMap) bgColor = (uint8_t) gif->SBackGroundColor; + else if(alphaColor >= 0) bgColor = alphaColor; + + uint8_t *src_data = (uint8_t*) gif->SavedImages[i].RasterBits; + uint32_t *dst_data = (uint32_t*) data; + + if (!gif->Image.Interlace) { + if (naturalWidth == img->Width && naturalHeight == img->Height) { + for (int y = 0; y < naturalHeight; ++y) { + for (int x = 0; x < naturalWidth; ++x) { + *dst_data = ((*src_data == alphaColor) ? 0 : 255) << 24 + | colormap->Colors[*src_data].Red << 16 + | colormap->Colors[*src_data].Green << 8 + | colormap->Colors[*src_data].Blue; + + dst_data++; + src_data++; + } + } + } else { + // Image does not take up whole "screen" so we need to fill-in the background + int bottom = img->Top + img->Height; + int right = img->Left + img->Width; + + uint32_t bgPixel = + ((bgColor == alphaColor) ? 0 : 255) << 24 + | colormap->Colors[bgColor].Red << 16 + | colormap->Colors[bgColor].Green << 8 + | colormap->Colors[bgColor].Blue; + + for (int y = 0; y < naturalHeight; ++y) { + for (int x = 0; x < naturalWidth; ++x) { + if (y < img->Top || y >= bottom || x < img->Left || x >= right) { + *dst_data = bgPixel; + dst_data++; + } else { + *dst_data = ((*src_data == alphaColor) ? 0 : 255) << 24 + | colormap->Colors[*src_data].Red << 16 + | colormap->Colors[*src_data].Green << 8 + | colormap->Colors[*src_data].Blue; + dst_data++; + src_data++; + } + } + } + } + } else { + // Image is interlaced so that it streams nice over 14.4k and 28.8k modems :) + // We first load in 1/8 of the image, followed by another 1/8, followed by + // 1/4 and finally the remaining 1/2. + int ioffs[] = { 0, 4, 2, 1 }; + int ijumps[] = { 8, 8, 4, 2 }; + + uint8_t *src_ptr = src_data; + uint32_t *dst_ptr; + + for(int z = 0; z < 4; z++) { + for(int y = ioffs[z]; y < naturalHeight; y += ijumps[z]) { + dst_ptr = dst_data + naturalWidth * y; + for(int x = 0; x < naturalWidth; ++x) { + *dst_ptr = ((*src_ptr == alphaColor) ? 0 : 255) << 24 + | (colormap->Colors[*src_ptr].Red) << 16 + | (colormap->Colors[*src_ptr].Green) << 8 + | (colormap->Colors[*src_ptr].Blue); + + dst_ptr++; + src_ptr++; + } + } + } + } + + GIF_CLOSE_FILE(gif); + + // New image surface + _surface = cairo_image_surface_create_for_data( + data + , CAIRO_FORMAT_ARGB32 + , naturalWidth + , naturalHeight + , cairo_format_stride_for_width(CAIRO_FORMAT_ARGB32, naturalWidth)); + + cairo_status_t status = cairo_surface_status(_surface); + + if (status) { + delete[] data; + return status; + } + + _data = data; + + return CAIRO_STATUS_SUCCESS; +} +#endif /* HAVE_GIF */ + +// JPEG support + +#ifdef HAVE_JPEG + +// libjpeg 6.2 does not have jpeg_mem_src; define it ourselves here unless +// libjpeg 8 is installed. +#if JPEG_LIB_VERSION < 80 && !defined(MEM_SRCDST_SUPPORTED) + +/* Read JPEG image from a memory segment */ +static void +init_source(j_decompress_ptr cinfo) {} + +static boolean +fill_input_buffer(j_decompress_ptr cinfo) { + ERREXIT(cinfo, JERR_INPUT_EMPTY); + return TRUE; +} +static void +skip_input_data(j_decompress_ptr cinfo, long num_bytes) { + struct jpeg_source_mgr* src = (struct jpeg_source_mgr*) cinfo->src; + if (num_bytes > 0) { + src->next_input_byte += (size_t) num_bytes; + src->bytes_in_buffer -= (size_t) num_bytes; + } +} + +static void term_source (j_decompress_ptr cinfo) {} +static void jpeg_mem_src (j_decompress_ptr cinfo, void* buffer, long nbytes) { + struct jpeg_source_mgr* src; + + if (cinfo->src == NULL) { + cinfo->src = (struct jpeg_source_mgr *) + (*cinfo->mem->alloc_small) ((j_common_ptr) cinfo, JPOOL_PERMANENT, + sizeof(struct jpeg_source_mgr)); + } + + src = (struct jpeg_source_mgr*) cinfo->src; + src->init_source = init_source; + src->fill_input_buffer = fill_input_buffer; + src->skip_input_data = skip_input_data; + src->resync_to_restart = jpeg_resync_to_restart; /* use default method */ + src->term_source = term_source; + src->bytes_in_buffer = nbytes; + src->next_input_byte = (JOCTET*)buffer; +} + +#endif + +class BufferReader : public Image::Reader { +public: + BufferReader(uint8_t* buf, unsigned len) : _buf(buf), _len(len), _idx(0) {} + + bool hasBytes(unsigned n) const override { return (_idx + n - 1 < _len); } + + uint8_t getNext() override { + return _buf[_idx++]; + } + + void skipBytes(unsigned n) override { _idx += n; } + +private: + uint8_t* _buf; // we do not own this + unsigned _len; + unsigned _idx; +}; + +class StreamReader : public Image::Reader { +public: + StreamReader(FILE *stream) : _stream(stream), _len(0), _idx(0) { + fseek(_stream, 0, SEEK_END); + _len = ftell(_stream); + fseek(_stream, 0, SEEK_SET); + } + + bool hasBytes(unsigned n) const override { return (_idx + n - 1 < _len); } + + uint8_t getNext() override { + ++_idx; + return getc(_stream); + } + + void skipBytes(unsigned n) override { + _idx += n; + fseek(_stream, _idx, SEEK_SET); + } + +private: + FILE* _stream; + unsigned _len; + unsigned _idx; +}; + +void Image::jpegToARGB(jpeg_decompress_struct* args, uint8_t* data, uint8_t* src, JPEGDecodeL decode) { + int stride = naturalWidth * 4; + for (int y = 0; y < naturalHeight; ++y) { + jpeg_read_scanlines(args, &src, 1); + uint32_t *row = (uint32_t*)(data + stride * y); + for (int x = 0; x < naturalWidth; ++x) { + int bx = args->output_components * x; + row[x] = decode(src + bx); + } + } +} + +/* + * Takes an initialised jpeg_decompress_struct and decodes the + * data into _surface. + */ + +cairo_status_t +Image::decodeJPEGIntoSurface(jpeg_decompress_struct *args, Orientation orientation) { + const int channels = 4; + cairo_status_t status = CAIRO_STATUS_SUCCESS; + + uint8_t *data = new uint8_t[naturalWidth * naturalHeight * channels]; + if (!data) { + jpeg_abort_decompress(args); + jpeg_destroy_decompress(args); + this->errorInfo.set(NULL, "malloc", errno); + return CAIRO_STATUS_NO_MEMORY; + } + + uint8_t *src = new uint8_t[naturalWidth * args->output_components]; + if (!src) { + free(data); + jpeg_abort_decompress(args); + jpeg_destroy_decompress(args); + this->errorInfo.set(NULL, "malloc", errno); + return CAIRO_STATUS_NO_MEMORY; + } + + // These are the three main cases to handle. libjpeg converts YCCK to CMYK + // and YCbCr to RGB by default. + switch (args->out_color_space) { + case JCS_CMYK: + jpegToARGB(args, data, src, [](uint8_t const* src) { + uint16_t k = static_cast(src[3]); + uint8_t r = k * src[0] / 255; + uint8_t g = k * src[1] / 255; + uint8_t b = k * src[2] / 255; + return 255 << 24 | r << 16 | g << 8 | b; + }); + break; + case JCS_RGB: + jpegToARGB(args, data, src, [](uint8_t const* src) { + uint8_t r = src[0], g = src[1], b = src[2]; + return 255 << 24 | r << 16 | g << 8 | b; + }); + break; + case JCS_GRAYSCALE: + jpegToARGB(args, data, src, [](uint8_t const* src) { + uint8_t v = src[0]; + return 255 << 24 | v << 16 | v << 8 | v; + }); + break; + default: + this->errorInfo.set("Unsupported JPEG encoding"); + status = CAIRO_STATUS_READ_ERROR; + break; + } + + updateDimensionsForOrientation(orientation); + + if (!status) { + _surface = cairo_image_surface_create_for_data( + data + , CAIRO_FORMAT_ARGB32 + , naturalWidth + , naturalHeight + , cairo_format_stride_for_width(CAIRO_FORMAT_ARGB32, naturalWidth)); + } + + jpeg_finish_decompress(args); + jpeg_destroy_decompress(args); + status = cairo_surface_status(_surface); + + rotatePixels(data, naturalWidth, naturalHeight, channels, orientation); + + delete[] src; + + if (status) { + delete[] data; + return status; + } + + _data = data; + + return CAIRO_STATUS_SUCCESS; +} + +/* + * Callback to recover from jpeg errors + */ + +static void canvas_jpeg_error_exit(j_common_ptr cinfo) { + canvas_jpeg_error_mgr *cjerr = static_cast(cinfo->err); + cjerr->output_message(cinfo); + // Return control to the setjmp point + longjmp(cjerr->setjmp_buffer, 1); +} + +// Capture libjpeg errors instead of writing stdout +static void canvas_jpeg_output_message(j_common_ptr cinfo) { + canvas_jpeg_error_mgr *cjerr = static_cast(cinfo->err); + char buff[JMSG_LENGTH_MAX]; + cjerr->format_message(cinfo, buff); + // (Only the last message will be returned to JS land.) + cjerr->image->errorInfo.set(buff); +} + +/* + * Takes a jpeg data buffer and assigns it as mime data to a + * dummy surface + */ + +cairo_status_t +Image::decodeJPEGBufferIntoMimeSurface(uint8_t *buf, unsigned len) { + // TODO: remove this duplicate logic + // JPEG setup + struct jpeg_decompress_struct args; + struct canvas_jpeg_error_mgr err; + + err.image = this; + args.err = jpeg_std_error(&err); + args.err->error_exit = canvas_jpeg_error_exit; + args.err->output_message = canvas_jpeg_output_message; + + // Establish the setjmp return context for canvas_jpeg_error_exit to use + if (setjmp(err.setjmp_buffer)) { + // If we get here, the JPEG code has signaled an error. + // We need to clean up the JPEG object, close the input file, and return. + jpeg_destroy_decompress(&args); + return CAIRO_STATUS_READ_ERROR; + } + + jpeg_create_decompress(&args); + + jpeg_mem_src(&args, buf, len); + + jpeg_read_header(&args, 1); + jpeg_start_decompress(&args); + width = naturalWidth = args.output_width; + height = naturalHeight = args.output_height; + + // Data alloc + // 8 pixels per byte using Alpha Channel format to reduce memory requirement. + int buf_size = naturalHeight * cairo_format_stride_for_width(CAIRO_FORMAT_A1, naturalWidth); + uint8_t *data = new uint8_t[buf_size]; + if (!data) { + this->errorInfo.set(NULL, "malloc", errno); + return CAIRO_STATUS_NO_MEMORY; + } + + BufferReader reader(buf, len); + Orientation orientation = getExifOrientation(reader); + updateDimensionsForOrientation(orientation); + + // New image surface + _surface = cairo_image_surface_create_for_data( + data + , CAIRO_FORMAT_A1 + , naturalWidth + , naturalHeight + , cairo_format_stride_for_width(CAIRO_FORMAT_A1, naturalWidth)); + + // Cleanup + jpeg_abort_decompress(&args); + jpeg_destroy_decompress(&args); + cairo_status_t status = cairo_surface_status(_surface); + + if (status) { + delete[] data; + return status; + } + + rotatePixels(data, naturalWidth, naturalHeight, 1, orientation); + + _data = data; + + return assignDataAsMime(buf, len, CAIRO_MIME_TYPE_JPEG); +} + +/* + * Helper function for disposing of a mime data closure. + */ + +void +clearMimeData(void *closure) { + Napi::MemoryManagement::AdjustExternalMemory( + *static_cast(closure)->env, + -static_cast((static_cast(closure)->len))); + free(static_cast(closure)->buf); + free(closure); +} + +/* + * Assign a given buffer as mime data against the surface. + * The provided buffer will be copied, and the copy will + * be automatically freed when the surface is destroyed. + */ + +cairo_status_t +Image::assignDataAsMime(uint8_t *data, int len, const char *mime_type) { + uint8_t *mime_data = (uint8_t *) malloc(len); + if (!mime_data) { + this->errorInfo.set(NULL, "malloc", errno); + return CAIRO_STATUS_NO_MEMORY; + } + + read_closure_t *mime_closure = (read_closure_t *) malloc(sizeof(read_closure_t)); + if (!mime_closure) { + free(mime_data); + this->errorInfo.set(NULL, "malloc", errno); + return CAIRO_STATUS_NO_MEMORY; + } + + memcpy(mime_data, data, len); + + mime_closure->env = &env; + mime_closure->buf = mime_data; + mime_closure->len = len; + + Napi::MemoryManagement::AdjustExternalMemory(env, len); + + return cairo_surface_set_mime_data(_surface + , mime_type + , mime_data + , len + , clearMimeData + , mime_closure); +} + +/* + * Load jpeg from buffer. + */ + +cairo_status_t +Image::loadJPEGFromBuffer(uint8_t *buf, unsigned len) { + BufferReader reader(buf, len); + Orientation orientation = getExifOrientation(reader); + + // TODO: remove this duplicate logic + // JPEG setup + struct jpeg_decompress_struct args; + struct canvas_jpeg_error_mgr err; + + err.image = this; + args.err = jpeg_std_error(&err); + args.err->error_exit = canvas_jpeg_error_exit; + args.err->output_message = canvas_jpeg_output_message; + + // Establish the setjmp return context for canvas_jpeg_error_exit to use + if (setjmp(err.setjmp_buffer)) { + // If we get here, the JPEG code has signaled an error. + // We need to clean up the JPEG object, close the input file, and return. + jpeg_destroy_decompress(&args); + return CAIRO_STATUS_READ_ERROR; + } + + jpeg_create_decompress(&args); + + jpeg_mem_src(&args, buf, len); + + jpeg_read_header(&args, 1); + jpeg_start_decompress(&args); + width = naturalWidth = args.output_width; + height = naturalHeight = args.output_height; + + return decodeJPEGIntoSurface(&args, orientation); +} + +/* + * Load JPEG, convert RGB to ARGB. + */ + +cairo_status_t +Image::loadJPEG(FILE *stream) { + cairo_status_t status; + +#if defined(_MSC_VER) + if (false) { // Force using loadJPEGFromBuffer +#else + if (data_mode == DATA_IMAGE) { // Can lazily read in the JPEG. +#endif + Orientation orientation = NORMAL; + { + StreamReader reader(stream); + orientation = getExifOrientation(reader); + rewind(stream); + } + + // JPEG setup + struct jpeg_decompress_struct args; + struct canvas_jpeg_error_mgr err; + + err.image = this; + args.err = jpeg_std_error(&err); + args.err->error_exit = canvas_jpeg_error_exit; + args.err->output_message = canvas_jpeg_output_message; + + // Establish the setjmp return context for canvas_jpeg_error_exit to use + if (setjmp(err.setjmp_buffer)) { + // If we get here, the JPEG code has signaled an error. + // We need to clean up the JPEG object, close the input file, and return. + jpeg_destroy_decompress(&args); + return CAIRO_STATUS_READ_ERROR; + } + + jpeg_create_decompress(&args); + + jpeg_stdio_src(&args, stream); + + jpeg_read_header(&args, 1); + jpeg_start_decompress(&args); + + if (args.output_width > canvas_max_side || args.output_height > canvas_max_side) { + jpeg_destroy_decompress(&args); + return CAIRO_STATUS_INVALID_SIZE; + } + + width = naturalWidth = args.output_width; + height = naturalHeight = args.output_height; + + status = decodeJPEGIntoSurface(&args, orientation); + fclose(stream); + } else { // We'll need the actual source jpeg data, so read fully. + uint8_t *buf; + unsigned len; + + fseek(stream, 0, SEEK_END); + len = ftell(stream); + fseek(stream, 0, SEEK_SET); + + buf = (uint8_t *) malloc(len); + if (!buf) { + this->errorInfo.set(NULL, "malloc", errno); + return CAIRO_STATUS_NO_MEMORY; + } + + if (fread(buf, len, 1, stream) != 1) { + status = CAIRO_STATUS_READ_ERROR; + } else if ((DATA_IMAGE | DATA_MIME) == data_mode) { + status = loadJPEGFromBuffer(buf, len); + if (!status) status = assignDataAsMime(buf, len, CAIRO_MIME_TYPE_JPEG); + } else if (DATA_MIME == data_mode) { + status = decodeJPEGBufferIntoMimeSurface(buf, len); + } +#if defined(_MSC_VER) + else if (DATA_IMAGE == data_mode) { + status = loadJPEGFromBuffer(buf, len); + } +#endif + else { + status = CAIRO_STATUS_READ_ERROR; + } + + fclose(stream); + free(buf); + } + + return status; +} + +/* + * Returns the Exif orientation if one exists, otherwise returns NORMAL + */ + +Image::Orientation +Image::getExifOrientation(Reader& jpeg) { + static const char kJpegStartOfImage = (char)0xd8; + static const char kJpegStartOfFrameBaseline = (char)0xc0; + static const char kJpegStartOfFrameProgressive = (char)0xc2; + static const char kJpegHuffmanTable = (char)0xc4; + static const char kJpegQuantizationTable = (char)0xdb; + static const char kJpegRestartInterval = (char)0xdd; + static const char kJpegComment = (char)0xfe; + static const char kJpegStartOfScan = (char)0xda; + static const char kJpegApp0 = (char)0xe0; + static const char kJpegApp1 = (char)0xe1; + + // Find the Exif tag (if it exists) + int exif_len = 0; + bool done = false; + while (!done && jpeg.hasBytes(1)) { + while (jpeg.hasBytes(1) && jpeg.getNext() != 0xff) { + // noop + } + if (jpeg.hasBytes(1)) { + char tag = jpeg.getNext(); + switch (tag) { + case kJpegStartOfImage: + break; // beginning of file, no extra bytes + case kJpegRestartInterval: + jpeg.skipBytes(4); + break; + case kJpegStartOfFrameBaseline: + case kJpegStartOfFrameProgressive: + case kJpegHuffmanTable: + case kJpegQuantizationTable: + case kJpegComment: + case kJpegApp0: + case kJpegApp1: { + if (jpeg.hasBytes(2)) { + uint16_t tag_len = 0; + tag_len |= jpeg.getNext() << 8; + tag_len |= jpeg.getNext(); + // The tag length includes the two bytes for the length + uint16_t tag_content_len = std::max(0, tag_len - 2); + if (tag != kJpegApp1 || !jpeg.hasBytes(tag_content_len)) { + jpeg.skipBytes(tag_content_len); // skip JPEG tags we ignore. + } else if (!jpeg.hasBytes(6)) { + jpeg.skipBytes(tag_content_len); // too short to have "Exif\0\0" + } else { + if (jpeg.getNext() == 'E' && jpeg.getNext() == 'x' && + jpeg.getNext() == 'i' && jpeg.getNext() == 'f' && + jpeg.getNext() == '\0' && jpeg.getNext() == '\0') { + exif_len = tag_content_len - 6; + done = true; + } else { + jpeg.skipBytes(tag_content_len); // too short to have "Exif\0\0" + } + } + } else { + done = true; // shouldn't happen: corrupt file or we have a bug + } + break; + } + case kJpegStartOfScan: + default: + done = true; // got to the image, apparently no exif tags here + break; + } + } + } + + // Parse exif if it exists. If it does, we have already checked that jpeglen + // is longer than exifStart + exifLen, so we can safely index the data + if (exif_len > 0) { + // The first two bytes of TIFF header are "II" if little-endian ("Intel") + // and "MM" if big-endian ("Motorola") + const bool isLE = (jpeg.getNext() == 'I'); + jpeg.skipBytes(3); // +1 for the other I/M, +2 for 0x002a + + auto readUint16Little = [](Reader &jpeg) -> uint32_t { + uint16_t val = uint16_t(jpeg.getNext()); + val |= uint16_t(jpeg.getNext()) << 8; + return val; + }; + auto readUint32Little = [](Reader &jpeg) -> uint32_t { + uint32_t val = uint32_t(jpeg.getNext()); + val |= uint32_t(jpeg.getNext()) << 8; + val |= uint32_t(jpeg.getNext()) << 16; + val |= uint32_t(jpeg.getNext()) << 24; + return val; + }; + auto readUint16Big = [](Reader &jpeg) -> uint32_t { + uint16_t val = uint16_t(jpeg.getNext()) << 8; + val |= uint16_t(jpeg.getNext()); + return val; + }; + auto readUint32Big = [](Reader &jpeg) -> uint32_t { + uint32_t val = uint32_t(jpeg.getNext()) << 24; + val |= uint32_t(jpeg.getNext()) << 16; + val |= uint32_t(jpeg.getNext()) << 8; + val |= uint32_t(jpeg.getNext()); + return val; + }; + // The first two bytes of TIFF header are "II" if little-endian ("Intel") + // and "MM" if big-endian ("Motorola") + auto readUint32 = [readUint32Little, readUint32Big, isLE](Reader &jpeg) -> uint32_t { + return isLE ? readUint32Little(jpeg) : readUint32Big(jpeg); + }; + auto readUint16 = [readUint16Little, readUint16Big, isLE](Reader &jpeg) -> uint32_t { + return isLE ? readUint16Little(jpeg) : readUint16Big(jpeg); + }; + // offset to the IFD0 (offset from beginning of TIFF header, II/MM, + // which is 8 bytes before where we are after reading the uint32) + jpeg.skipBytes(readUint32(jpeg) - 8); + + // Read the IFD0 ("Image File Directory 0") + // | NN | n entries in directory (2 bytes) + // | TT | tt | nnnn | vvvv | entry: tag (2b), data type (2b), + // n components (4b), value/offset (4b) + if (jpeg.hasBytes(2)) { + uint16_t nEntries = readUint16(jpeg); + for (uint16_t i = 0; i < nEntries && jpeg.hasBytes(2); ++i) { + uint16_t tag = readUint16(jpeg); + // The entry is 12 bytes. We already read the 2 bytes for the tag. + jpeg.skipBytes(6); // skip 2 for the data type, skip 4 n components. + if (tag == 0x112) { + switch (readUint16(jpeg)) { // orientation tag is always one uint16 + case 1: return NORMAL; + case 2: return MIRROR_HORIZ; + case 3: return ROTATE_180; + case 4: return MIRROR_VERT; + case 5: return MIRROR_HORIZ_AND_ROTATE_270_CW; + case 6: return ROTATE_90_CW; + case 7: return MIRROR_HORIZ_AND_ROTATE_90_CW; + case 8: return ROTATE_270_CW; + default: return NORMAL; + } + } else { + jpeg.skipBytes(4); // skip the four bytes for the value + } + } + } + } + + return NORMAL; +} + +/* + * Updates the dimensions of the bitmap according to the orientation + */ + +void Image::updateDimensionsForOrientation(Orientation orientation) { + switch (orientation) { + case ROTATE_90_CW: + case ROTATE_270_CW: + case MIRROR_HORIZ_AND_ROTATE_90_CW: + case MIRROR_HORIZ_AND_ROTATE_270_CW: { + int tmp = naturalWidth; + naturalWidth = naturalHeight; + naturalHeight = tmp; + tmp = width; + width = height; + height = tmp; + break; + } + case NORMAL: + case MIRROR_HORIZ: + case MIRROR_VERT: + case ROTATE_180: + default: { + break; + } + } +} + +/* + * Rotates the pixels to the correct orientation. + */ + +void +Image::rotatePixels(uint8_t* pixels, int width, int height, int channels, + Orientation orientation) { + auto swapPixel = [channels](uint8_t* pixels, int src_idx, int dst_idx) { + uint8_t tmp; + for (int i = 0; i < channels; ++i) { + tmp = pixels[src_idx + i]; + pixels[src_idx + i] = pixels[dst_idx + i]; + pixels[dst_idx + i] = tmp; + } + }; + + auto mirrorHoriz = [swapPixel](uint8_t* pixels, int width, int height, int channels) { + int midX = width / 2; // ok to truncate if odd, since we don't swap a center pixel + for (int y = 0; y < height; ++y) { + for (int x = 0; x < midX; ++x) { + int orig_idx = (y * width + x) * channels; + int new_idx = (y * width + width - 1 - x) * channels; + swapPixel(pixels, orig_idx, new_idx); + } + } + }; + + auto mirrorVert = [swapPixel](uint8_t* pixels, int width, int height, int channels) { + int midY = height / 2; // ok to truncate if odd, since we don't swap a center pixel + for (int y = 0; y < midY; ++y) { + for (int x = 0; x < width; ++x) { + int orig_idx = (y * width + x) * channels; + int new_idx = ((height - y - 1) * width + x) * channels; + swapPixel(pixels, orig_idx, new_idx); + } + } + }; + + auto rotate90 = [](uint8_t* pixels, int width, int height, int channels) { + const int n_bytes = width * height * channels; + uint8_t *unrotated = new uint8_t[n_bytes]; + if (!unrotated) { + return; + } + std::memcpy(unrotated, pixels, n_bytes); + for (int y = 0; y < height; ++y) { + for (int x = 0; x < width ; ++x) { + int orig_idx = (y * width + x) * channels; + int new_idx = (x * height + height - 1 - y) * channels; + std::memcpy(pixels + new_idx, unrotated + orig_idx, channels); + } + } + }; + + auto rotate270 = [](uint8_t* pixels, int width, int height, int channels) { + const int n_bytes = width * height * channels; + uint8_t *unrotated = new uint8_t[n_bytes]; + if (!unrotated) { + return; + } + std::memcpy(unrotated, pixels, n_bytes); + for (int y = 0; y < height; ++y) { + for (int x = 0; x < width ; ++x) { + int orig_idx = (y * width + x) * channels; + int new_idx = ((width - 1 - x) * height + y) * channels; + std::memcpy(pixels + new_idx, unrotated + orig_idx, channels); + } + } + }; + + switch (orientation) { + case MIRROR_HORIZ: + mirrorHoriz(pixels, width, height, channels); + break; + case MIRROR_VERT: + mirrorVert(pixels, width, height, channels); + break; + case ROTATE_180: + mirrorHoriz(pixels, width, height, channels); + mirrorVert(pixels, width, height, channels); + break; + case ROTATE_90_CW: + rotate90(pixels, height, width, channels); // swap w/h because we need orig w/h + break; + case ROTATE_270_CW: + rotate270(pixels, height, width, channels); // swap w/h because we need orig w/h + break; + case MIRROR_HORIZ_AND_ROTATE_90_CW: + mirrorHoriz(pixels, height, width, channels); // swap w/h because we need orig w/h + rotate90(pixels, height, width, channels); + break; + case MIRROR_HORIZ_AND_ROTATE_270_CW: + mirrorHoriz(pixels, height, width, channels); // swap w/h because we need orig w/h + rotate270(pixels, height, width, channels); + break; + case NORMAL: + default: + break; + } +} + +#endif /* HAVE_JPEG */ + +#ifdef HAVE_RSVG + +/* + * Load SVG from buffer + */ + +cairo_status_t +Image::loadSVGFromBuffer(uint8_t *buf, unsigned len) { + _is_svg = true; + + if (NULL == (_rsvg = rsvg_handle_new_from_data(buf, len, nullptr))) { + return CAIRO_STATUS_READ_ERROR; + } + + double d_width; + double d_height; + + rsvg_handle_get_intrinsic_size_in_pixels(_rsvg, &d_width, &d_height); + + width = naturalWidth = d_width; + height = naturalHeight = d_height; + + if (width <= 0 || height <= 0) { + this->errorInfo.set("Width and height must be set on the svg element"); + return CAIRO_STATUS_READ_ERROR; + } + + return renderSVGToSurface(); +} + +/* + * Renders the Rsvg handle to this image's surface + */ +cairo_status_t +Image::renderSVGToSurface() { + cairo_status_t status; + + _surface = cairo_image_surface_create(CAIRO_FORMAT_ARGB32, width, height); + + status = cairo_surface_status(_surface); + if (status != CAIRO_STATUS_SUCCESS) { + g_object_unref(_rsvg); + return status; + } + + cairo_t *cr = cairo_create(_surface); + status = cairo_status(cr); + if (status != CAIRO_STATUS_SUCCESS) { + g_object_unref(_rsvg); + return status; + } + + RsvgRectangle viewport = { + 0, // x + 0, // y + static_cast(width), + static_cast(height) + }; + gboolean render_ok = rsvg_handle_render_document(_rsvg, cr, &viewport, nullptr); + if (!render_ok) { + g_object_unref(_rsvg); + cairo_destroy(cr); + return CAIRO_STATUS_READ_ERROR; // or WRITE? + } + + cairo_destroy(cr); + + _svg_last_width = width; + _svg_last_height = height; + + return status; +} + +/* + * Load SVG + */ + +cairo_status_t +Image::loadSVG(FILE *stream) { + _is_svg = true; + + struct stat s; + int fd = fileno(stream); + + // stat + if (fstat(fd, &s) < 0) { + fclose(stream); + return CAIRO_STATUS_READ_ERROR; + } + + uint8_t *buf = (uint8_t *) malloc(s.st_size); + + if (!buf) { + fclose(stream); + return CAIRO_STATUS_NO_MEMORY; + } + + size_t read = fread(buf, s.st_size, 1, stream); + fclose(stream); + + cairo_status_t result = CAIRO_STATUS_READ_ERROR; + if (1 == read) result = loadSVGFromBuffer(buf, s.st_size); + free(buf); + + return result; +} + +#endif /* HAVE_RSVG */ + +/* + * Load BMP from buffer. + */ + +cairo_status_t Image::loadBMPFromBuffer(uint8_t *buf, unsigned len){ + BMPParser::Parser parser; + + // Reversed ARGB32 with pre-multiplied alpha + uint8_t pixFmt[5] = {2, 1, 0, 3, 1}; + parser.parse(buf, len, pixFmt); + + if (parser.getStatus() != BMPParser::Status::OK) { + errorInfo.reset(); + errorInfo.message = parser.getErrMsg(); + return CAIRO_STATUS_READ_ERROR; + } + + width = naturalWidth = parser.getWidth(); + height = naturalHeight = parser.getHeight(); + uint8_t *data = parser.getImgd(); + + _surface = cairo_image_surface_create_for_data( + data, + CAIRO_FORMAT_ARGB32, + width, + height, + cairo_format_stride_for_width(CAIRO_FORMAT_ARGB32, width) + ); + + // No need to delete the data + cairo_status_t status = cairo_surface_status(_surface); + if (status) return status; + + _data = data; + parser.clearImgd(); + + return CAIRO_STATUS_SUCCESS; +} + +/* + * Load BMP. + */ + +cairo_status_t Image::loadBMP(FILE *stream){ + struct stat s; + int fd = fileno(stream); + + // Stat + if (fstat(fd, &s) < 0) { + fclose(stream); + return CAIRO_STATUS_READ_ERROR; + } + + uint8_t *buf = new uint8_t[s.st_size]; + + if (!buf) { + fclose(stream); + errorInfo.set(NULL, "malloc", errno); + return CAIRO_STATUS_NO_MEMORY; + } + + size_t read = fread(buf, s.st_size, 1, stream); + fclose(stream); + + cairo_status_t result = CAIRO_STATUS_READ_ERROR; + if (read == 1) result = loadBMPFromBuffer(buf, s.st_size); + delete[] buf; + + return result; +} + +/* + * Return UNKNOWN, SVG, GIF, JPEG, or PNG based on the filename. + */ + +Image::type +Image::extension(const char *filename) { + size_t len = strlen(filename); + filename += len; + if (len >= 5 && 0 == strcmp(".jpeg", filename - 5)) return Image::JPEG; + if (len >= 4 && 0 == strcmp(".gif", filename - 4)) return Image::GIF; + if (len >= 4 && 0 == strcmp(".jpg", filename - 4)) return Image::JPEG; + if (len >= 4 && 0 == strcmp(".png", filename - 4)) return Image::PNG; + if (len >= 4 && 0 == strcmp(".svg", filename - 4)) return Image::SVG; + return Image::UNKNOWN; +} + +/* + * Sniff bytes 0..1 for JPEG's magic number ff d8. + */ + +int +Image::isJPEG(uint8_t *data) { + return 0xff == data[0] && 0xd8 == data[1]; +} + +/* + * Sniff bytes 0..2 for "GIF". + */ + +int +Image::isGIF(uint8_t *data) { + return 'G' == data[0] && 'I' == data[1] && 'F' == data[2]; +} + +/* + * Sniff bytes 1..3 for "PNG". + */ + +int +Image::isPNG(uint8_t *data) { + return 'P' == data[1] && 'N' == data[2] && 'G' == data[3]; +} + +/* + * Skip " + +#pragma once + +#include +#include "CanvasError.h" +#include +#include +#include // node < 7 uses libstdc++ on macOS which lacks complete c++11 + +#ifdef HAVE_JPEG +#include +#include +#endif + +#ifdef HAVE_GIF +#include + + #if GIFLIB_MAJOR > 5 || GIFLIB_MAJOR == 5 && GIFLIB_MINOR >= 1 + #define GIF_CLOSE_FILE(gif) DGifCloseFile(gif, NULL) + #else + #define GIF_CLOSE_FILE(gif) DGifCloseFile(gif) + #endif +#endif + +#ifdef HAVE_RSVG +#include + // librsvg <= 2.36.1, identified by undefined macro, needs an extra include + #ifndef LIBRSVG_CHECK_VERSION + #include + #endif +#endif + +using JPEGDecodeL = std::function; + +class Image : public Napi::ObjectWrap { + public: + char *filename; + int width, height; + int naturalWidth, naturalHeight; + Napi::Env env; + static Napi::FunctionReference constructor; + static void Initialize(Napi::Env& env, Napi::Object& target); + Image(const Napi::CallbackInfo& info); + Napi::Value GetComplete(const Napi::CallbackInfo& info); + Napi::Value GetWidth(const Napi::CallbackInfo& info); + Napi::Value GetHeight(const Napi::CallbackInfo& info); + Napi::Value GetNaturalWidth(const Napi::CallbackInfo& info); + Napi::Value GetNaturalHeight(const Napi::CallbackInfo& info); + Napi::Value GetDataMode(const Napi::CallbackInfo& info); + void SetDataMode(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetWidth(const Napi::CallbackInfo& info, const Napi::Value& value); + void SetHeight(const Napi::CallbackInfo& info, const Napi::Value& value); + static Napi::Value GetSource(const Napi::CallbackInfo& info); + static void SetSource(const Napi::CallbackInfo& info); + inline uint8_t *data(){ return cairo_image_surface_get_data(_surface); } + inline int stride(){ return cairo_image_surface_get_stride(_surface); } + static int isPNG(uint8_t *data); + static int isJPEG(uint8_t *data); + static int isGIF(uint8_t *data); + static int isSVG(uint8_t *data, unsigned len); + static int isBMP(uint8_t *data, unsigned len); + static cairo_status_t readPNG(void *closure, unsigned char *data, unsigned len); + inline int isComplete(){ return COMPLETE == state; } + cairo_surface_t *surface(); + cairo_status_t loadSurface(); + cairo_status_t loadFromBuffer(uint8_t *buf, unsigned len); + cairo_status_t loadPNGFromBuffer(uint8_t *buf); + cairo_status_t loadPNG(); + void clearData(); +#ifdef HAVE_RSVG + cairo_status_t loadSVGFromBuffer(uint8_t *buf, unsigned len); + cairo_status_t loadSVG(FILE *stream); + cairo_status_t renderSVGToSurface(); +#endif +#ifdef HAVE_GIF + cairo_status_t loadGIFFromBuffer(uint8_t *buf, unsigned len); + cairo_status_t loadGIF(FILE *stream); +#endif +#ifdef HAVE_JPEG + enum Orientation { + NORMAL, + MIRROR_HORIZ, + MIRROR_VERT, + ROTATE_180, + ROTATE_90_CW, + ROTATE_270_CW, + MIRROR_HORIZ_AND_ROTATE_90_CW, + MIRROR_HORIZ_AND_ROTATE_270_CW + }; + cairo_status_t loadJPEGFromBuffer(uint8_t *buf, unsigned len); + cairo_status_t loadJPEG(FILE *stream); + void jpegToARGB(jpeg_decompress_struct* args, uint8_t* data, uint8_t* src, JPEGDecodeL decode); + cairo_status_t decodeJPEGIntoSurface(jpeg_decompress_struct *info, Orientation orientation); + cairo_status_t decodeJPEGBufferIntoMimeSurface(uint8_t *buf, unsigned len); + cairo_status_t assignDataAsMime(uint8_t *data, int len, const char *mime_type); + + class Reader { + public: + virtual bool hasBytes(unsigned n) const = 0; + virtual uint8_t getNext() = 0; + virtual void skipBytes(unsigned n) = 0; + }; + Orientation getExifOrientation(Reader& jpeg); + void updateDimensionsForOrientation(Orientation orientation); + void rotatePixels(uint8_t* pixels, int width, int height, int channels, Orientation orientation); +#endif + cairo_status_t loadBMPFromBuffer(uint8_t *buf, unsigned len); + cairo_status_t loadBMP(FILE *stream); + CanvasError errorInfo; + void loaded(); + cairo_status_t load(); + ~Image(); + + enum { + DEFAULT + , LOADING + , COMPLETE + } state; + + enum data_mode_t { + DATA_IMAGE = 1 + , DATA_MIME = 2 + } data_mode; + + typedef enum { + UNKNOWN + , GIF + , JPEG + , PNG + , SVG + } type; + + static type extension(const char *filename); + + private: + cairo_surface_t *_surface; + uint8_t *_data = nullptr; + int _data_len; +#ifdef HAVE_RSVG + RsvgHandle *_rsvg; + bool _is_svg; + int _svg_last_width; + int _svg_last_height; +#endif +}; diff --git a/miniprogram/node_modules/canvas/src/ImageData.cc b/miniprogram/node_modules/canvas/src/ImageData.cc new file mode 100644 index 00000000..d334ca89 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/ImageData.cc @@ -0,0 +1,132 @@ +// Copyright (c) 2010 LearnBoost + +#include "ImageData.h" +#include "InstanceData.h" + +/* + * Initialize ImageData. + */ + +void +ImageData::Initialize(Napi::Env& env, Napi::Object& exports) { + Napi::HandleScope scope(env); + + InstanceData *data = env.GetInstanceData(); + + Napi::Function ctor = DefineClass(env, "ImageData", { + InstanceAccessor<&ImageData::GetWidth>("width", napi_default_jsproperty), + InstanceAccessor<&ImageData::GetHeight>("height", napi_default_jsproperty) + }); + + exports.Set("ImageData", ctor); + data->ImageDataCtor = Napi::Persistent(ctor); +} + +/* + * Initialize a new ImageData object. + */ + +ImageData::ImageData(const Napi::CallbackInfo& info) : Napi::ObjectWrap(info), env(info.Env()) { + Napi::TypedArray dataArray; + uint32_t width; + uint32_t height; + int length; + + if (info[0].IsNumber() && info[1].IsNumber()) { + width = info[0].As().Uint32Value(); + if (width == 0) { + Napi::RangeError::New(env, "The source width is zero.").ThrowAsJavaScriptException(); + return; + } + height = info[1].As().Uint32Value(); + if (height == 0) { + Napi::RangeError::New(env, "The source height is zero.").ThrowAsJavaScriptException(); + return; + } + length = width * height * 4; // ImageData(w, h) constructor assumes 4 BPP; documented. + + dataArray = Napi::Uint8Array::New(env, length, napi_uint8_clamped_array); + } else if ( + info[0].IsTypedArray() && + info[0].As().TypedArrayType() == napi_uint8_clamped_array && + info[1].IsNumber() + ) { + dataArray = info[0].As(); + + length = dataArray.ElementLength(); + if (length == 0) { + Napi::RangeError::New(env, "The input data has a zero byte length.").ThrowAsJavaScriptException(); + return; + } + + // Don't assert that the ImageData length is a multiple of four because some + // data formats are not 4 BPP. + + width = info[1].As().Uint32Value(); + if (width == 0) { + Napi::RangeError::New(env, "The source width is zero.").ThrowAsJavaScriptException(); + return; + } + + // Don't assert that the byte length is a multiple of 4 * width, ditto. + + if (info[2].IsNumber()) { // Explicit height given + height = info[2].As().Uint32Value(); + } else { // Calculate height assuming 4 BPP + int size = length / 4; + height = size / width; + } + } else if ( + info[0].IsTypedArray() && + info[0].As().TypedArrayType() == napi_uint16_array && + info[1].IsNumber() + ) { // Intended for RGB16_565 format + dataArray = info[0].As(); + + length = dataArray.ElementLength(); + if (length == 0) { + Napi::RangeError::New(env, "The input data has a zero byte length.").ThrowAsJavaScriptException(); + return; + } + + width = info[1].As().Uint32Value(); + if (width == 0) { + Napi::RangeError::New(env, "The source width is zero.").ThrowAsJavaScriptException(); + return; + } + + if (info[2].IsNumber()) { // Explicit height given + height = info[2].As().Uint32Value(); + } else { // Calculate height assuming 2 BPP + int size = length / 2; + height = size / width; + } + } else { + Napi::TypeError::New(env, "Expected (Uint8ClampedArray, width[, height]), (Uint16Array, width[, height]) or (width, height)").ThrowAsJavaScriptException(); + return; + } + + _width = width; + _height = height; + _data = dataArray.As().Data(); + + info.This().As().Set("data", dataArray); +} + +/* + * Get width. + */ + +Napi::Value +ImageData::GetWidth(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, width()); +} + +/* + * Get height. + */ + +Napi::Value +ImageData::GetHeight(const Napi::CallbackInfo& info) { + return Napi::Number::New(env, height()); +} diff --git a/miniprogram/node_modules/canvas/src/ImageData.h b/miniprogram/node_modules/canvas/src/ImageData.h new file mode 100644 index 00000000..32d6037d --- /dev/null +++ b/miniprogram/node_modules/canvas/src/ImageData.h @@ -0,0 +1,26 @@ +// Copyright (c) 2010 LearnBoost + +#pragma once + +#include +#include // node < 7 uses libstdc++ on macOS which lacks complete c++11 + +class ImageData : public Napi::ObjectWrap { + public: + static void Initialize(Napi::Env& env, Napi::Object& exports); + ImageData(const Napi::CallbackInfo& info); + Napi::Value GetWidth(const Napi::CallbackInfo& info); + Napi::Value GetHeight(const Napi::CallbackInfo& info); + + inline int width() { return _width; } + inline int height() { return _height; } + inline uint8_t *data() { return _data; } + + Napi::Env env; + + private: + int _width; + int _height; + uint8_t *_data; + +}; diff --git a/miniprogram/node_modules/canvas/src/InstanceData.h b/miniprogram/node_modules/canvas/src/InstanceData.h new file mode 100644 index 00000000..939f2a48 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/InstanceData.h @@ -0,0 +1,15 @@ +#include + +struct InstanceData { + Napi::FunctionReference ImageBackendCtor; + Napi::FunctionReference PdfBackendCtor; + Napi::FunctionReference SvgBackendCtor; + Napi::FunctionReference CanvasCtor; + Napi::FunctionReference CanvasGradientCtor; + Napi::FunctionReference DOMMatrixCtor; + Napi::FunctionReference ImageCtor; + Napi::FunctionReference parseFont; + Napi::FunctionReference Context2dCtor; + Napi::FunctionReference ImageDataCtor; + Napi::FunctionReference CanvasPatternCtor; +}; diff --git a/miniprogram/node_modules/canvas/src/JPEGStream.h b/miniprogram/node_modules/canvas/src/JPEGStream.h new file mode 100644 index 00000000..43c74f13 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/JPEGStream.h @@ -0,0 +1,157 @@ +#pragma once + +#include "closure.h" +#include +#include + +/* + * Expanded data destination object for closure output, + * inspired by IJG's jdatadst.c + */ + +struct closure_destination_mgr { + jpeg_destination_mgr pub; + JpegClosure* closure; + JOCTET *buffer; + int bufsize; +}; + +void +init_closure_destination(j_compress_ptr cinfo){ + // we really don't have to do anything here +} + +boolean +empty_closure_output_buffer(j_compress_ptr cinfo){ + closure_destination_mgr *dest = (closure_destination_mgr *) cinfo->dest; + Napi::Env env = dest->closure->canvas->Env(); + Napi::HandleScope scope(env); + Napi::AsyncContext async(env, "canvas:empty_closure_output_buffer"); + + Napi::Object buf = Napi::Buffer::New(env, (char *)dest->buffer, dest->bufsize); + + // emit "data" + dest->closure->cb.MakeCallback(env.Global(), {env.Null(), buf}, async); + + dest->buffer = (JOCTET *)malloc(dest->bufsize); + cinfo->dest->next_output_byte = dest->buffer; + cinfo->dest->free_in_buffer = dest->bufsize; + return true; +} + +void +term_closure_destination(j_compress_ptr cinfo){ + closure_destination_mgr *dest = (closure_destination_mgr *) cinfo->dest; + Napi::Env env = dest->closure->canvas->Env(); + Napi::HandleScope scope(env); + Napi::AsyncContext async(env, "canvas:term_closure_destination"); + + /* emit remaining data */ + Napi::Object buf = Napi::Buffer::New(env, (char *)dest->buffer, dest->bufsize - dest->pub.free_in_buffer); + + dest->closure->cb.MakeCallback(env.Global(), {env.Null(), buf}, async); + + // emit "end" + dest->closure->cb.MakeCallback(env.Global(), {env.Null(), env.Null()}, async); +} + +void +jpeg_closure_dest(j_compress_ptr cinfo, JpegClosure* closure, int bufsize){ + closure_destination_mgr * dest; + + /* The destination object is made permanent so that multiple JPEG images + * can be written to the same buffer without re-executing jpeg_mem_dest. + */ + if (cinfo->dest == NULL) { /* first time for this JPEG object? */ + cinfo->dest = (struct jpeg_destination_mgr *) + (*cinfo->mem->alloc_small) ((j_common_ptr) cinfo, JPOOL_PERMANENT, + sizeof(closure_destination_mgr)); + } + + dest = (closure_destination_mgr *) cinfo->dest; + + cinfo->dest->init_destination = &init_closure_destination; + cinfo->dest->empty_output_buffer = &empty_closure_output_buffer; + cinfo->dest->term_destination = &term_closure_destination; + + dest->closure = closure; + dest->bufsize = bufsize; + dest->buffer = (JOCTET *)malloc(bufsize); + + cinfo->dest->next_output_byte = dest->buffer; + cinfo->dest->free_in_buffer = dest->bufsize; +} + +void encode_jpeg(jpeg_compress_struct cinfo, cairo_surface_t *surface, int quality, bool progressive, int chromaHSampFactor, int chromaVSampFactor) { + int w = cairo_image_surface_get_width(surface); + int h = cairo_image_surface_get_height(surface); + + cinfo.in_color_space = JCS_RGB; + cinfo.input_components = 3; + cinfo.image_width = w; + cinfo.image_height = h; + jpeg_set_defaults(&cinfo); + if (progressive) + jpeg_simple_progression(&cinfo); + jpeg_set_quality(&cinfo, quality, (quality < 25) ? 0 : 1); + cinfo.comp_info[0].h_samp_factor = chromaHSampFactor; + cinfo.comp_info[0].v_samp_factor = chromaVSampFactor; + + JSAMPROW slr; + jpeg_start_compress(&cinfo, TRUE); + unsigned char *dst; + unsigned int *src = (unsigned int *)cairo_image_surface_get_data(surface); + int sl = 0; + dst = (unsigned char *)malloc(w * 3); + while (sl < h) { + unsigned char *dp = dst; + int x = 0; + while (x < w) { + dp[0] = (*src >> 16) & 255; + dp[1] = (*src >> 8) & 255; + dp[2] = *src & 255; + src++; + dp += 3; + x++; + } + slr = dst; + jpeg_write_scanlines(&cinfo, &slr, 1); + sl++; + } + free(dst); + jpeg_finish_compress(&cinfo); + jpeg_destroy_compress(&cinfo); +} + +void +write_to_jpeg_stream(cairo_surface_t *surface, int bufsize, JpegClosure* closure) { + jpeg_compress_struct cinfo; + jpeg_error_mgr jerr; + cinfo.err = jpeg_std_error(&jerr); + jpeg_create_compress(&cinfo); + jpeg_closure_dest(&cinfo, closure, bufsize); + encode_jpeg( + cinfo, + surface, + closure->quality, + closure->progressive, + closure->chromaSubsampling, + closure->chromaSubsampling); +} + +void +write_to_jpeg_buffer(cairo_surface_t* surface, JpegClosure* closure) { + jpeg_compress_struct cinfo; + jpeg_error_mgr jerr; + cinfo.err = jpeg_std_error(&jerr); + jpeg_create_compress(&cinfo); + cinfo.client_data = closure; + cinfo.dest = closure->jpeg_dest_mgr; + encode_jpeg( + cinfo, + surface, + closure->quality, + closure->progressive, + closure->chromaSubsampling, + closure->chromaSubsampling); +} diff --git a/miniprogram/node_modules/canvas/src/PNG.h b/miniprogram/node_modules/canvas/src/PNG.h new file mode 100644 index 00000000..30b88f85 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/PNG.h @@ -0,0 +1,292 @@ +#pragma once + +#include +#include "closure.h" +#include // round +#include +#include +#include +#include + +#if defined(__GNUC__) && (__GNUC__ > 2) && defined(__OPTIMIZE__) +#define likely(expr) (__builtin_expect (!!(expr), 1)) +#define unlikely(expr) (__builtin_expect (!!(expr), 0)) +#else +#define likely(expr) (expr) +#define unlikely(expr) (expr) +#endif + +static void canvas_png_flush(png_structp png_ptr) { + /* Do nothing; fflush() is said to be just a waste of energy. */ + (void) png_ptr; /* Stifle compiler warning */ +} + +/* Converts native endian xRGB => RGBx bytes */ +static void canvas_convert_data_to_bytes(png_structp png, png_row_infop row_info, png_bytep data) { + unsigned int i; + + for (i = 0; i < row_info->rowbytes; i += 4) { + uint8_t *b = &data[i]; + uint32_t pixel; + + memcpy(&pixel, b, sizeof (uint32_t)); + + b[0] = (pixel & 0xff0000) >> 16; + b[1] = (pixel & 0x00ff00) >> 8; + b[2] = (pixel & 0x0000ff) >> 0; + b[3] = 0; + } +} + +/* Unpremultiplies data and converts native endian ARGB => RGBA bytes */ +static void canvas_unpremultiply_data(png_structp png, png_row_infop row_info, png_bytep data) { + unsigned int i; + + for (i = 0; i < row_info->rowbytes; i += 4) { + uint8_t *b = &data[i]; + uint32_t pixel; + uint8_t alpha; + + memcpy(&pixel, b, sizeof (uint32_t)); + alpha = (pixel & 0xff000000) >> 24; + if (alpha == 0) { + b[0] = b[1] = b[2] = b[3] = 0; + } else { + b[0] = (((pixel & 0xff0000) >> 16) * 255 + alpha / 2) / alpha; + b[1] = (((pixel & 0x00ff00) >> 8) * 255 + alpha / 2) / alpha; + b[2] = (((pixel & 0x0000ff) >> 0) * 255 + alpha / 2) / alpha; + b[3] = alpha; + } + } +} + +/* Converts RGB16_565 format data to RGBA32 */ +static void canvas_convert_565_to_888(png_structp png, png_row_infop row_info, png_bytep data) { + // Loop in reverse to unpack in-place. + for (ptrdiff_t col = row_info->width - 1; col >= 0; col--) { + uint8_t* src = &data[col * sizeof(uint16_t)]; + uint8_t* dst = &data[col * 3]; + uint16_t pixel; + + memcpy(&pixel, src, sizeof(uint16_t)); + + // Convert and rescale to the full 0-255 range + // See http://stackoverflow.com/a/29326693 + const uint8_t red5 = (pixel & 0xF800) >> 11; + const uint8_t green6 = (pixel & 0x7E0) >> 5; + const uint8_t blue5 = (pixel & 0x001F); + + dst[0] = ((red5 * 255 + 15) / 31); + dst[1] = ((green6 * 255 + 31) / 63); + dst[2] = ((blue5 * 255 + 15) / 31); + } +} + +struct canvas_png_write_closure_t { + cairo_write_func_t write_func; + PngClosure* closure; +}; + +#ifdef PNG_SETJMP_SUPPORTED +bool setjmp_wrapper(png_structp png) { + return setjmp(png_jmpbuf(png)); +} +#endif + +static cairo_status_t canvas_write_png(cairo_surface_t *surface, png_rw_ptr write_func, canvas_png_write_closure_t *closure) { + unsigned int i; + cairo_status_t status = CAIRO_STATUS_SUCCESS; + uint8_t *data; + png_structp png; + png_infop info; + png_bytep *volatile rows = NULL; + png_color_16 white; + int png_color_type; + int bpc; + unsigned int width = cairo_image_surface_get_width(surface); + unsigned int height = cairo_image_surface_get_height(surface); + + data = cairo_image_surface_get_data(surface); + if (data == NULL) { + status = CAIRO_STATUS_SURFACE_TYPE_MISMATCH; + return status; + } + cairo_surface_flush(surface); + + if (width == 0 || height == 0) { + status = CAIRO_STATUS_WRITE_ERROR; + return status; + } + + rows = (png_bytep *) malloc(height * sizeof (png_byte*)); + if (unlikely(rows == NULL)) { + status = CAIRO_STATUS_NO_MEMORY; + return status; + } + + int stride = cairo_image_surface_get_stride(surface); + for (i = 0; i < height; i++) { + rows[i] = (png_byte *) data + i * stride; + } + +#ifdef PNG_USER_MEM_SUPPORTED + png = png_create_write_struct_2(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL, NULL, NULL, NULL); +#else + png = png_create_write_struct(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL); +#endif + + if (unlikely(png == NULL)) { + status = CAIRO_STATUS_NO_MEMORY; + free(rows); + return status; + } + + info = png_create_info_struct (png); + if (unlikely(info == NULL)) { + status = CAIRO_STATUS_NO_MEMORY; + png_destroy_write_struct(&png, &info); + free(rows); + return status; + + } + +#ifdef PNG_SETJMP_SUPPORTED + if (setjmp_wrapper(png)) { + png_destroy_write_struct(&png, &info); + free(rows); + return status; + } +#endif + + png_set_write_fn(png, closure, write_func, canvas_png_flush); + png_set_compression_level(png, closure->closure->compressionLevel); + png_set_filter(png, 0, closure->closure->filters); + if (closure->closure->resolution != 0) { + uint32_t res = static_cast(round(static_cast(closure->closure->resolution) * 39.3701)); + png_set_pHYs(png, info, res, res, PNG_RESOLUTION_METER); + } + + cairo_format_t format = cairo_image_surface_get_format(surface); + + switch (format) { + case CAIRO_FORMAT_ARGB32: + bpc = 8; + png_color_type = PNG_COLOR_TYPE_RGB_ALPHA; + break; +#ifdef CAIRO_FORMAT_RGB30 + case CAIRO_FORMAT_RGB30: + bpc = 10; + png_color_type = PNG_COLOR_TYPE_RGB; + break; +#endif + case CAIRO_FORMAT_RGB24: + bpc = 8; + png_color_type = PNG_COLOR_TYPE_RGB; + break; + case CAIRO_FORMAT_A8: + bpc = 8; + png_color_type = PNG_COLOR_TYPE_GRAY; + break; + case CAIRO_FORMAT_A1: + bpc = 1; + png_color_type = PNG_COLOR_TYPE_GRAY; +#ifndef WORDS_BIGENDIAN + png_set_packswap(png); +#endif + break; + case CAIRO_FORMAT_RGB16_565: + bpc = 8; // 565 gets upconverted to 888 + png_color_type = PNG_COLOR_TYPE_RGB; + break; + case CAIRO_FORMAT_INVALID: + default: + status = CAIRO_STATUS_INVALID_FORMAT; + png_destroy_write_struct(&png, &info); + free(rows); + return status; + } + + if ((format == CAIRO_FORMAT_A8 || format == CAIRO_FORMAT_A1) && + closure->closure->palette != NULL) { + png_color_type = PNG_COLOR_TYPE_PALETTE; + } + + png_set_IHDR(png, info, width, height, bpc, png_color_type, PNG_INTERLACE_NONE, PNG_COMPRESSION_TYPE_DEFAULT, PNG_FILTER_TYPE_DEFAULT); + + if (png_color_type == PNG_COLOR_TYPE_PALETTE) { + size_t nColors = closure->closure->nPaletteColors; + uint8_t* colors = closure->closure->palette; + uint8_t backgroundIndex = closure->closure->backgroundIndex; + png_colorp pngPalette = (png_colorp)png_malloc(png, nColors * sizeof(png_colorp)); + png_bytep transparency = (png_bytep)png_malloc(png, nColors * sizeof(png_bytep)); + for (i = 0; i < nColors; i++) { + pngPalette[i].red = colors[4 * i]; + pngPalette[i].green = colors[4 * i + 1]; + pngPalette[i].blue = colors[4 * i + 2]; + transparency[i] = colors[4 * i + 3]; + } + png_set_PLTE(png, info, pngPalette, nColors); + png_set_tRNS(png, info, transparency, nColors, NULL); + png_set_packing(png); // pack pixels + // have libpng free palette and trans: + png_data_freer(png, info, PNG_DESTROY_WILL_FREE_DATA, PNG_FREE_PLTE | PNG_FREE_TRNS); + png_color_16 bkg; + bkg.index = backgroundIndex; + png_set_bKGD(png, info, &bkg); + } + + if (png_color_type != PNG_COLOR_TYPE_PALETTE) { + white.gray = (1 << bpc) - 1; + white.red = white.blue = white.green = white.gray; + png_set_bKGD(png, info, &white); + } + + /* We have to call png_write_info() before setting up the write + * transformation, since it stores data internally in 'png' + * that is needed for the write transformation functions to work. + */ + png_write_info(png, info); + if (png_color_type == PNG_COLOR_TYPE_RGB_ALPHA) { + png_set_write_user_transform_fn(png, canvas_unpremultiply_data); + } else if (format == CAIRO_FORMAT_RGB16_565) { + png_set_write_user_transform_fn(png, canvas_convert_565_to_888); + } else if (png_color_type == PNG_COLOR_TYPE_RGB) { + png_set_write_user_transform_fn(png, canvas_convert_data_to_bytes); + png_set_filler(png, 0, PNG_FILLER_AFTER); + } + + png_write_image(png, rows); + png_write_end(png, info); + + png_destroy_write_struct(&png, &info); + free(rows); + return status; +} + +static void canvas_stream_write_func(png_structp png, png_bytep data, png_size_t size) { + cairo_status_t status; + struct canvas_png_write_closure_t *png_closure; + + png_closure = (struct canvas_png_write_closure_t *) png_get_io_ptr(png); + status = png_closure->write_func(png_closure->closure, data, size); + if (unlikely(status)) { + cairo_status_t *error = (cairo_status_t *) png_get_error_ptr(png); + if (*error == CAIRO_STATUS_SUCCESS) { + *error = status; + } + png_error(png, NULL); + } +} + +static cairo_status_t canvas_write_to_png_stream(cairo_surface_t *surface, cairo_write_func_t write_func, PngClosure* closure) { + struct canvas_png_write_closure_t png_closure; + + if (cairo_surface_status(surface)) { + return cairo_surface_status(surface); + } + + png_closure.write_func = write_func; + png_closure.closure = closure; + + return canvas_write_png(surface, canvas_stream_write_func, &png_closure); +} diff --git a/miniprogram/node_modules/canvas/src/Point.h b/miniprogram/node_modules/canvas/src/Point.h new file mode 100644 index 00000000..a61f8b1b --- /dev/null +++ b/miniprogram/node_modules/canvas/src/Point.h @@ -0,0 +1,11 @@ +// Copyright (c) 2010 LearnBoost +#pragma once + +template +class Point { + public: + T x, y; + Point(T x=0, T y=0): x(x), y(y) {} + Point(const Point&) = default; + Point& operator=(const Point&) = default; +}; diff --git a/miniprogram/node_modules/canvas/src/Util.h b/miniprogram/node_modules/canvas/src/Util.h new file mode 100644 index 00000000..0e6d1d89 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/Util.h @@ -0,0 +1,9 @@ +#pragma once + +#include + +inline bool streq_casein(std::string& str1, std::string& str2) { + return str1.size() == str2.size() && std::equal(str1.begin(), str1.end(), str2.begin(), [](char& c1, char& c2) { + return c1 == c2 || std::toupper(c1) == std::toupper(c2); + }); +} diff --git a/miniprogram/node_modules/canvas/src/backend/Backend.cc b/miniprogram/node_modules/canvas/src/backend/Backend.cc new file mode 100644 index 00000000..4607fb64 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/backend/Backend.cc @@ -0,0 +1,73 @@ +#include "Backend.h" +#include +#include + +Backend::Backend(std::string name, Napi::CallbackInfo& info) : name(name), env(info.Env()) { + int width = 0; + int height = 0; + if (info[0].IsNumber()) width = info[0].As().Int32Value(); + if (info[1].IsNumber()) height = info[1].As().Int32Value(); + this->width = width; + this->height = height; +} + +void Backend::setCanvas(Canvas* _canvas) +{ + this->canvas = _canvas; +} + + + +std::string Backend::getName() +{ + return name; +} + +int Backend::getWidth() +{ + return this->width; +} +void Backend::setWidth(int width_) +{ + this->destroySurface(); + this->width = width_; +} + +int Backend::getHeight() +{ + return this->height; +} +void Backend::setHeight(int height_) +{ + this->destroySurface(); + this->height = height_; +} + +bool Backend::isSurfaceValid() { + bool isValid = true; + + cairo_status_t status = cairo_surface_status(ensureSurface()); + + if (status != CAIRO_STATUS_SUCCESS) { + error = cairo_status_to_string(status); + isValid = false; + } + + return isValid; +} + + +BackendOperationNotAvailable::BackendOperationNotAvailable(Backend* backend, + std::string operation_name) + : operation_name(operation_name) +{ + msg = "operation " + operation_name + + " not supported by backend " + backend->getName(); +}; + +BackendOperationNotAvailable::~BackendOperationNotAvailable() throw() {}; + +const char* BackendOperationNotAvailable::what() const throw() +{ + return msg.c_str(); +}; diff --git a/miniprogram/node_modules/canvas/src/backend/Backend.h b/miniprogram/node_modules/canvas/src/backend/Backend.h new file mode 100644 index 00000000..d51eb760 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/backend/Backend.h @@ -0,0 +1,61 @@ +#pragma once + +#include +#include "../dll_visibility.h" +#include +#include +#include + +class Canvas; + +class Backend +{ + private: + const std::string name; + const char* error = NULL; + + protected: + int width; + int height; + Canvas* canvas = nullptr; + + Backend(std::string name, Napi::CallbackInfo& info); + + public: + Napi::Env env; + + void setCanvas(Canvas* canvas); + + virtual cairo_surface_t* ensureSurface() = 0; + virtual void destroySurface() = 0; + + DLL_PUBLIC std::string getName(); + + DLL_PUBLIC int getWidth(); + virtual void setWidth(int width); + + DLL_PUBLIC int getHeight(); + virtual void setHeight(int height); + + // Overridden by ImageBackend. SVG and PDF thus always return INVALID. + virtual cairo_format_t getFormat() { + return CAIRO_FORMAT_INVALID; + } + + bool isSurfaceValid(); + inline const char* getError(){ return error; } +}; + + +class BackendOperationNotAvailable: public std::exception +{ + private: + std::string operation_name; + std::string msg; + + public: + BackendOperationNotAvailable(Backend* backend, std::string operation_name); + ~BackendOperationNotAvailable() throw(); + + const char* what() const throw(); +}; diff --git a/miniprogram/node_modules/canvas/src/backend/ImageBackend.cc b/miniprogram/node_modules/canvas/src/backend/ImageBackend.cc new file mode 100644 index 00000000..1fede073 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/backend/ImageBackend.cc @@ -0,0 +1,67 @@ +#include "ImageBackend.h" +#include "../InstanceData.h" +#include +#include + +ImageBackend::ImageBackend(Napi::CallbackInfo& info) : Napi::ObjectWrap(info), Backend("image", info) {} + +ImageBackend::~ImageBackend() { + destroySurface(); +} + +// This returns an approximate value only, suitable for +// Napi::MemoryManagement:: AdjustExternalMemory. +// The formats that don't map to intrinsic types (RGB30, A1) round up. +int32_t ImageBackend::approxBytesPerPixel() { + switch (format) { + case CAIRO_FORMAT_ARGB32: + case CAIRO_FORMAT_RGB24: + return 4; +#ifdef CAIRO_FORMAT_RGB30 + case CAIRO_FORMAT_RGB30: + return 3; +#endif + case CAIRO_FORMAT_RGB16_565: + return 2; + case CAIRO_FORMAT_A8: + case CAIRO_FORMAT_A1: + return 1; + default: + return 0; + } +} + +cairo_surface_t* ImageBackend::ensureSurface() { + if (!surface) { + surface = cairo_image_surface_create(format, width, height); + assert(surface); + Napi::MemoryManagement::AdjustExternalMemory(env, approxBytesPerPixel() * width * height); + } + return surface; +} + +void ImageBackend::destroySurface() { + if (surface) { + cairo_surface_destroy(surface); + surface = nullptr; + Napi::MemoryManagement::AdjustExternalMemory(env, -approxBytesPerPixel() * width * height); + } +} + +cairo_format_t ImageBackend::getFormat() { + return format; +} + +void ImageBackend::setFormat(cairo_format_t _format) { + this->destroySurface(); + this->format = _format; +} + +Napi::FunctionReference ImageBackend::constructor; + +void ImageBackend::Initialize(Napi::Object target) { + Napi::Env env = target.Env(); + Napi::Function ctor = DefineClass(env, "ImageBackend", {}); + InstanceData* data = env.GetInstanceData(); + data->ImageBackendCtor = Napi::Persistent(ctor); +} diff --git a/miniprogram/node_modules/canvas/src/backend/ImageBackend.h b/miniprogram/node_modules/canvas/src/backend/ImageBackend.h new file mode 100644 index 00000000..14946c7b --- /dev/null +++ b/miniprogram/node_modules/canvas/src/backend/ImageBackend.h @@ -0,0 +1,26 @@ +#pragma once + +#include "Backend.h" +#include + +class ImageBackend : public Napi::ObjectWrap, public Backend +{ + private: + cairo_surface_t* ensureSurface(); + void destroySurface(); + cairo_format_t format = DEFAULT_FORMAT; + cairo_surface_t* surface = nullptr; + + public: + ImageBackend(Napi::CallbackInfo& info); + ~ImageBackend(); + + cairo_format_t getFormat(); + void setFormat(cairo_format_t format); + + int32_t approxBytesPerPixel(); + + static Napi::FunctionReference constructor; + static void Initialize(Napi::Object target); + const static cairo_format_t DEFAULT_FORMAT = CAIRO_FORMAT_ARGB32; +}; diff --git a/miniprogram/node_modules/canvas/src/backend/PdfBackend.cc b/miniprogram/node_modules/canvas/src/backend/PdfBackend.cc new file mode 100644 index 00000000..4eb46168 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/backend/PdfBackend.cc @@ -0,0 +1,40 @@ +#include "PdfBackend.h" + +#include +#include +#include "../InstanceData.h" +#include "../Canvas.h" +#include "../closure.h" + +PdfBackend::PdfBackend(Napi::CallbackInfo& info) : Napi::ObjectWrap(info), Backend("pdf", info) {} + +PdfBackend::~PdfBackend() { + destroySurface(); +} + +cairo_surface_t* PdfBackend::ensureSurface() { + if (!surface) { + _closure = new PdfSvgClosure(canvas); + surface = cairo_pdf_surface_create_for_stream(PdfSvgClosure::writeVec, _closure, width, height); + } + return surface; +} + +void PdfBackend::destroySurface() { + if (surface) { + cairo_surface_finish(surface); + cairo_surface_destroy(surface); + surface = nullptr; + assert(_closure); + delete _closure; + _closure = nullptr; + } +} + +void +PdfBackend::Initialize(Napi::Object target) { + Napi::Env env = target.Env(); + InstanceData* data = env.GetInstanceData(); + Napi::Function ctor = DefineClass(env, "PdfBackend", {}); + data->PdfBackendCtor = Napi::Persistent(ctor); +} diff --git a/miniprogram/node_modules/canvas/src/backend/PdfBackend.h b/miniprogram/node_modules/canvas/src/backend/PdfBackend.h new file mode 100644 index 00000000..6ae8415c --- /dev/null +++ b/miniprogram/node_modules/canvas/src/backend/PdfBackend.h @@ -0,0 +1,24 @@ +#pragma once + +#include "Backend.h" +#include "../closure.h" +#include + +class PdfBackend : public Napi::ObjectWrap, public Backend +{ + private: + cairo_surface_t* ensureSurface(); + void destroySurface(); + cairo_surface_t* surface = nullptr; + + public: + PdfSvgClosure* _closure = NULL; + inline PdfSvgClosure* closure() { return _closure; } + + PdfBackend(Napi::CallbackInfo& info); + ~PdfBackend(); + + static Napi::FunctionReference constructor; + static void Initialize(Napi::Object target); + static Napi::Value New(const Napi::CallbackInfo& info); +}; diff --git a/miniprogram/node_modules/canvas/src/backend/SvgBackend.cc b/miniprogram/node_modules/canvas/src/backend/SvgBackend.cc new file mode 100644 index 00000000..475c07de --- /dev/null +++ b/miniprogram/node_modules/canvas/src/backend/SvgBackend.cc @@ -0,0 +1,44 @@ +#include "SvgBackend.h" + +#include +#include +#include "../Canvas.h" +#include "../closure.h" +#include "../InstanceData.h" +#include + +using namespace Napi; + +SvgBackend::SvgBackend(Napi::CallbackInfo& info) : Napi::ObjectWrap(info), Backend("svg", info) {} + +SvgBackend::~SvgBackend() { + destroySurface(); +} + +cairo_surface_t* SvgBackend::ensureSurface() { + if (!surface) { + assert(!_closure); + _closure = new PdfSvgClosure(canvas); + surface = cairo_svg_surface_create_for_stream(PdfSvgClosure::writeVec, _closure, width, height); + } + return surface; +} + +void SvgBackend::destroySurface() { + if (surface) { + cairo_surface_finish(surface); + cairo_surface_destroy(surface); + surface = nullptr; + assert(_closure); + delete _closure; + _closure = nullptr; + } + } + +void +SvgBackend::Initialize(Napi::Object target) { + Napi::Env env = target.Env(); + Napi::Function ctor = DefineClass(env, "SvgBackend", {}); + InstanceData* data = env.GetInstanceData(); + data->SvgBackendCtor = Napi::Persistent(ctor); +} diff --git a/miniprogram/node_modules/canvas/src/backend/SvgBackend.h b/miniprogram/node_modules/canvas/src/backend/SvgBackend.h new file mode 100644 index 00000000..f4484269 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/backend/SvgBackend.h @@ -0,0 +1,22 @@ +#pragma once + +#include "Backend.h" +#include "../closure.h" +#include + +class SvgBackend : public Napi::ObjectWrap, public Backend +{ + private: + cairo_surface_t* ensureSurface(); + void destroySurface(); + cairo_surface_t* surface = nullptr; + + public: + PdfSvgClosure* _closure = NULL; + inline PdfSvgClosure* closure() { return _closure; } + + SvgBackend(Napi::CallbackInfo& info); + ~SvgBackend(); + + static void Initialize(Napi::Object target); +}; diff --git a/miniprogram/node_modules/canvas/src/bmp/BMPParser.cc b/miniprogram/node_modules/canvas/src/bmp/BMPParser.cc new file mode 100644 index 00000000..7d623f65 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/bmp/BMPParser.cc @@ -0,0 +1,459 @@ +#include "BMPParser.h" + +#include +#include + +using namespace std; +using namespace BMPParser; + +#define MAX_IMG_SIZE 10000 + +#define E(cond, msg) if(cond) return setErr(msg) +#define EU(cond, msg) if(cond) return setErrUnsupported(msg) +#define EX(cond, msg) if(cond) return setErrUnknown(msg) + +#define I1() get() +#define U1() get() +#define I2() get() +#define U2() get() +#define I4() get() +#define U4() get() + +#define I1UC() get() +#define U1UC() get() +#define I2UC() get() +#define U2UC() get() +#define I4UC() get() +#define U4UC() get() + +#define CHECK_OVERRUN(ptr, size, type) \ + if((ptr) + (size) - data > len){ \ + setErr("unexpected end of file"); \ + return type(); \ + } + +Parser::~Parser(){ + data = nullptr; + ptr = nullptr; + + if(imgd){ + delete[] imgd; + imgd = nullptr; + } +} + +void Parser::parse(uint8_t *buf, int bufSize, uint8_t *format){ + assert(status == Status::EMPTY); + + data = ptr = buf; + len = bufSize; + + // Start parsing file header + setOp("file header"); + + // File header signature + string fhSig = getStr(2); + string temp = "file header signature"; + EU(fhSig == "BA", temp + " \"BA\""); + EU(fhSig == "CI", temp + " \"CI\""); + EU(fhSig == "CP", temp + " \"CP\""); + EU(fhSig == "IC", temp + " \"IC\""); + EU(fhSig == "PT", temp + " \"PT\""); + EX(fhSig != "BM", temp); // BM + + // Length of the file should not be larger than `len` + E(U4() > static_cast(len), "inconsistent file size"); + + // Skip unused values + skip(4); + + // Offset where the pixel array (bitmap data) can be found + auto imgdOffset = U4(); + + // Start parsing DIB header + setOp("DIB header"); + + // Prepare some variables in case they are needed + uint32_t compr = 0; + uint32_t redShift = 0, greenShift = 0, blueShift = 0, alphaShift = 0; + uint32_t redMask = 0, greenMask = 0, blueMask = 0, alphaMask = 0; + double redMultp = 0, greenMultp = 0, blueMultp = 0, alphaMultp = 0; + + /** + * Type of the DIB (device-independent bitmap) header + * is determined by its size. Most BMP files use BITMAPINFOHEADER. + */ + auto dibSize = U4(); + temp = "DIB header"; + EU(dibSize == 64, temp + " \"OS22XBITMAPHEADER\""); + EU(dibSize == 16, temp + " \"OS22XBITMAPHEADER\""); + + uint32_t infoHeader = dibSize == 40 ? 1 : + dibSize == 52 ? 2 : + dibSize == 56 ? 3 : + dibSize == 108 ? 4 : + dibSize == 124 ? 5 : 0; + + // BITMAPCOREHEADER, BITMAP*INFOHEADER, BITMAP*HEADER + auto isDibValid = dibSize == 12 || infoHeader; + EX(!isDibValid, temp); + + // Image width + w = dibSize == 12 ? U2() : I4(); + E(!w, "image width is 0"); + E(w < 0, "negative image width"); + E(w > MAX_IMG_SIZE, "too large image width"); + + // Image height (specification allows negative values) + h = dibSize == 12 ? U2() : I4(); + E(!h, "image height is 0"); + E(h > MAX_IMG_SIZE, "too large image height"); + + bool isHeightNegative = h < 0; + if(isHeightNegative) h = -h; + + // Number of color planes (must be 1) + E(U2() != 1, "number of color planes must be 1"); + + // Bits per pixel (color depth) + auto bpp = U2(); + auto isBppValid = bpp == 1 || bpp == 4 || bpp == 8 || bpp == 16 || bpp == 24 || bpp == 32; + EU(!isBppValid, "color depth"); + + // Calculate image data size and padding + uint32_t expectedImgdSize = (((w * bpp + 31) >> 5) << 2) * h; + uint32_t rowPadding = (-w * bpp & 31) >> 3; + uint32_t imgdSize = 0; + + // Color palette data + uint8_t* paletteStart = nullptr; + uint32_t palColNum = 0; + + if(infoHeader){ + // Compression type + compr = U4(); + temp = "compression type"; + EU(compr == 1, temp + " \"BI_RLE8\""); + EU(compr == 2, temp + " \"BI_RLE4\""); + EU(compr == 4, temp + " \"BI_JPEG\""); + EU(compr == 5, temp + " \"BI_PNG\""); + EU(compr == 6, temp + " \"BI_ALPHABITFIELDS\""); + EU(compr == 11, temp + " \"BI_CMYK\""); + EU(compr == 12, temp + " \"BI_CMYKRLE8\""); + EU(compr == 13, temp + " \"BI_CMYKRLE4\""); + + // BI_RGB and BI_BITFIELDS + auto isComprValid = compr == 0 || compr == 3; + EX(!isComprValid, temp); + + // Ensure that BI_BITFIELDS appears only with 16-bit or 32-bit color + E(compr == 3 && !(bpp == 16 || bpp == 32), "compression BI_BITFIELDS can be used only with 16-bit and 32-bit color depth"); + + // Size of the image data + imgdSize = U4(); + + // Horizontal and vertical resolution (ignored) + skip(8); + + // Number of colors in the palette or 0 if no palette is present + palColNum = U4(); + EU(palColNum && bpp > 8, "color palette and bit depth combination"); + if(palColNum) paletteStart = data + dibSize + 14; + + // Number of important colors used or 0 if all colors are important (generally ignored) + skip(4); + + if(infoHeader >= 2){ + // If BI_BITFIELDS are used, calculate masks, otherwise ignore them + if(compr == 3){ + calcMaskShift(redShift, redMask, redMultp); + calcMaskShift(greenShift, greenMask, greenMultp); + calcMaskShift(blueShift, blueMask, blueMultp); + if(infoHeader >= 3) calcMaskShift(alphaShift, alphaMask, alphaMultp); + if(status == Status::ERROR) return; + }else{ + skip(16); + } + + // Ensure that the color space is LCS_WINDOWS_COLOR_SPACE or sRGB + if(infoHeader >= 4 && !palColNum){ + string colSpace = getStr(4, 1); + EU(colSpace != "Win " && colSpace != "sRGB", "color space \"" + colSpace + "\""); + } + } + } + + // Skip to the image data (there may be other chunks between, but they are optional) + E(ptr - data > imgdOffset, "image data overlaps with another structure"); + ptr = data + imgdOffset; + + // Start parsing image data + setOp("image data"); + + if(!imgdSize){ + // Value 0 is allowed only for BI_RGB compression type + E(compr != 0, "missing image data size"); + imgdSize = expectedImgdSize; + }else{ + E(imgdSize < expectedImgdSize, "invalid image data size"); + } + + // Ensure that all image data is present + E(ptr - data + imgdSize > len, "not enough image data"); + + // Direction of reading rows + int yStart = h - 1; + int yEnd = -1; + int dy = isHeightNegative ? 1 : -1; + + // In case of negative height, read rows backward + if(isHeightNegative){ + yStart = 0; + yEnd = h; + } + + // Allocate output image data array + int buffLen = w * h << 2; + imgd = new (nothrow) uint8_t[buffLen]; + E(!imgd, "unable to allocate memory"); + + // Prepare color values + uint8_t color[4] = {0}; + uint8_t &red = color[0]; + uint8_t &green = color[1]; + uint8_t &blue = color[2]; + uint8_t &alpha = color[3]; + + // Check if pre-multiplied alpha is used + bool premul = format ? format[4] : 0; + + // Main loop + for(int y = yStart; y != yEnd; y += dy){ + // Use in-byte offset for bpp < 8 + uint8_t colOffset = 0; + uint8_t cval = 0; + uint32_t val = 0; + + for(int x = 0; x != w; x++){ + // Index in the output image data + int i = (x + y * w) << 2; + + switch(compr){ + case 0: // BI_RGB + switch(bpp){ + case 1: + if(colOffset) ptr--; + cval = (U1UC() >> (7 - colOffset)) & 1; + + if(palColNum){ + uint8_t* entry = paletteStart + (cval << 2); + blue = get(entry); + green = get(entry + 1); + red = get(entry + 2); + if(status == Status::ERROR) return; + }else{ + red = green = blue = cval ? 255 : 0; + } + + alpha = 255; + colOffset = (colOffset + 1) & 7; + break; + + case 4: + if(colOffset) ptr--; + cval = (U1UC() >> (4 - colOffset)) & 15; + + if(palColNum){ + uint8_t* entry = paletteStart + (cval << 2); + blue = get(entry); + green = get(entry + 1); + red = get(entry + 2); + if(status == Status::ERROR) return; + }else{ + red = green = blue = cval << 4; + } + + alpha = 255; + colOffset = (colOffset + 4) & 7; + break; + + case 8: + cval = U1UC(); + + if(palColNum){ + uint8_t* entry = paletteStart + (cval << 2); + blue = get(entry); + green = get(entry + 1); + red = get(entry + 2); + if(status == Status::ERROR) return; + }else{ + red = green = blue = cval; + } + + alpha = 255; + break; + + case 16: + // RGB555 + val = U1UC(); + val |= U1UC() << 8; + red = (val >> 10) << 3; + green = (val >> 5) << 3; + blue = val << 3; + alpha = 255; + break; + + case 24: + blue = U1UC(); + green = U1UC(); + red = U1UC(); + alpha = 255; + break; + + case 32: + blue = U1UC(); + green = U1UC(); + red = U1UC(); + + if(infoHeader >= 3){ + alpha = U1UC(); + }else{ + alpha = 255; + skip(1); + } + break; + } + break; + + case 3: // BI_BITFIELDS + uint32_t col = bpp == 16 ? U2UC() : U4UC(); + red = ((col >> redShift) & redMask) * redMultp + .5; + green = ((col >> greenShift) & greenMask) * greenMultp + .5; + blue = ((col >> blueShift) & blueMask) * blueMultp + .5; + alpha = alphaMask ? ((col >> alphaShift) & alphaMask) * alphaMultp + .5 : 255; + break; + } + + /** + * Pixel format: + * red, + * green, + * blue, + * alpha, + * is alpha pre-multiplied + * Default is [0, 1, 2, 3, 0] + */ + + if(premul && alpha != 255){ + double a = alpha / 255.; + red = static_cast(red * a + .5); + green = static_cast(green * a + .5); + blue = static_cast(blue * a + .5); + } + + if(format){ + imgd[i] = color[format[0]]; + imgd[i + 1] = color[format[1]]; + imgd[i + 2] = color[format[2]]; + imgd[i + 3] = color[format[3]]; + }else{ + imgd[i] = red; + imgd[i + 1] = green; + imgd[i + 2] = blue; + imgd[i + 3] = alpha; + } + } + + // Skip unused bytes in the current row + skip(rowPadding); + } + + if(status == Status::ERROR) return; + status = Status::OK; +}; + +void Parser::clearImgd(){ imgd = nullptr; } +int32_t Parser::getWidth() const{ return w; } +int32_t Parser::getHeight() const{ return h; } +uint8_t *Parser::getImgd() const{ return imgd; } +Status Parser::getStatus() const{ return status; } + +string Parser::getErrMsg() const{ + return "Error while processing " + getOp() + " - " + err; +} + +template inline T Parser::get(){ + if(check) + CHECK_OVERRUN(ptr, sizeof(T), T); + T val; + std::memcpy(&val, ptr, sizeof(T)); + ptr += sizeof(T); + return val; +} + +template inline T Parser::get(uint8_t* pointer){ + if(check) + CHECK_OVERRUN(pointer, sizeof(T), T); + T val = *(T*)pointer; + return val; +} + +string Parser::getStr(int size, bool reverse){ + CHECK_OVERRUN(ptr, size, string); + string val = ""; + + while(size--){ + if(reverse) val = string(1, static_cast(*ptr++)) + val; + else val += static_cast(*ptr++); + } + + return val; +} + +inline void Parser::skip(int size){ + CHECK_OVERRUN(ptr, size, void); + ptr += size; +} + +void Parser::calcMaskShift(uint32_t& shift, uint32_t& mask, double& multp){ + mask = U4(); + shift = 0; + + if(mask == 0) return; + + while(~mask & 1){ + mask >>= 1; + shift++; + } + + E(mask & (mask + 1), "invalid color mask"); + + multp = 255. / mask; +} + +void Parser::setOp(string val){ + if(status != Status::EMPTY) return; + op = val; +} + +string Parser::getOp() const{ + return op; +} + +void Parser::setErrUnsupported(string msg){ + setErr("unsupported " + msg); +} + +void Parser::setErrUnknown(string msg){ + setErr("unknown " + msg); +} + +void Parser::setErr(string msg){ + if(status != Status::EMPTY) return; + err = msg; + status = Status::ERROR; +} + +string Parser::getErr() const{ + return err; +} diff --git a/miniprogram/node_modules/canvas/src/bmp/BMPParser.h b/miniprogram/node_modules/canvas/src/bmp/BMPParser.h new file mode 100644 index 00000000..c35f51a8 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/bmp/BMPParser.h @@ -0,0 +1,60 @@ +#pragma once + +#ifdef ERROR +#define ERROR_ ERROR +#undef ERROR +#endif + +#include // node < 7 uses libstdc++ on macOS which lacks complete c++11 +#include + +namespace BMPParser{ + enum Status{ + EMPTY, + OK, + ERROR, + }; + + class Parser{ + public: + Parser()=default; + ~Parser(); + void parse(uint8_t *buf, int bufSize, uint8_t *format=nullptr); + void clearImgd(); + int32_t getWidth() const; + int32_t getHeight() const; + uint8_t *getImgd() const; + Status getStatus() const; + std::string getErrMsg() const; + + private: + Status status = Status::EMPTY; + uint8_t *data = nullptr; + uint8_t *ptr = nullptr; + int len = 0; + int32_t w = 0; + int32_t h = 0; + uint8_t *imgd = nullptr; + std::string err = ""; + std::string op = ""; + + template inline T get(); + template inline T get(uint8_t* pointer); + std::string getStr(int len, bool reverse=false); + inline void skip(int len); + void calcMaskShift(uint32_t& shift, uint32_t& mask, double& multp); + + void setOp(std::string val); + std::string getOp() const; + + void setErrUnsupported(std::string msg); + void setErrUnknown(std::string msg); + void setErr(std::string msg); + std::string getErr() const; + }; +} + +#ifdef ERROR_ +#define ERROR ERROR_ +#undef ERROR_ +#endif diff --git a/miniprogram/node_modules/canvas/src/bmp/LICENSE.md b/miniprogram/node_modules/canvas/src/bmp/LICENSE.md new file mode 100644 index 00000000..6bb8a291 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/bmp/LICENSE.md @@ -0,0 +1,24 @@ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to \ No newline at end of file diff --git a/miniprogram/node_modules/canvas/src/closure.cc b/miniprogram/node_modules/canvas/src/closure.cc new file mode 100644 index 00000000..3290db2e --- /dev/null +++ b/miniprogram/node_modules/canvas/src/closure.cc @@ -0,0 +1,52 @@ +#include "closure.h" +#include "Canvas.h" + +#ifdef HAVE_JPEG +void JpegClosure::init_destination(j_compress_ptr cinfo) { + JpegClosure* closure = (JpegClosure*)cinfo->client_data; + closure->vec.resize(PAGE_SIZE); + closure->jpeg_dest_mgr->next_output_byte = &closure->vec[0]; + closure->jpeg_dest_mgr->free_in_buffer = closure->vec.size(); +} + +boolean JpegClosure::empty_output_buffer(j_compress_ptr cinfo) { + JpegClosure* closure = (JpegClosure*)cinfo->client_data; + size_t currentSize = closure->vec.size(); + closure->vec.resize(currentSize * 1.5); + closure->jpeg_dest_mgr->next_output_byte = &closure->vec[currentSize]; + closure->jpeg_dest_mgr->free_in_buffer = closure->vec.size() - currentSize; + return true; +} + +void JpegClosure::term_destination(j_compress_ptr cinfo) { + JpegClosure* closure = (JpegClosure*)cinfo->client_data; + size_t finalSize = closure->vec.size() - closure->jpeg_dest_mgr->free_in_buffer; + closure->vec.resize(finalSize); +} +#endif + +void +EncodingWorker::Init(void (*work_fn)(Closure*), Closure* closure) { + this->work_fn = work_fn; + this->closure = closure; +} + +void +EncodingWorker::Execute() { + this->work_fn(this->closure); +} + +void +EncodingWorker::OnWorkComplete(Napi::Env env, napi_status status) { + Napi::HandleScope scope(env); + + if (closure->status) { + closure->cb.Call({ closure->canvas->CairoError(closure->status).Value() }); + } else { + Napi::Object buf = Napi::Buffer::Copy(env, &closure->vec[0], closure->vec.size()); + closure->cb.Call({ env.Null(), buf }); + } + + closure->canvas->Unref(); + delete closure; +} diff --git a/miniprogram/node_modules/canvas/src/closure.h b/miniprogram/node_modules/canvas/src/closure.h new file mode 100644 index 00000000..ce5ec489 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/closure.h @@ -0,0 +1,93 @@ +// Copyright (c) 2010 LearnBoost + +#pragma once + +#include "Canvas.h" + +#ifdef HAVE_JPEG +#include +#endif + +#include +#include +#include // node < 7 uses libstdc++ on macOS which lacks complete c++11 +#include + +#ifndef PAGE_SIZE + #define PAGE_SIZE 4096 +#endif + +/* + * Image encoding closures. + */ + +struct Closure { + std::vector vec; + Napi::FunctionReference cb; + Canvas* canvas = nullptr; + cairo_status_t status = CAIRO_STATUS_SUCCESS; + + static cairo_status_t writeVec(void *c, const uint8_t *odata, unsigned len) { + Closure* closure = static_cast(c); + try { + closure->vec.insert(closure->vec.end(), odata, odata + len); + } catch (const std::bad_alloc &) { + return CAIRO_STATUS_NO_MEMORY; + } + return CAIRO_STATUS_SUCCESS; + } + + Closure(Canvas* canvas) : canvas(canvas) {}; +}; + +struct PdfSvgClosure : Closure { + PdfSvgClosure(Canvas* canvas) : Closure(canvas) {}; +}; + +struct PngClosure : Closure { + uint32_t compressionLevel = 6; + uint32_t filters = PNG_ALL_FILTERS; + uint32_t resolution = 0; // 0 = unspecified + // Indexed PNGs: + uint32_t nPaletteColors = 0; + uint8_t* palette = nullptr; + uint8_t backgroundIndex = 0; + + PngClosure(Canvas* canvas) : Closure(canvas) {}; +}; + +#ifdef HAVE_JPEG +struct JpegClosure : Closure { + uint32_t quality = 75; + uint32_t chromaSubsampling = 2; + bool progressive = false; + jpeg_destination_mgr* jpeg_dest_mgr = nullptr; + + static void init_destination(j_compress_ptr cinfo); + static boolean empty_output_buffer(j_compress_ptr cinfo); + static void term_destination(j_compress_ptr cinfo); + + JpegClosure(Canvas* canvas) : Closure(canvas) { + jpeg_dest_mgr = new jpeg_destination_mgr; + jpeg_dest_mgr->init_destination = init_destination; + jpeg_dest_mgr->empty_output_buffer = empty_output_buffer; + jpeg_dest_mgr->term_destination = term_destination; + }; + + ~JpegClosure() { + delete jpeg_dest_mgr; + } +}; +#endif + +class EncodingWorker : public Napi::AsyncWorker { + public: + EncodingWorker(Napi::Env env): Napi::AsyncWorker(env) {}; + void Init(void (*work_fn)(Closure*), Closure* closure); + void Execute() override; + void OnWorkComplete(Napi::Env env, napi_status status) override; + + private: + void (*work_fn)(Closure*) = nullptr; + Closure* closure = nullptr; +}; diff --git a/miniprogram/node_modules/canvas/src/color.cc b/miniprogram/node_modules/canvas/src/color.cc new file mode 100644 index 00000000..f8262946 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/color.cc @@ -0,0 +1,796 @@ +// Copyright (c) 2010 LearnBoost + +#include "color.h" + +#include +#include +#include +#include +#include +#include +#include + +// Compatibility with Visual Studio versions prior to VS2015 +#if defined(_MSC_VER) && _MSC_VER < 1900 +#define snprintf _snprintf +#endif + +/* + * Parse integer value + */ + +template +static bool +parse_integer(const char** pStr, parsed_t *pParsed) { + parsed_t& c = *pParsed; + const char*& str = *pStr; + int8_t sign=1; + + c = 0; + if (*str == '-') { + sign=-1; + ++str; + } + else if (*str == '+') + ++str; + + if (*str >= '0' && *str <= '9') { + do { + c *= 10; + c += *str++ - '0'; + } while (*str >= '0' && *str <= '9'); + } else { + return false; + } + if (sign<0) + c=-c; + return true; +} + + +/* + * Parse CSS value + * Adapted from http://crackprogramming.blogspot.co.il/2012/10/implement-atof.html + */ + +template +static bool +parse_css_number(const char** pStr, parsed_t *pParsed) { + parsed_t &parsed = *pParsed; + const char*& str = *pStr; + const char* startStr = str; + if (!str || !*str) + return false; + parsed_t integerPart = 0; + parsed_t fractionPart = 0; + int divisorForFraction = 1; + int sign = 1; + int exponent = 0; + int digits = 0; + bool inFraction = false; + + if (*str == '-') { + ++str; + sign = -1; + } + else if (*str == '+') + ++str; + while (*str != '\0') { + if (*str >= '0' && *str <= '9') { + if (digits>=std::numeric_limits::digits10) { + if (!inFraction) + return false; + } + else { + ++digits; + + if (inFraction) { + fractionPart = fractionPart*10 + (*str - '0'); + divisorForFraction *= 10; + } + else { + integerPart = integerPart*10 + (*str - '0'); + } + } + } + else if (*str == '.') { + if (inFraction) + break; + else + inFraction = true; + } + else if (*str == 'e') { + ++str; + if (!parse_integer(&str, &exponent)) + return false; + break; + } + else + break; + ++str; + } + if (str != startStr) { + parsed = sign * (integerPart + fractionPart/divisorForFraction); + for (;exponent>0;--exponent) + parsed *= 10; + for (;exponent<0;++exponent) + parsed /= 10; + return true; + } + return false; +} + +/* + * Clip value to the range [minValue, maxValue] + */ + +template +static T +clip(T value, T minValue, T maxValue) { + if (value > maxValue) + value = maxValue; + if (value < minValue) + value = minValue; + return value; +} + +/* + * Wrap value to the range [0, limit] + */ + +template +static T +wrap_float(T value, T limit) { + return fmod(fmod(value, limit) + limit, limit); +} + +/* + * Wrap value to the range [0, limit] - currently-unused integer version of wrap_float + */ + +// template +// static T wrap_int(T value, T limit) { +// return (value % limit + limit) % limit; +// } + +/* + * Parse color channel value + */ + +static bool +parse_rgb_channel(const char** pStr, uint8_t *pChannel) { + float f_channel; + if (parse_css_number(pStr, &f_channel)) { + int channel = (int) ceil(f_channel); + *pChannel = clip(channel, 0, 255); + return true; + } + return false; +} + +/* + * Parse a value in degrees + */ + +static bool +parse_degrees(const char** pStr, float *pDegrees) { + float degrees; + if (parse_css_number(pStr, °rees)) { + *pDegrees = wrap_float(degrees, 360.0f); + return true; + } + return false; +} + +/* + * Parse and clip a percentage value. Returns a float in the range [0, 1]. + */ + +static bool +parse_clipped_percentage(const char** pStr, float *pFraction) { + float percentage; + bool result = parse_css_number(pStr,&percentage); + const char*& str = *pStr; + if (result) { + if (*str == '%') { + ++str; + *pFraction = clip(percentage, 0.0f, 100.0f) / 100.0f; + return result; + } + } + return false; +} + +/* + * Macros to help with parsing inside rgba_from_*_string + */ + +#define WHITESPACE \ + while (' ' == *str) ++str; + +#define WHITESPACE_OR_COMMA \ + while (' ' == *str || ',' == *str) ++str; + +#define WHITESPACE_OR_COMMA_OR_SLASH \ + while (' ' == *str || ',' == *str || '/' == *str) ++str; + +#define CHANNEL(NAME) \ + if (!parse_rgb_channel(&str, &NAME)) \ + return 0; \ + +#define HUE(NAME) \ + if (!parse_degrees(&str, &NAME)) \ + return 0; + +#define SATURATION(NAME) \ + if (!parse_clipped_percentage(&str, &NAME)) \ + return 0; + +#define LIGHTNESS(NAME) SATURATION(NAME) + +#define ALPHA(NAME) \ + if (*str >= '1' && *str <= '9') { \ + NAME = 0; \ + float n = .1f; \ + while(*str >='0' && *str <= '9') { \ + NAME += (*str - '0') * n; \ + str++; \ + } \ + while(*str == ' ')str++; \ + if(*str != '%') { \ + NAME = 1; \ + } \ + } else { \ + if ('0' == *str) { \ + NAME = 0; \ + ++str; \ + } \ + if ('.' == *str) { \ + ++str; \ + NAME = 0; \ + float n = .1f; \ + while (*str >= '0' && *str <= '9') { \ + NAME += (*str++ - '0') * n; \ + n *= .1f; \ + } \ + } \ + } \ + do {} while (0) // require trailing semicolon + +/* + * Named colors. + */ +static const std::map named_colors = { + { "transparent", 0xFFFFFF00} + , { "aliceblue", 0xF0F8FFFF } + , { "antiquewhite", 0xFAEBD7FF } + , { "aqua", 0x00FFFFFF } + , { "aquamarine", 0x7FFFD4FF } + , { "azure", 0xF0FFFFFF } + , { "beige", 0xF5F5DCFF } + , { "bisque", 0xFFE4C4FF } + , { "black", 0x000000FF } + , { "blanchedalmond", 0xFFEBCDFF } + , { "blue", 0x0000FFFF } + , { "blueviolet", 0x8A2BE2FF } + , { "brown", 0xA52A2AFF } + , { "burlywood", 0xDEB887FF } + , { "cadetblue", 0x5F9EA0FF } + , { "chartreuse", 0x7FFF00FF } + , { "chocolate", 0xD2691EFF } + , { "coral", 0xFF7F50FF } + , { "cornflowerblue", 0x6495EDFF } + , { "cornsilk", 0xFFF8DCFF } + , { "crimson", 0xDC143CFF } + , { "cyan", 0x00FFFFFF } + , { "darkblue", 0x00008BFF } + , { "darkcyan", 0x008B8BFF } + , { "darkgoldenrod", 0xB8860BFF } + , { "darkgray", 0xA9A9A9FF } + , { "darkgreen", 0x006400FF } + , { "darkgrey", 0xA9A9A9FF } + , { "darkkhaki", 0xBDB76BFF } + , { "darkmagenta", 0x8B008BFF } + , { "darkolivegreen", 0x556B2FFF } + , { "darkorange", 0xFF8C00FF } + , { "darkorchid", 0x9932CCFF } + , { "darkred", 0x8B0000FF } + , { "darksalmon", 0xE9967AFF } + , { "darkseagreen", 0x8FBC8FFF } + , { "darkslateblue", 0x483D8BFF } + , { "darkslategray", 0x2F4F4FFF } + , { "darkslategrey", 0x2F4F4FFF } + , { "darkturquoise", 0x00CED1FF } + , { "darkviolet", 0x9400D3FF } + , { "deeppink", 0xFF1493FF } + , { "deepskyblue", 0x00BFFFFF } + , { "dimgray", 0x696969FF } + , { "dimgrey", 0x696969FF } + , { "dodgerblue", 0x1E90FFFF } + , { "firebrick", 0xB22222FF } + , { "floralwhite", 0xFFFAF0FF } + , { "forestgreen", 0x228B22FF } + , { "fuchsia", 0xFF00FFFF } + , { "gainsboro", 0xDCDCDCFF } + , { "ghostwhite", 0xF8F8FFFF } + , { "gold", 0xFFD700FF } + , { "goldenrod", 0xDAA520FF } + , { "gray", 0x808080FF } + , { "green", 0x008000FF } + , { "greenyellow", 0xADFF2FFF } + , { "grey", 0x808080FF } + , { "honeydew", 0xF0FFF0FF } + , { "hotpink", 0xFF69B4FF } + , { "indianred", 0xCD5C5CFF } + , { "indigo", 0x4B0082FF } + , { "ivory", 0xFFFFF0FF } + , { "khaki", 0xF0E68CFF } + , { "lavender", 0xE6E6FAFF } + , { "lavenderblush", 0xFFF0F5FF } + , { "lawngreen", 0x7CFC00FF } + , { "lemonchiffon", 0xFFFACDFF } + , { "lightblue", 0xADD8E6FF } + , { "lightcoral", 0xF08080FF } + , { "lightcyan", 0xE0FFFFFF } + , { "lightgoldenrodyellow", 0xFAFAD2FF } + , { "lightgray", 0xD3D3D3FF } + , { "lightgreen", 0x90EE90FF } + , { "lightgrey", 0xD3D3D3FF } + , { "lightpink", 0xFFB6C1FF } + , { "lightsalmon", 0xFFA07AFF } + , { "lightseagreen", 0x20B2AAFF } + , { "lightskyblue", 0x87CEFAFF } + , { "lightslategray", 0x778899FF } + , { "lightslategrey", 0x778899FF } + , { "lightsteelblue", 0xB0C4DEFF } + , { "lightyellow", 0xFFFFE0FF } + , { "lime", 0x00FF00FF } + , { "limegreen", 0x32CD32FF } + , { "linen", 0xFAF0E6FF } + , { "magenta", 0xFF00FFFF } + , { "maroon", 0x800000FF } + , { "mediumaquamarine", 0x66CDAAFF } + , { "mediumblue", 0x0000CDFF } + , { "mediumorchid", 0xBA55D3FF } + , { "mediumpurple", 0x9370DBFF } + , { "mediumseagreen", 0x3CB371FF } + , { "mediumslateblue", 0x7B68EEFF } + , { "mediumspringgreen", 0x00FA9AFF } + , { "mediumturquoise", 0x48D1CCFF } + , { "mediumvioletred", 0xC71585FF } + , { "midnightblue", 0x191970FF } + , { "mintcream", 0xF5FFFAFF } + , { "mistyrose", 0xFFE4E1FF } + , { "moccasin", 0xFFE4B5FF } + , { "navajowhite", 0xFFDEADFF } + , { "navy", 0x000080FF } + , { "oldlace", 0xFDF5E6FF } + , { "olive", 0x808000FF } + , { "olivedrab", 0x6B8E23FF } + , { "orange", 0xFFA500FF } + , { "orangered", 0xFF4500FF } + , { "orchid", 0xDA70D6FF } + , { "palegoldenrod", 0xEEE8AAFF } + , { "palegreen", 0x98FB98FF } + , { "paleturquoise", 0xAFEEEEFF } + , { "palevioletred", 0xDB7093FF } + , { "papayawhip", 0xFFEFD5FF } + , { "peachpuff", 0xFFDAB9FF } + , { "peru", 0xCD853FFF } + , { "pink", 0xFFC0CBFF } + , { "plum", 0xDDA0DDFF } + , { "powderblue", 0xB0E0E6FF } + , { "purple", 0x800080FF } + , { "rebeccapurple", 0x663399FF } // Source: CSS Color Level 4 draft + , { "red", 0xFF0000FF } + , { "rosybrown", 0xBC8F8FFF } + , { "royalblue", 0x4169E1FF } + , { "saddlebrown", 0x8B4513FF } + , { "salmon", 0xFA8072FF } + , { "sandybrown", 0xF4A460FF } + , { "seagreen", 0x2E8B57FF } + , { "seashell", 0xFFF5EEFF } + , { "sienna", 0xA0522DFF } + , { "silver", 0xC0C0C0FF } + , { "skyblue", 0x87CEEBFF } + , { "slateblue", 0x6A5ACDFF } + , { "slategray", 0x708090FF } + , { "slategrey", 0x708090FF } + , { "snow", 0xFFFAFAFF } + , { "springgreen", 0x00FF7FFF } + , { "steelblue", 0x4682B4FF } + , { "tan", 0xD2B48CFF } + , { "teal", 0x008080FF } + , { "thistle", 0xD8BFD8FF } + , { "tomato", 0xFF6347FF } + , { "turquoise", 0x40E0D0FF } + , { "violet", 0xEE82EEFF } + , { "wheat", 0xF5DEB3FF } + , { "white", 0xFFFFFFFF } + , { "whitesmoke", 0xF5F5F5FF } + , { "yellow", 0xFFFF00FF } + , { "yellowgreen", 0x9ACD32FF } +}; + +/* + * Hex digit int val. + */ + +static int +h(char c) { + switch (c) { + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + return c - '0'; + case 'a': + case 'b': + case 'c': + case 'd': + case 'e': + case 'f': + return (c - 'a') + 10; + case 'A': + case 'B': + case 'C': + case 'D': + case 'E': + case 'F': + return (c - 'A') + 10; + } + return 0; +} + +/* + * Return rgba_t from rgba. + */ + +rgba_t +rgba_create(uint32_t rgba) { + rgba_t color; + color.r = (double) (rgba >> 24) / 255; + color.g = (double) (rgba >> 16 & 0xff) / 255; + color.b = (double) (rgba >> 8 & 0xff) / 255; + color.a = (double) (rgba & 0xff) / 255; + return color; +} + +/* + * Return a string representation of the color. + */ + +void +rgba_to_string(rgba_t rgba, char *buf, size_t len) { + if (1 == rgba.a) { + snprintf(buf, len, "#%.2x%.2x%.2x", + static_cast(round(rgba.r * 255)), + static_cast(round(rgba.g * 255)), + static_cast(round(rgba.b * 255))); + } else { + snprintf(buf, len, "rgba(%d, %d, %d, %.2f)", + static_cast(round(rgba.r * 255)), + static_cast(round(rgba.g * 255)), + static_cast(round(rgba.b * 255)), + rgba.a); + } +} + +/* + * Return rgba from (r,g,b,a). + */ + +static inline int32_t +rgba_from_rgba(uint8_t r, uint8_t g, uint8_t b, uint8_t a) { + return + r << 24 + | g << 16 + | b << 8 + | a; +} + +/* + * Helper function used in rgba_from_hsla(). + * Based on http://dev.w3.org/csswg/css-color-4/#hsl-to-rgb + */ + +static float +hue_to_rgb(float t1, float t2, float hue) { + if (hue < 0) + hue += 6; + if (hue >= 6) + hue -= 6; + + if (hue < 1) + return (t2 - t1) * hue + t1; + else if (hue < 3) + return t2; + else if (hue < 4) + return (t2 - t1) * (4 - hue) + t1; + else + return t1; +} + +/* + * Return rgba from (h,s,l,a). + * Expects h values in the range [0, 360), and s, l, a in the range [0, 1]. + * Adapted from http://dev.w3.org/csswg/css-color-4/#hsl-to-rgb + */ + +static inline int32_t +rgba_from_hsla(float h_deg, float s, float l, float a) { + uint8_t r, g, b; + float h = (6 * h_deg) / 360.0f, m1, m2; + + if (l<=0.5) + m2=l*(s+1); + else + m2=l+s-l*s; + m1 = l*2 - m2; + + // Scale and round the RGB components + r = (uint8_t)floor(hue_to_rgb(m1, m2, h + 2) * 255 + 0.5); + g = (uint8_t)floor(hue_to_rgb(m1, m2, h ) * 255 + 0.5); + b = (uint8_t)floor(hue_to_rgb(m1, m2, h - 2) * 255 + 0.5); + + return rgba_from_rgba(r, g, b, (uint8_t) (a * 255)); +} + +/* + * Return rgba from (h,s,l). + * Expects h values in the range [0, 360), and s, l in the range [0, 1]. + */ + +static inline int32_t +rgba_from_hsl(float h_deg, float s, float l) { + return rgba_from_hsla(h_deg, s, l, 1.0); +} + + +/* + * Return rgba from (r,g,b). + */ + +static int32_t +rgba_from_rgb(uint8_t r, uint8_t g, uint8_t b) { + return rgba_from_rgba(r, g, b, 255); +} + +/* + * Return rgba from #RRGGBBAA + */ + +static int32_t +rgba_from_hex8_string(const char *str) { + return rgba_from_rgba( + (h(str[0]) << 4) + h(str[1]), + (h(str[2]) << 4) + h(str[3]), + (h(str[4]) << 4) + h(str[5]), + (h(str[6]) << 4) + h(str[7]) + ); +} + +/* + * Return rgb from "#RRGGBB". + */ + +static int32_t +rgba_from_hex6_string(const char *str) { + return rgba_from_rgb( + (h(str[0]) << 4) + h(str[1]) + , (h(str[2]) << 4) + h(str[3]) + , (h(str[4]) << 4) + h(str[5]) + ); +} + +/* +* Return rgba from #RGBA +*/ + +static int32_t +rgba_from_hex4_string(const char *str) { + return rgba_from_rgba( + (h(str[0]) << 4) + h(str[0]), + (h(str[1]) << 4) + h(str[1]), + (h(str[2]) << 4) + h(str[2]), + (h(str[3]) << 4) + h(str[3]) + ); +} + +/* + * Return rgb from "#RGB" + */ + +static int32_t +rgba_from_hex3_string(const char *str) { + return rgba_from_rgb( + (h(str[0]) << 4) + h(str[0]) + , (h(str[1]) << 4) + h(str[1]) + , (h(str[2]) << 4) + h(str[2]) + ); +} + +/* + * Return rgb from "rgb()" + */ + +static int32_t +rgba_from_rgb_string(const char *str, short *ok) { + if (str == strstr(str, "rgb(")) { + str += 4; + WHITESPACE; + uint8_t r = 0, g = 0, b = 0; + float a=1.f; + CHANNEL(r); + WHITESPACE_OR_COMMA; + CHANNEL(g); + WHITESPACE_OR_COMMA; + CHANNEL(b); + WHITESPACE_OR_COMMA_OR_SLASH; + ALPHA(a); + return *ok = 1, rgba_from_rgba(r, g, b, (int) (255 * a)); + } + return *ok = 0; +} + +/* + * Return rgb from "rgba()" + */ + +static int32_t +rgba_from_rgba_string(const char *str, short *ok) { + if (str == strstr(str, "rgba(")) { + str += 5; + WHITESPACE; + uint8_t r = 0, g = 0, b = 0; + float a = 1.f; + CHANNEL(r); + WHITESPACE_OR_COMMA; + CHANNEL(g); + WHITESPACE_OR_COMMA; + CHANNEL(b); + WHITESPACE_OR_COMMA_OR_SLASH; + ALPHA(a); + WHITESPACE; + return *ok = 1, rgba_from_rgba(r, g, b, (int) (a * 255)); + } + return *ok = 0; +} + +/* + * Return rgb from "hsla()" + */ + +static int32_t +rgba_from_hsla_string(const char *str, short *ok) { + if (str == strstr(str, "hsla(")) { + str += 5; + WHITESPACE; + float h_deg = 0; + float s = 0, l = 0; + float a = 0; + HUE(h_deg); + WHITESPACE_OR_COMMA; + SATURATION(s); + WHITESPACE_OR_COMMA; + LIGHTNESS(l); + WHITESPACE_OR_COMMA; + ALPHA(a); + WHITESPACE; + return *ok = 1, rgba_from_hsla(h_deg, s, l, a); + } + return *ok = 0; +} + +/* + * Return rgb from "hsl()" + */ + +static int32_t +rgba_from_hsl_string(const char *str, short *ok) { + if (str == strstr(str, "hsl(")) { + str += 4; + WHITESPACE; + float h_deg = 0; + float s = 0, l = 0; + HUE(h_deg); + WHITESPACE_OR_COMMA; + SATURATION(s); + WHITESPACE_OR_COMMA; + LIGHTNESS(l); + WHITESPACE; + return *ok = 1, rgba_from_hsl(h_deg, s, l); + } + return *ok = 0; +} + + +/* + * Return rgb from: + * + * - "#RGB" + * - "#RGBA" + * - "#RRGGBB" + * - "#RRGGBBAA" + * + */ + +static int32_t +rgba_from_hex_string(const char *str, short *ok) { + size_t len = strlen(str); + *ok = 1; + switch (len) { + case 8: return rgba_from_hex8_string(str); + case 6: return rgba_from_hex6_string(str); + case 4: return rgba_from_hex4_string(str); + case 3: return rgba_from_hex3_string(str); + } + return *ok = 0; +} + +/* + * Return named color value. + */ + +static int32_t +rgba_from_name_string(const char *str, short *ok) { + WHITESPACE; + std::string lowered(str); + std::transform(lowered.begin(), lowered.end(), lowered.begin(), tolower); + auto color = named_colors.find(lowered); + if (color != named_colors.end()) { + return *ok = 1, color->second; + } + return *ok = 0; +} + +/* + * Return rgb from: + * + * - #RGB + * - #RGBA + * - #RRGGBB + * - #RRGGBBAA + * - rgb(r,g,b) + * - rgba(r,g,b,a) + * - hsl(h,s,l) + * - hsla(h,s,l,a) + * - name + * + */ + +int32_t +rgba_from_string(const char *str, short *ok) { + WHITESPACE; + if ('#' == str[0]) + return rgba_from_hex_string(++str, ok); + if (str == strstr(str, "rgba")) + return rgba_from_rgba_string(str, ok); + if (str == strstr(str, "rgb")) + return rgba_from_rgb_string(str, ok); + if (str == strstr(str, "hsla")) + return rgba_from_hsla_string(str, ok); + if (str == strstr(str, "hsl")) + return rgba_from_hsl_string(str, ok); + return rgba_from_name_string(str, ok); +} + +/* + * Inspect the given rgba color. + */ + +void +rgba_inspect(int32_t rgba) { + printf("rgba(%d,%d,%d,%d)\n" + , rgba >> 24 & 0xff + , rgba >> 16 & 0xff + , rgba >> 8 & 0xff + , rgba & 0xff + ); +} diff --git a/miniprogram/node_modules/canvas/src/color.h b/miniprogram/node_modules/canvas/src/color.h new file mode 100644 index 00000000..137c1d6b --- /dev/null +++ b/miniprogram/node_modules/canvas/src/color.h @@ -0,0 +1,30 @@ +// Copyright (c) 2010 LearnBoost + +#pragma once + +#include // node < 7 uses libstdc++ on macOS which lacks complete c++11 +#include + +/* + * RGBA struct. + */ + +typedef struct { + double r, g, b, a; +} rgba_t; + +/* + * Prototypes. + */ + +rgba_t +rgba_create(uint32_t rgba); + +int32_t +rgba_from_string(const char *str, short *ok); + +void +rgba_to_string(rgba_t rgba, char *buf, size_t len); + +void +rgba_inspect(int32_t rgba); diff --git a/miniprogram/node_modules/canvas/src/dll_visibility.h b/miniprogram/node_modules/canvas/src/dll_visibility.h new file mode 100644 index 00000000..7a1f9845 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/dll_visibility.h @@ -0,0 +1,20 @@ +#ifndef DLL_PUBLIC + +#if defined _WIN32 + #ifdef __GNUC__ + #define DLL_PUBLIC __attribute__ ((dllexport)) + #else + #define DLL_PUBLIC __declspec(dllexport) + #endif + #define DLL_LOCAL +#else + #if __GNUC__ >= 4 + #define DLL_PUBLIC __attribute__ ((visibility ("default"))) + #define DLL_LOCAL __attribute__ ((visibility ("hidden"))) + #else + #define DLL_PUBLIC + #define DLL_LOCAL + #endif +#endif + +#endif diff --git a/miniprogram/node_modules/canvas/src/init.cc b/miniprogram/node_modules/canvas/src/init.cc new file mode 100644 index 00000000..ad920784 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/init.cc @@ -0,0 +1,116 @@ +// Copyright (c) 2010 LearnBoost + +#include +#include + +#include +#if CAIRO_VERSION < CAIRO_VERSION_ENCODE(1, 10, 0) +// CAIRO_FORMAT_RGB16_565: undeprecated in v1.10.0 +// CAIRO_STATUS_INVALID_SIZE: v1.10.0 +// CAIRO_FORMAT_INVALID: v1.10.0 +// Lots of the compositing operators: v1.10.0 +// JPEG MIME tracking: v1.10.0 +// Note: CAIRO_FORMAT_RGB30 is v1.12.0 and still optional +#error("cairo v1.10.0 or later is required") +#endif + +#include "Backends.h" +#include "Canvas.h" +#include "CanvasGradient.h" +#include "CanvasPattern.h" +#include "CanvasRenderingContext2d.h" +#include "Image.h" +#include "ImageData.h" +#include "InstanceData.h" + +#include +#include FT_FREETYPE_H + +/* + * Save some external modules as private references. + */ + +static void +setDOMMatrix(const Napi::CallbackInfo& info) { + InstanceData* data = info.Env().GetInstanceData(); + data->DOMMatrixCtor = Napi::Persistent(info[0].As()); +} + +static void +setParseFont(const Napi::CallbackInfo& info) { + InstanceData* data = info.Env().GetInstanceData(); + data->parseFont = Napi::Persistent(info[0].As()); +} + +// Compatibility with Visual Studio versions prior to VS2015 +#if defined(_MSC_VER) && _MSC_VER < 1900 +#define snprintf _snprintf +#endif + +Napi::Object init(Napi::Env env, Napi::Object exports) { + env.SetInstanceData(new InstanceData()); + + Backends::Initialize(env, exports); + Canvas::Initialize(env, exports); + Image::Initialize(env, exports); + ImageData::Initialize(env, exports); + Context2d::Initialize(env, exports); + Gradient::Initialize(env, exports); + Pattern::Initialize(env, exports); + + exports.Set("setDOMMatrix", Napi::Function::New(env, &setDOMMatrix)); + exports.Set("setParseFont", Napi::Function::New(env, &setParseFont)); + + exports.Set("cairoVersion", Napi::String::New(env, cairo_version_string())); +#ifdef HAVE_JPEG + +#ifndef JPEG_LIB_VERSION_MAJOR +#ifdef JPEG_LIB_VERSION +#define JPEG_LIB_VERSION_MAJOR (JPEG_LIB_VERSION / 10) +#else +#define JPEG_LIB_VERSION_MAJOR 0 +#endif +#endif + +#ifndef JPEG_LIB_VERSION_MINOR +#ifdef JPEG_LIB_VERSION +#define JPEG_LIB_VERSION_MINOR (JPEG_LIB_VERSION % 10) +#else +#define JPEG_LIB_VERSION_MINOR 0 +#endif +#endif + + char jpeg_version[10]; + static bool minor_gt_0 = JPEG_LIB_VERSION_MINOR > 0; + if (minor_gt_0) { + snprintf(jpeg_version, 10, "%d%c", JPEG_LIB_VERSION_MAJOR, JPEG_LIB_VERSION_MINOR + 'a' - 1); + } else { + snprintf(jpeg_version, 10, "%d", JPEG_LIB_VERSION_MAJOR); + } + exports.Set("jpegVersion", Napi::String::New(env, jpeg_version)); +#endif + +#ifdef HAVE_GIF +#ifndef GIF_LIB_VERSION + char gif_version[10]; + snprintf(gif_version, 10, "%d.%d.%d", GIFLIB_MAJOR, GIFLIB_MINOR, GIFLIB_RELEASE); + exports.Set("gifVersion", Napi::String::New(env, gif_version)); +#else + exports.Set("gifVersion", Napi::String::New(env, GIF_LIB_VERSION)); +#endif +#endif + +#ifdef HAVE_RSVG + exports.Set("rsvgVersion", Napi::String::New(env, LIBRSVG_VERSION)); +#endif + + exports.Set("pangoVersion", Napi::String::New(env, PANGO_VERSION_STRING)); + + char freetype_version[10]; + snprintf(freetype_version, 10, "%d.%d.%d", FREETYPE_MAJOR, FREETYPE_MINOR, FREETYPE_PATCH); + exports.Set("freetypeVersion", Napi::String::New(env, freetype_version)); + + return exports; +} + +NODE_API_MODULE(canvas, init); diff --git a/miniprogram/node_modules/canvas/src/itemize.cc b/miniprogram/node_modules/canvas/src/itemize.cc new file mode 100644 index 00000000..0e56c7c9 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/itemize.cc @@ -0,0 +1,228 @@ +#include +#include +#include +#include "itemize.h" + +void +bidi_iterator_next( + BidiIteratorState& state, + const std::vector& text_buffer +) { + if (state.done) return; + + const size_t text_length = text_buffer.size(); + state.level = state.levels[state.offset]; + + bool should_break = false; + while (state.offset < text_length && !should_break) { + while (state.offset < text_length) { + if (state.levels[state.offset] != state.level) { + should_break = true; + break; + } + + state.offset += 1; + } + } + + if (state.offset == text_length) state.done = true; +} + +static const uint32_t paired_chars[] = { + 0x0028, 0x0029, /* ascii paired punctuation */ + 0x003c, 0x003e, + 0x005b, 0x005d, + 0x007b, 0x007d, + 0x00ab, 0x00bb, /* guillemets */ + 0x0f3a, 0x0f3b, /* tibetan */ + 0x0f3c, 0x0f3d, + 0x169b, 0x169c, /* ogham */ + 0x2018, 0x2019, /* general punctuation */ + 0x201c, 0x201d, + 0x2039, 0x203a, + 0x2045, 0x2046, + 0x207d, 0x207e, + 0x208d, 0x208e, + 0x27e6, 0x27e7, /* math */ + 0x27e8, 0x27e9, + 0x27ea, 0x27eb, + 0x27ec, 0x27ed, + 0x27ee, 0x27ef, + 0x2983, 0x2984, + 0x2985, 0x2986, + 0x2987, 0x2988, + 0x2989, 0x298a, + 0x298b, 0x298c, + 0x298d, 0x298e, + 0x298f, 0x2990, + 0x2991, 0x2992, + 0x2993, 0x2994, + 0x2995, 0x2996, + 0x2997, 0x2998, + 0x29fc, 0x29fd, + 0x2e02, 0x2e03, + 0x2e04, 0x2e05, + 0x2e09, 0x2e0a, + 0x2e0c, 0x2e0d, + 0x2e1c, 0x2e1d, + 0x2e20, 0x2e21, + 0x2e22, 0x2e23, + 0x2e24, 0x2e25, + 0x2e26, 0x2e27, + 0x2e28, 0x2e29, + 0x3008, 0x3009, /* chinese paired punctuation */ + 0x300a, 0x300b, + 0x300c, 0x300d, + 0x300e, 0x300f, + 0x3010, 0x3011, + 0x3014, 0x3015, + 0x3016, 0x3017, + 0x3018, 0x3019, + 0x301a, 0x301b, + 0xfe59, 0xfe5a, + 0xfe5b, 0xfe5c, + 0xfe5d, 0xfe5e, + 0xff08, 0xff09, + 0xff3b, 0xff3d, + 0xff5b, 0xff5d, + 0xff5f, 0xff60, + 0xff62, 0xff63 +}; + +static const size_t paired_chars_count = sizeof(paired_chars) / sizeof(paired_chars[0]); + +static int +get_pair_index(uint32_t ch) { + int lower = 0; + int upper = paired_chars_count - 1; + + while (lower <= upper) { + int mid = (lower + upper) / 2; + + if (ch < paired_chars[mid]) { + upper = mid - 1; + } else if (ch > paired_chars[mid]) { + lower = mid + 1; + } else { + return mid; + } + } + + return -1; +} + +void +script_iterator_next( + ScriptIteratorState& state, + const std::vector& text_buffer +) { + if (state.done) return; + + const size_t text_length = text_buffer.size(); + state.script = SCRIPT_COMMON; + + while (state.offset < text_length) { + uint32_t code = text_buffer[state.offset]; + int jump = 1; + + // Handle surrogate pairs + if (state.offset + 1 < text_length) { + uint32_t next = text_buffer[state.offset + 1]; + if ((0xd800 <= code && code <= 0xdbff) && (0xdc00 <= next && next <= 0xdfff)) { + jump = 2; + code = ((code - 0xd800) * 0x400) + (next - 0xdc00) + 0x10000; + } + } + + script_t script = get_script(code); + int pair_index = script != SCRIPT_COMMON ? -1 : get_pair_index(code); + + // Paired character handling: + // if it's an open character, push it onto the stack + // if it's a close character, find the matching open on the stack, and use + // that script code. Any non-matching open characters above it on the stack + // will be popped. + if (pair_index >= 0) { + if ((pair_index & 1) == 0) { + // Open character + state.parens.push_back({pair_index, state.script}); + } else if (!state.parens.empty()) { + // Close character + int pi = pair_index & ~1; + + while (!state.parens.empty() && state.parens.back().index != pi) { + state.parens.pop_back(); + } + + if (static_cast(state.parens.size()) - 1 < state.start_paren) { + state.start_paren = static_cast(state.parens.size()) - 1; + } + + if (!state.parens.empty()) { + script = state.parens.back().script; + } + } + } + + bool running_is_real = state.script != SCRIPT_COMMON && state.script != SCRIPT_INHERITED; + bool is_real = script != SCRIPT_COMMON && script != SCRIPT_INHERITED; + bool is_same = !running_is_real || !is_real || script == state.script; + + if (is_same) { + if (!running_is_real && is_real) { + state.script = script; + + // Now that we have a final script code, fix any open characters we + // pushed before we knew the real script code. + while (state.start_paren + 1 < static_cast(state.parens.size())) { + state.parens[++state.start_paren].script = script; + } + + if (pair_index >= 0 && (pair_index & 1) && !state.parens.empty()) { + state.parens.pop_back(); + + if (static_cast(state.parens.size()) - 1 < state.start_paren) { + state.start_paren = static_cast(state.parens.size()) - 1; + } + } + } + + state.offset += jump; + } else { + state.start_paren = static_cast(state.parens.size()) - 1; + break; + } + } + + if (state.offset >= text_length) { + state.done = true; + } +} + +void +itemize_next( + ItemizeState& state, + const std::vector& text_buffer +) { + if (state.done) return; + + if (state.bidi_state.offset == state.offset) { + bidi_iterator_next(state.bidi_state, text_buffer); + } + + if (state.script_state.offset == state.offset) { + script_iterator_next(state.script_state, text_buffer); + } + + state.offset = std::min( + std::min( + state.bidi_state.offset, + state.script_state.offset + ), + text_buffer.size() + ); + + if (state.bidi_state.done && state.script_state.done) { + state.done = true; + } +} diff --git a/miniprogram/node_modules/canvas/src/itemize.h b/miniprogram/node_modules/canvas/src/itemize.h new file mode 100644 index 00000000..ca6dfe6a --- /dev/null +++ b/miniprogram/node_modules/canvas/src/itemize.h @@ -0,0 +1,70 @@ +#pragma once + +#include +#include +#include +#include "unicode.h" + +struct ParenInfo { + int index; + script_t script; +}; + +struct ScriptIteratorState { + // Output + size_t offset = 0; + script_t script = SCRIPT_COMMON; + bool done = false; + + // Private state + std::vector parens; + int start_paren = -1; +}; + +struct BidiIteratorState { + BidiIteratorState(const std::vector& text_buffer) { + SBCodepointSequence codepointSequence = { + SBStringEncodingUTF16, + text_buffer.data(), + text_buffer.size() + }; + algorithm = SBAlgorithmCreate(&codepointSequence); + paragraph = SBAlgorithmCreateParagraph( + algorithm, + offset, + text_buffer.size(), + initial_level + ); + levels = SBParagraphGetLevelsPtr(paragraph); + } + + ~BidiIteratorState() { + if (paragraph != nullptr) SBParagraphRelease(paragraph); + if (algorithm != nullptr) SBAlgorithmRelease(algorithm); + } + + // Output + size_t offset = 0; + uint8_t level = 0; + bool done = false; + + // Private state + SBAlgorithmRef algorithm = nullptr; + SBParagraphRef paragraph = nullptr; + const SBLevel* levels = nullptr; + uint8_t initial_level = 0; +}; + +struct ItemizeState { + ItemizeState(const std::vector& text_buffer) : bidi_state(text_buffer) {} + + // Output + size_t offset = 0; + bool done = false; + + // Private state + BidiIteratorState bidi_state; + ScriptIteratorState script_state; +}; + +void itemize_next(ItemizeState& state, const std::vector& text_buffer); \ No newline at end of file diff --git a/miniprogram/node_modules/canvas/src/register_font.cc b/miniprogram/node_modules/canvas/src/register_font.cc new file mode 100644 index 00000000..ae2ece58 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/register_font.cc @@ -0,0 +1,352 @@ +#include "register_font.h" + +#include +#include +#include + +#ifdef __APPLE__ +#include +#elif defined(_WIN32) +#include +#include +#else +#include +#endif + +#include +#include FT_FREETYPE_H +#include FT_TRUETYPE_TABLES_H +#include FT_SFNT_NAMES_H +#include FT_TRUETYPE_IDS_H +#ifndef FT_SFNT_OS2 +#define FT_SFNT_OS2 ft_sfnt_os2 +#endif + +// OSX seems to read the strings in MacRoman encoding and ignore Unicode entries. +// You can verify this by opening a TTF with both Unicode and Macroman on OSX. +// It uses the MacRoman name, while Fontconfig and Windows use Unicode +#ifdef __APPLE__ +#define PREFERRED_PLATFORM_ID TT_PLATFORM_MACINTOSH +#define PREFERRED_ENCODING_ID TT_MAC_ID_ROMAN +#else +#define PREFERRED_PLATFORM_ID TT_PLATFORM_MICROSOFT +#define PREFERRED_ENCODING_ID TT_MS_ID_UNICODE_CS +#endif + +#define IS_PREFERRED_ENC(X) \ + X.platform_id == PREFERRED_PLATFORM_ID && X.encoding_id == PREFERRED_ENCODING_ID + +#define GET_NAME_RANK(X) \ + (IS_PREFERRED_ENC(X) ? 1 : 0) + (X.name_id == TT_NAME_ID_PREFERRED_FAMILY ? 1 : 0) + +/* + * Return a UTF-8 encoded string given a TrueType name buf+len + * and its platform and encoding + */ + +char * +to_utf8(FT_Byte* buf, FT_UInt len, FT_UShort pid, FT_UShort eid) { + size_t ret_len = len * 4; // max chars in a utf8 string + char *ret = (char*)malloc(ret_len + 1); // utf8 string + null + + if (!ret) return NULL; + + // In my testing of hundreds of fonts from the Google Font repo, the two types + // of fonts are TT_PLATFORM_MICROSOFT with TT_MS_ID_UNICODE_CS encoding, or + // TT_PLATFORM_MACINTOSH with TT_MAC_ID_ROMAN encoding. Usually both, never neither + + char const *fromcode; + + if (pid == TT_PLATFORM_MACINTOSH && eid == TT_MAC_ID_ROMAN) { + fromcode = "MAC"; + } else if (pid == TT_PLATFORM_MICROSOFT && eid == TT_MS_ID_UNICODE_CS) { + fromcode = "UTF-16BE"; + } else { + free(ret); + return NULL; + } + + GIConv cd = g_iconv_open("UTF-8", fromcode); + + if (cd == (GIConv)-1) { + free(ret); + return NULL; + } + + size_t inbytesleft = len; + size_t outbytesleft = ret_len; + + size_t n_converted = g_iconv(cd, (char**)&buf, &inbytesleft, &ret, &outbytesleft); + + ret -= ret_len - outbytesleft; // rewind the pointers to their + buf -= len - inbytesleft; // original starting positions + + if (n_converted == (size_t)-1) { + free(ret); + return NULL; + } else { + ret[ret_len - outbytesleft] = '\0'; + return ret; + } +} + +/* + * Find a family name in the face's name table, preferring the one the + * system, fall back to the other + */ + +char * +get_family_name(FT_Face face) { + FT_SfntName name; + + int best_rank = -1; + char* best_buf = NULL; + + for (unsigned i = 0; i < FT_Get_Sfnt_Name_Count(face); ++i) { + FT_Get_Sfnt_Name(face, i, &name); + + if (name.name_id == TT_NAME_ID_FONT_FAMILY || name.name_id == TT_NAME_ID_PREFERRED_FAMILY) { + char *buf = to_utf8(name.string, name.string_len, name.platform_id, name.encoding_id); + + if (buf) { + int rank = GET_NAME_RANK(name); + if (rank > best_rank) { + best_rank = rank; + if (best_buf) free(best_buf); + best_buf = buf; + } else { + free(buf); + } + } + } + } + + return best_buf; +} + +PangoWeight +get_pango_weight(FT_UShort weight) { + switch (weight) { + case 100: return PANGO_WEIGHT_THIN; + case 200: return PANGO_WEIGHT_ULTRALIGHT; + case 300: return PANGO_WEIGHT_LIGHT; + #if PANGO_VERSION >= PANGO_VERSION_ENCODE(1, 36, 7) + case 350: return PANGO_WEIGHT_SEMILIGHT; + #endif + case 380: return PANGO_WEIGHT_BOOK; + case 400: return PANGO_WEIGHT_NORMAL; + case 500: return PANGO_WEIGHT_MEDIUM; + case 600: return PANGO_WEIGHT_SEMIBOLD; + case 700: return PANGO_WEIGHT_BOLD; + case 800: return PANGO_WEIGHT_ULTRABOLD; + case 900: return PANGO_WEIGHT_HEAVY; + case 1000: return PANGO_WEIGHT_ULTRAHEAVY; + default: return PANGO_WEIGHT_NORMAL; + } +} + +PangoStretch +get_pango_stretch(FT_UShort width) { + switch (width) { + case 1: return PANGO_STRETCH_ULTRA_CONDENSED; + case 2: return PANGO_STRETCH_EXTRA_CONDENSED; + case 3: return PANGO_STRETCH_CONDENSED; + case 4: return PANGO_STRETCH_SEMI_CONDENSED; + case 5: return PANGO_STRETCH_NORMAL; + case 6: return PANGO_STRETCH_SEMI_EXPANDED; + case 7: return PANGO_STRETCH_EXPANDED; + case 8: return PANGO_STRETCH_EXTRA_EXPANDED; + case 9: return PANGO_STRETCH_ULTRA_EXPANDED; + default: return PANGO_STRETCH_NORMAL; + } +} + +PangoStyle +get_pango_style(FT_Long flags) { + if (flags & FT_STYLE_FLAG_ITALIC) { + return PANGO_STYLE_ITALIC; + } else { + return PANGO_STYLE_NORMAL; + } +} + +#ifdef _WIN32 +std::unique_ptr +u8ToWide(const char* str) { + int iBufferSize = MultiByteToWideChar(CP_UTF8, 0, str, -1, (wchar_t*)NULL, 0); + if(!iBufferSize){ + return nullptr; + } + std::unique_ptr wpBufWString = std::unique_ptr{ new wchar_t[static_cast(iBufferSize)] }; + if(!MultiByteToWideChar(CP_UTF8, 0, str, -1, wpBufWString.get(), iBufferSize)){ + return nullptr; + } + return wpBufWString; +} + +static unsigned long +stream_read_func(FT_Stream stream, unsigned long offset, unsigned char* buffer, unsigned long count){ + HANDLE hFile = reinterpret_cast(stream->descriptor.pointer); + DWORD numberOfBytesRead; + OVERLAPPED overlapped; + overlapped.Offset = offset; + overlapped.OffsetHigh = 0; + overlapped.hEvent = NULL; + if(!ReadFile(hFile, buffer, count, &numberOfBytesRead, &overlapped)){ + return 0; + } + return numberOfBytesRead; +}; + +static void +stream_close_func(FT_Stream stream){ + HANDLE hFile = reinterpret_cast(stream->descriptor.pointer); + CloseHandle(hFile); +} +#endif + +/* + * Return a PangoFontDescription that will resolve to the font file + */ + +PangoFontDescription * +get_pango_font_description(unsigned char* filepath) { + FT_Library library; + FT_Face face; + PangoFontDescription *desc = pango_font_description_new(); +#ifdef _WIN32 + // FT_New_Face use fopen. + // Unable to find the file when supplied the multibyte string path on the Windows platform and throw error "Could not parse font file". + // This workaround fixes this by reading the font file uses win32 wide character API. + std::unique_ptr wFilepath = u8ToWide((char*)filepath); + if(!wFilepath){ + return NULL; + } + HANDLE hFile = CreateFileW( + wFilepath.get(), + GENERIC_READ, + FILE_SHARE_READ, + NULL, + OPEN_EXISTING, + FILE_ATTRIBUTE_NORMAL, + NULL + ); + if(!hFile){ + return NULL; + } + LARGE_INTEGER liSize; + if(!GetFileSizeEx(hFile, &liSize)) { + CloseHandle(hFile); + return NULL; + } + FT_Open_Args args; + args.flags = FT_OPEN_STREAM; + FT_StreamRec stream; + stream.base = NULL; + stream.size = liSize.QuadPart; + stream.pos = 0; + stream.descriptor.pointer = hFile; + stream.read = stream_read_func; + stream.close = stream_close_func; + args.stream = &stream; + if ( + !FT_Init_FreeType(&library) && + !FT_Open_Face(library, &args, 0, &face)) { +#else + if (!FT_Init_FreeType(&library) && !FT_New_Face(library, (const char*)filepath, 0, &face)) { +#endif + TT_OS2 *table = (TT_OS2*)FT_Get_Sfnt_Table(face, FT_SFNT_OS2); + if (table) { + char *family = get_family_name(face); + + if (!family) { + pango_font_description_free(desc); + FT_Done_Face(face); + FT_Done_FreeType(library); + + return NULL; + } + + pango_font_description_set_family(desc, family); + free(family); + pango_font_description_set_weight(desc, get_pango_weight(table->usWeightClass)); + pango_font_description_set_stretch(desc, get_pango_stretch(table->usWidthClass)); + pango_font_description_set_style(desc, get_pango_style(face->style_flags)); + + FT_Done_Face(face); + FT_Done_FreeType(library); + + return desc; + } + } + pango_font_description_free(desc); + + return NULL; +} + +/* + * Register font with the OS + */ + +bool +register_font(unsigned char *filepath) { + bool success; + + #ifdef __APPLE__ + CFURLRef filepathUrl = CFURLCreateFromFileSystemRepresentation(NULL, filepath, strlen((char*)filepath), false); + success = CTFontManagerRegisterFontsForURL(filepathUrl, kCTFontManagerScopeProcess, NULL); + #elif defined(_WIN32) + std::unique_ptr wFilepath = u8ToWide((char*)filepath); + if(wFilepath){ + success = AddFontResourceExW(wFilepath.get(), FR_PRIVATE, 0) != 0; + }else{ + success = false; + } + + #else + success = FcConfigAppFontAddFile(FcConfigGetCurrent(), (FcChar8 *)(filepath)); + #endif + + if (!success) return false; + + // Tell Pango to throw away the current FontMap and create a new one. This + // has the effect of registering the new font in Pango by re-looking up all + // font families. + pango_cairo_font_map_set_default(NULL); + + return true; +} + +/* + * Deregister font from the OS + * Note that Linux (FontConfig) can only dereregister ALL fonts at once. + */ + +bool +deregister_font(unsigned char *filepath) { + bool success; + + #ifdef __APPLE__ + CFURLRef filepathUrl = CFURLCreateFromFileSystemRepresentation(NULL, filepath, strlen((char*)filepath), false); + success = CTFontManagerUnregisterFontsForURL(filepathUrl, kCTFontManagerScopeProcess, NULL); + #elif defined(_WIN32) + std::unique_ptr wFilepath = u8ToWide((char*)filepath); + if(wFilepath){ + success = RemoveFontResourceExW(wFilepath.get(), FR_PRIVATE, 0) != 0; + }else{ + success = false; + } + #else + FcConfigAppFontClear(FcConfigGetCurrent()); + success = true; + #endif + + if (!success) return false; + + // Tell Pango to throw away the current FontMap and create a new one. This + // has the effect of deregistering the font in Pango by re-looking up all + // font families. + pango_cairo_font_map_set_default(NULL); + + return true; +} diff --git a/miniprogram/node_modules/canvas/src/register_font.h b/miniprogram/node_modules/canvas/src/register_font.h new file mode 100644 index 00000000..a4fcd598 --- /dev/null +++ b/miniprogram/node_modules/canvas/src/register_font.h @@ -0,0 +1,7 @@ +#pragma once + +#include + +PangoFontDescription *get_pango_font_description(unsigned char *filepath); +bool register_font(unsigned char *filepath); +bool deregister_font(unsigned char *filepath); diff --git a/miniprogram/node_modules/canvas/src/unicode.h b/miniprogram/node_modules/canvas/src/unicode.h new file mode 100644 index 00000000..a01e1f3b --- /dev/null +++ b/miniprogram/node_modules/canvas/src/unicode.h @@ -0,0 +1,184 @@ +// This is a manually written file for the C API of unicode.zig +// TODO: delete this when Zig resurrects -femit-h +// https://github.com/ziglang/zig/issues/9698 +#pragma once + +#include + +typedef enum { + SCRIPT_NONE, + SCRIPT_ADLAM, + SCRIPT_AHOM, + SCRIPT_ANATOLIAN_HIEROGLYPHS, + SCRIPT_ARABIC, + SCRIPT_ARMENIAN, + SCRIPT_AVESTAN, + SCRIPT_BALINESE, + SCRIPT_BAMUM, + SCRIPT_BASSA_VAH, + SCRIPT_BATAK, + SCRIPT_BENGALI, + SCRIPT_BHAIKSUKI, + SCRIPT_BOPOMOFO, + SCRIPT_BRAHMI, + SCRIPT_BRAILLE, + SCRIPT_BUGINESE, + SCRIPT_BUHID, + SCRIPT_CANADIAN_ABORIGINAL, + SCRIPT_CARIAN, + SCRIPT_CAUCASIAN_ALBANIAN, + SCRIPT_CHAKMA, + SCRIPT_CHAM, + SCRIPT_CHEROKEE, + SCRIPT_CHORASMIAN, + SCRIPT_COMMON, + SCRIPT_COPTIC, + SCRIPT_CUNEIFORM, + SCRIPT_CYPRIOT, + SCRIPT_CYPRO_MINOAN, + SCRIPT_CYRILLIC, + SCRIPT_DESERET, + SCRIPT_DEVANAGARI, + SCRIPT_DIVES_AKURU, + SCRIPT_DOGRA, + SCRIPT_DUPLOYAN, + SCRIPT_EGYPTIAN_HIEROGLYPHS, + SCRIPT_ELBASAN, + SCRIPT_ELYMAIC, + SCRIPT_ETHIOPIC, + SCRIPT_GARAY, + SCRIPT_GEORGIAN, + SCRIPT_GLAGOLITIC, + SCRIPT_GOTHIC, + SCRIPT_GRANTHA, + SCRIPT_GREEK, + SCRIPT_GUJARATI, + SCRIPT_GUNJALA_GONDI, + SCRIPT_GURMUKHI, + SCRIPT_GURUNG_KHEMA, + SCRIPT_HAN, + SCRIPT_HANGUL, + SCRIPT_HANIFI_ROHINGYA, + SCRIPT_HANUNOO, + SCRIPT_HATRAN, + SCRIPT_HEBREW, + SCRIPT_HIRAGANA, + SCRIPT_IMPERIAL_ARAMAIC, + SCRIPT_INHERITED, + SCRIPT_INSCRIPTIONAL_PAHLAVI, + SCRIPT_INSCRIPTIONAL_PARTHIAN, + SCRIPT_JAVANESE, + SCRIPT_KAITHI, + SCRIPT_KANNADA, + SCRIPT_KATAKANA, + SCRIPT_KAWI, + SCRIPT_KAYAH_LI, + SCRIPT_KHAROSHTHI, + SCRIPT_KHITAN_SMALL_SCRIPT, + SCRIPT_KHMER, + SCRIPT_KHOJKI, + SCRIPT_KHUDAWADI, + SCRIPT_KIRAT_RAI, + SCRIPT_LAO, + SCRIPT_LATIN, + SCRIPT_LEPCHA, + SCRIPT_LIMBU, + SCRIPT_LINEAR_A, + SCRIPT_LINEAR_B, + SCRIPT_LISU, + SCRIPT_LYCIAN, + SCRIPT_LYDIAN, + SCRIPT_MAHAJANI, + SCRIPT_MAKASAR, + SCRIPT_MALAYALAM, + SCRIPT_MANDAIC, + SCRIPT_MANICHAEAN, + SCRIPT_MARCHEN, + SCRIPT_MASARAM_GONDI, + SCRIPT_MEDEFAIDRIN, + SCRIPT_MEETEI_MAYEK, + SCRIPT_MENDE_KIKAKUI, + SCRIPT_MEROITIC_CURSIVE, + SCRIPT_MEROITIC_HIEROGLYPHS, + SCRIPT_MIAO, + SCRIPT_MODI, + SCRIPT_MONGOLIAN, + SCRIPT_MRO, + SCRIPT_MULTANI, + SCRIPT_MYANMAR, + SCRIPT_NABATAEAN, + SCRIPT_NAG_MUNDARI, + SCRIPT_NANDINAGARI, + SCRIPT_NEW_TAI_LUE, + SCRIPT_NEWA, + SCRIPT_NKO, + SCRIPT_NUSHU, + SCRIPT_NYIAKENG_PUACHUE_HMONG, + SCRIPT_OGHAM, + SCRIPT_OL_CHIKI, + SCRIPT_OL_ONAL, + SCRIPT_OLD_HUNGARIAN, + SCRIPT_OLD_ITALIC, + SCRIPT_OLD_NORTH_ARABIAN, + SCRIPT_OLD_PERMIC, + SCRIPT_OLD_PERSIAN, + SCRIPT_OLD_SOGDIAN, + SCRIPT_OLD_SOUTH_ARABIAN, + SCRIPT_OLD_TURKIC, + SCRIPT_OLD_UYGHUR, + SCRIPT_ORIYA, + SCRIPT_OSAGE, + SCRIPT_OSMANYA, + SCRIPT_PAHAWH_HMONG, + SCRIPT_PALMYRENE, + SCRIPT_PAU_CIN_HAU, + SCRIPT_PHAGS_PA, + SCRIPT_PHOENICIAN, + SCRIPT_PSALTER_PAHLAVI, + SCRIPT_REJANG, + SCRIPT_RUNIC, + SCRIPT_SAMARITAN, + SCRIPT_SAURASHTRA, + SCRIPT_SHARADA, + SCRIPT_SHAVIAN, + SCRIPT_SIDDHAM, + SCRIPT_SIGNWRITING, + SCRIPT_SINHALA, + SCRIPT_SOGDIAN, + SCRIPT_SORA_SOMPENG, + SCRIPT_SOYOMBO, + SCRIPT_SUNDANESE, + SCRIPT_SUNUWAR, + SCRIPT_SYLOTI_NAGRI, + SCRIPT_SYRIAC, + SCRIPT_TAGALOG, + SCRIPT_TAGBANWA, + SCRIPT_TAI_LE, + SCRIPT_TAI_THAM, + SCRIPT_TAI_VIET, + SCRIPT_TAKRI, + SCRIPT_TAMIL, + SCRIPT_TANGSA, + SCRIPT_TANGUT, + SCRIPT_TELUGU, + SCRIPT_THAANA, + SCRIPT_THAI, + SCRIPT_TIBETAN, + SCRIPT_TIFINAGH, + SCRIPT_TIRHUTA, + SCRIPT_TODHRI, + SCRIPT_TOTO, + SCRIPT_TULU_TIGALARI, + SCRIPT_UGARITIC, + SCRIPT_VAI, + SCRIPT_VITHKUQI, + SCRIPT_WANCHO, + SCRIPT_WARANG_CITI, + SCRIPT_YEZIDI, + SCRIPT_YI, + SCRIPT_ZANABAZAR_SQUARE +} script_t; + +extern "C" { + script_t get_script(uint32_t codepoint); +} \ No newline at end of file diff --git a/miniprogram/node_modules/canvas/src/unicode.zig b/miniprogram/node_modules/canvas/src/unicode.zig new file mode 100644 index 00000000..93a59f9c --- /dev/null +++ b/miniprogram/node_modules/canvas/src/unicode.zig @@ -0,0 +1,368 @@ +// This file binds the zg library to C. We use zg to: +// 1. Segment text into scripts for shaping boundaries +// 2. Find Unicode grapheme boundaries for fallback shaping +// 3. Eventually: segment text into emoji and non-emojis for font selection +// (this will rewuire a PR to zg though) +const Scripts = @import("Scripts"); +const std = @import("std"); + +pub const Script = enum(c_int) { + none, + Adlam, + Ahom, + Anatolian_Hieroglyphs, + Arabic, + Armenian, + Avestan, + Balinese, + Bamum, + Bassa_Vah, + Batak, + Bengali, + Bhaiksuki, + Bopomofo, + Brahmi, + Braille, + Buginese, + Buhid, + Canadian_Aboriginal, + Carian, + Caucasian_Albanian, + Chakma, + Cham, + Cherokee, + Chorasmian, + Common, + Coptic, + Cuneiform, + Cypriot, + Cypro_Minoan, + Cyrillic, + Deseret, + Devanagari, + Dives_Akuru, + Dogra, + Duployan, + Egyptian_Hieroglyphs, + Elbasan, + Elymaic, + Ethiopic, + Garay, + Georgian, + Glagolitic, + Gothic, + Grantha, + Greek, + Gujarati, + Gunjala_Gondi, + Gurmukhi, + Gurung_Khema, + Han, + Hangul, + Hanifi_Rohingya, + Hanunoo, + Hatran, + Hebrew, + Hiragana, + Imperial_Aramaic, + Inherited, + Inscriptional_Pahlavi, + Inscriptional_Parthian, + Javanese, + Kaithi, + Kannada, + Katakana, + Kawi, + Kayah_Li, + Kharoshthi, + Khitan_Small_Script, + Khmer, + Khojki, + Khudawadi, + Kirat_Rai, + Lao, + Latin, + Lepcha, + Limbu, + Linear_A, + Linear_B, + Lisu, + Lycian, + Lydian, + Mahajani, + Makasar, + Malayalam, + Mandaic, + Manichaean, + Marchen, + Masaram_Gondi, + Medefaidrin, + Meetei_Mayek, + Mende_Kikakui, + Meroitic_Cursive, + Meroitic_Hieroglyphs, + Miao, + Modi, + Mongolian, + Mro, + Multani, + Myanmar, + Nabataean, + Nag_Mundari, + Nandinagari, + New_Tai_Lue, + Newa, + Nko, + Nushu, + Nyiakeng_Puachue_Hmong, + Ogham, + Ol_Chiki, + Ol_Onal, + Old_Hungarian, + Old_Italic, + Old_North_Arabian, + Old_Permic, + Old_Persian, + Old_Sogdian, + Old_South_Arabian, + Old_Turkic, + Old_Uyghur, + Oriya, + Osage, + Osmanya, + Pahawh_Hmong, + Palmyrene, + Pau_Cin_Hau, + Phags_Pa, + Phoenician, + Psalter_Pahlavi, + Rejang, + Runic, + Samaritan, + Saurashtra, + Sharada, + Shavian, + Siddham, + SignWriting, + Sinhala, + Sogdian, + Sora_Sompeng, + Soyombo, + Sundanese, + Sunuwar, + Syloti_Nagri, + Syriac, + Tagalog, + Tagbanwa, + Tai_Le, + Tai_Tham, + Tai_Viet, + Takri, + Tamil, + Tangsa, + Tangut, + Telugu, + Thaana, + Thai, + Tibetan, + Tifinagh, + Tirhuta, + Todhri, + Toto, + Tulu_Tigalari, + Ugaritic, + Vai, + Vithkuqi, + Wancho, + Warang_Citi, + Yezidi, + Yi, + Zanabazar_Square, +}; + +var scripts: Scripts = undefined; +var initialized: bool = false; + +export fn get_script(codepoint: u32) Script { + // TODO: initialize once, not every get_script call + if (!initialized) { + scripts = Scripts.init(std.heap.c_allocator) catch @panic("Failed to initialize scripts"); + initialized = true; + } + + const script_result = scripts.script(@as(u21, @intCast(codepoint))) orelse return .none; + + return switch (script_result) { + .none => .none, + .Adlam => .Adlam, + .Ahom => .Ahom, + .Anatolian_Hieroglyphs => .Anatolian_Hieroglyphs, + .Arabic => .Arabic, + .Armenian => .Armenian, + .Avestan => .Avestan, + .Balinese => .Balinese, + .Bamum => .Bamum, + .Bassa_Vah => .Bassa_Vah, + .Batak => .Batak, + .Bengali => .Bengali, + .Bhaiksuki => .Bhaiksuki, + .Bopomofo => .Bopomofo, + .Brahmi => .Brahmi, + .Braille => .Braille, + .Buginese => .Buginese, + .Buhid => .Buhid, + .Canadian_Aboriginal => .Canadian_Aboriginal, + .Carian => .Carian, + .Caucasian_Albanian => .Caucasian_Albanian, + .Chakma => .Chakma, + .Cham => .Cham, + .Cherokee => .Cherokee, + .Chorasmian => .Chorasmian, + .Common => .Common, + .Coptic => .Coptic, + .Cuneiform => .Cuneiform, + .Cypriot => .Cypriot, + .Cypro_Minoan => .Cypro_Minoan, + .Cyrillic => .Cyrillic, + .Deseret => .Deseret, + .Devanagari => .Devanagari, + .Dives_Akuru => .Dives_Akuru, + .Dogra => .Dogra, + .Duployan => .Duployan, + .Egyptian_Hieroglyphs => .Egyptian_Hieroglyphs, + .Elbasan => .Elbasan, + .Elymaic => .Elymaic, + .Ethiopic => .Ethiopic, + .Garay => .Garay, + .Georgian => .Georgian, + .Glagolitic => .Glagolitic, + .Gothic => .Gothic, + .Grantha => .Grantha, + .Greek => .Greek, + .Gujarati => .Gujarati, + .Gunjala_Gondi => .Gunjala_Gondi, + .Gurmukhi => .Gurmukhi, + .Gurung_Khema => .Gurung_Khema, + .Han => .Han, + .Hangul => .Hangul, + .Hanifi_Rohingya => .Hanifi_Rohingya, + .Hanunoo => .Hanunoo, + .Hatran => .Hatran, + .Hebrew => .Hebrew, + .Hiragana => .Hiragana, + .Imperial_Aramaic => .Imperial_Aramaic, + .Inherited => .Inherited, + .Inscriptional_Pahlavi => .Inscriptional_Pahlavi, + .Inscriptional_Parthian => .Inscriptional_Parthian, + .Javanese => .Javanese, + .Kaithi => .Kaithi, + .Kannada => .Kannada, + .Katakana => .Katakana, + .Kawi => .Kawi, + .Kayah_Li => .Kayah_Li, + .Kharoshthi => .Kharoshthi, + .Khitan_Small_Script => .Khitan_Small_Script, + .Khmer => .Khmer, + .Khojki => .Khojki, + .Khudawadi => .Khudawadi, + .Kirat_Rai => .Kirat_Rai, + .Lao => .Lao, + .Latin => .Latin, + .Lepcha => .Lepcha, + .Limbu => .Limbu, + .Linear_A => .Linear_A, + .Linear_B => .Linear_B, + .Lisu => .Lisu, + .Lycian => .Lycian, + .Lydian => .Lydian, + .Mahajani => .Mahajani, + .Makasar => .Makasar, + .Malayalam => .Malayalam, + .Mandaic => .Mandaic, + .Manichaean => .Manichaean, + .Marchen => .Marchen, + .Masaram_Gondi => .Masaram_Gondi, + .Medefaidrin => .Medefaidrin, + .Meetei_Mayek => .Meetei_Mayek, + .Mende_Kikakui => .Mende_Kikakui, + .Meroitic_Cursive => .Meroitic_Cursive, + .Meroitic_Hieroglyphs => .Meroitic_Hieroglyphs, + .Miao => .Miao, + .Modi => .Modi, + .Mongolian => .Mongolian, + .Mro => .Mro, + .Multani => .Multani, + .Myanmar => .Myanmar, + .Nabataean => .Nabataean, + .Nag_Mundari => .Nag_Mundari, + .Nandinagari => .Nandinagari, + .New_Tai_Lue => .New_Tai_Lue, + .Newa => .Newa, + .Nko => .Nko, + .Nushu => .Nushu, + .Nyiakeng_Puachue_Hmong => .Nyiakeng_Puachue_Hmong, + .Ogham => .Ogham, + .Ol_Chiki => .Ol_Chiki, + .Ol_Onal => .Ol_Onal, + .Old_Hungarian => .Old_Hungarian, + .Old_Italic => .Old_Italic, + .Old_North_Arabian => .Old_North_Arabian, + .Old_Permic => .Old_Permic, + .Old_Persian => .Old_Persian, + .Old_Sogdian => .Old_Sogdian, + .Old_South_Arabian => .Old_South_Arabian, + .Old_Turkic => .Old_Turkic, + .Old_Uyghur => .Old_Uyghur, + .Oriya => .Oriya, + .Osage => .Osage, + .Osmanya => .Osmanya, + .Pahawh_Hmong => .Pahawh_Hmong, + .Palmyrene => .Palmyrene, + .Pau_Cin_Hau => .Pau_Cin_Hau, + .Phags_Pa => .Phags_Pa, + .Phoenician => .Phoenician, + .Psalter_Pahlavi => .Psalter_Pahlavi, + .Rejang => .Rejang, + .Runic => .Runic, + .Samaritan => .Samaritan, + .Saurashtra => .Saurashtra, + .Sharada => .Sharada, + .Shavian => .Shavian, + .Siddham => .Siddham, + .SignWriting => .SignWriting, + .Sinhala => .Sinhala, + .Sogdian => .Sogdian, + .Sora_Sompeng => .Sora_Sompeng, + .Soyombo => .Soyombo, + .Sundanese => .Sundanese, + .Sunuwar => .Sunuwar, + .Syloti_Nagri => .Syloti_Nagri, + .Syriac => .Syriac, + .Tagalog => .Tagalog, + .Tagbanwa => .Tagbanwa, + .Tai_Le => .Tai_Le, + .Tai_Tham => .Tai_Tham, + .Tai_Viet => .Tai_Viet, + .Takri => .Takri, + .Tamil => .Tamil, + .Tangsa => .Tangsa, + .Tangut => .Tangut, + .Telugu => .Telugu, + .Thaana => .Thaana, + .Thai => .Thai, + .Tibetan => .Tibetan, + .Tifinagh => .Tifinagh, + .Tirhuta => .Tirhuta, + .Todhri => .Todhri, + .Toto => .Toto, + .Tulu_Tigalari => .Tulu_Tigalari, + .Ugaritic => .Ugaritic, + .Vai => .Vai, + .Vithkuqi => .Vithkuqi, + .Wancho => .Wancho, + .Warang_Citi => .Warang_Citi, + .Yezidi => .Yezidi, + .Yi => .Yi, + .Zanabazar_Square => .Zanabazar_Square, + }; +} \ No newline at end of file diff --git a/miniprogram/node_modules/canvas/util/has_lib.js b/miniprogram/node_modules/canvas/util/has_lib.js new file mode 100644 index 00000000..02d70906 --- /dev/null +++ b/miniprogram/node_modules/canvas/util/has_lib.js @@ -0,0 +1,119 @@ +const query = process.argv[2] +const fs = require('fs') +const childProcess = require('child_process') + +const SYSTEM_PATHS = [ + '/lib', + '/usr/lib', + '/usr/lib64', + '/usr/local/lib', + '/opt/local/lib', + '/opt/homebrew/lib', + '/usr/lib/x86_64-linux-gnu', + '/usr/lib/i386-linux-gnu', + '/usr/lib/arm-linux-gnueabihf', + '/usr/lib/arm-linux-gnueabi', + '/usr/lib/aarch64-linux-gnu' +] + +/** + * Checks for lib using ldconfig if present, or searching SYSTEM_PATHS + * otherwise. + * @param {string} lib - library name, e.g. 'jpeg' in 'libjpeg64.so' (see first line) + * @return {boolean} exists + */ +function hasSystemLib (lib) { + const libName = 'lib' + lib + '.+(so|dylib)' + const libNameRegex = new RegExp(libName) + + // Try using ldconfig on linux systems + if (hasLdconfig()) { + try { + if (childProcess.execSync('ldconfig -p 2>/dev/null | grep -E "' + libName + '"').length) { + return true + } + } catch (err) { + // noop -- proceed to other search methods + } + } + + // Try checking common library locations + return SYSTEM_PATHS.some(function (systemPath) { + try { + const dirListing = fs.readdirSync(systemPath) + return dirListing.some(function (file) { + return libNameRegex.test(file) + }) + } catch (err) { + return false + } + }) +} + +/** + * Checks for ldconfig on the path and /sbin + * @return {boolean} exists + */ +function hasLdconfig () { + try { + // Add /sbin to path as ldconfig is located there on some systems -- e.g. + // Debian (and it can still be used by unprivileged users): + childProcess.execSync('export PATH="$PATH:/sbin"') + process.env.PATH = '...' + // execSync throws on nonzero exit + childProcess.execSync('hash ldconfig 2>/dev/null') + return true + } catch (err) { + return false + } +} + +/** + * Checks for freetype2 with --cflags-only-I + * @return Boolean exists + */ +function hasFreetype () { + try { + if (childProcess.execSync('pkg-config cairo --cflags-only-I 2>/dev/null | grep freetype2').length) { + return true + } + } catch (err) { + // noop + } + return false +} + +/** + * Checks for lib using pkg-config. + * @param {string} lib - library name + * @return {boolean} exists + */ +function hasPkgconfigLib (lib) { + try { + // execSync throws on nonzero exit + childProcess.execSync('pkg-config --exists "' + lib + '" 2>/dev/null') + return true + } catch (err) { + return false + } +} + +function main (query) { + switch (query) { + case 'gif': + case 'cairo': + return hasSystemLib(query) + case 'pango': + return hasPkgconfigLib(query) + case 'freetype': + return hasFreetype() + case 'jpeg': + return hasPkgconfigLib('libjpeg') + case 'rsvg': + return hasPkgconfigLib('librsvg-2.0') + default: + throw new Error('Unknown library: ' + query) + } +} + +process.stdout.write(main(query).toString()) diff --git a/miniprogram/node_modules/canvas/util/win_jpeg_lookup.js b/miniprogram/node_modules/canvas/util/win_jpeg_lookup.js new file mode 100644 index 00000000..79815f65 --- /dev/null +++ b/miniprogram/node_modules/canvas/util/win_jpeg_lookup.js @@ -0,0 +1,21 @@ +const fs = require('fs') +const paths = ['C:/libjpeg-turbo'] + +if (process.arch === 'x64') { + paths.unshift('C:/libjpeg-turbo64') +} + +paths.forEach(function (path) { + if (exists(path)) { + process.stdout.write(path) + process.exit() + } +}) + +function exists (path) { + try { + return fs.lstatSync(path).isDirectory() + } catch (e) { + return false + } +} diff --git a/miniprogram/node_modules/chownr/LICENSE b/miniprogram/node_modules/chownr/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/miniprogram/node_modules/chownr/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/miniprogram/node_modules/chownr/README.md b/miniprogram/node_modules/chownr/README.md new file mode 100644 index 00000000..70e9a54a --- /dev/null +++ b/miniprogram/node_modules/chownr/README.md @@ -0,0 +1,3 @@ +Like `chown -R`. + +Takes the same arguments as `fs.chown()` diff --git a/miniprogram/node_modules/chownr/chownr.js b/miniprogram/node_modules/chownr/chownr.js new file mode 100644 index 00000000..0d409321 --- /dev/null +++ b/miniprogram/node_modules/chownr/chownr.js @@ -0,0 +1,167 @@ +'use strict' +const fs = require('fs') +const path = require('path') + +/* istanbul ignore next */ +const LCHOWN = fs.lchown ? 'lchown' : 'chown' +/* istanbul ignore next */ +const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync' + +/* istanbul ignore next */ +const needEISDIRHandled = fs.lchown && + !process.version.match(/v1[1-9]+\./) && + !process.version.match(/v10\.[6-9]/) + +const lchownSync = (path, uid, gid) => { + try { + return fs[LCHOWNSYNC](path, uid, gid) + } catch (er) { + if (er.code !== 'ENOENT') + throw er + } +} + +/* istanbul ignore next */ +const chownSync = (path, uid, gid) => { + try { + return fs.chownSync(path, uid, gid) + } catch (er) { + if (er.code !== 'ENOENT') + throw er + } +} + +/* istanbul ignore next */ +const handleEISDIR = + needEISDIRHandled ? (path, uid, gid, cb) => er => { + // Node prior to v10 had a very questionable implementation of + // fs.lchown, which would always try to call fs.open on a directory + // Fall back to fs.chown in those cases. + if (!er || er.code !== 'EISDIR') + cb(er) + else + fs.chown(path, uid, gid, cb) + } + : (_, __, ___, cb) => cb + +/* istanbul ignore next */ +const handleEISDirSync = + needEISDIRHandled ? (path, uid, gid) => { + try { + return lchownSync(path, uid, gid) + } catch (er) { + if (er.code !== 'EISDIR') + throw er + chownSync(path, uid, gid) + } + } + : (path, uid, gid) => lchownSync(path, uid, gid) + +// fs.readdir could only accept an options object as of node v6 +const nodeVersion = process.version +let readdir = (path, options, cb) => fs.readdir(path, options, cb) +let readdirSync = (path, options) => fs.readdirSync(path, options) +/* istanbul ignore next */ +if (/^v4\./.test(nodeVersion)) + readdir = (path, options, cb) => fs.readdir(path, cb) + +const chown = (cpath, uid, gid, cb) => { + fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => { + // Skip ENOENT error + cb(er && er.code !== 'ENOENT' ? er : null) + })) +} + +const chownrKid = (p, child, uid, gid, cb) => { + if (typeof child === 'string') + return fs.lstat(path.resolve(p, child), (er, stats) => { + // Skip ENOENT error + if (er) + return cb(er.code !== 'ENOENT' ? er : null) + stats.name = child + chownrKid(p, stats, uid, gid, cb) + }) + + if (child.isDirectory()) { + chownr(path.resolve(p, child.name), uid, gid, er => { + if (er) + return cb(er) + const cpath = path.resolve(p, child.name) + chown(cpath, uid, gid, cb) + }) + } else { + const cpath = path.resolve(p, child.name) + chown(cpath, uid, gid, cb) + } +} + + +const chownr = (p, uid, gid, cb) => { + readdir(p, { withFileTypes: true }, (er, children) => { + // any error other than ENOTDIR or ENOTSUP means it's not readable, + // or doesn't exist. give up. + if (er) { + if (er.code === 'ENOENT') + return cb() + else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP') + return cb(er) + } + if (er || !children.length) + return chown(p, uid, gid, cb) + + let len = children.length + let errState = null + const then = er => { + if (errState) + return + if (er) + return cb(errState = er) + if (-- len === 0) + return chown(p, uid, gid, cb) + } + + children.forEach(child => chownrKid(p, child, uid, gid, then)) + }) +} + +const chownrKidSync = (p, child, uid, gid) => { + if (typeof child === 'string') { + try { + const stats = fs.lstatSync(path.resolve(p, child)) + stats.name = child + child = stats + } catch (er) { + if (er.code === 'ENOENT') + return + else + throw er + } + } + + if (child.isDirectory()) + chownrSync(path.resolve(p, child.name), uid, gid) + + handleEISDirSync(path.resolve(p, child.name), uid, gid) +} + +const chownrSync = (p, uid, gid) => { + let children + try { + children = readdirSync(p, { withFileTypes: true }) + } catch (er) { + if (er.code === 'ENOENT') + return + else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP') + return handleEISDirSync(p, uid, gid) + else + throw er + } + + if (children && children.length) + children.forEach(child => chownrKidSync(p, child, uid, gid)) + + return handleEISDirSync(p, uid, gid) +} + +module.exports = chownr +chownr.sync = chownrSync diff --git a/miniprogram/node_modules/chownr/package.json b/miniprogram/node_modules/chownr/package.json new file mode 100644 index 00000000..c273a7d1 --- /dev/null +++ b/miniprogram/node_modules/chownr/package.json @@ -0,0 +1,29 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "chownr", + "description": "like `chown -R`", + "version": "1.1.4", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/chownr.git" + }, + "main": "chownr.js", + "files": [ + "chownr.js" + ], + "devDependencies": { + "mkdirp": "0.3", + "rimraf": "^2.7.1", + "tap": "^14.10.6" + }, + "tap": { + "check-coverage": true + }, + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "license": "ISC" +} diff --git a/miniprogram/node_modules/decompress-response/index.d.ts b/miniprogram/node_modules/decompress-response/index.d.ts new file mode 100644 index 00000000..c0be175f --- /dev/null +++ b/miniprogram/node_modules/decompress-response/index.d.ts @@ -0,0 +1,22 @@ +/// +import {IncomingMessage} from 'http'; + +/** +Decompress a HTTP response if needed. + +@param response - The HTTP incoming stream with compressed data. +@returns The decompressed HTTP response stream. + +@example +``` +import {http} from 'http'; +import decompressResponse = require('decompress-response'); + +http.get('https://sindresorhus.com', response => { + response = decompressResponse(response); +}); +``` +*/ +declare function decompressResponse(response: IncomingMessage): IncomingMessage; + +export = decompressResponse; diff --git a/miniprogram/node_modules/decompress-response/index.js b/miniprogram/node_modules/decompress-response/index.js new file mode 100644 index 00000000..c8610362 --- /dev/null +++ b/miniprogram/node_modules/decompress-response/index.js @@ -0,0 +1,58 @@ +'use strict'; +const {Transform, PassThrough} = require('stream'); +const zlib = require('zlib'); +const mimicResponse = require('mimic-response'); + +module.exports = response => { + const contentEncoding = (response.headers['content-encoding'] || '').toLowerCase(); + + if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) { + return response; + } + + // TODO: Remove this when targeting Node.js 12. + const isBrotli = contentEncoding === 'br'; + if (isBrotli && typeof zlib.createBrotliDecompress !== 'function') { + response.destroy(new Error('Brotli is not supported on Node.js < 12')); + return response; + } + + let isEmpty = true; + + const checker = new Transform({ + transform(data, _encoding, callback) { + isEmpty = false; + + callback(null, data); + }, + + flush(callback) { + callback(); + } + }); + + const finalStream = new PassThrough({ + autoDestroy: false, + destroy(error, callback) { + response.destroy(); + + callback(error); + } + }); + + const decompressStream = isBrotli ? zlib.createBrotliDecompress() : zlib.createUnzip(); + + decompressStream.once('error', error => { + if (isEmpty && !response.readable) { + finalStream.end(); + return; + } + + finalStream.destroy(error); + }); + + mimicResponse(response, finalStream); + response.pipe(checker).pipe(decompressStream).pipe(finalStream); + + return finalStream; +}; diff --git a/miniprogram/node_modules/decompress-response/license b/miniprogram/node_modules/decompress-response/license new file mode 100644 index 00000000..fa7ceba3 --- /dev/null +++ b/miniprogram/node_modules/decompress-response/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/miniprogram/node_modules/decompress-response/package.json b/miniprogram/node_modules/decompress-response/package.json new file mode 100644 index 00000000..57df8896 --- /dev/null +++ b/miniprogram/node_modules/decompress-response/package.json @@ -0,0 +1,56 @@ +{ + "name": "decompress-response", + "version": "6.0.0", + "description": "Decompress a HTTP response if needed", + "license": "MIT", + "repository": "sindresorhus/decompress-response", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "decompress", + "response", + "http", + "https", + "zlib", + "gzip", + "zip", + "deflate", + "unzip", + "ungzip", + "incoming", + "message", + "stream", + "compressed", + "brotli" + ], + "dependencies": { + "mimic-response": "^3.1.0" + }, + "devDependencies": { + "@types/node": "^14.0.1", + "ava": "^2.2.0", + "get-stream": "^5.0.0", + "pify": "^5.0.0", + "tsd": "^0.11.0", + "xo": "^0.30.0" + }, + "xo": { + "rules": { + "@typescript-eslint/prefer-readonly-parameter-types": "off" + } + } +} diff --git a/miniprogram/node_modules/decompress-response/readme.md b/miniprogram/node_modules/decompress-response/readme.md new file mode 100644 index 00000000..58523ef3 --- /dev/null +++ b/miniprogram/node_modules/decompress-response/readme.md @@ -0,0 +1,48 @@ +# decompress-response [![Build Status](https://travis-ci.com/sindresorhus/decompress-response.svg?branch=master)](https://travis-ci.com/sindresorhus/decompress-response) + +> Decompress a HTTP response if needed + +Decompresses the [response](https://nodejs.org/api/http.html#http_class_http_incomingmessage) from [`http.request`](https://nodejs.org/api/http.html#http_http_request_options_callback) if it's gzipped, deflated or compressed with Brotli, otherwise just passes it through. + +Used by [`got`](https://github.com/sindresorhus/got). + +## Install + +``` +$ npm install decompress-response +``` + +## Usage + +```js +const http = require('http'); +const decompressResponse = require('decompress-response'); + +http.get('https://sindresorhus.com', response => { + response = decompressResponse(response); +}); +``` + +## API + +### decompressResponse(response) + +Returns the decompressed HTTP response stream. + +#### response + +Type: [`http.IncomingMessage`](https://nodejs.org/api/http.html#http_class_http_incomingmessage) + +The HTTP incoming stream with compressed data. + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/miniprogram/node_modules/deep-extend/CHANGELOG.md b/miniprogram/node_modules/deep-extend/CHANGELOG.md new file mode 100644 index 00000000..dd13ec13 --- /dev/null +++ b/miniprogram/node_modules/deep-extend/CHANGELOG.md @@ -0,0 +1,46 @@ +Changelog +========= + +v0.6.0 +------ + +- Updated "devDependencies" versions to fix vulnerability alerts +- Dropped support of io.js and node.js v0.12.x and lower since new versions of + "devDependencies" couldn't work with those old node.js versions + (minimal supported version of node.js now is v4.0.0) + +v0.5.1 +------ + +- Fix prototype pollution vulnerability (thanks to @mwakerman for the PR) +- Avoid using deprecated Buffer API (thanks to @ChALkeR for the PR) + +v0.5.0 +------ + +- Auto-testing provided by Travis CI; +- Support older Node.JS versions (`v0.11.x` and `v0.10.x`); +- Removed tests files from npm package. + +v0.4.2 +------ + +- Fix for `null` as an argument. + +v0.4.1 +------ + +- Removed test code from npm package + ([see pull request #21](https://github.com/unclechu/node-deep-extend/pull/21)); +- Increased minimal version of Node from `0.4.0` to `0.12.0` + (because can't run tests on lesser version anyway). + +v0.4.0 +------ + +- **WARNING!** Broken backward compatibility with `v0.3.x`; +- Fixed bug with extending arrays instead of cloning; +- Deep cloning for arrays; +- Check for own property; +- Fixed some documentation issues; +- Strict JS mode. diff --git a/miniprogram/node_modules/deep-extend/LICENSE b/miniprogram/node_modules/deep-extend/LICENSE new file mode 100644 index 00000000..5c58916f --- /dev/null +++ b/miniprogram/node_modules/deep-extend/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013-2018, Viacheslav Lotsmanov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/miniprogram/node_modules/deep-extend/README.md b/miniprogram/node_modules/deep-extend/README.md new file mode 100644 index 00000000..67c7fc08 --- /dev/null +++ b/miniprogram/node_modules/deep-extend/README.md @@ -0,0 +1,91 @@ +Deep Extend +=========== + +Recursive object extending. + +[![Build Status](https://api.travis-ci.org/unclechu/node-deep-extend.svg?branch=master)](https://travis-ci.org/unclechu/node-deep-extend) + +[![NPM](https://nodei.co/npm/deep-extend.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/deep-extend/) + +Install +------- + +```bash +$ npm install deep-extend +``` + +Usage +----- + +```javascript +var deepExtend = require('deep-extend'); +var obj1 = { + a: 1, + b: 2, + d: { + a: 1, + b: [], + c: { test1: 123, test2: 321 } + }, + f: 5, + g: 123, + i: 321, + j: [1, 2] +}; +var obj2 = { + b: 3, + c: 5, + d: { + b: { first: 'one', second: 'two' }, + c: { test2: 222 } + }, + e: { one: 1, two: 2 }, + f: [], + g: (void 0), + h: /abc/g, + i: null, + j: [3, 4] +}; + +deepExtend(obj1, obj2); + +console.log(obj1); +/* +{ a: 1, + b: 3, + d: + { a: 1, + b: { first: 'one', second: 'two' }, + c: { test1: 123, test2: 222 } }, + f: [], + g: undefined, + c: 5, + e: { one: 1, two: 2 }, + h: /abc/g, + i: null, + j: [3, 4] } +*/ +``` + +Unit testing +------------ + +```bash +$ npm test +``` + +Changelog +--------- + +[CHANGELOG.md](./CHANGELOG.md) + +Any issues? +----------- + +Please, report about issues +[here](https://github.com/unclechu/node-deep-extend/issues). + +License +------- + +[MIT](./LICENSE) diff --git a/miniprogram/node_modules/deep-extend/index.js b/miniprogram/node_modules/deep-extend/index.js new file mode 100644 index 00000000..762d81e9 --- /dev/null +++ b/miniprogram/node_modules/deep-extend/index.js @@ -0,0 +1 @@ +module.exports = require('./lib/deep-extend'); diff --git a/miniprogram/node_modules/deep-extend/lib/deep-extend.js b/miniprogram/node_modules/deep-extend/lib/deep-extend.js new file mode 100644 index 00000000..651fd8d3 --- /dev/null +++ b/miniprogram/node_modules/deep-extend/lib/deep-extend.js @@ -0,0 +1,150 @@ +/*! + * @description Recursive object extending + * @author Viacheslav Lotsmanov + * @license MIT + * + * The MIT License (MIT) + * + * Copyright (c) 2013-2018 Viacheslav Lotsmanov + * + * Permission is hereby granted, free of charge, to any person obtaining a copy of + * this software and associated documentation files (the "Software"), to deal in + * the Software without restriction, including without limitation the rights to + * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of + * the Software, and to permit persons to whom the Software is furnished to do so, + * subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS + * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR + * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +'use strict'; + +function isSpecificValue(val) { + return ( + val instanceof Buffer + || val instanceof Date + || val instanceof RegExp + ) ? true : false; +} + +function cloneSpecificValue(val) { + if (val instanceof Buffer) { + var x = Buffer.alloc + ? Buffer.alloc(val.length) + : new Buffer(val.length); + val.copy(x); + return x; + } else if (val instanceof Date) { + return new Date(val.getTime()); + } else if (val instanceof RegExp) { + return new RegExp(val); + } else { + throw new Error('Unexpected situation'); + } +} + +/** + * Recursive cloning array. + */ +function deepCloneArray(arr) { + var clone = []; + arr.forEach(function (item, index) { + if (typeof item === 'object' && item !== null) { + if (Array.isArray(item)) { + clone[index] = deepCloneArray(item); + } else if (isSpecificValue(item)) { + clone[index] = cloneSpecificValue(item); + } else { + clone[index] = deepExtend({}, item); + } + } else { + clone[index] = item; + } + }); + return clone; +} + +function safeGetProperty(object, property) { + return property === '__proto__' ? undefined : object[property]; +} + +/** + * Extening object that entered in first argument. + * + * Returns extended object or false if have no target object or incorrect type. + * + * If you wish to clone source object (without modify it), just use empty new + * object as first argument, like this: + * deepExtend({}, yourObj_1, [yourObj_N]); + */ +var deepExtend = module.exports = function (/*obj_1, [obj_2], [obj_N]*/) { + if (arguments.length < 1 || typeof arguments[0] !== 'object') { + return false; + } + + if (arguments.length < 2) { + return arguments[0]; + } + + var target = arguments[0]; + + // convert arguments to array and cut off target object + var args = Array.prototype.slice.call(arguments, 1); + + var val, src, clone; + + args.forEach(function (obj) { + // skip argument if isn't an object, is null, or is an array + if (typeof obj !== 'object' || obj === null || Array.isArray(obj)) { + return; + } + + Object.keys(obj).forEach(function (key) { + src = safeGetProperty(target, key); // source value + val = safeGetProperty(obj, key); // new value + + // recursion prevention + if (val === target) { + return; + + /** + * if new value isn't object then just overwrite by new value + * instead of extending. + */ + } else if (typeof val !== 'object' || val === null) { + target[key] = val; + return; + + // just clone arrays (and recursive clone objects inside) + } else if (Array.isArray(val)) { + target[key] = deepCloneArray(val); + return; + + // custom cloning and overwrite for specific objects + } else if (isSpecificValue(val)) { + target[key] = cloneSpecificValue(val); + return; + + // overwrite by new value if source isn't object or array + } else if (typeof src !== 'object' || src === null || Array.isArray(src)) { + target[key] = deepExtend({}, val); + return; + + // source value and new value is objects both, extending... + } else { + target[key] = deepExtend(src, val); + return; + } + }); + }); + + return target; +}; diff --git a/miniprogram/node_modules/deep-extend/package.json b/miniprogram/node_modules/deep-extend/package.json new file mode 100644 index 00000000..5f2195ff --- /dev/null +++ b/miniprogram/node_modules/deep-extend/package.json @@ -0,0 +1,62 @@ +{ + "name": "deep-extend", + "description": "Recursive object extending", + "license": "MIT", + "version": "0.6.0", + "homepage": "https://github.com/unclechu/node-deep-extend", + "keywords": [ + "deep-extend", + "extend", + "deep", + "recursive", + "xtend", + "clone", + "merge", + "json" + ], + "licenses": [ + { + "type": "MIT", + "url": "https://raw.githubusercontent.com/unclechu/node-deep-extend/master/LICENSE" + } + ], + "repository": { + "type": "git", + "url": "git://github.com/unclechu/node-deep-extend.git" + }, + "author": "Viacheslav Lotsmanov ", + "bugs": "https://github.com/unclechu/node-deep-extend/issues", + "contributors": [ + { + "name": "Romain Prieto", + "url": "https://github.com/rprieto" + }, + { + "name": "Max Maximov", + "url": "https://github.com/maxmaximov" + }, + { + "name": "Marshall Bowers", + "url": "https://github.com/maxdeviant" + }, + { + "name": "Misha Wakerman", + "url": "https://github.com/mwakerman" + } + ], + "main": "lib/deep-extend.js", + "engines": { + "node": ">=4.0.0" + }, + "scripts": { + "test": "./node_modules/.bin/mocha" + }, + "devDependencies": { + "mocha": "5.2.0", + "should": "13.2.1" + }, + "files": [ + "index.js", + "lib/" + ] +} diff --git a/miniprogram/node_modules/detect-libc/LICENSE b/miniprogram/node_modules/detect-libc/LICENSE new file mode 100644 index 00000000..8dada3ed --- /dev/null +++ b/miniprogram/node_modules/detect-libc/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/miniprogram/node_modules/detect-libc/README.md b/miniprogram/node_modules/detect-libc/README.md new file mode 100644 index 00000000..23212fdd --- /dev/null +++ b/miniprogram/node_modules/detect-libc/README.md @@ -0,0 +1,163 @@ +# detect-libc + +Node.js module to detect details of the C standard library (libc) +implementation provided by a given Linux system. + +Currently supports detection of GNU glibc and MUSL libc. + +Provides asychronous and synchronous functions for the +family (e.g. `glibc`, `musl`) and version (e.g. `1.23`, `1.2.3`). + +The version numbers of libc implementations +are not guaranteed to be semver-compliant. + +For previous v1.x releases, please see the +[v1](https://github.com/lovell/detect-libc/tree/v1) branch. + +## Install + +```sh +npm install detect-libc +``` + +## API + +### GLIBC + +```ts +const GLIBC: string = 'glibc'; +``` + +A String constant containing the value `glibc`. + +### MUSL + +```ts +const MUSL: string = 'musl'; +``` + +A String constant containing the value `musl`. + +### family + +```ts +function family(): Promise; +``` + +Resolves asychronously with: + +* `glibc` or `musl` when the libc family can be determined +* `null` when the libc family cannot be determined +* `null` when run on a non-Linux platform + +```js +const { family, GLIBC, MUSL } = require('detect-libc'); + +switch (await family()) { + case GLIBC: ... + case MUSL: ... + case null: ... +} +``` + +### familySync + +```ts +function familySync(): string | null; +``` + +Synchronous version of `family()`. + +```js +const { familySync, GLIBC, MUSL } = require('detect-libc'); + +switch (familySync()) { + case GLIBC: ... + case MUSL: ... + case null: ... +} +``` + +### version + +```ts +function version(): Promise; +``` + +Resolves asychronously with: + +* The version when it can be determined +* `null` when the libc family cannot be determined +* `null` when run on a non-Linux platform + +```js +const { version } = require('detect-libc'); + +const v = await version(); +if (v) { + const [major, minor, patch] = v.split('.'); +} +``` + +### versionSync + +```ts +function versionSync(): string | null; +``` + +Synchronous version of `version()`. + +```js +const { versionSync } = require('detect-libc'); + +const v = versionSync(); +if (v) { + const [major, minor, patch] = v.split('.'); +} +``` + +### isNonGlibcLinux + +```ts +function isNonGlibcLinux(): Promise; +``` + +Resolves asychronously with: + +* `false` when the libc family is `glibc` +* `true` when the libc family is not `glibc` +* `false` when run on a non-Linux platform + +```js +const { isNonGlibcLinux } = require('detect-libc'); + +if (await isNonGlibcLinux()) { ... } +``` + +### isNonGlibcLinuxSync + +```ts +function isNonGlibcLinuxSync(): boolean; +``` + +Synchronous version of `isNonGlibcLinux()`. + +```js +const { isNonGlibcLinuxSync } = require('detect-libc'); + +if (isNonGlibcLinuxSync()) { ... } +``` + +## Licensing + +Copyright 2017 Lovell Fuller and others. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0.html) + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/miniprogram/node_modules/detect-libc/index.d.ts b/miniprogram/node_modules/detect-libc/index.d.ts new file mode 100644 index 00000000..4c0fb2b0 --- /dev/null +++ b/miniprogram/node_modules/detect-libc/index.d.ts @@ -0,0 +1,14 @@ +// Copyright 2017 Lovell Fuller and others. +// SPDX-License-Identifier: Apache-2.0 + +export const GLIBC: 'glibc'; +export const MUSL: 'musl'; + +export function family(): Promise; +export function familySync(): string | null; + +export function isNonGlibcLinux(): Promise; +export function isNonGlibcLinuxSync(): boolean; + +export function version(): Promise; +export function versionSync(): string | null; diff --git a/miniprogram/node_modules/detect-libc/lib/detect-libc.js b/miniprogram/node_modules/detect-libc/lib/detect-libc.js new file mode 100644 index 00000000..01299b4d --- /dev/null +++ b/miniprogram/node_modules/detect-libc/lib/detect-libc.js @@ -0,0 +1,313 @@ +// Copyright 2017 Lovell Fuller and others. +// SPDX-License-Identifier: Apache-2.0 + +'use strict'; + +const childProcess = require('child_process'); +const { isLinux, getReport } = require('./process'); +const { LDD_PATH, SELF_PATH, readFile, readFileSync } = require('./filesystem'); +const { interpreterPath } = require('./elf'); + +let cachedFamilyInterpreter; +let cachedFamilyFilesystem; +let cachedVersionFilesystem; + +const command = 'getconf GNU_LIBC_VERSION 2>&1 || true; ldd --version 2>&1 || true'; +let commandOut = ''; + +const safeCommand = () => { + if (!commandOut) { + return new Promise((resolve) => { + childProcess.exec(command, (err, out) => { + commandOut = err ? ' ' : out; + resolve(commandOut); + }); + }); + } + return commandOut; +}; + +const safeCommandSync = () => { + if (!commandOut) { + try { + commandOut = childProcess.execSync(command, { encoding: 'utf8' }); + } catch (_err) { + commandOut = ' '; + } + } + return commandOut; +}; + +/** + * A String constant containing the value `glibc`. + * @type {string} + * @public + */ +const GLIBC = 'glibc'; + +/** + * A Regexp constant to get the GLIBC Version. + * @type {string} + */ +const RE_GLIBC_VERSION = /LIBC[a-z0-9 \-).]*?(\d+\.\d+)/i; + +/** + * A String constant containing the value `musl`. + * @type {string} + * @public + */ +const MUSL = 'musl'; + +const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-'); + +const familyFromReport = () => { + const report = getReport(); + if (report.header && report.header.glibcVersionRuntime) { + return GLIBC; + } + if (Array.isArray(report.sharedObjects)) { + if (report.sharedObjects.some(isFileMusl)) { + return MUSL; + } + } + return null; +}; + +const familyFromCommand = (out) => { + const [getconf, ldd1] = out.split(/[\r\n]+/); + if (getconf && getconf.includes(GLIBC)) { + return GLIBC; + } + if (ldd1 && ldd1.includes(MUSL)) { + return MUSL; + } + return null; +}; + +const familyFromInterpreterPath = (path) => { + if (path) { + if (path.includes('/ld-musl-')) { + return MUSL; + } else if (path.includes('/ld-linux-')) { + return GLIBC; + } + } + return null; +}; + +const getFamilyFromLddContent = (content) => { + content = content.toString(); + if (content.includes('musl')) { + return MUSL; + } + if (content.includes('GNU C Library')) { + return GLIBC; + } + return null; +}; + +const familyFromFilesystem = async () => { + if (cachedFamilyFilesystem !== undefined) { + return cachedFamilyFilesystem; + } + cachedFamilyFilesystem = null; + try { + const lddContent = await readFile(LDD_PATH); + cachedFamilyFilesystem = getFamilyFromLddContent(lddContent); + } catch (e) {} + return cachedFamilyFilesystem; +}; + +const familyFromFilesystemSync = () => { + if (cachedFamilyFilesystem !== undefined) { + return cachedFamilyFilesystem; + } + cachedFamilyFilesystem = null; + try { + const lddContent = readFileSync(LDD_PATH); + cachedFamilyFilesystem = getFamilyFromLddContent(lddContent); + } catch (e) {} + return cachedFamilyFilesystem; +}; + +const familyFromInterpreter = async () => { + if (cachedFamilyInterpreter !== undefined) { + return cachedFamilyInterpreter; + } + cachedFamilyInterpreter = null; + try { + const selfContent = await readFile(SELF_PATH); + const path = interpreterPath(selfContent); + cachedFamilyInterpreter = familyFromInterpreterPath(path); + } catch (e) {} + return cachedFamilyInterpreter; +}; + +const familyFromInterpreterSync = () => { + if (cachedFamilyInterpreter !== undefined) { + return cachedFamilyInterpreter; + } + cachedFamilyInterpreter = null; + try { + const selfContent = readFileSync(SELF_PATH); + const path = interpreterPath(selfContent); + cachedFamilyInterpreter = familyFromInterpreterPath(path); + } catch (e) {} + return cachedFamilyInterpreter; +}; + +/** + * Resolves with the libc family when it can be determined, `null` otherwise. + * @returns {Promise} + */ +const family = async () => { + let family = null; + if (isLinux()) { + family = await familyFromInterpreter(); + if (!family) { + family = await familyFromFilesystem(); + if (!family) { + family = familyFromReport(); + } + if (!family) { + const out = await safeCommand(); + family = familyFromCommand(out); + } + } + } + return family; +}; + +/** + * Returns the libc family when it can be determined, `null` otherwise. + * @returns {?string} + */ +const familySync = () => { + let family = null; + if (isLinux()) { + family = familyFromInterpreterSync(); + if (!family) { + family = familyFromFilesystemSync(); + if (!family) { + family = familyFromReport(); + } + if (!family) { + const out = safeCommandSync(); + family = familyFromCommand(out); + } + } + } + return family; +}; + +/** + * Resolves `true` only when the platform is Linux and the libc family is not `glibc`. + * @returns {Promise} + */ +const isNonGlibcLinux = async () => isLinux() && await family() !== GLIBC; + +/** + * Returns `true` only when the platform is Linux and the libc family is not `glibc`. + * @returns {boolean} + */ +const isNonGlibcLinuxSync = () => isLinux() && familySync() !== GLIBC; + +const versionFromFilesystem = async () => { + if (cachedVersionFilesystem !== undefined) { + return cachedVersionFilesystem; + } + cachedVersionFilesystem = null; + try { + const lddContent = await readFile(LDD_PATH); + const versionMatch = lddContent.match(RE_GLIBC_VERSION); + if (versionMatch) { + cachedVersionFilesystem = versionMatch[1]; + } + } catch (e) {} + return cachedVersionFilesystem; +}; + +const versionFromFilesystemSync = () => { + if (cachedVersionFilesystem !== undefined) { + return cachedVersionFilesystem; + } + cachedVersionFilesystem = null; + try { + const lddContent = readFileSync(LDD_PATH); + const versionMatch = lddContent.match(RE_GLIBC_VERSION); + if (versionMatch) { + cachedVersionFilesystem = versionMatch[1]; + } + } catch (e) {} + return cachedVersionFilesystem; +}; + +const versionFromReport = () => { + const report = getReport(); + if (report.header && report.header.glibcVersionRuntime) { + return report.header.glibcVersionRuntime; + } + return null; +}; + +const versionSuffix = (s) => s.trim().split(/\s+/)[1]; + +const versionFromCommand = (out) => { + const [getconf, ldd1, ldd2] = out.split(/[\r\n]+/); + if (getconf && getconf.includes(GLIBC)) { + return versionSuffix(getconf); + } + if (ldd1 && ldd2 && ldd1.includes(MUSL)) { + return versionSuffix(ldd2); + } + return null; +}; + +/** + * Resolves with the libc version when it can be determined, `null` otherwise. + * @returns {Promise} + */ +const version = async () => { + let version = null; + if (isLinux()) { + version = await versionFromFilesystem(); + if (!version) { + version = versionFromReport(); + } + if (!version) { + const out = await safeCommand(); + version = versionFromCommand(out); + } + } + return version; +}; + +/** + * Returns the libc version when it can be determined, `null` otherwise. + * @returns {?string} + */ +const versionSync = () => { + let version = null; + if (isLinux()) { + version = versionFromFilesystemSync(); + if (!version) { + version = versionFromReport(); + } + if (!version) { + const out = safeCommandSync(); + version = versionFromCommand(out); + } + } + return version; +}; + +module.exports = { + GLIBC, + MUSL, + family, + familySync, + isNonGlibcLinux, + isNonGlibcLinuxSync, + version, + versionSync +}; diff --git a/miniprogram/node_modules/detect-libc/lib/elf.js b/miniprogram/node_modules/detect-libc/lib/elf.js new file mode 100644 index 00000000..aa166aa2 --- /dev/null +++ b/miniprogram/node_modules/detect-libc/lib/elf.js @@ -0,0 +1,39 @@ +// Copyright 2017 Lovell Fuller and others. +// SPDX-License-Identifier: Apache-2.0 + +'use strict'; + +const interpreterPath = (elf) => { + if (elf.length < 64) { + return null; + } + if (elf.readUInt32BE(0) !== 0x7F454C46) { + // Unexpected magic bytes + return null; + } + if (elf.readUInt8(4) !== 2) { + // Not a 64-bit ELF + return null; + } + if (elf.readUInt8(5) !== 1) { + // Not little-endian + return null; + } + const offset = elf.readUInt32LE(32); + const size = elf.readUInt16LE(54); + const count = elf.readUInt16LE(56); + for (let i = 0; i < count; i++) { + const headerOffset = offset + (i * size); + const type = elf.readUInt32LE(headerOffset); + if (type === 3) { + const fileOffset = elf.readUInt32LE(headerOffset + 8); + const fileSize = elf.readUInt32LE(headerOffset + 32); + return elf.subarray(fileOffset, fileOffset + fileSize).toString().replace(/\0.*$/g, ''); + } + } + return null; +}; + +module.exports = { + interpreterPath +}; diff --git a/miniprogram/node_modules/detect-libc/lib/filesystem.js b/miniprogram/node_modules/detect-libc/lib/filesystem.js new file mode 100644 index 00000000..4c2443cc --- /dev/null +++ b/miniprogram/node_modules/detect-libc/lib/filesystem.js @@ -0,0 +1,51 @@ +// Copyright 2017 Lovell Fuller and others. +// SPDX-License-Identifier: Apache-2.0 + +'use strict'; + +const fs = require('fs'); + +const LDD_PATH = '/usr/bin/ldd'; +const SELF_PATH = '/proc/self/exe'; +const MAX_LENGTH = 2048; + +/** + * Read the content of a file synchronous + * + * @param {string} path + * @returns {Buffer} + */ +const readFileSync = (path) => { + const fd = fs.openSync(path, 'r'); + const buffer = Buffer.alloc(MAX_LENGTH); + const bytesRead = fs.readSync(fd, buffer, 0, MAX_LENGTH, 0); + fs.close(fd, () => {}); + return buffer.subarray(0, bytesRead); +}; + +/** + * Read the content of a file + * + * @param {string} path + * @returns {Promise} + */ +const readFile = (path) => new Promise((resolve, reject) => { + fs.open(path, 'r', (err, fd) => { + if (err) { + reject(err); + } else { + const buffer = Buffer.alloc(MAX_LENGTH); + fs.read(fd, buffer, 0, MAX_LENGTH, 0, (_, bytesRead) => { + resolve(buffer.subarray(0, bytesRead)); + fs.close(fd, () => {}); + }); + } + }); +}); + +module.exports = { + LDD_PATH, + SELF_PATH, + readFileSync, + readFile +}; diff --git a/miniprogram/node_modules/detect-libc/lib/process.js b/miniprogram/node_modules/detect-libc/lib/process.js new file mode 100644 index 00000000..ee78ad26 --- /dev/null +++ b/miniprogram/node_modules/detect-libc/lib/process.js @@ -0,0 +1,24 @@ +// Copyright 2017 Lovell Fuller and others. +// SPDX-License-Identifier: Apache-2.0 + +'use strict'; + +const isLinux = () => process.platform === 'linux'; + +let report = null; +const getReport = () => { + if (!report) { + /* istanbul ignore next */ + if (isLinux() && process.report) { + const orig = process.report.excludeNetwork; + process.report.excludeNetwork = true; + report = process.report.getReport(); + process.report.excludeNetwork = orig; + } else { + report = {}; + } + } + return report; +}; + +module.exports = { isLinux, getReport }; diff --git a/miniprogram/node_modules/detect-libc/package.json b/miniprogram/node_modules/detect-libc/package.json new file mode 100644 index 00000000..36d0f2be --- /dev/null +++ b/miniprogram/node_modules/detect-libc/package.json @@ -0,0 +1,44 @@ +{ + "name": "detect-libc", + "version": "2.1.2", + "description": "Node.js module to detect the C standard library (libc) implementation family and version", + "main": "lib/detect-libc.js", + "files": [ + "lib/", + "index.d.ts" + ], + "scripts": { + "test": "semistandard && nyc --reporter=text --check-coverage --branches=100 ava test/unit.js", + "changelog": "conventional-changelog -i CHANGELOG.md -s", + "bench": "node benchmark/detect-libc", + "bench:calls": "node benchmark/call-familySync.js && sleep 1 && node benchmark/call-isNonGlibcLinuxSync.js && sleep 1 && node benchmark/call-versionSync.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/lovell/detect-libc.git" + }, + "keywords": [ + "libc", + "glibc", + "musl" + ], + "author": "Lovell Fuller ", + "contributors": [ + "Niklas Salmoukas ", + "Vinícius Lourenço " + ], + "license": "Apache-2.0", + "devDependencies": { + "ava": "^2.4.0", + "benchmark": "^2.1.4", + "conventional-changelog-cli": "^5.0.0", + "eslint-config-standard": "^13.0.1", + "nyc": "^15.1.0", + "proxyquire": "^2.1.3", + "semistandard": "^14.2.3" + }, + "engines": { + "node": ">=8" + }, + "types": "index.d.ts" +} diff --git a/miniprogram/node_modules/end-of-stream/LICENSE b/miniprogram/node_modules/end-of-stream/LICENSE new file mode 100644 index 00000000..757562ec --- /dev/null +++ b/miniprogram/node_modules/end-of-stream/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/miniprogram/node_modules/end-of-stream/README.md b/miniprogram/node_modules/end-of-stream/README.md new file mode 100644 index 00000000..857b14bd --- /dev/null +++ b/miniprogram/node_modules/end-of-stream/README.md @@ -0,0 +1,54 @@ +# end-of-stream + +A node module that calls a callback when a readable/writable/duplex stream has completed or failed. + + npm install end-of-stream + +[![Build status](https://travis-ci.org/mafintosh/end-of-stream.svg?branch=master)](https://travis-ci.org/mafintosh/end-of-stream) + +## Usage + +Simply pass a stream and a callback to the `eos`. +Both legacy streams, streams2 and stream3 are supported. + +``` js +var eos = require('end-of-stream'); + +eos(readableStream, function(err) { + // this will be set to the stream instance + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended', this === readableStream); +}); + +eos(writableStream, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has finished', this === writableStream); +}); + +eos(duplexStream, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended and finished', this === duplexStream); +}); + +eos(duplexStream, {readable:false}, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has finished but might still be readable'); +}); + +eos(duplexStream, {writable:false}, function(err) { + if (err) return console.log('stream had an error or closed early'); + console.log('stream has ended but might still be writable'); +}); + +eos(readableStream, {error:false}, function(err) { + // do not treat emit('error', err) as a end-of-stream +}); +``` + +## License + +MIT + +## Related + +`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. diff --git a/miniprogram/node_modules/end-of-stream/index.js b/miniprogram/node_modules/end-of-stream/index.js new file mode 100644 index 00000000..7ce47e95 --- /dev/null +++ b/miniprogram/node_modules/end-of-stream/index.js @@ -0,0 +1,96 @@ +var once = require('once'); + +var noop = function() {}; + +var qnt = global.Bare ? queueMicrotask : process.nextTick.bind(process); + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + qnt(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; diff --git a/miniprogram/node_modules/end-of-stream/package.json b/miniprogram/node_modules/end-of-stream/package.json new file mode 100644 index 00000000..0b530cdf --- /dev/null +++ b/miniprogram/node_modules/end-of-stream/package.json @@ -0,0 +1,37 @@ +{ + "name": "end-of-stream", + "version": "1.4.5", + "description": "Call a callback when a readable/writable/duplex stream has completed or failed.", + "repository": { + "type": "git", + "url": "git://github.com/mafintosh/end-of-stream.git" + }, + "dependencies": { + "once": "^1.4.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "stream", + "streams", + "callback", + "finish", + "close", + "end", + "wait" + ], + "bugs": { + "url": "https://github.com/mafintosh/end-of-stream/issues" + }, + "homepage": "https://github.com/mafintosh/end-of-stream", + "main": "index.js", + "author": "Mathias Buus ", + "license": "MIT", + "devDependencies": { + "tape": "^4.11.0" + } +} diff --git a/miniprogram/node_modules/expand-template/.travis.yml b/miniprogram/node_modules/expand-template/.travis.yml new file mode 100644 index 00000000..1335a770 --- /dev/null +++ b/miniprogram/node_modules/expand-template/.travis.yml @@ -0,0 +1,6 @@ +language: node_js + +node_js: + - 6 + - 8 + - 10 diff --git a/miniprogram/node_modules/expand-template/LICENSE b/miniprogram/node_modules/expand-template/LICENSE new file mode 100644 index 00000000..814aef41 --- /dev/null +++ b/miniprogram/node_modules/expand-template/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Lars-Magnus Skog + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/miniprogram/node_modules/expand-template/README.md b/miniprogram/node_modules/expand-template/README.md new file mode 100644 index 00000000..b98aa480 --- /dev/null +++ b/miniprogram/node_modules/expand-template/README.md @@ -0,0 +1,43 @@ +# expand-template + +> Expand placeholders in a template string. + +[![npm](https://img.shields.io/npm/v/expand-template.svg)](https://www.npmjs.com/package/expand-template) +![Node version](https://img.shields.io/node/v/expand-template.svg) +[![Build Status](https://travis-ci.org/ralphtheninja/expand-template.svg?branch=master)](https://travis-ci.org/ralphtheninja/expand-template) +[![JavaScript Style Guide](https://img.shields.io/badge/code_style-standard-brightgreen.svg)](https://standardjs.com) + +## Install + +``` +$ npm i expand-template -S +``` + +## Usage + +Default functionality expands templates using `{}` as separators for string placeholders. + +```js +var expand = require('expand-template')() +var template = '{foo}/{foo}/{bar}/{bar}' +console.log(expand(template, { + foo: 'BAR', + bar: 'FOO' +})) +// -> BAR/BAR/FOO/FOO +``` + +Custom separators: + +```js +var expand = require('expand-template')({ sep: '[]' }) +var template = '[foo]/[foo]/[bar]/[bar]' +console.log(expand(template, { + foo: 'BAR', + bar: 'FOO' +})) +// -> BAR/BAR/FOO/FOO +``` + +## License +All code, unless stated otherwise, is dual-licensed under [`WTFPL`](http://www.wtfpl.net/txt/copying/) and [`MIT`](https://opensource.org/licenses/MIT). diff --git a/miniprogram/node_modules/expand-template/index.js b/miniprogram/node_modules/expand-template/index.js new file mode 100644 index 00000000..e182837c --- /dev/null +++ b/miniprogram/node_modules/expand-template/index.js @@ -0,0 +1,26 @@ +module.exports = function (opts) { + var sep = opts ? opts.sep : '{}' + var len = sep.length + + var whitespace = '\\s*' + var left = escape(sep.substring(0, len / 2)) + whitespace + var right = whitespace + escape(sep.substring(len / 2, len)) + + return function (template, values) { + Object.keys(values).forEach(function (key) { + var value = String(values[key]).replace(/\$/g, '$$$$') + template = template.replace(regExp(key), value) + }) + return template + } + + function escape (s) { + return [].map.call(s, function (char) { + return '\\' + char + }).join('') + } + + function regExp (key) { + return new RegExp(left + key + right, 'g') + } +} diff --git a/miniprogram/node_modules/expand-template/package.json b/miniprogram/node_modules/expand-template/package.json new file mode 100644 index 00000000..9a09656c --- /dev/null +++ b/miniprogram/node_modules/expand-template/package.json @@ -0,0 +1,29 @@ +{ + "name": "expand-template", + "version": "2.0.3", + "description": "Expand placeholders in a template string", + "main": "index.js", + "repository": { + "type": "git", + "url": "https://github.com/ralphtheninja/expand-template.git" + }, + "homepage": "https://github.com/ralphtheninja/expand-template", + "scripts": { + "test": "tape test.js && standard" + }, + "keywords": [ + "template", + "expand", + "replace" + ], + "author": "LM ", + "license": "(MIT OR WTFPL)", + "dependencies": {}, + "devDependencies": { + "standard": "^12.0.0", + "tape": "^4.2.2" + }, + "engines": { + "node": ">=6" + } +} diff --git a/miniprogram/node_modules/expand-template/test.js b/miniprogram/node_modules/expand-template/test.js new file mode 100644 index 00000000..ba6ed871 --- /dev/null +++ b/miniprogram/node_modules/expand-template/test.js @@ -0,0 +1,67 @@ +var test = require('tape') +var Expand = require('./') + +test('default expands {} placeholders', function (t) { + var expand = Expand() + t.equal(typeof expand, 'function', 'is a function') + t.equal(expand('{foo}/{bar}', { + foo: 'BAR', bar: 'FOO' + }), 'BAR/FOO') + t.equal(expand('{foo}{foo}{foo}', { + foo: 'FOO' + }), 'FOOFOOFOO', 'expands one placeholder many times') + t.end() +}) + +test('support for custom separators', function (t) { + var expand = Expand({ sep: '[]' }) + t.equal(expand('[foo]/[bar]', { + foo: 'BAR', bar: 'FOO' + }), 'BAR/FOO') + t.equal(expand('[foo][foo][foo]', { + foo: 'FOO' + }), 'FOOFOOFOO', 'expands one placeholder many times') + t.end() +}) + +test('support for longer custom separators', function (t) { + var expand = Expand({ sep: '[[]]' }) + t.equal(expand('[[foo]]/[[bar]]', { + foo: 'BAR', bar: 'FOO' + }), 'BAR/FOO') + t.equal(expand('[[foo]][[foo]][[foo]]', { + foo: 'FOO' + }), 'FOOFOOFOO', 'expands one placeholder many times') + t.end() +}) + +test('whitespace-insensitive', function (t) { + var expand = Expand({ sep: '[]' }) + t.equal(expand('[ foo ]/[ bar ]', { + foo: 'BAR', bar: 'FOO' + }), 'BAR/FOO') + t.equal(expand('[ foo ][ foo ][ foo]', { + foo: 'FOO' + }), 'FOOFOOFOO', 'expands one placeholder many times') + t.end() +}) + +test('dollar escape', function (t) { + var expand = Expand() + t.equal(expand('before {foo} after', { + foo: '$' + }), 'before $ after') + t.equal(expand('before {foo} after', { + foo: '$&' + }), 'before $& after') + t.equal(expand('before {foo} after', { + foo: '$`' + }), 'before $` after') + t.equal(expand('before {foo} after', { + foo: '$\'' + }), 'before $\' after') + t.equal(expand('before {foo} after', { + foo: '$0' + }), 'before $0 after') + t.end() +}) diff --git a/miniprogram/node_modules/fs-constants/LICENSE b/miniprogram/node_modules/fs-constants/LICENSE new file mode 100644 index 00000000..cb757e5d --- /dev/null +++ b/miniprogram/node_modules/fs-constants/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2018 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/miniprogram/node_modules/fs-constants/README.md b/miniprogram/node_modules/fs-constants/README.md new file mode 100644 index 00000000..62b33742 --- /dev/null +++ b/miniprogram/node_modules/fs-constants/README.md @@ -0,0 +1,26 @@ +# fs-constants + +Small module that allows you to get the fs constants across +Node and the browser. + +``` +npm install fs-constants +``` + +Previously you would use `require('constants')` for this in node but that has been +deprecated and changed to `require('fs').constants` which does not browserify. + +This module uses `require('constants')` in the browser and `require('fs').constants` in node to work around this + + +## Usage + +``` js +var constants = require('fs-constants') + +console.log('constants:', constants) +``` + +## License + +MIT diff --git a/miniprogram/node_modules/fs-constants/browser.js b/miniprogram/node_modules/fs-constants/browser.js new file mode 100644 index 00000000..3c87638d --- /dev/null +++ b/miniprogram/node_modules/fs-constants/browser.js @@ -0,0 +1 @@ +module.exports = require('constants') diff --git a/miniprogram/node_modules/fs-constants/index.js b/miniprogram/node_modules/fs-constants/index.js new file mode 100644 index 00000000..2a3aadf3 --- /dev/null +++ b/miniprogram/node_modules/fs-constants/index.js @@ -0,0 +1 @@ +module.exports = require('fs').constants || require('constants') diff --git a/miniprogram/node_modules/fs-constants/package.json b/miniprogram/node_modules/fs-constants/package.json new file mode 100644 index 00000000..6f2b8f24 --- /dev/null +++ b/miniprogram/node_modules/fs-constants/package.json @@ -0,0 +1,19 @@ +{ + "name": "fs-constants", + "version": "1.0.0", + "description": "Require constants across node and the browser", + "main": "index.js", + "browser": "browser.js", + "dependencies": {}, + "devDependencies": {}, + "repository": { + "type": "git", + "url": "https://github.com/mafintosh/fs-constants.git" + }, + "author": "Mathias Buus (@mafintosh)", + "license": "MIT", + "bugs": { + "url": "https://github.com/mafintosh/fs-constants/issues" + }, + "homepage": "https://github.com/mafintosh/fs-constants" +} diff --git a/miniprogram/node_modules/github-from-package/.travis.yml b/miniprogram/node_modules/github-from-package/.travis.yml new file mode 100644 index 00000000..895dbd36 --- /dev/null +++ b/miniprogram/node_modules/github-from-package/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.6 + - 0.8 diff --git a/miniprogram/node_modules/github-from-package/LICENSE b/miniprogram/node_modules/github-from-package/LICENSE new file mode 100644 index 00000000..ee27ba4b --- /dev/null +++ b/miniprogram/node_modules/github-from-package/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/miniprogram/node_modules/github-from-package/example/package.json b/miniprogram/node_modules/github-from-package/example/package.json new file mode 100644 index 00000000..03494f48 --- /dev/null +++ b/miniprogram/node_modules/github-from-package/example/package.json @@ -0,0 +1,8 @@ +{ + "name": "beep-boop", + "version": "1.2.3", + "repository" : { + "type" : "git", + "url": "git@github.com:substack/beep-boop.git" + } +} diff --git a/miniprogram/node_modules/github-from-package/example/url.js b/miniprogram/node_modules/github-from-package/example/url.js new file mode 100644 index 00000000..138fb8a6 --- /dev/null +++ b/miniprogram/node_modules/github-from-package/example/url.js @@ -0,0 +1,3 @@ +var github = require('../'); +var url = github(require('./package.json')); +console.log(url); diff --git a/miniprogram/node_modules/github-from-package/index.js b/miniprogram/node_modules/github-from-package/index.js new file mode 100644 index 00000000..3d1d657b --- /dev/null +++ b/miniprogram/node_modules/github-from-package/index.js @@ -0,0 +1,17 @@ +module.exports = function (pkg) { + var m; + if (m = match(JSON.stringify(pkg.repository))) { + return m; + } + else if (m = match(JSON.stringify(pkg))) { + return m; + } + return undefined; +}; + +function match (str) { + var m = /\bgithub.com[:\/]([^\/"]+)\/([^\/"]+)/.exec(str); + if (m) { + return 'https://github.com/' + m[1] + '/' + m[2].replace(/\.git$/, ''); + } +} diff --git a/miniprogram/node_modules/github-from-package/package.json b/miniprogram/node_modules/github-from-package/package.json new file mode 100644 index 00000000..a3e240fe --- /dev/null +++ b/miniprogram/node_modules/github-from-package/package.json @@ -0,0 +1,30 @@ +{ + "name" : "github-from-package", + "version" : "0.0.0", + "description" : "return the github url from a package.json file", + "main" : "index.js", + "devDependencies" : { + "tap" : "~0.3.0", + "tape" : "~0.1.5" + }, + "scripts" : { + "test" : "tap test/*.js" + }, + "repository" : { + "type" : "git", + "url" : "git://github.com/substack/github-from-package.git" + }, + "homepage" : "https://github.com/substack/github-from-package", + "keywords" : [ + "github", + "package.json", + "npm", + "repository" + ], + "author" : { + "name" : "James Halliday", + "email" : "mail@substack.net", + "url" : "http://substack.net" + }, + "license" : "MIT" +} diff --git a/miniprogram/node_modules/github-from-package/readme.markdown b/miniprogram/node_modules/github-from-package/readme.markdown new file mode 100644 index 00000000..5ba397da --- /dev/null +++ b/miniprogram/node_modules/github-from-package/readme.markdown @@ -0,0 +1,53 @@ +# github-from-package + +return the github url from a package.json file + +[![build status](https://secure.travis-ci.org/substack/github-from-package.png)](http://travis-ci.org/substack/github-from-package) + +# example + +For the `./package.json` file: + +``` json +{ + "name": "beep-boop", + "version": "1.2.3", + "repository" : { + "type" : "git", + "url": "git@github.com:substack/beep-boop.git" + } +} +``` + +``` js +var github = require('github-from-package'); +var url = github(require('./package.json')); +console.log(url); +``` + +``` +https://github.com/substack/beep-boop +``` + +# methods + +``` js +var github = require('github-from-package') +``` + +## var url = github(pkg) + +Return the most likely github url from the package.json contents `pkg`. If no +github url can be determined, return `undefined`. + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install github-from-package +``` + +# license + +MIT diff --git a/miniprogram/node_modules/github-from-package/test/a.json b/miniprogram/node_modules/github-from-package/test/a.json new file mode 100644 index 00000000..03494f48 --- /dev/null +++ b/miniprogram/node_modules/github-from-package/test/a.json @@ -0,0 +1,8 @@ +{ + "name": "beep-boop", + "version": "1.2.3", + "repository" : { + "type" : "git", + "url": "git@github.com:substack/beep-boop.git" + } +} diff --git a/miniprogram/node_modules/github-from-package/test/b.json b/miniprogram/node_modules/github-from-package/test/b.json new file mode 100644 index 00000000..02093257 --- /dev/null +++ b/miniprogram/node_modules/github-from-package/test/b.json @@ -0,0 +1,5 @@ +{ + "name": "beep-boop", + "version": "1.2.3", + "repository" : "git@github.com:substack/beep-boop.git" +} diff --git a/miniprogram/node_modules/github-from-package/test/c.json b/miniprogram/node_modules/github-from-package/test/c.json new file mode 100644 index 00000000..65f6ddad --- /dev/null +++ b/miniprogram/node_modules/github-from-package/test/c.json @@ -0,0 +1,5 @@ +{ + "name": "beep-boop", + "version": "1.2.3", + "repository" : "https://github.com/substack/beep-boop.git" +} diff --git a/miniprogram/node_modules/github-from-package/test/d.json b/miniprogram/node_modules/github-from-package/test/d.json new file mode 100644 index 00000000..c61f3cd3 --- /dev/null +++ b/miniprogram/node_modules/github-from-package/test/d.json @@ -0,0 +1,7 @@ +{ + "name": "beep-boop", + "version": "1.2.3", + "repository" : { + "url": "https://github.com/substack/beep-boop" + } +} diff --git a/miniprogram/node_modules/github-from-package/test/e.json b/miniprogram/node_modules/github-from-package/test/e.json new file mode 100644 index 00000000..770b4384 --- /dev/null +++ b/miniprogram/node_modules/github-from-package/test/e.json @@ -0,0 +1,5 @@ +{ + "name": "beep-boop", + "version": "1.2.3", + "homepage": "https://github.com/substack/beep-boop/issues" +} diff --git a/miniprogram/node_modules/github-from-package/test/url.js b/miniprogram/node_modules/github-from-package/test/url.js new file mode 100644 index 00000000..d5a0a667 --- /dev/null +++ b/miniprogram/node_modules/github-from-package/test/url.js @@ -0,0 +1,19 @@ +var test = require('tape'); +var github = require('../'); +var packages = { + a : require('./a.json'), + b : require('./b.json'), + c : require('./c.json'), + d : require('./d.json'), + e : require('./e.json') +}; + +test(function (t) { + t.plan(5); + var url = 'https://github.com/substack/beep-boop'; + t.equal(url, github(packages.a), 'a.json comparison'); + t.equal(url, github(packages.b), 'b.json comparison'); + t.equal(url, github(packages.c), 'c.json comparison'); + t.equal(url, github(packages.d), 'd.json comparison'); + t.equal(url, github(packages.e), 'e.json comparison'); +}); diff --git a/miniprogram/node_modules/ieee754/LICENSE b/miniprogram/node_modules/ieee754/LICENSE new file mode 100644 index 00000000..5aac82c7 --- /dev/null +++ b/miniprogram/node_modules/ieee754/LICENSE @@ -0,0 +1,11 @@ +Copyright 2008 Fair Oaks Labs, Inc. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/miniprogram/node_modules/ieee754/README.md b/miniprogram/node_modules/ieee754/README.md new file mode 100644 index 00000000..cb7527b3 --- /dev/null +++ b/miniprogram/node_modules/ieee754/README.md @@ -0,0 +1,51 @@ +# ieee754 [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/ieee754/master.svg +[travis-url]: https://travis-ci.org/feross/ieee754 +[npm-image]: https://img.shields.io/npm/v/ieee754.svg +[npm-url]: https://npmjs.org/package/ieee754 +[downloads-image]: https://img.shields.io/npm/dm/ieee754.svg +[downloads-url]: https://npmjs.org/package/ieee754 +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +[![saucelabs][saucelabs-image]][saucelabs-url] + +[saucelabs-image]: https://saucelabs.com/browser-matrix/ieee754.svg +[saucelabs-url]: https://saucelabs.com/u/ieee754 + +### Read/write IEEE754 floating point numbers from/to a Buffer or array-like object. + +## install + +``` +npm install ieee754 +``` + +## methods + +`var ieee754 = require('ieee754')` + +The `ieee754` object has the following functions: + +``` +ieee754.read = function (buffer, offset, isLE, mLen, nBytes) +ieee754.write = function (buffer, value, offset, isLE, mLen, nBytes) +``` + +The arguments mean the following: + +- buffer = the buffer +- offset = offset into the buffer +- value = value to set (only for `write`) +- isLe = is little endian? +- mLen = mantissa length +- nBytes = number of bytes + +## what is ieee754? + +The IEEE Standard for Floating-Point Arithmetic (IEEE 754) is a technical standard for floating-point computation. [Read more](http://en.wikipedia.org/wiki/IEEE_floating_point). + +## license + +BSD 3 Clause. Copyright (c) 2008, Fair Oaks Labs, Inc. diff --git a/miniprogram/node_modules/ieee754/index.d.ts b/miniprogram/node_modules/ieee754/index.d.ts new file mode 100644 index 00000000..f1e43548 --- /dev/null +++ b/miniprogram/node_modules/ieee754/index.d.ts @@ -0,0 +1,10 @@ +declare namespace ieee754 { + export function read( + buffer: Uint8Array, offset: number, isLE: boolean, mLen: number, + nBytes: number): number; + export function write( + buffer: Uint8Array, value: number, offset: number, isLE: boolean, + mLen: number, nBytes: number): void; + } + + export = ieee754; \ No newline at end of file diff --git a/miniprogram/node_modules/ieee754/index.js b/miniprogram/node_modules/ieee754/index.js new file mode 100644 index 00000000..81d26c34 --- /dev/null +++ b/miniprogram/node_modules/ieee754/index.js @@ -0,0 +1,85 @@ +/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh */ +exports.read = function (buffer, offset, isLE, mLen, nBytes) { + var e, m + var eLen = (nBytes * 8) - mLen - 1 + var eMax = (1 << eLen) - 1 + var eBias = eMax >> 1 + var nBits = -7 + var i = isLE ? (nBytes - 1) : 0 + var d = isLE ? -1 : 1 + var s = buffer[offset + i] + + i += d + + e = s & ((1 << (-nBits)) - 1) + s >>= (-nBits) + nBits += eLen + for (; nBits > 0; e = (e * 256) + buffer[offset + i], i += d, nBits -= 8) {} + + m = e & ((1 << (-nBits)) - 1) + e >>= (-nBits) + nBits += mLen + for (; nBits > 0; m = (m * 256) + buffer[offset + i], i += d, nBits -= 8) {} + + if (e === 0) { + e = 1 - eBias + } else if (e === eMax) { + return m ? NaN : ((s ? -1 : 1) * Infinity) + } else { + m = m + Math.pow(2, mLen) + e = e - eBias + } + return (s ? -1 : 1) * m * Math.pow(2, e - mLen) +} + +exports.write = function (buffer, value, offset, isLE, mLen, nBytes) { + var e, m, c + var eLen = (nBytes * 8) - mLen - 1 + var eMax = (1 << eLen) - 1 + var eBias = eMax >> 1 + var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0) + var i = isLE ? 0 : (nBytes - 1) + var d = isLE ? 1 : -1 + var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0 + + value = Math.abs(value) + + if (isNaN(value) || value === Infinity) { + m = isNaN(value) ? 1 : 0 + e = eMax + } else { + e = Math.floor(Math.log(value) / Math.LN2) + if (value * (c = Math.pow(2, -e)) < 1) { + e-- + c *= 2 + } + if (e + eBias >= 1) { + value += rt / c + } else { + value += rt * Math.pow(2, 1 - eBias) + } + if (value * c >= 2) { + e++ + c /= 2 + } + + if (e + eBias >= eMax) { + m = 0 + e = eMax + } else if (e + eBias >= 1) { + m = ((value * c) - 1) * Math.pow(2, mLen) + e = e + eBias + } else { + m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen) + e = 0 + } + } + + for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {} + + e = (e << mLen) | m + eLen += mLen + for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {} + + buffer[offset + i - d] |= s * 128 +} diff --git a/miniprogram/node_modules/ieee754/package.json b/miniprogram/node_modules/ieee754/package.json new file mode 100644 index 00000000..7b238513 --- /dev/null +++ b/miniprogram/node_modules/ieee754/package.json @@ -0,0 +1,52 @@ +{ + "name": "ieee754", + "description": "Read/write IEEE754 floating point numbers from/to a Buffer or array-like object", + "version": "1.2.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "contributors": [ + "Romain Beauxis " + ], + "devDependencies": { + "airtap": "^3.0.0", + "standard": "*", + "tape": "^5.0.1" + }, + "keywords": [ + "IEEE 754", + "buffer", + "convert", + "floating point", + "ieee754" + ], + "license": "BSD-3-Clause", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/ieee754.git" + }, + "scripts": { + "test": "standard && npm run test-node && npm run test-browser", + "test-browser": "airtap -- test/*.js", + "test-browser-local": "airtap --local -- test/*.js", + "test-node": "tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/miniprogram/node_modules/inherits/LICENSE b/miniprogram/node_modules/inherits/LICENSE new file mode 100644 index 00000000..dea3013d --- /dev/null +++ b/miniprogram/node_modules/inherits/LICENSE @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + diff --git a/miniprogram/node_modules/inherits/README.md b/miniprogram/node_modules/inherits/README.md new file mode 100644 index 00000000..b1c56658 --- /dev/null +++ b/miniprogram/node_modules/inherits/README.md @@ -0,0 +1,42 @@ +Browser-friendly inheritance fully compatible with standard node.js +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). + +This package exports standard `inherits` from node.js `util` module in +node environment, but also provides alternative browser-friendly +implementation through [browser +field](https://gist.github.com/shtylman/4339901). Alternative +implementation is a literal copy of standard one located in standalone +module to avoid requiring of `util`. It also has a shim for old +browsers with no `Object.create` support. + +While keeping you sure you are using standard `inherits` +implementation in node.js environment, it allows bundlers such as +[browserify](https://github.com/substack/node-browserify) to not +include full `util` package to your client code if all you need is +just `inherits` function. It worth, because browser shim for `util` +package is large and `inherits` is often the single function you need +from it. + +It's recommended to use this package instead of +`require('util').inherits` for any code that has chances to be used +not only in node.js but in browser too. + +## usage + +```js +var inherits = require('inherits'); +// then use exactly as the standard one +``` + +## note on version ~1.0 + +Version ~1.0 had completely different motivation and is not compatible +neither with 2.0 nor with standard node.js `inherits`. + +If you are using version ~1.0 and planning to switch to ~2.0, be +careful: + +* new version uses `super_` instead of `super` for referencing + superclass +* new version overwrites current prototype while old one preserves any + existing fields on it diff --git a/miniprogram/node_modules/inherits/inherits.js b/miniprogram/node_modules/inherits/inherits.js new file mode 100644 index 00000000..f71f2d93 --- /dev/null +++ b/miniprogram/node_modules/inherits/inherits.js @@ -0,0 +1,9 @@ +try { + var util = require('util'); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = require('./inherits_browser.js'); +} diff --git a/miniprogram/node_modules/inherits/inherits_browser.js b/miniprogram/node_modules/inherits/inherits_browser.js new file mode 100644 index 00000000..86bbb3dc --- /dev/null +++ b/miniprogram/node_modules/inherits/inherits_browser.js @@ -0,0 +1,27 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} diff --git a/miniprogram/node_modules/inherits/package.json b/miniprogram/node_modules/inherits/package.json new file mode 100644 index 00000000..37b4366b --- /dev/null +++ b/miniprogram/node_modules/inherits/package.json @@ -0,0 +1,29 @@ +{ + "name": "inherits", + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "version": "2.0.4", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "main": "./inherits.js", + "browser": "./inherits_browser.js", + "repository": "git://github.com/isaacs/inherits", + "license": "ISC", + "scripts": { + "test": "tap" + }, + "devDependencies": { + "tap": "^14.2.4" + }, + "files": [ + "inherits.js", + "inherits_browser.js" + ] +} diff --git a/miniprogram/node_modules/ini/LICENSE b/miniprogram/node_modules/ini/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/miniprogram/node_modules/ini/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/miniprogram/node_modules/ini/README.md b/miniprogram/node_modules/ini/README.md new file mode 100644 index 00000000..33df2582 --- /dev/null +++ b/miniprogram/node_modules/ini/README.md @@ -0,0 +1,102 @@ +An ini format parser and serializer for node. + +Sections are treated as nested objects. Items before the first +heading are saved on the object directly. + +## Usage + +Consider an ini-file `config.ini` that looks like this: + + ; this comment is being ignored + scope = global + + [database] + user = dbuser + password = dbpassword + database = use_this_database + + [paths.default] + datadir = /var/lib/data + array[] = first value + array[] = second value + array[] = third value + +You can read, manipulate and write the ini-file like so: + + var fs = require('fs') + , ini = require('ini') + + var config = ini.parse(fs.readFileSync('./config.ini', 'utf-8')) + + config.scope = 'local' + config.database.database = 'use_another_database' + config.paths.default.tmpdir = '/tmp' + delete config.paths.default.datadir + config.paths.default.array.push('fourth value') + + fs.writeFileSync('./config_modified.ini', ini.stringify(config, { section: 'section' })) + +This will result in a file called `config_modified.ini` being written +to the filesystem with the following content: + + [section] + scope=local + [section.database] + user=dbuser + password=dbpassword + database=use_another_database + [section.paths.default] + tmpdir=/tmp + array[]=first value + array[]=second value + array[]=third value + array[]=fourth value + + +## API + +### decode(inistring) + +Decode the ini-style formatted `inistring` into a nested object. + +### parse(inistring) + +Alias for `decode(inistring)` + +### encode(object, [options]) + +Encode the object `object` into an ini-style formatted string. If the +optional parameter `section` is given, then all top-level properties +of the object are put into this section and the `section`-string is +prepended to all sub-sections, see the usage example above. + +The `options` object may contain the following: + +* `section` A string which will be the first `section` in the encoded + ini data. Defaults to none. +* `whitespace` Boolean to specify whether to put whitespace around the + `=` character. By default, whitespace is omitted, to be friendly to + some persnickety old parsers that don't tolerate it well. But some + find that it's more human-readable and pretty with the whitespace. + +For backwards compatibility reasons, if a `string` options is passed +in, then it is assumed to be the `section` value. + +### stringify(object, [options]) + +Alias for `encode(object, [options])` + +### safe(val) + +Escapes the string `val` such that it is safe to be used as a key or +value in an ini-file. Basically escapes quotes. For example + + ini.safe('"unsafe string"') + +would result in + + "\"unsafe string\"" + +### unsafe(val) + +Unescapes the string `val` diff --git a/miniprogram/node_modules/ini/ini.js b/miniprogram/node_modules/ini/ini.js new file mode 100644 index 00000000..b576f08d --- /dev/null +++ b/miniprogram/node_modules/ini/ini.js @@ -0,0 +1,206 @@ +exports.parse = exports.decode = decode + +exports.stringify = exports.encode = encode + +exports.safe = safe +exports.unsafe = unsafe + +var eol = typeof process !== 'undefined' && + process.platform === 'win32' ? '\r\n' : '\n' + +function encode (obj, opt) { + var children = [] + var out = '' + + if (typeof opt === 'string') { + opt = { + section: opt, + whitespace: false, + } + } else { + opt = opt || {} + opt.whitespace = opt.whitespace === true + } + + var separator = opt.whitespace ? ' = ' : '=' + + Object.keys(obj).forEach(function (k, _, __) { + var val = obj[k] + if (val && Array.isArray(val)) { + val.forEach(function (item) { + out += safe(k + '[]') + separator + safe(item) + '\n' + }) + } else if (val && typeof val === 'object') + children.push(k) + else + out += safe(k) + separator + safe(val) + eol + }) + + if (opt.section && out.length) + out = '[' + safe(opt.section) + ']' + eol + out + + children.forEach(function (k, _, __) { + var nk = dotSplit(k).join('\\.') + var section = (opt.section ? opt.section + '.' : '') + nk + var child = encode(obj[k], { + section: section, + whitespace: opt.whitespace, + }) + if (out.length && child.length) + out += eol + + out += child + }) + + return out +} + +function dotSplit (str) { + return str.replace(/\1/g, '\u0002LITERAL\\1LITERAL\u0002') + .replace(/\\\./g, '\u0001') + .split(/\./).map(function (part) { + return part.replace(/\1/g, '\\.') + .replace(/\2LITERAL\\1LITERAL\2/g, '\u0001') + }) +} + +function decode (str) { + var out = {} + var p = out + var section = null + // section |key = value + var re = /^\[([^\]]*)\]$|^([^=]+)(=(.*))?$/i + var lines = str.split(/[\r\n]+/g) + + lines.forEach(function (line, _, __) { + if (!line || line.match(/^\s*[;#]/)) + return + var match = line.match(re) + if (!match) + return + if (match[1] !== undefined) { + section = unsafe(match[1]) + if (section === '__proto__') { + // not allowed + // keep parsing the section, but don't attach it. + p = {} + return + } + p = out[section] = out[section] || {} + return + } + var key = unsafe(match[2]) + if (key === '__proto__') + return + var value = match[3] ? unsafe(match[4]) : true + switch (value) { + case 'true': + case 'false': + case 'null': value = JSON.parse(value) + } + + // Convert keys with '[]' suffix to an array + if (key.length > 2 && key.slice(-2) === '[]') { + key = key.substring(0, key.length - 2) + if (key === '__proto__') + return + if (!p[key]) + p[key] = [] + else if (!Array.isArray(p[key])) + p[key] = [p[key]] + } + + // safeguard against resetting a previously defined + // array by accidentally forgetting the brackets + if (Array.isArray(p[key])) + p[key].push(value) + else + p[key] = value + }) + + // {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}} + // use a filter to return the keys that have to be deleted. + Object.keys(out).filter(function (k, _, __) { + if (!out[k] || + typeof out[k] !== 'object' || + Array.isArray(out[k])) + return false + + // see if the parent section is also an object. + // if so, add it to that, and mark this one for deletion + var parts = dotSplit(k) + var p = out + var l = parts.pop() + var nl = l.replace(/\\\./g, '.') + parts.forEach(function (part, _, __) { + if (part === '__proto__') + return + if (!p[part] || typeof p[part] !== 'object') + p[part] = {} + p = p[part] + }) + if (p === out && nl === l) + return false + + p[nl] = out[k] + return true + }).forEach(function (del, _, __) { + delete out[del] + }) + + return out +} + +function isQuoted (val) { + return (val.charAt(0) === '"' && val.slice(-1) === '"') || + (val.charAt(0) === "'" && val.slice(-1) === "'") +} + +function safe (val) { + return (typeof val !== 'string' || + val.match(/[=\r\n]/) || + val.match(/^\[/) || + (val.length > 1 && + isQuoted(val)) || + val !== val.trim()) + ? JSON.stringify(val) + : val.replace(/;/g, '\\;').replace(/#/g, '\\#') +} + +function unsafe (val, doUnesc) { + val = (val || '').trim() + if (isQuoted(val)) { + // remove the single quotes before calling JSON.parse + if (val.charAt(0) === "'") + val = val.substr(1, val.length - 2) + + try { + val = JSON.parse(val) + } catch (_) {} + } else { + // walk the val to find the first not-escaped ; character + var esc = false + var unesc = '' + for (var i = 0, l = val.length; i < l; i++) { + var c = val.charAt(i) + if (esc) { + if ('\\;#'.indexOf(c) !== -1) + unesc += c + else + unesc += '\\' + c + + esc = false + } else if (';#'.indexOf(c) !== -1) + break + else if (c === '\\') + esc = true + else + unesc += c + } + if (esc) + unesc += '\\' + + return unesc.trim() + } + return val +} diff --git a/miniprogram/node_modules/ini/package.json b/miniprogram/node_modules/ini/package.json new file mode 100644 index 00000000..c830a355 --- /dev/null +++ b/miniprogram/node_modules/ini/package.json @@ -0,0 +1,33 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "ini", + "description": "An ini encoder/decoder for node", + "version": "1.3.8", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/ini.git" + }, + "main": "ini.js", + "scripts": { + "eslint": "eslint", + "lint": "npm run eslint -- ini.js test/*.js", + "lintfix": "npm run lint -- --fix", + "test": "tap", + "posttest": "npm run lint", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "devDependencies": { + "eslint": "^7.9.0", + "eslint-plugin-import": "^2.22.0", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^4.2.1", + "eslint-plugin-standard": "^4.0.1", + "tap": "14" + }, + "license": "ISC", + "files": [ + "ini.js" + ] +} diff --git a/miniprogram/node_modules/mimic-response/index.d.ts b/miniprogram/node_modules/mimic-response/index.d.ts new file mode 100644 index 00000000..65a51e98 --- /dev/null +++ b/miniprogram/node_modules/mimic-response/index.d.ts @@ -0,0 +1,17 @@ +import {IncomingMessage} from 'http'; + +/** +Mimic a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage) + +Makes `toStream` include the properties from `fromStream`. + +@param fromStream - The stream to copy the properties from. +@param toStream - The stream to copy the properties to. +@return The same object as `toStream`. +*/ +declare function mimicResponse( + fromStream: IncomingMessage, // eslint-disable-line @typescript-eslint/prefer-readonly-parameter-types + toStream: T, +): T & IncomingMessage; + +export = mimicResponse; diff --git a/miniprogram/node_modules/mimic-response/index.js b/miniprogram/node_modules/mimic-response/index.js new file mode 100644 index 00000000..404e1111 --- /dev/null +++ b/miniprogram/node_modules/mimic-response/index.js @@ -0,0 +1,77 @@ +'use strict'; + +// We define these manually to ensure they're always copied +// even if they would move up the prototype chain +// https://nodejs.org/api/http.html#http_class_http_incomingmessage +const knownProperties = [ + 'aborted', + 'complete', + 'headers', + 'httpVersion', + 'httpVersionMinor', + 'httpVersionMajor', + 'method', + 'rawHeaders', + 'rawTrailers', + 'setTimeout', + 'socket', + 'statusCode', + 'statusMessage', + 'trailers', + 'url' +]; + +module.exports = (fromStream, toStream) => { + if (toStream._readableState.autoDestroy) { + throw new Error('The second stream must have the `autoDestroy` option set to `false`'); + } + + const fromProperties = new Set(Object.keys(fromStream).concat(knownProperties)); + + const properties = {}; + + for (const property of fromProperties) { + // Don't overwrite existing properties. + if (property in toStream) { + continue; + } + + properties[property] = { + get() { + const value = fromStream[property]; + const isFunction = typeof value === 'function'; + + return isFunction ? value.bind(fromStream) : value; + }, + set(value) { + fromStream[property] = value; + }, + enumerable: true, + configurable: false + }; + } + + Object.defineProperties(toStream, properties); + + fromStream.once('aborted', () => { + toStream.destroy(); + + toStream.emit('aborted'); + }); + + fromStream.once('close', () => { + if (fromStream.complete) { + if (toStream.readable) { + toStream.once('end', () => { + toStream.emit('close'); + }); + } else { + toStream.emit('close'); + } + } else { + toStream.emit('close'); + } + }); + + return toStream; +}; diff --git a/miniprogram/node_modules/mimic-response/license b/miniprogram/node_modules/mimic-response/license new file mode 100644 index 00000000..fa7ceba3 --- /dev/null +++ b/miniprogram/node_modules/mimic-response/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/miniprogram/node_modules/mimic-response/package.json b/miniprogram/node_modules/mimic-response/package.json new file mode 100644 index 00000000..d478b0f3 --- /dev/null +++ b/miniprogram/node_modules/mimic-response/package.json @@ -0,0 +1,42 @@ +{ + "name": "mimic-response", + "version": "3.1.0", + "description": "Mimic a Node.js HTTP response stream", + "license": "MIT", + "repository": "sindresorhus/mimic-response", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.d.ts", + "index.js" + ], + "keywords": [ + "mimic", + "response", + "stream", + "http", + "https", + "request", + "get", + "core" + ], + "devDependencies": { + "@types/node": "^14.0.1", + "ava": "^2.4.0", + "create-test-server": "^2.4.0", + "p-event": "^4.1.0", + "pify": "^5.0.0", + "tsd": "^0.11.0", + "xo": "^0.30.0" + } +} diff --git a/miniprogram/node_modules/mimic-response/readme.md b/miniprogram/node_modules/mimic-response/readme.md new file mode 100644 index 00000000..e968620a --- /dev/null +++ b/miniprogram/node_modules/mimic-response/readme.md @@ -0,0 +1,78 @@ +# mimic-response [![Build Status](https://travis-ci.com/sindresorhus/mimic-response.svg?branch=master)](https://travis-ci.com/sindresorhus/mimic-response) + +> Mimic a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage) + +## Install + +``` +$ npm install mimic-response +``` + +## Usage + +```js +const stream = require('stream'); +const mimicResponse = require('mimic-response'); + +const responseStream = getHttpResponseStream(); +const myStream = new stream.PassThrough(); + +mimicResponse(responseStream, myStream); + +console.log(myStream.statusCode); +//=> 200 +``` + +## API + +### mimicResponse(from, to) + +**Note #1:** The `from.destroy(error)` function is not proxied. You have to call it manually: + +```js +const stream = require('stream'); +const mimicResponse = require('mimic-response'); + +const responseStream = getHttpResponseStream(); + +const myStream = new stream.PassThrough({ + destroy(error, callback) { + responseStream.destroy(); + + callback(error); + } +}); + +myStream.destroy(); +``` + +Please note that `myStream` and `responseStream` never throws. The error is passed to the request instead. + +#### from + +Type: `Stream` + +[Node.js HTTP response stream.](https://nodejs.org/api/http.html#http_class_http_incomingmessage) + +#### to + +Type: `Stream` + +Any stream. + +## Related + +- [mimic-fn](https://github.com/sindresorhus/mimic-fn) - Make a function mimic another one +- [clone-response](https://github.com/lukechilds/clone-response) - Clone a Node.js response stream + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/miniprogram/node_modules/minimist/.eslintrc b/miniprogram/node_modules/minimist/.eslintrc new file mode 100644 index 00000000..bd1a5e04 --- /dev/null +++ b/miniprogram/node_modules/minimist/.eslintrc @@ -0,0 +1,29 @@ +{ + "root": true, + + "extends": "@ljharb/eslint-config/node/0.4", + + "rules": { + "array-element-newline": 0, + "complexity": 0, + "func-style": [2, "declaration"], + "max-lines-per-function": 0, + "max-nested-callbacks": 1, + "max-statements-per-line": 1, + "max-statements": 0, + "multiline-comment-style": 0, + "no-continue": 1, + "no-param-reassign": 1, + "no-restricted-syntax": 1, + "object-curly-newline": 0, + }, + + "overrides": [ + { + "files": "test/**", + "rules": { + "camelcase": 0, + }, + }, + ] +} diff --git a/miniprogram/node_modules/minimist/.github/FUNDING.yml b/miniprogram/node_modules/minimist/.github/FUNDING.yml new file mode 100644 index 00000000..a9366222 --- /dev/null +++ b/miniprogram/node_modules/minimist/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/minimist +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/miniprogram/node_modules/minimist/.nycrc b/miniprogram/node_modules/minimist/.nycrc new file mode 100644 index 00000000..55c3d293 --- /dev/null +++ b/miniprogram/node_modules/minimist/.nycrc @@ -0,0 +1,14 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "lines": 86, + "statements": 85.93, + "functions": 82.43, + "branches": 76.06, + "exclude": [ + "coverage", + "example", + "test" + ] +} diff --git a/miniprogram/node_modules/minimist/CHANGELOG.md b/miniprogram/node_modules/minimist/CHANGELOG.md new file mode 100644 index 00000000..c9a1e15e --- /dev/null +++ b/miniprogram/node_modules/minimist/CHANGELOG.md @@ -0,0 +1,298 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.2.8](https://github.com/minimistjs/minimist/compare/v1.2.7...v1.2.8) - 2023-02-09 + +### Merged + +- [Fix] Fix long option followed by single dash [`#17`](https://github.com/minimistjs/minimist/pull/17) +- [Tests] Remove duplicate test [`#12`](https://github.com/minimistjs/minimist/pull/12) +- [Fix] opt.string works with multiple aliases [`#10`](https://github.com/minimistjs/minimist/pull/10) + +### Fixed + +- [Fix] Fix long option followed by single dash (#17) [`#15`](https://github.com/minimistjs/minimist/issues/15) +- [Tests] Remove duplicate test (#12) [`#8`](https://github.com/minimistjs/minimist/issues/8) +- [Fix] Fix long option followed by single dash [`#15`](https://github.com/minimistjs/minimist/issues/15) +- [Fix] opt.string works with multiple aliases (#10) [`#9`](https://github.com/minimistjs/minimist/issues/9) +- [Fix] Fix handling of short option with non-trivial equals [`#5`](https://github.com/minimistjs/minimist/issues/5) +- [Tests] Remove duplicate test [`#8`](https://github.com/minimistjs/minimist/issues/8) +- [Fix] opt.string works with multiple aliases [`#9`](https://github.com/minimistjs/minimist/issues/9) + +### Commits + +- Merge tag 'v0.2.3' [`a026794`](https://github.com/minimistjs/minimist/commit/a0267947c7870fc5847cf2d437fbe33f392767da) +- [eslint] fix indentation and whitespace [`5368ca4`](https://github.com/minimistjs/minimist/commit/5368ca4147e974138a54cc0dc4cea8f756546b70) +- [eslint] fix indentation and whitespace [`e5f5067`](https://github.com/minimistjs/minimist/commit/e5f5067259ceeaf0b098d14bec910f87e58708c7) +- [eslint] more cleanup [`62fde7d`](https://github.com/minimistjs/minimist/commit/62fde7d935f83417fb046741531a9e2346a36976) +- [eslint] more cleanup [`36ac5d0`](https://github.com/minimistjs/minimist/commit/36ac5d0d95e4947d074e5737d94814034ca335d1) +- [meta] add `auto-changelog` [`73923d2`](https://github.com/minimistjs/minimist/commit/73923d223553fca08b1ba77e3fbc2a492862ae4c) +- [actions] add reusable workflows [`d80727d`](https://github.com/minimistjs/minimist/commit/d80727df77bfa9e631044d7f16368d8f09242c91) +- [eslint] add eslint; rules to enable later are warnings [`48bc06a`](https://github.com/minimistjs/minimist/commit/48bc06a1b41f00e9cdf183db34f7a51ba70e98d4) +- [eslint] fix indentation [`34b0f1c`](https://github.com/minimistjs/minimist/commit/34b0f1ccaa45183c3c4f06a91f9b405180a6f982) +- [readme] rename and add badges [`5df0fe4`](https://github.com/minimistjs/minimist/commit/5df0fe49211bd09a3636f8686a7cb3012c3e98f0) +- [Dev Deps] switch from `covert` to `nyc` [`a48b128`](https://github.com/minimistjs/minimist/commit/a48b128fdb8d427dfb20a15273f83e38d97bef07) +- [Dev Deps] update `covert`, `tape`; remove unnecessary `tap` [`f0fb958`](https://github.com/minimistjs/minimist/commit/f0fb958e9a1fe980cdffc436a211b0bda58f621b) +- [meta] create FUNDING.yml; add `funding` in package.json [`3639e0c`](https://github.com/minimistjs/minimist/commit/3639e0c819359a366387e425ab6eabf4c78d3caa) +- [meta] use `npmignore` to autogenerate an npmignore file [`be2e038`](https://github.com/minimistjs/minimist/commit/be2e038c342d8333b32f0fde67a0026b79c8150e) +- Only apps should have lockfiles [`282b570`](https://github.com/minimistjs/minimist/commit/282b570e7489d01b03f2d6d3dabf79cd3e5f84cf) +- isConstructorOrProto adapted from PR [`ef9153f`](https://github.com/minimistjs/minimist/commit/ef9153fc52b6cea0744b2239921c5dcae4697f11) +- [Dev Deps] update `@ljharb/eslint-config`, `aud` [`098873c`](https://github.com/minimistjs/minimist/commit/098873c213cdb7c92e55ae1ef5aa1af3a8192a79) +- [Dev Deps] update `@ljharb/eslint-config`, `aud` [`3124ed3`](https://github.com/minimistjs/minimist/commit/3124ed3e46306301ebb3c834874ce0241555c2c4) +- [meta] add `safe-publish-latest` [`4b927de`](https://github.com/minimistjs/minimist/commit/4b927de696d561c636b4f43bf49d4597cb36d6d6) +- [Tests] add `aud` in `posttest` [`b32d9bd`](https://github.com/minimistjs/minimist/commit/b32d9bd0ab340f4e9f8c3a97ff2a4424f25fab8c) +- [meta] update repo URLs [`f9fdfc0`](https://github.com/minimistjs/minimist/commit/f9fdfc032c54884d9a9996a390c63cd0719bbe1a) +- [actions] Avoid 0.6 tests due to build failures [`ba92fe6`](https://github.com/minimistjs/minimist/commit/ba92fe6ebbdc0431cca9a2ea8f27beb492f5e4ec) +- [Dev Deps] update `tape` [`950eaa7`](https://github.com/minimistjs/minimist/commit/950eaa74f112e04d23e9c606c67472c46739b473) +- [Dev Deps] add missing `npmignore` dev dep [`3226afa`](https://github.com/minimistjs/minimist/commit/3226afaf09e9d127ca369742437fe6e88f752d6b) +- Merge tag 'v0.2.2' [`980d7ac`](https://github.com/minimistjs/minimist/commit/980d7ac61a0b4bd552711251ac107d506b23e41f) + +## [v1.2.7](https://github.com/minimistjs/minimist/compare/v1.2.6...v1.2.7) - 2022-10-10 + +### Commits + +- [meta] add `auto-changelog` [`0ebf4eb`](https://github.com/minimistjs/minimist/commit/0ebf4ebcd5f7787a5524d31a849ef41316b83c3c) +- [actions] add reusable workflows [`e115b63`](https://github.com/minimistjs/minimist/commit/e115b63fa9d3909f33b00a2db647ff79068388de) +- [eslint] add eslint; rules to enable later are warnings [`f58745b`](https://github.com/minimistjs/minimist/commit/f58745b9bb84348e1be72af7dbba5840c7c13013) +- [Dev Deps] switch from `covert` to `nyc` [`ab03356`](https://github.com/minimistjs/minimist/commit/ab033567b9c8b31117cb026dc7f1e592ce455c65) +- [readme] rename and add badges [`236f4a0`](https://github.com/minimistjs/minimist/commit/236f4a07e4ebe5ee44f1496ec6974991ab293ffd) +- [meta] create FUNDING.yml; add `funding` in package.json [`783a49b`](https://github.com/minimistjs/minimist/commit/783a49bfd47e8335d3098a8cac75662cf71eb32a) +- [meta] use `npmignore` to autogenerate an npmignore file [`f81ece6`](https://github.com/minimistjs/minimist/commit/f81ece6aaec2fa14e69ff4f1e0407a8c4e2635a2) +- Only apps should have lockfiles [`56cad44`](https://github.com/minimistjs/minimist/commit/56cad44c7f879b9bb5ec18fcc349308024a89bfc) +- [Dev Deps] update `covert`, `tape`; remove unnecessary `tap` [`49c5f9f`](https://github.com/minimistjs/minimist/commit/49c5f9fb7e6a92db9eb340cc679de92fb3aacded) +- [Tests] add `aud` in `posttest` [`228ae93`](https://github.com/minimistjs/minimist/commit/228ae938f3cd9db9dfd8bd7458b076a7b2aef280) +- [meta] add `safe-publish-latest` [`01fc23f`](https://github.com/minimistjs/minimist/commit/01fc23f5104f85c75059972e01dd33796ab529ff) +- [meta] update repo URLs [`6b164c7`](https://github.com/minimistjs/minimist/commit/6b164c7d68e0b6bf32f894699effdfb7c63041dd) + +## [v1.2.6](https://github.com/minimistjs/minimist/compare/v1.2.5...v1.2.6) - 2022-03-21 + +### Commits + +- test from prototype pollution PR [`bc8ecee`](https://github.com/minimistjs/minimist/commit/bc8ecee43875261f4f17eb20b1243d3ed15e70eb) +- isConstructorOrProto adapted from PR [`c2b9819`](https://github.com/minimistjs/minimist/commit/c2b981977fa834b223b408cfb860f933c9811e4d) +- security notice for additional prototype pollution issue [`ef88b93`](https://github.com/minimistjs/minimist/commit/ef88b9325f77b5ee643ccfc97e2ebda577e4c4e2) + +## [v1.2.5](https://github.com/minimistjs/minimist/compare/v1.2.4...v1.2.5) - 2020-03-12 + +## [v1.2.4](https://github.com/minimistjs/minimist/compare/v1.2.3...v1.2.4) - 2020-03-11 + +### Commits + +- security notice [`4cf1354`](https://github.com/minimistjs/minimist/commit/4cf1354839cb972e38496d35e12f806eea92c11f) +- additional test for constructor prototype pollution [`1043d21`](https://github.com/minimistjs/minimist/commit/1043d212c3caaf871966e710f52cfdf02f9eea4b) + +## [v1.2.3](https://github.com/minimistjs/minimist/compare/v1.2.2...v1.2.3) - 2020-03-10 + +### Commits + +- more failing proto pollution tests [`13c01a5`](https://github.com/minimistjs/minimist/commit/13c01a5327736903704984b7f65616b8476850cc) +- even more aggressive checks for protocol pollution [`38a4d1c`](https://github.com/minimistjs/minimist/commit/38a4d1caead72ef99e824bb420a2528eec03d9ab) + +## [v1.2.2](https://github.com/minimistjs/minimist/compare/v1.2.1...v1.2.2) - 2020-03-10 + +### Commits + +- failing test for protocol pollution [`0efed03`](https://github.com/minimistjs/minimist/commit/0efed0340ec8433638758f7ca0c77cb20a0bfbab) +- cleanup [`67d3722`](https://github.com/minimistjs/minimist/commit/67d3722413448d00a62963d2d30c34656a92d7e2) +- console.dir -> console.log [`47acf72`](https://github.com/minimistjs/minimist/commit/47acf72c715a630bf9ea013867f47f1dd69dfc54) +- don't assign onto __proto__ [`63e7ed0`](https://github.com/minimistjs/minimist/commit/63e7ed05aa4b1889ec2f3b196426db4500cbda94) + +## [v1.2.1](https://github.com/minimistjs/minimist/compare/v1.2.0...v1.2.1) - 2020-03-10 + +### Merged + +- move the `opts['--']` example back where it belongs [`#63`](https://github.com/minimistjs/minimist/pull/63) + +### Commits + +- add test [`6be5dae`](https://github.com/minimistjs/minimist/commit/6be5dae35a32a987bcf4137fcd6c19c5200ee909) +- fix bad boolean regexp [`ac3fc79`](https://github.com/minimistjs/minimist/commit/ac3fc796e63b95128fdbdf67ea7fad71bd59aa76) + +## [v1.2.0](https://github.com/minimistjs/minimist/compare/v1.1.3...v1.2.0) - 2015-08-24 + +### Commits + +- failing -k=v short test [`63416b8`](https://github.com/minimistjs/minimist/commit/63416b8cd1d0d70e4714564cce465a36e4dd26d7) +- kv short fix [`6bbe145`](https://github.com/minimistjs/minimist/commit/6bbe14529166245e86424f220a2321442fe88dc3) +- failing kv short test [`f72ab7f`](https://github.com/minimistjs/minimist/commit/f72ab7f4572adc52902c9b6873cc969192f01b10) +- fixed kv test [`f5a48c3`](https://github.com/minimistjs/minimist/commit/f5a48c3e50e40ca54f00c8e84de4b4d6e9897fa8) +- enforce space between arg key and value [`86b321a`](https://github.com/minimistjs/minimist/commit/86b321affe648a8e016c095a4f0efa9d9074f502) + +## [v1.1.3](https://github.com/minimistjs/minimist/compare/v1.1.2...v1.1.3) - 2015-08-06 + +### Commits + +- add failing test - boolean alias array [`0fa3c5b`](https://github.com/minimistjs/minimist/commit/0fa3c5b3dd98551ddecf5392831b4c21211743fc) +- fix boolean values with multiple aliases [`9c0a6e7`](https://github.com/minimistjs/minimist/commit/9c0a6e7de25a273b11bbf9a7464f0bd833779795) + +## [v1.1.2](https://github.com/minimistjs/minimist/compare/v1.1.1...v1.1.2) - 2015-07-22 + +### Commits + +- Convert boolean arguments to boolean values [`8f3dc27`](https://github.com/minimistjs/minimist/commit/8f3dc27cf833f1d54671b6d0bcb55c2fe19672a9) +- use non-ancient npm, node 0.12 and iojs [`61ed1d0`](https://github.com/minimistjs/minimist/commit/61ed1d034b9ec7282764ce76f3992b1a0b4906ae) +- an older npm for 0.8 [`25cf778`](https://github.com/minimistjs/minimist/commit/25cf778b1220e7838a526832ad6972f75244054f) + +## [v1.1.1](https://github.com/minimistjs/minimist/compare/v1.1.0...v1.1.1) - 2015-03-10 + +### Commits + +- check that they type of a value is a boolean, not just that it is currently set to a boolean [`6863198`](https://github.com/minimistjs/minimist/commit/6863198e36139830ff1f20ffdceaddd93f2c1db9) +- upgrade tape, fix type issues from old tape version [`806712d`](https://github.com/minimistjs/minimist/commit/806712df91604ed02b8e39aa372b84aea659ee34) +- test for setting a boolean to a null default [`8c444fe`](https://github.com/minimistjs/minimist/commit/8c444fe89384ded7d441c120915ea60620b01dd3) +- if the previous value was a boolean, without an default (or with an alias) don't make an array either [`e5f419a`](https://github.com/minimistjs/minimist/commit/e5f419a3b5b3bc3f9e5ac71b7040621af70ed2dd) + +## [v1.1.0](https://github.com/minimistjs/minimist/compare/v1.0.0...v1.1.0) - 2014-08-10 + +### Commits + +- add support for handling "unknown" options not registered with the parser. [`6f3cc5d`](https://github.com/minimistjs/minimist/commit/6f3cc5d4e84524932a6ef2ce3592acc67cdd4383) +- reformat package.json [`02ed371`](https://github.com/minimistjs/minimist/commit/02ed37115194d3697ff358e8e25e5e66bab1d9f8) +- coverage script [`e5531ba`](https://github.com/minimistjs/minimist/commit/e5531ba0479da3b8138d3d8cac545d84ccb1c8df) +- extra fn to get 100% coverage again [`a6972da`](https://github.com/minimistjs/minimist/commit/a6972da89e56bf77642f8ec05a13b6558db93498) + +## [v1.0.0](https://github.com/minimistjs/minimist/compare/v0.2.3...v1.0.0) - 2014-08-10 + +### Commits + +- added stopEarly option [`471c7e4`](https://github.com/minimistjs/minimist/commit/471c7e4a7e910fc7ad8f9df850a186daf32c64e9) +- fix list [`fef6ae7`](https://github.com/minimistjs/minimist/commit/fef6ae79c38b9dc1c49569abb7cd04eb965eac5e) + +## [v0.2.3](https://github.com/minimistjs/minimist/compare/v0.2.2...v0.2.3) - 2023-02-09 + +### Merged + +- [Fix] Fix long option followed by single dash [`#17`](https://github.com/minimistjs/minimist/pull/17) +- [Tests] Remove duplicate test [`#12`](https://github.com/minimistjs/minimist/pull/12) +- [Fix] opt.string works with multiple aliases [`#10`](https://github.com/minimistjs/minimist/pull/10) + +### Fixed + +- [Fix] Fix long option followed by single dash (#17) [`#15`](https://github.com/minimistjs/minimist/issues/15) +- [Tests] Remove duplicate test (#12) [`#8`](https://github.com/minimistjs/minimist/issues/8) +- [Fix] opt.string works with multiple aliases (#10) [`#9`](https://github.com/minimistjs/minimist/issues/9) + +### Commits + +- [eslint] fix indentation and whitespace [`e5f5067`](https://github.com/minimistjs/minimist/commit/e5f5067259ceeaf0b098d14bec910f87e58708c7) +- [eslint] more cleanup [`36ac5d0`](https://github.com/minimistjs/minimist/commit/36ac5d0d95e4947d074e5737d94814034ca335d1) +- [eslint] fix indentation [`34b0f1c`](https://github.com/minimistjs/minimist/commit/34b0f1ccaa45183c3c4f06a91f9b405180a6f982) +- isConstructorOrProto adapted from PR [`ef9153f`](https://github.com/minimistjs/minimist/commit/ef9153fc52b6cea0744b2239921c5dcae4697f11) +- [Dev Deps] update `@ljharb/eslint-config`, `aud` [`098873c`](https://github.com/minimistjs/minimist/commit/098873c213cdb7c92e55ae1ef5aa1af3a8192a79) +- [Dev Deps] add missing `npmignore` dev dep [`3226afa`](https://github.com/minimistjs/minimist/commit/3226afaf09e9d127ca369742437fe6e88f752d6b) + +## [v0.2.2](https://github.com/minimistjs/minimist/compare/v0.2.1...v0.2.2) - 2022-10-10 + +### Commits + +- [meta] add `auto-changelog` [`73923d2`](https://github.com/minimistjs/minimist/commit/73923d223553fca08b1ba77e3fbc2a492862ae4c) +- [actions] add reusable workflows [`d80727d`](https://github.com/minimistjs/minimist/commit/d80727df77bfa9e631044d7f16368d8f09242c91) +- [eslint] add eslint; rules to enable later are warnings [`48bc06a`](https://github.com/minimistjs/minimist/commit/48bc06a1b41f00e9cdf183db34f7a51ba70e98d4) +- [readme] rename and add badges [`5df0fe4`](https://github.com/minimistjs/minimist/commit/5df0fe49211bd09a3636f8686a7cb3012c3e98f0) +- [Dev Deps] switch from `covert` to `nyc` [`a48b128`](https://github.com/minimistjs/minimist/commit/a48b128fdb8d427dfb20a15273f83e38d97bef07) +- [Dev Deps] update `covert`, `tape`; remove unnecessary `tap` [`f0fb958`](https://github.com/minimistjs/minimist/commit/f0fb958e9a1fe980cdffc436a211b0bda58f621b) +- [meta] create FUNDING.yml; add `funding` in package.json [`3639e0c`](https://github.com/minimistjs/minimist/commit/3639e0c819359a366387e425ab6eabf4c78d3caa) +- [meta] use `npmignore` to autogenerate an npmignore file [`be2e038`](https://github.com/minimistjs/minimist/commit/be2e038c342d8333b32f0fde67a0026b79c8150e) +- Only apps should have lockfiles [`282b570`](https://github.com/minimistjs/minimist/commit/282b570e7489d01b03f2d6d3dabf79cd3e5f84cf) +- [meta] add `safe-publish-latest` [`4b927de`](https://github.com/minimistjs/minimist/commit/4b927de696d561c636b4f43bf49d4597cb36d6d6) +- [Tests] add `aud` in `posttest` [`b32d9bd`](https://github.com/minimistjs/minimist/commit/b32d9bd0ab340f4e9f8c3a97ff2a4424f25fab8c) +- [meta] update repo URLs [`f9fdfc0`](https://github.com/minimistjs/minimist/commit/f9fdfc032c54884d9a9996a390c63cd0719bbe1a) + +## [v0.2.1](https://github.com/minimistjs/minimist/compare/v0.2.0...v0.2.1) - 2020-03-12 + +## [v0.2.0](https://github.com/minimistjs/minimist/compare/v0.1.0...v0.2.0) - 2014-06-19 + +### Commits + +- support all-boolean mode [`450a97f`](https://github.com/minimistjs/minimist/commit/450a97f6e2bc85c7a4a13185c19a818d9a5ebe69) + +## [v0.1.0](https://github.com/minimistjs/minimist/compare/v0.0.10...v0.1.0) - 2014-05-12 + +### Commits + +- Provide a mechanism to segregate -- arguments [`ce4a1e6`](https://github.com/minimistjs/minimist/commit/ce4a1e63a7e8d5ab88d2a3768adefa6af98a445a) +- documented argv['--'] [`14db0e6`](https://github.com/minimistjs/minimist/commit/14db0e6dbc6d2b9e472adaa54dad7004b364634f) +- Adding a test-case for notFlags segregation [`715c1e3`](https://github.com/minimistjs/minimist/commit/715c1e3714be223f998f6c537af6b505f0236c16) + +## [v0.0.10](https://github.com/minimistjs/minimist/compare/v0.0.9...v0.0.10) - 2014-05-11 + +### Commits + +- dedicated boolean test [`46e448f`](https://github.com/minimistjs/minimist/commit/46e448f9f513cfeb2bcc8b688b9b47ba1e515c2b) +- dedicated num test [`9bf2d36`](https://github.com/minimistjs/minimist/commit/9bf2d36f1d3b8795be90b8f7de0a937f098aa394) +- aliased values treated as strings [`1ab743b`](https://github.com/minimistjs/minimist/commit/1ab743bad4484d69f1259bed42f9531de01119de) +- cover the case of already numbers, at 100% coverage [`b2bb044`](https://github.com/minimistjs/minimist/commit/b2bb04436599d77a2ce029e8e555e25b3aa55d13) +- another test for higher coverage [`3662624`](https://github.com/minimistjs/minimist/commit/3662624be976d5489d486a856849c048d13be903) + +## [v0.0.9](https://github.com/minimistjs/minimist/compare/v0.0.8...v0.0.9) - 2014-05-08 + +### Commits + +- Eliminate `longest` fn. [`824f642`](https://github.com/minimistjs/minimist/commit/824f642038d1b02ede68b6261d1d65163390929a) + +## [v0.0.8](https://github.com/minimistjs/minimist/compare/v0.0.7...v0.0.8) - 2014-02-20 + +### Commits + +- return '' if flag is string and empty [`fa63ed4`](https://github.com/minimistjs/minimist/commit/fa63ed4651a4ef4eefddce34188e0d98d745a263) +- handle joined single letters [`66c248f`](https://github.com/minimistjs/minimist/commit/66c248f0241d4d421d193b022e9e365f11178534) + +## [v0.0.7](https://github.com/minimistjs/minimist/compare/v0.0.6...v0.0.7) - 2014-02-08 + +### Commits + +- another swap of .test for .match [`d1da408`](https://github.com/minimistjs/minimist/commit/d1da40819acbe846d89a5c02721211e3c1260dde) + +## [v0.0.6](https://github.com/minimistjs/minimist/compare/v0.0.5...v0.0.6) - 2014-02-08 + +### Commits + +- use .test() instead of .match() to not crash on non-string values in the arguments array [`7e0d1ad`](https://github.com/minimistjs/minimist/commit/7e0d1add8c9e5b9b20a4d3d0f9a94d824c578da1) + +## [v0.0.5](https://github.com/minimistjs/minimist/compare/v0.0.4...v0.0.5) - 2013-09-18 + +### Commits + +- Improve '--' handling. [`b11822c`](https://github.com/minimistjs/minimist/commit/b11822c09cc9d2460f30384d12afc0b953c037a4) + +## [v0.0.4](https://github.com/minimistjs/minimist/compare/v0.0.3...v0.0.4) - 2013-09-17 + +## [v0.0.3](https://github.com/minimistjs/minimist/compare/v0.0.2...v0.0.3) - 2013-09-12 + +### Commits + +- failing test for single dash preceeding a double dash [`b465514`](https://github.com/minimistjs/minimist/commit/b465514b82c9ae28972d714facd951deb2ad762b) +- fix for the dot test [`6a095f1`](https://github.com/minimistjs/minimist/commit/6a095f1d364c8fab2d6753d2291a0649315d297a) + +## [v0.0.2](https://github.com/minimistjs/minimist/compare/v0.0.1...v0.0.2) - 2013-08-28 + +### Commits + +- allow dotted aliases & defaults [`321c33e`](https://github.com/minimistjs/minimist/commit/321c33e755485faaeb44eeb1c05d33b2e0a5a7c4) +- use a better version of ff [`e40f611`](https://github.com/minimistjs/minimist/commit/e40f61114cf7be6f7947f7b3eed345853a67dbbb) + +## [v0.0.1](https://github.com/minimistjs/minimist/compare/v0.0.0...v0.0.1) - 2013-06-25 + +### Commits + +- remove trailing commas [`6ff0fa0`](https://github.com/minimistjs/minimist/commit/6ff0fa055064f15dbe06d50b89d5173a6796e1db) + +## v0.0.0 - 2013-06-25 + +### Commits + +- half of the parse test ported [`3079326`](https://github.com/minimistjs/minimist/commit/307932601325087de6cf94188eb798ffc4f3088a) +- stripped down code and a passing test from optimist [`7cced88`](https://github.com/minimistjs/minimist/commit/7cced88d82e399d1a03ed23eb667f04d3f320d10) +- ported parse tests completely over [`9448754`](https://github.com/minimistjs/minimist/commit/944875452e0820df6830b1408c26a0f7d3e1db04) +- docs, package.json [`a5bf46a`](https://github.com/minimistjs/minimist/commit/a5bf46ac9bb3bd114a9c340276c62c1091e538d5) +- move more short tests into short.js [`503edb5`](https://github.com/minimistjs/minimist/commit/503edb5c41d89c0d40831ee517154fc13b0f18b9) +- default bool test was wrong, not the code [`1b9f5db`](https://github.com/minimistjs/minimist/commit/1b9f5db4741b49962846081b68518de824992097) +- passing long tests ripped out of parse.js [`7972c4a`](https://github.com/minimistjs/minimist/commit/7972c4aff1f4803079e1668006658e2a761a0428) +- badges [`84c0370`](https://github.com/minimistjs/minimist/commit/84c037063664d42878aace715fe6572ce01b6f3b) +- all the tests now ported, some failures [`64239ed`](https://github.com/minimistjs/minimist/commit/64239edfe92c711c4eb0da254fcdfad2a5fdb605) +- failing short test [`f8a5341`](https://github.com/minimistjs/minimist/commit/f8a534112dd1138d2fad722def56a848480c446f) +- fixed the numeric test [`6b034f3`](https://github.com/minimistjs/minimist/commit/6b034f37c79342c60083ed97fd222e16928aac51) diff --git a/miniprogram/node_modules/minimist/LICENSE b/miniprogram/node_modules/minimist/LICENSE new file mode 100644 index 00000000..ee27ba4b --- /dev/null +++ b/miniprogram/node_modules/minimist/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/miniprogram/node_modules/minimist/README.md b/miniprogram/node_modules/minimist/README.md new file mode 100644 index 00000000..74da3234 --- /dev/null +++ b/miniprogram/node_modules/minimist/README.md @@ -0,0 +1,121 @@ +# minimist [![Version Badge][npm-version-svg]][package-url] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +parse argument options + +This module is the guts of optimist's argument parser without all the +fanciful decoration. + +# example + +``` js +var argv = require('minimist')(process.argv.slice(2)); +console.log(argv); +``` + +``` +$ node example/parse.js -a beep -b boop +{ _: [], a: 'beep', b: 'boop' } +``` + +``` +$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz +{ + _: ['foo', 'bar', 'baz'], + x: 3, + y: 4, + n: 5, + a: true, + b: true, + c: true, + beep: 'boop' +} +``` + +# security + +Previous versions had a prototype pollution bug that could cause privilege +escalation in some circumstances when handling untrusted user input. + +Please use version 1.2.6 or later: + +* https://security.snyk.io/vuln/SNYK-JS-MINIMIST-2429795 (version <=1.2.5) +* https://snyk.io/vuln/SNYK-JS-MINIMIST-559764 (version <=1.2.3) + +# methods + +``` js +var parseArgs = require('minimist') +``` + +## var argv = parseArgs(args, opts={}) + +Return an argument object `argv` populated with the array arguments from `args`. + +`argv._` contains all the arguments that didn't have an option associated with +them. + +Numeric-looking arguments will be returned as numbers unless `opts.string` or +`opts.boolean` is set for that argument name. + +Any arguments after `'--'` will not be parsed and will end up in `argv._`. + +options can be: + +* `opts.string` - a string or array of strings argument names to always treat as +strings +* `opts.boolean` - a boolean, string or array of strings to always treat as +booleans. if `true` will treat all double hyphenated arguments without equal signs +as boolean (e.g. affects `--foo`, not `-f` or `--foo=bar`) +* `opts.alias` - an object mapping string names to strings or arrays of string +argument names to use as aliases +* `opts.default` - an object mapping string argument names to default values +* `opts.stopEarly` - when true, populate `argv._` with everything after the +first non-option +* `opts['--']` - when true, populate `argv._` with everything before the `--` +and `argv['--']` with everything after the `--`. Here's an example: + + ``` + > require('./')('one two three -- four five --six'.split(' '), { '--': true }) + { + _: ['one', 'two', 'three'], + '--': ['four', 'five', '--six'] + } + ``` + + Note that with `opts['--']` set, parsing for arguments still stops after the + `--`. + +* `opts.unknown` - a function which is invoked with a command line parameter not +defined in the `opts` configuration object. If the function returns `false`, the +unknown option is not added to `argv`. + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install minimist +``` + +# license + +MIT + +[package-url]: https://npmjs.org/package/minimist +[npm-version-svg]: https://versionbadg.es/minimistjs/minimist.svg +[npm-badge-png]: https://nodei.co/npm/minimist.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/minimist.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/minimist.svg +[downloads-url]: https://npm-stat.com/charts.html?package=minimist +[codecov-image]: https://codecov.io/gh/minimistjs/minimist/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/minimistjs/minimist/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/minimistjs/minimist +[actions-url]: https://github.com/minimistjs/minimist/actions diff --git a/miniprogram/node_modules/minimist/example/parse.js b/miniprogram/node_modules/minimist/example/parse.js new file mode 100644 index 00000000..9d90ffb2 --- /dev/null +++ b/miniprogram/node_modules/minimist/example/parse.js @@ -0,0 +1,4 @@ +'use strict'; + +var argv = require('../')(process.argv.slice(2)); +console.log(argv); diff --git a/miniprogram/node_modules/minimist/index.js b/miniprogram/node_modules/minimist/index.js new file mode 100644 index 00000000..f020f394 --- /dev/null +++ b/miniprogram/node_modules/minimist/index.js @@ -0,0 +1,263 @@ +'use strict'; + +function hasKey(obj, keys) { + var o = obj; + keys.slice(0, -1).forEach(function (key) { + o = o[key] || {}; + }); + + var key = keys[keys.length - 1]; + return key in o; +} + +function isNumber(x) { + if (typeof x === 'number') { return true; } + if ((/^0x[0-9a-f]+$/i).test(x)) { return true; } + return (/^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/).test(x); +} + +function isConstructorOrProto(obj, key) { + return (key === 'constructor' && typeof obj[key] === 'function') || key === '__proto__'; +} + +module.exports = function (args, opts) { + if (!opts) { opts = {}; } + + var flags = { + bools: {}, + strings: {}, + unknownFn: null, + }; + + if (typeof opts.unknown === 'function') { + flags.unknownFn = opts.unknown; + } + + if (typeof opts.boolean === 'boolean' && opts.boolean) { + flags.allBools = true; + } else { + [].concat(opts.boolean).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + }); + } + + var aliases = {}; + + function aliasIsBoolean(key) { + return aliases[key].some(function (x) { + return flags.bools[x]; + }); + } + + Object.keys(opts.alias || {}).forEach(function (key) { + aliases[key] = [].concat(opts.alias[key]); + aliases[key].forEach(function (x) { + aliases[x] = [key].concat(aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + + [].concat(opts.string).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + if (aliases[key]) { + [].concat(aliases[key]).forEach(function (k) { + flags.strings[k] = true; + }); + } + }); + + var defaults = opts.default || {}; + + var argv = { _: [] }; + + function argDefined(key, arg) { + return (flags.allBools && (/^--[^=]+$/).test(arg)) + || flags.strings[key] + || flags.bools[key] + || aliases[key]; + } + + function setKey(obj, keys, value) { + var o = obj; + for (var i = 0; i < keys.length - 1; i++) { + var key = keys[i]; + if (isConstructorOrProto(o, key)) { return; } + if (o[key] === undefined) { o[key] = {}; } + if ( + o[key] === Object.prototype + || o[key] === Number.prototype + || o[key] === String.prototype + ) { + o[key] = {}; + } + if (o[key] === Array.prototype) { o[key] = []; } + o = o[key]; + } + + var lastKey = keys[keys.length - 1]; + if (isConstructorOrProto(o, lastKey)) { return; } + if ( + o === Object.prototype + || o === Number.prototype + || o === String.prototype + ) { + o = {}; + } + if (o === Array.prototype) { o = []; } + if (o[lastKey] === undefined || flags.bools[lastKey] || typeof o[lastKey] === 'boolean') { + o[lastKey] = value; + } else if (Array.isArray(o[lastKey])) { + o[lastKey].push(value); + } else { + o[lastKey] = [o[lastKey], value]; + } + } + + function setArg(key, val, arg) { + if (arg && flags.unknownFn && !argDefined(key, arg)) { + if (flags.unknownFn(arg) === false) { return; } + } + + var value = !flags.strings[key] && isNumber(val) + ? Number(val) + : val; + setKey(argv, key.split('.'), value); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), value); + }); + } + + Object.keys(flags.bools).forEach(function (key) { + setArg(key, defaults[key] === undefined ? false : defaults[key]); + }); + + var notFlags = []; + + if (args.indexOf('--') !== -1) { + notFlags = args.slice(args.indexOf('--') + 1); + args = args.slice(0, args.indexOf('--')); + } + + for (var i = 0; i < args.length; i++) { + var arg = args[i]; + var key; + var next; + + if ((/^--.+=/).test(arg)) { + // Using [\s\S] instead of . because js doesn't support the + // 'dotall' regex modifier. See: + // http://stackoverflow.com/a/1068308/13216 + var m = arg.match(/^--([^=]+)=([\s\S]*)$/); + key = m[1]; + var value = m[2]; + if (flags.bools[key]) { + value = value !== 'false'; + } + setArg(key, value, arg); + } else if ((/^--no-.+/).test(arg)) { + key = arg.match(/^--no-(.+)/)[1]; + setArg(key, false, arg); + } else if ((/^--.+/).test(arg)) { + key = arg.match(/^--(.+)/)[1]; + next = args[i + 1]; + if ( + next !== undefined + && !(/^(-|--)[^-]/).test(next) + && !flags.bools[key] + && !flags.allBools + && (aliases[key] ? !aliasIsBoolean(key) : true) + ) { + setArg(key, next, arg); + i += 1; + } else if ((/^(true|false)$/).test(next)) { + setArg(key, next === 'true', arg); + i += 1; + } else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } else if ((/^-[^-]+/).test(arg)) { + var letters = arg.slice(1, -1).split(''); + + var broken = false; + for (var j = 0; j < letters.length; j++) { + next = arg.slice(j + 2); + + if (next === '-') { + setArg(letters[j], next, arg); + continue; + } + + if ((/[A-Za-z]/).test(letters[j]) && next[0] === '=') { + setArg(letters[j], next.slice(1), arg); + broken = true; + break; + } + + if ( + (/[A-Za-z]/).test(letters[j]) + && (/-?\d+(\.\d*)?(e-?\d+)?$/).test(next) + ) { + setArg(letters[j], next, arg); + broken = true; + break; + } + + if (letters[j + 1] && letters[j + 1].match(/\W/)) { + setArg(letters[j], arg.slice(j + 2), arg); + broken = true; + break; + } else { + setArg(letters[j], flags.strings[letters[j]] ? '' : true, arg); + } + } + + key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if ( + args[i + 1] + && !(/^(-|--)[^-]/).test(args[i + 1]) + && !flags.bools[key] + && (aliases[key] ? !aliasIsBoolean(key) : true) + ) { + setArg(key, args[i + 1], arg); + i += 1; + } else if (args[i + 1] && (/^(true|false)$/).test(args[i + 1])) { + setArg(key, args[i + 1] === 'true', arg); + i += 1; + } else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } + } else { + if (!flags.unknownFn || flags.unknownFn(arg) !== false) { + argv._.push(flags.strings._ || !isNumber(arg) ? arg : Number(arg)); + } + if (opts.stopEarly) { + argv._.push.apply(argv._, args.slice(i + 1)); + break; + } + } + } + + Object.keys(defaults).forEach(function (k) { + if (!hasKey(argv, k.split('.'))) { + setKey(argv, k.split('.'), defaults[k]); + + (aliases[k] || []).forEach(function (x) { + setKey(argv, x.split('.'), defaults[k]); + }); + } + }); + + if (opts['--']) { + argv['--'] = notFlags.slice(); + } else { + notFlags.forEach(function (k) { + argv._.push(k); + }); + } + + return argv; +}; diff --git a/miniprogram/node_modules/minimist/package.json b/miniprogram/node_modules/minimist/package.json new file mode 100644 index 00000000..c10a3344 --- /dev/null +++ b/miniprogram/node_modules/minimist/package.json @@ -0,0 +1,75 @@ +{ + "name": "minimist", + "version": "1.2.8", + "description": "parse argument options", + "main": "index.js", + "devDependencies": { + "@ljharb/eslint-config": "^21.0.1", + "aud": "^2.0.2", + "auto-changelog": "^2.4.0", + "eslint": "=8.8.0", + "in-publish": "^2.0.1", + "npmignore": "^0.3.0", + "nyc": "^10.3.2", + "safe-publish-latest": "^2.0.0", + "tape": "^5.6.3" + }, + "scripts": { + "prepack": "npmignore --auto --commentLines=auto", + "prepublishOnly": "safe-publish-latest", + "prepublish": "not-in-publish || npm run prepublishOnly", + "lint": "eslint --ext=js,mjs .", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/**/*.js'", + "test": "npm run tests-only", + "posttest": "aud --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/6..latest", + "ff/5", + "firefox/latest", + "chrome/10", + "chrome/latest", + "safari/5.1", + "safari/latest", + "opera/12" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/minimistjs/minimist.git" + }, + "homepage": "https://github.com/minimistjs/minimist", + "keywords": [ + "argv", + "getopt", + "parser", + "optimist" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + }, + "publishConfig": { + "ignore": [ + ".github/workflows" + ] + } +} diff --git a/miniprogram/node_modules/minimist/test/all_bool.js b/miniprogram/node_modules/minimist/test/all_bool.js new file mode 100644 index 00000000..befa0c99 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/all_bool.js @@ -0,0 +1,34 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('flag boolean true (default all --args to boolean)', function (t) { + var argv = parse(['moo', '--honk', 'cow'], { + boolean: true, + }); + + t.deepEqual(argv, { + honk: true, + _: ['moo', 'cow'], + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); + +test('flag boolean true only affects double hyphen arguments without equals signs', function (t) { + var argv = parse(['moo', '--honk', 'cow', '-p', '55', '--tacos=good'], { + boolean: true, + }); + + t.deepEqual(argv, { + honk: true, + tacos: 'good', + p: 55, + _: ['moo', 'cow'], + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); diff --git a/miniprogram/node_modules/minimist/test/bool.js b/miniprogram/node_modules/minimist/test/bool.js new file mode 100644 index 00000000..e58d47e4 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/bool.js @@ -0,0 +1,177 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('flag boolean default false', function (t) { + var argv = parse(['moo'], { + boolean: ['t', 'verbose'], + default: { verbose: false, t: false }, + }); + + t.deepEqual(argv, { + verbose: false, + t: false, + _: ['moo'], + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); + +}); + +test('boolean groups', function (t) { + var argv = parse(['-x', '-z', 'one', 'two', 'three'], { + boolean: ['x', 'y', 'z'], + }); + + t.deepEqual(argv, { + x: true, + y: false, + z: true, + _: ['one', 'two', 'three'], + }); + + t.deepEqual(typeof argv.x, 'boolean'); + t.deepEqual(typeof argv.y, 'boolean'); + t.deepEqual(typeof argv.z, 'boolean'); + t.end(); +}); +test('boolean and alias with chainable api', function (t) { + var aliased = ['-h', 'derp']; + var regular = ['--herp', 'derp']; + var aliasedArgv = parse(aliased, { + boolean: 'herp', + alias: { h: 'herp' }, + }); + var propertyArgv = parse(regular, { + boolean: 'herp', + alias: { h: 'herp' }, + }); + var expected = { + herp: true, + h: true, + _: ['derp'], + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias with options hash', function (t) { + var aliased = ['-h', 'derp']; + var regular = ['--herp', 'derp']; + var opts = { + alias: { h: 'herp' }, + boolean: 'herp', + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + _: ['derp'], + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias array with options hash', function (t) { + var aliased = ['-h', 'derp']; + var regular = ['--herp', 'derp']; + var alt = ['--harp', 'derp']; + var opts = { + alias: { h: ['herp', 'harp'] }, + boolean: 'h', + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var altPropertyArgv = parse(alt, opts); + var expected = { + harp: true, + herp: true, + h: true, + _: ['derp'], + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.same(altPropertyArgv, expected); + t.end(); +}); + +test('boolean and alias using explicit true', function (t) { + var aliased = ['-h', 'true']; + var regular = ['--herp', 'true']; + var opts = { + alias: { h: 'herp' }, + boolean: 'h', + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + _: [], + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +// regression, see https://github.com/substack/node-optimist/issues/71 +test('boolean and --x=true', function (t) { + var parsed = parse(['--boool', '--other=true'], { + boolean: 'boool', + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'true'); + + parsed = parse(['--boool', '--other=false'], { + boolean: 'boool', + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'false'); + t.end(); +}); + +test('boolean --boool=true', function (t) { + var parsed = parse(['--boool=true'], { + default: { + boool: false, + }, + boolean: ['boool'], + }); + + t.same(parsed.boool, true); + t.end(); +}); + +test('boolean --boool=false', function (t) { + var parsed = parse(['--boool=false'], { + default: { + boool: true, + }, + boolean: ['boool'], + }); + + t.same(parsed.boool, false); + t.end(); +}); + +test('boolean using something similar to true', function (t) { + var opts = { boolean: 'h' }; + var result = parse(['-h', 'true.txt'], opts); + var expected = { + h: true, + _: ['true.txt'], + }; + + t.same(result, expected); + t.end(); +}); diff --git a/miniprogram/node_modules/minimist/test/dash.js b/miniprogram/node_modules/minimist/test/dash.js new file mode 100644 index 00000000..70788177 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/dash.js @@ -0,0 +1,43 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('-', function (t) { + t.plan(6); + t.deepEqual(parse(['-n', '-']), { n: '-', _: [] }); + t.deepEqual(parse(['--nnn', '-']), { nnn: '-', _: [] }); + t.deepEqual(parse(['-']), { _: ['-'] }); + t.deepEqual(parse(['-f-']), { f: '-', _: [] }); + t.deepEqual( + parse(['-b', '-'], { boolean: 'b' }), + { b: true, _: ['-'] } + ); + t.deepEqual( + parse(['-s', '-'], { string: 's' }), + { s: '-', _: [] } + ); +}); + +test('-a -- b', function (t) { + t.plan(2); + t.deepEqual(parse(['-a', '--', 'b']), { a: true, _: ['b'] }); + t.deepEqual(parse(['--a', '--', 'b']), { a: true, _: ['b'] }); +}); + +test('move arguments after the -- into their own `--` array', function (t) { + t.plan(1); + t.deepEqual( + parse(['--name', 'John', 'before', '--', 'after'], { '--': true }), + { name: 'John', _: ['before'], '--': ['after'] } + ); +}); + +test('--- option value', function (t) { + // A multi-dash value is largely an edge case, but check the behaviour is as expected, + // and in particular the same for short option and long option (as made consistent in Jan 2023). + t.plan(2); + t.deepEqual(parse(['-n', '---']), { n: '---', _: [] }); + t.deepEqual(parse(['--nnn', '---']), { nnn: '---', _: [] }); +}); + diff --git a/miniprogram/node_modules/minimist/test/default_bool.js b/miniprogram/node_modules/minimist/test/default_bool.js new file mode 100644 index 00000000..4e9f6250 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/default_bool.js @@ -0,0 +1,37 @@ +'use strict'; + +var test = require('tape'); +var parse = require('../'); + +test('boolean default true', function (t) { + var argv = parse([], { + boolean: 'sometrue', + default: { sometrue: true }, + }); + t.equal(argv.sometrue, true); + t.end(); +}); + +test('boolean default false', function (t) { + var argv = parse([], { + boolean: 'somefalse', + default: { somefalse: false }, + }); + t.equal(argv.somefalse, false); + t.end(); +}); + +test('boolean default to null', function (t) { + var argv = parse([], { + boolean: 'maybe', + default: { maybe: null }, + }); + t.equal(argv.maybe, null); + + var argvLong = parse(['--maybe'], { + boolean: 'maybe', + default: { maybe: null }, + }); + t.equal(argvLong.maybe, true); + t.end(); +}); diff --git a/miniprogram/node_modules/minimist/test/dotted.js b/miniprogram/node_modules/minimist/test/dotted.js new file mode 100644 index 00000000..126ff033 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/dotted.js @@ -0,0 +1,24 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('dotted alias', function (t) { + var argv = parse(['--a.b', '22'], { default: { 'a.b': 11 }, alias: { 'a.b': 'aa.bb' } }); + t.equal(argv.a.b, 22); + t.equal(argv.aa.bb, 22); + t.end(); +}); + +test('dotted default', function (t) { + var argv = parse('', { default: { 'a.b': 11 }, alias: { 'a.b': 'aa.bb' } }); + t.equal(argv.a.b, 11); + t.equal(argv.aa.bb, 11); + t.end(); +}); + +test('dotted default with no alias', function (t) { + var argv = parse('', { default: { 'a.b': 11 } }); + t.equal(argv.a.b, 11); + t.end(); +}); diff --git a/miniprogram/node_modules/minimist/test/kv_short.js b/miniprogram/node_modules/minimist/test/kv_short.js new file mode 100644 index 00000000..6d1b53a7 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/kv_short.js @@ -0,0 +1,32 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('short -k=v', function (t) { + t.plan(1); + + var argv = parse(['-b=123']); + t.deepEqual(argv, { b: 123, _: [] }); +}); + +test('multi short -k=v', function (t) { + t.plan(1); + + var argv = parse(['-a=whatever', '-b=robots']); + t.deepEqual(argv, { a: 'whatever', b: 'robots', _: [] }); +}); + +test('short with embedded equals -k=a=b', function (t) { + t.plan(1); + + var argv = parse(['-k=a=b']); + t.deepEqual(argv, { k: 'a=b', _: [] }); +}); + +test('short with later equals like -ab=c', function (t) { + t.plan(1); + + var argv = parse(['-ab=c']); + t.deepEqual(argv, { a: true, b: 'c', _: [] }); +}); diff --git a/miniprogram/node_modules/minimist/test/long.js b/miniprogram/node_modules/minimist/test/long.js new file mode 100644 index 00000000..9fef51f1 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/long.js @@ -0,0 +1,33 @@ +'use strict'; + +var test = require('tape'); +var parse = require('../'); + +test('long opts', function (t) { + t.deepEqual( + parse(['--bool']), + { bool: true, _: [] }, + 'long boolean' + ); + t.deepEqual( + parse(['--pow', 'xixxle']), + { pow: 'xixxle', _: [] }, + 'long capture sp' + ); + t.deepEqual( + parse(['--pow=xixxle']), + { pow: 'xixxle', _: [] }, + 'long capture eq' + ); + t.deepEqual( + parse(['--host', 'localhost', '--port', '555']), + { host: 'localhost', port: 555, _: [] }, + 'long captures sp' + ); + t.deepEqual( + parse(['--host=localhost', '--port=555']), + { host: 'localhost', port: 555, _: [] }, + 'long captures eq' + ); + t.end(); +}); diff --git a/miniprogram/node_modules/minimist/test/num.js b/miniprogram/node_modules/minimist/test/num.js new file mode 100644 index 00000000..074393ec --- /dev/null +++ b/miniprogram/node_modules/minimist/test/num.js @@ -0,0 +1,38 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('nums', function (t) { + var argv = parse([ + '-x', '1234', + '-y', '5.67', + '-z', '1e7', + '-w', '10f', + '--hex', '0xdeadbeef', + '789', + ]); + t.deepEqual(argv, { + x: 1234, + y: 5.67, + z: 1e7, + w: '10f', + hex: 0xdeadbeef, + _: [789], + }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv.y, 'number'); + t.deepEqual(typeof argv.z, 'number'); + t.deepEqual(typeof argv.w, 'string'); + t.deepEqual(typeof argv.hex, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); + +test('already a number', function (t) { + var argv = parse(['-x', 1234, 789]); + t.deepEqual(argv, { x: 1234, _: [789] }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); diff --git a/miniprogram/node_modules/minimist/test/parse.js b/miniprogram/node_modules/minimist/test/parse.js new file mode 100644 index 00000000..65d9d909 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/parse.js @@ -0,0 +1,209 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('parse args', function (t) { + t.deepEqual( + parse(['--no-moo']), + { moo: false, _: [] }, + 'no' + ); + t.deepEqual( + parse(['-v', 'a', '-v', 'b', '-v', 'c']), + { v: ['a', 'b', 'c'], _: [] }, + 'multi' + ); + t.end(); +}); + +test('comprehensive', function (t) { + t.deepEqual( + parse([ + '--name=meowmers', 'bare', '-cats', 'woo', + '-h', 'awesome', '--multi=quux', + '--key', 'value', + '-b', '--bool', '--no-meep', '--multi=baz', + '--', '--not-a-flag', 'eek', + ]), + { + c: true, + a: true, + t: true, + s: 'woo', + h: 'awesome', + b: true, + bool: true, + key: 'value', + multi: ['quux', 'baz'], + meep: false, + name: 'meowmers', + _: ['bare', '--not-a-flag', 'eek'], + } + ); + t.end(); +}); + +test('flag boolean', function (t) { + var argv = parse(['-t', 'moo'], { boolean: 't' }); + t.deepEqual(argv, { t: true, _: ['moo'] }); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('flag boolean value', function (t) { + var argv = parse(['--verbose', 'false', 'moo', '-t', 'true'], { + boolean: ['t', 'verbose'], + default: { verbose: true }, + }); + + t.deepEqual(argv, { + verbose: false, + t: true, + _: ['moo'], + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('newlines in params', function (t) { + var args = parse(['-s', 'X\nX']); + t.deepEqual(args, { _: [], s: 'X\nX' }); + + // reproduce in bash: + // VALUE="new + // line" + // node program.js --s="$VALUE" + args = parse(['--s=X\nX']); + t.deepEqual(args, { _: [], s: 'X\nX' }); + t.end(); +}); + +test('strings', function (t) { + var s = parse(['-s', '0001234'], { string: 's' }).s; + t.equal(s, '0001234'); + t.equal(typeof s, 'string'); + + var x = parse(['-x', '56'], { string: 'x' }).x; + t.equal(x, '56'); + t.equal(typeof x, 'string'); + t.end(); +}); + +test('stringArgs', function (t) { + var s = parse([' ', ' '], { string: '_' })._; + t.same(s.length, 2); + t.same(typeof s[0], 'string'); + t.same(s[0], ' '); + t.same(typeof s[1], 'string'); + t.same(s[1], ' '); + t.end(); +}); + +test('empty strings', function (t) { + var s = parse(['-s'], { string: 's' }).s; + t.equal(s, ''); + t.equal(typeof s, 'string'); + + var str = parse(['--str'], { string: 'str' }).str; + t.equal(str, ''); + t.equal(typeof str, 'string'); + + var letters = parse(['-art'], { + string: ['a', 't'], + }); + + t.equal(letters.a, ''); + t.equal(letters.r, true); + t.equal(letters.t, ''); + + t.end(); +}); + +test('string and alias', function (t) { + var x = parse(['--str', '000123'], { + string: 's', + alias: { s: 'str' }, + }); + + t.equal(x.str, '000123'); + t.equal(typeof x.str, 'string'); + t.equal(x.s, '000123'); + t.equal(typeof x.s, 'string'); + + var y = parse(['-s', '000123'], { + string: 'str', + alias: { str: 's' }, + }); + + t.equal(y.str, '000123'); + t.equal(typeof y.str, 'string'); + t.equal(y.s, '000123'); + t.equal(typeof y.s, 'string'); + + var z = parse(['-s123'], { + alias: { str: ['s', 'S'] }, + string: ['str'], + }); + + t.deepEqual( + z, + { _: [], s: '123', S: '123', str: '123' }, + 'opt.string works with multiple aliases' + ); + t.end(); +}); + +test('slashBreak', function (t) { + t.same( + parse(['-I/foo/bar/baz']), + { I: '/foo/bar/baz', _: [] } + ); + t.same( + parse(['-xyz/foo/bar/baz']), + { x: true, y: true, z: '/foo/bar/baz', _: [] } + ); + t.end(); +}); + +test('alias', function (t) { + var argv = parse(['-f', '11', '--zoom', '55'], { + alias: { z: 'zoom' }, + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.f, 11); + t.end(); +}); + +test('multiAlias', function (t) { + var argv = parse(['-f', '11', '--zoom', '55'], { + alias: { z: ['zm', 'zoom'] }, + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.z, argv.zm); + t.equal(argv.f, 11); + t.end(); +}); + +test('nested dotted objects', function (t) { + var argv = parse([ + '--foo.bar', '3', '--foo.baz', '4', + '--foo.quux.quibble', '5', '--foo.quux.o_O', + '--beep.boop', + ]); + + t.same(argv.foo, { + bar: 3, + baz: 4, + quux: { + quibble: 5, + o_O: true, + }, + }); + t.same(argv.beep, { boop: true }); + t.end(); +}); diff --git a/miniprogram/node_modules/minimist/test/parse_modified.js b/miniprogram/node_modules/minimist/test/parse_modified.js new file mode 100644 index 00000000..32965d13 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/parse_modified.js @@ -0,0 +1,11 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('parse with modifier functions', function (t) { + t.plan(1); + + var argv = parse(['-b', '123'], { boolean: 'b' }); + t.deepEqual(argv, { b: true, _: [123] }); +}); diff --git a/miniprogram/node_modules/minimist/test/proto.js b/miniprogram/node_modules/minimist/test/proto.js new file mode 100644 index 00000000..6e629dd3 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/proto.js @@ -0,0 +1,64 @@ +'use strict'; + +/* eslint no-proto: 0 */ + +var parse = require('../'); +var test = require('tape'); + +test('proto pollution', function (t) { + var argv = parse(['--__proto__.x', '123']); + t.equal({}.x, undefined); + t.equal(argv.__proto__.x, undefined); + t.equal(argv.x, undefined); + t.end(); +}); + +test('proto pollution (array)', function (t) { + var argv = parse(['--x', '4', '--x', '5', '--x.__proto__.z', '789']); + t.equal({}.z, undefined); + t.deepEqual(argv.x, [4, 5]); + t.equal(argv.x.z, undefined); + t.equal(argv.x.__proto__.z, undefined); + t.end(); +}); + +test('proto pollution (number)', function (t) { + var argv = parse(['--x', '5', '--x.__proto__.z', '100']); + t.equal({}.z, undefined); + t.equal((4).z, undefined); + t.equal(argv.x, 5); + t.equal(argv.x.z, undefined); + t.end(); +}); + +test('proto pollution (string)', function (t) { + var argv = parse(['--x', 'abc', '--x.__proto__.z', 'def']); + t.equal({}.z, undefined); + t.equal('...'.z, undefined); + t.equal(argv.x, 'abc'); + t.equal(argv.x.z, undefined); + t.end(); +}); + +test('proto pollution (constructor)', function (t) { + var argv = parse(['--constructor.prototype.y', '123']); + t.equal({}.y, undefined); + t.equal(argv.y, undefined); + t.end(); +}); + +test('proto pollution (constructor function)', function (t) { + var argv = parse(['--_.concat.constructor.prototype.y', '123']); + function fnToBeTested() {} + t.equal(fnToBeTested.y, undefined); + t.equal(argv.y, undefined); + t.end(); +}); + +// powered by snyk - https://github.com/backstage/backstage/issues/10343 +test('proto pollution (constructor function) snyk', function (t) { + var argv = parse('--_.constructor.constructor.prototype.foo bar'.split(' ')); + t.equal(function () {}.foo, undefined); + t.equal(argv.y, undefined); + t.end(); +}); diff --git a/miniprogram/node_modules/minimist/test/short.js b/miniprogram/node_modules/minimist/test/short.js new file mode 100644 index 00000000..4a7b8438 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/short.js @@ -0,0 +1,69 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('numeric short args', function (t) { + t.plan(2); + t.deepEqual(parse(['-n123']), { n: 123, _: [] }); + t.deepEqual( + parse(['-123', '456']), + { 1: true, 2: true, 3: 456, _: [] } + ); +}); + +test('short', function (t) { + t.deepEqual( + parse(['-b']), + { b: true, _: [] }, + 'short boolean' + ); + t.deepEqual( + parse(['foo', 'bar', 'baz']), + { _: ['foo', 'bar', 'baz'] }, + 'bare' + ); + t.deepEqual( + parse(['-cats']), + { c: true, a: true, t: true, s: true, _: [] }, + 'group' + ); + t.deepEqual( + parse(['-cats', 'meow']), + { c: true, a: true, t: true, s: 'meow', _: [] }, + 'short group next' + ); + t.deepEqual( + parse(['-h', 'localhost']), + { h: 'localhost', _: [] }, + 'short capture' + ); + t.deepEqual( + parse(['-h', 'localhost', '-p', '555']), + { h: 'localhost', p: 555, _: [] }, + 'short captures' + ); + t.end(); +}); + +test('mixed short bool and capture', function (t) { + t.same( + parse(['-h', 'localhost', '-fp', '555', 'script.js']), + { + f: true, p: 555, h: 'localhost', + _: ['script.js'], + } + ); + t.end(); +}); + +test('short and long', function (t) { + t.deepEqual( + parse(['-h', 'localhost', '-fp', '555', 'script.js']), + { + f: true, p: 555, h: 'localhost', + _: ['script.js'], + } + ); + t.end(); +}); diff --git a/miniprogram/node_modules/minimist/test/stop_early.js b/miniprogram/node_modules/minimist/test/stop_early.js new file mode 100644 index 00000000..52a6a919 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/stop_early.js @@ -0,0 +1,17 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('stops parsing on the first non-option when stopEarly is set', function (t) { + var argv = parse(['--aaa', 'bbb', 'ccc', '--ddd'], { + stopEarly: true, + }); + + t.deepEqual(argv, { + aaa: 'bbb', + _: ['ccc', '--ddd'], + }); + + t.end(); +}); diff --git a/miniprogram/node_modules/minimist/test/unknown.js b/miniprogram/node_modules/minimist/test/unknown.js new file mode 100644 index 00000000..4f2e0ca4 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/unknown.js @@ -0,0 +1,104 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('boolean and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = ['-h', 'true', '--derp', 'true']; + var regular = ['--herp', 'true', '-d', 'true']; + var opts = { + alias: { h: 'herp' }, + boolean: 'h', + unknown: unknownFn, + }; + parse(aliased, opts); + parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('flag boolean true any double hyphen argument is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var argv = parse(['--honk', '--tacos=good', 'cow', '-p', '55'], { + boolean: true, + unknown: unknownFn, + }); + t.same(unknown, ['--tacos=good', 'cow', '-p']); + t.same(argv, { + honk: true, + _: [], + }); + t.end(); +}); + +test('string and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = ['-h', 'hello', '--derp', 'goodbye']; + var regular = ['--herp', 'hello', '-d', 'moon']; + var opts = { + alias: { h: 'herp' }, + string: 'h', + unknown: unknownFn, + }; + parse(aliased, opts); + parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('default and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = ['-h', 'hello']; + var regular = ['--herp', 'hello']; + var opts = { + default: { h: 'bar' }, + alias: { h: 'herp' }, + unknown: unknownFn, + }; + parse(aliased, opts); + parse(regular, opts); + + t.same(unknown, []); + t.end(); + unknownFn(); // exercise fn for 100% coverage +}); + +test('value following -- is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = ['--bad', '--', 'good', 'arg']; + var opts = { + '--': true, + unknown: unknownFn, + }; + var argv = parse(aliased, opts); + + t.same(unknown, ['--bad']); + t.same(argv, { + '--': ['good', 'arg'], + _: [], + }); + t.end(); +}); diff --git a/miniprogram/node_modules/minimist/test/whitespace.js b/miniprogram/node_modules/minimist/test/whitespace.js new file mode 100644 index 00000000..4fdaf1d3 --- /dev/null +++ b/miniprogram/node_modules/minimist/test/whitespace.js @@ -0,0 +1,10 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('whitespace should be whitespace', function (t) { + t.plan(1); + var x = parse(['-x', '\t']).x; + t.equal(x, '\t'); +}); diff --git a/miniprogram/node_modules/mkdirp-classic/LICENSE b/miniprogram/node_modules/mkdirp-classic/LICENSE new file mode 100644 index 00000000..f6b3a0d1 --- /dev/null +++ b/miniprogram/node_modules/mkdirp-classic/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 James Halliday (mail@substack.net) and Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/miniprogram/node_modules/mkdirp-classic/README.md b/miniprogram/node_modules/mkdirp-classic/README.md new file mode 100644 index 00000000..be5ac938 --- /dev/null +++ b/miniprogram/node_modules/mkdirp-classic/README.md @@ -0,0 +1,18 @@ +# mkdirp-classic + +Just a non-deprecated mirror of [mkdirp 0.5.2](https://github.com/substack/node-mkdirp/tree/0.5.1) +for use in modules where we depend on the non promise interface. + +``` +npm install mkdirp-classic +``` + +## Usage + +``` js +// See the above link +``` + +## License + +MIT diff --git a/miniprogram/node_modules/mkdirp-classic/index.js b/miniprogram/node_modules/mkdirp-classic/index.js new file mode 100644 index 00000000..6ce241b5 --- /dev/null +++ b/miniprogram/node_modules/mkdirp-classic/index.js @@ -0,0 +1,98 @@ +var path = require('path'); +var fs = require('fs'); +var _0777 = parseInt('0777', 8); + +module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; + +function mkdirP (p, opts, f, made) { + if (typeof opts === 'function') { + f = opts; + opts = {}; + } + else if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 & (~process.umask()); + } + if (!made) made = null; + + var cb = f || function () {}; + p = path.resolve(p); + + xfs.mkdir(p, mode, function (er) { + if (!er) { + made = made || p; + return cb(null, made); + } + switch (er.code) { + case 'ENOENT': + mkdirP(path.dirname(p), opts, function (er, made) { + if (er) cb(er, made); + else mkdirP(p, opts, cb, made); + }); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + xfs.stat(p, function (er2, stat) { + // if the stat fails, then that's super weird. + // let the original error be the failure reason. + if (er2 || !stat.isDirectory()) cb(er, made) + else cb(null, made); + }); + break; + } + }); +} + +mkdirP.sync = function sync (p, opts, made) { + if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 & (~process.umask()); + } + if (!made) made = null; + + p = path.resolve(p); + + try { + xfs.mkdirSync(p, mode); + made = made || p; + } + catch (err0) { + switch (err0.code) { + case 'ENOENT' : + made = sync(path.dirname(p), opts, made); + sync(p, opts, made); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + var stat; + try { + stat = xfs.statSync(p); + } + catch (err1) { + throw err0; + } + if (!stat.isDirectory()) throw err0; + break; + } + } + + return made; +}; diff --git a/miniprogram/node_modules/mkdirp-classic/package.json b/miniprogram/node_modules/mkdirp-classic/package.json new file mode 100644 index 00000000..c8b5407b --- /dev/null +++ b/miniprogram/node_modules/mkdirp-classic/package.json @@ -0,0 +1,18 @@ +{ + "name": "mkdirp-classic", + "version": "0.5.3", + "description": "Mirror of mkdirp 0.5.2", + "main": "index.js", + "dependencies": {}, + "devDependencies": {}, + "repository": { + "type": "git", + "url": "https://github.com/mafintosh/mkdirp-classic.git" + }, + "author": "Mathias Buus (@mafintosh)", + "license": "MIT", + "bugs": { + "url": "https://github.com/mafintosh/mkdirp-classic/issues" + }, + "homepage": "https://github.com/mafintosh/mkdirp-classic" +} diff --git a/miniprogram/node_modules/napi-build-utils/.github/workflows/run-npm-tests.yml b/miniprogram/node_modules/napi-build-utils/.github/workflows/run-npm-tests.yml new file mode 100644 index 00000000..3298868e --- /dev/null +++ b/miniprogram/node_modules/napi-build-utils/.github/workflows/run-npm-tests.yml @@ -0,0 +1,31 @@ +name: Run npm Tests + +on: + workflow_dispatch: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +jobs: + test: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [22, 23] + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Set up Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + + - name: Install dependencies + run: npm install + + - name: Run tests + run: npm test \ No newline at end of file diff --git a/miniprogram/node_modules/napi-build-utils/LICENSE b/miniprogram/node_modules/napi-build-utils/LICENSE new file mode 100644 index 00000000..8e0248a4 --- /dev/null +++ b/miniprogram/node_modules/napi-build-utils/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 inspiredware + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/miniprogram/node_modules/napi-build-utils/README.md b/miniprogram/node_modules/napi-build-utils/README.md new file mode 100644 index 00000000..7c29b680 --- /dev/null +++ b/miniprogram/node_modules/napi-build-utils/README.md @@ -0,0 +1,52 @@ +# napi-build-utils + +[![npm](https://img.shields.io/npm/v/napi-build-utils.svg)](https://www.npmjs.com/package/napi-build-utils) +![Node version](https://img.shields.io/node/v/prebuild.svg) +![Build Status](https://github.com/inspiredware/napi-build-utils/actions/workflows/run-npm-tests.yml/badge.svg) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](http://standardjs.com/) +[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) + +A set of utilities to assist developers of tools that build [Node-API](https://nodejs.org/api/n-api.html#n_api_n_api) native add-ons. + +## Background + +This module is targeted to developers creating tools that build Node-API native add-ons. + +It implements a set of functions that aid in determining the Node-API version supported by the currently running Node instance and the set of Node-API versions against which the Node-API native add-on is designed to be built. Other functions determine whether a particular Node-API version can be built and can issue console warnings for unsupported Node-API versions. + +Unlike the modules this code is designed to facilitate building, this module is written entirely in JavaScript. + +## Quick start + +```bash +npm install napi-build-utils +``` + +The module exports a set of functions documented [here](./index.md). For example: + +```javascript +var napiBuildUtils = require('napi-build-utils'); +var napiVersion = napiBuildUtils.getNapiVersion(); // Node-API version supported by Node, or undefined. +``` + +## Declaring supported Node-API versions + +Native modules that are designed to work with [Node-API](https://nodejs.org/api/n-api.html#n_api_n_api) must explicitly declare the Node-API version(s) against which they are coded to build. This is accomplished by including a `binary.napi_versions` property in the module's `package.json` file. For example: + +```json +"binary": { + "napi_versions": [2,3] +} +``` + +In the absence of a need to compile against a specific Node-API version, the value `3` is a good choice as this is the Node-API version that was supported when Node-API left experimental status. + +Modules that are built against a specific Node-API version will continue to operate indefinitely, even as later versions of Node-API are introduced. + +## History + +**v2.0.0** This version was introduced to address a limitation when the Node-API version reached `10` in NodeJS `v23.6.0`. There was no change in the API, but a SemVer bump to `2.0.0` was made out of an abundance of caution. + +## Support + +If you run into problems or limitations, please file an issue and we'll take a look. Pull requests are also welcome. diff --git a/miniprogram/node_modules/napi-build-utils/index.js b/miniprogram/node_modules/napi-build-utils/index.js new file mode 100644 index 00000000..d143d5d0 --- /dev/null +++ b/miniprogram/node_modules/napi-build-utils/index.js @@ -0,0 +1,214 @@ +'use strict' +// Copyright (c) 2018 inspiredware + +var path = require('path') +var pkg = require(path.resolve('package.json')) + +var versionArray = process.version + .substr(1) + .replace(/-.*$/, '') + .split('.') + .map(function (item) { + return +item + }) + +/** + * + * A set of utilities to assist developers of tools that build + * [N-API](https://nodejs.org/api/n-api.html#n_api_n_api) native add-ons. + * + * The main repository can be found + * [here](https://github.com/inspiredware/napi-build-utils#napi-build-utils). + * + * @module napi-build-utils + */ + +/** + * Implements a consistent name of `napi` for N-API runtimes. + * + * @param {string} runtime The runtime string. + * @returns {boolean} + */ +exports.isNapiRuntime = function (runtime) { + return runtime === 'napi' +} + +/** + * Determines whether the specified N-API version is supported + * by both the currently running Node instance and the package. + * + * @param {string} napiVersion The N-API version to check. + * @returns {boolean} + */ +exports.isSupportedVersion = function (napiVersion) { + var version = parseInt(napiVersion, 10) + return version <= exports.getNapiVersion() && exports.packageSupportsVersion(version) +} + +/** + * Determines whether the specified N-API version is supported by the package. + * The N-API version must be present in the `package.json` + * `binary.napi_versions` array. + * + * @param {number} napiVersion The N-API version to check. + * @returns {boolean} + * @private + */ +exports.packageSupportsVersion = function (napiVersion) { + if (pkg.binary && pkg.binary.napi_versions && + pkg.binary.napi_versions instanceof Array) { // integer array + for (var i = 0; i < pkg.binary.napi_versions.length; i++) { + if (pkg.binary.napi_versions[i] === napiVersion) return true + }; + }; + return false +} + +/** + * Issues a warning to the supplied log if the N-API version is not supported + * by the current Node instance or if the N-API version is not supported + * by the package. + * + * @param {string} napiVersion The N-API version to check. + * @param {Object} log The log object to which the warnings are to be issued. + * Must implement the `warn` method. + */ +exports.logUnsupportedVersion = function (napiVersion, log) { + if (!exports.isSupportedVersion(napiVersion)) { + if (exports.packageSupportsVersion(napiVersion)) { + log.warn('This Node instance does not support N-API version ' + napiVersion) + } else { + log.warn('This package does not support N-API version ' + napiVersion) + } + } +} + +/** + * Issues warnings to the supplied log for those N-API versions not supported + * by the N-API runtime or the package. + * + * Note that this function is specific to the + * [`prebuild`](https://github.com/prebuild/prebuild#prebuild) package. + * + * `target` is the list of targets to be built and is determined in one of + * three ways from the command line arguments: + * (1) `--target` specifies a specific target to build. + * (2) `--all` specifies all N-API versions supported by the package. + * (3) Neither of these specifies to build the single "best version available." + * + * `prebuild` is an array of objects in the form `{runtime: 'napi', target: '2'}`. + * The array contains the list of N-API versions that are supported by both the + * package being built and the currently running Node instance. + * + * The objective of this function is to issue a warning for those items that appear + * in the `target` argument but not in the `prebuild` argument. + * If a specific target is supported by the package (`packageSupportsVersion`) but + * but note in `prebuild`, the assumption is that the target is not supported by + * Node. + * + * @param {(Array|string)} target The N-API version(s) to check. Target is + * @param {Array} prebuild A config object created by the `prebuild` package. + * @param {Object} log The log object to which the warnings are to be issued. + * Must implement the `warn` method. + * @private + */ +exports.logMissingNapiVersions = function (target, prebuild, log) { + if (exports.getNapiBuildVersions()) { + var targets = [].concat(target) + targets.forEach(function (napiVersion) { + if (!prebuildExists(prebuild, napiVersion)) { + if (exports.packageSupportsVersion(parseInt(napiVersion, 10))) { + log.warn('This Node instance does not support N-API version ' + napiVersion) + } else { + log.warn('This package does not support N-API version ' + napiVersion) + } + } + }) + } else { + log.error('Builds with runtime \'napi\' require a binary.napi_versions ' + + 'property on the package.json file') + } +} + +/** + * Determines whether the specified N-API version exists in the prebuild + * configuration object. + * + * Note that this function is specific to the `prebuild` and `prebuild-install` + * packages. + * + * @param {Object} prebuild A config object created by the `prebuild` package. + * @param {string} napiVersion The N-APi version to be checked. + * @return {boolean} + * @private + */ +var prebuildExists = function (prebuild, napiVersion) { + if (prebuild) { + for (var i = 0; i < prebuild.length; i++) { + if (prebuild[i].target === napiVersion) return true + } + } + return false +} + +/** + * Returns the best N-API version to build given the highest N-API + * version supported by the current Node instance and the N-API versions + * supported by the package, or undefined if a suitable N-API version + * cannot be determined. + * + * The best build version is the greatest N-API version supported by + * the package that is less than or equal to the highest N-API version + * supported by the current Node instance. + * + * @returns {number|undefined} + */ +exports.getBestNapiBuildVersion = function () { + var bestNapiBuildVersion = 0 + var napiBuildVersions = exports.getNapiBuildVersions(pkg) // array of integer strings + if (napiBuildVersions) { + var ourNapiVersion = exports.getNapiVersion() + napiBuildVersions.forEach(function (napiBuildVersionStr) { + var napiBuildVersion = parseInt(napiBuildVersionStr, 10) + if (napiBuildVersion > bestNapiBuildVersion && + napiBuildVersion <= ourNapiVersion) { + bestNapiBuildVersion = napiBuildVersion + } + }) + } + return bestNapiBuildVersion === 0 ? undefined : bestNapiBuildVersion +} + +/** + * Returns an array of N-API versions supported by the package. + * + * @returns {Array|undefined} + */ +exports.getNapiBuildVersions = function () { + var napiBuildVersions = [] + // remove duplicates, convert to text + if (pkg.binary && pkg.binary.napi_versions) { + pkg.binary.napi_versions.forEach(function (napiVersion) { + var duplicated = napiBuildVersions.indexOf('' + napiVersion) !== -1 + if (!duplicated) { + napiBuildVersions.push('' + napiVersion) + } + }) + } + return napiBuildVersions.length ? napiBuildVersions : undefined +} + +/** + * Returns the highest N-API version supported by the current node instance + * or undefined if N-API is not supported. + * + * @returns {string|undefined} + */ +exports.getNapiVersion = function () { + var version = process.versions.napi // integer string, can be undefined + if (!version) { // this code should never need to be updated + if (versionArray[0] === 9 && versionArray[1] >= 3) version = '2' // 9.3.0+ + else if (versionArray[0] === 8) version = '1' // 8.0.0+ + } + return version +} diff --git a/miniprogram/node_modules/napi-build-utils/index.md b/miniprogram/node_modules/napi-build-utils/index.md new file mode 100644 index 00000000..e69de29b diff --git a/miniprogram/node_modules/napi-build-utils/package.json b/miniprogram/node_modules/napi-build-utils/package.json new file mode 100644 index 00000000..ad15183c --- /dev/null +++ b/miniprogram/node_modules/napi-build-utils/package.json @@ -0,0 +1,42 @@ +{ + "name": "napi-build-utils", + "version": "2.0.0", + "description": "A set of utilities to assist developers of tools that build N-API native add-ons", + "main": "index.js", + "scripts": { + "doc": "jsdoc2md index.js >index.md", + "test": "mocha test/ && npm run lint", + "lint": "standard", + "prepublishOnly": "npm run test" + }, + "keywords": [ + "n-api", + "prebuild", + "prebuild-install" + ], + "author": "Jim Schlight", + "license": "MIT", + "homepage": "https://github.com/inspiredware/napi-build-utils#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/inspiredware/napi-build-utils.git" + }, + "bugs": { + "url": "https://github.com/inspiredware/napi-build-utils/issues" + }, + "devDependencies": { + "chai": "^4.1.2", + "jsdoc-to-markdown": "^4.0.1", + "mocha": "^5.2.0", + "standard": "^12.0.1" + }, + "binary": { + "note": "napi-build-tools is not an N-API module. This entry is for unit testing.", + "napi_versions": [ + 2, + 2, + 3, + 10 + ] + } +} diff --git a/miniprogram/node_modules/node-abi/LICENSE b/miniprogram/node_modules/node-abi/LICENSE new file mode 100644 index 00000000..5513de0d --- /dev/null +++ b/miniprogram/node_modules/node-abi/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Lukas Geiger + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/miniprogram/node_modules/node-abi/README.md b/miniprogram/node_modules/node-abi/README.md new file mode 100644 index 00000000..680a0d0b --- /dev/null +++ b/miniprogram/node_modules/node-abi/README.md @@ -0,0 +1,54 @@ +# Node.js ABI + +[![Build Status](https://github.com/electron/node-abi/actions/workflows/test.yml/badge.svg)](https://github.com/electron/node-abi/actions/workflows/test.yml) +[![Auto-update ABI JSON file](https://github.com/electron/node-abi/actions/workflows/update-abi.yml/badge.svg)](https://github.com/electron/node-abi/actions/workflows/update-abi.yml) +[![Snyk badge](https://snyk.io/test/github/electron/node-abi/badge.svg)](https://snyk.io/test/github/electron/node-abi) +[![npm version](http://img.shields.io/npm/v/node-abi.svg)](https://npmjs.org/package/node-abi) + +Get the Node ABI (application binary interface) for a given target and runtime, and vice versa. + +## Installation + +```shell +npm install node-abi +``` + +## Usage + +```javascript +const nodeAbi = require('node-abi') + +nodeAbi.getAbi('7.2.0', 'node') +// '51' +nodeAbi.getAbi('1.4.10', 'electron') +// '50' +nodeAbi.getTarget('51', 'node') +// '7.2.0' +nodeAbi.getTarget('50', 'electron') +// '1.4.15' + +nodeAbi.allTargets +// [ +// { runtime: 'node', target: '0.10.48', abi: '11', lts: false }, +// { runtime: 'node', target: '0.12.17', abi: '14', lts: false }, +// { runtime: 'node', target: '4.6.1', abi: '46', lts: true }, +// { runtime: 'node', target: '5.12.0', abi: '47', lts: false }, +// { runtime: 'node', target: '6.9.4', abi: '48', lts: true }, +// { runtime: 'node', target: '7.4.0', abi: '51', lts: false }, +// { runtime: 'electron', target: '1.0.2', abi: '47', lts: false }, +// { runtime: 'electron', target: '1.2.8', abi: '48', lts: false }, +// { runtime: 'electron', target: '1.3.13', abi: '49', lts: false }, +// { runtime: 'electron', target: '1.4.15', abi: '50', lts: false } +// ] +nodeAbi.deprecatedTargets +nodeAbi.supportedTargets +nodeAbi.additionalTargets +nodeAbi.futureTargets +// ... +``` + +## References + +- https://github.com/lgeiger/electron-abi +- https://nodejs.org/en/download/releases/ +- https://github.com/nodejs/Release diff --git a/miniprogram/node_modules/node-abi/abi_registry.json b/miniprogram/node_modules/node-abi/abi_registry.json new file mode 100644 index 00000000..f2a679de --- /dev/null +++ b/miniprogram/node_modules/node-abi/abi_registry.json @@ -0,0 +1,425 @@ +[ + { + "runtime": "node", + "target": "11.0.0", + "lts": false, + "future": false, + "abi": "67" + }, + { + "runtime": "node", + "target": "12.0.0", + "lts": [ + "2019-10-21", + "2020-11-30" + ], + "future": false, + "abi": "72" + }, + { + "runtime": "node", + "target": "13.0.0", + "lts": false, + "future": false, + "abi": "79" + }, + { + "runtime": "node", + "target": "14.0.0", + "lts": [ + "2020-10-27", + "2021-10-19" + ], + "future": false, + "abi": "83" + }, + { + "runtime": "node", + "target": "15.0.0", + "lts": false, + "future": false, + "abi": "88" + }, + { + "runtime": "node", + "target": "16.0.0", + "lts": [ + "2021-10-26", + "2022-10-18" + ], + "future": false, + "abi": "93" + }, + { + "runtime": "node", + "target": "17.0.0", + "lts": false, + "future": false, + "abi": "102" + }, + { + "runtime": "node", + "target": "18.0.0", + "lts": [ + "2022-10-25", + "2023-10-18" + ], + "future": false, + "abi": "108" + }, + { + "runtime": "node", + "target": "19.0.0", + "lts": false, + "future": false, + "abi": "111" + }, + { + "runtime": "node", + "target": "20.0.0", + "lts": [ + "2023-10-24", + "2024-10-22" + ], + "future": false, + "abi": "115" + }, + { + "runtime": "node", + "target": "21.0.0", + "lts": false, + "future": false, + "abi": "120" + }, + { + "runtime": "node", + "target": "22.0.0", + "lts": [ + "2024-10-29", + "2025-10-21" + ], + "future": false, + "abi": "127" + }, + { + "runtime": "node", + "target": "23.0.0", + "lts": false, + "future": false, + "abi": "131" + }, + { + "runtime": "node", + "target": "24.0.0", + "lts": [ + "2025-10-28", + "2026-10-20" + ], + "future": false, + "abi": "137" + }, + { + "runtime": "node", + "target": "25.0.0", + "lts": false, + "future": false, + "abi": "141" + }, + { + "runtime": "node", + "target": "26.0.0", + "lts": [ + "2026-10-28", + "2027-10-20" + ], + "future": true, + "abi": "144" + }, + { + "abi": "70", + "future": false, + "lts": false, + "runtime": "electron", + "target": "5.0.0-beta.9" + }, + { + "abi": "73", + "future": false, + "lts": false, + "runtime": "electron", + "target": "6.0.0-beta.1" + }, + { + "abi": "75", + "future": false, + "lts": false, + "runtime": "electron", + "target": "7.0.0-beta.1" + }, + { + "abi": "76", + "future": false, + "lts": false, + "runtime": "electron", + "target": "9.0.0-beta.1" + }, + { + "abi": "76", + "future": false, + "lts": false, + "runtime": "electron", + "target": "8.0.0-beta.1" + }, + { + "abi": "80", + "future": false, + "lts": false, + "runtime": "electron", + "target": "9.0.0-beta.2" + }, + { + "abi": "82", + "future": false, + "lts": false, + "runtime": "electron", + "target": "11.0.0-beta.1" + }, + { + "abi": "82", + "future": false, + "lts": false, + "runtime": "electron", + "target": "10.0.0-beta.1" + }, + { + "abi": "85", + "future": false, + "lts": false, + "runtime": "electron", + "target": "11.0.0-beta.11" + }, + { + "abi": "87", + "future": false, + "lts": false, + "runtime": "electron", + "target": "12.0.0-beta.1" + }, + { + "abi": "89", + "future": false, + "lts": false, + "runtime": "electron", + "target": "15.0.0-alpha.1" + }, + { + "abi": "89", + "future": false, + "lts": false, + "runtime": "electron", + "target": "14.0.0-beta.1" + }, + { + "abi": "89", + "future": false, + "lts": false, + "runtime": "electron", + "target": "13.0.0-beta.2" + }, + { + "abi": "97", + "future": false, + "lts": false, + "runtime": "electron", + "target": "14.0.2" + }, + { + "abi": "98", + "future": false, + "lts": false, + "runtime": "electron", + "target": "15.0.0-beta.7" + }, + { + "abi": "99", + "future": false, + "lts": false, + "runtime": "electron", + "target": "16.0.0-alpha.1" + }, + { + "abi": "101", + "future": false, + "lts": false, + "runtime": "electron", + "target": "17.0.0-alpha.1" + }, + { + "abi": "103", + "future": false, + "lts": false, + "runtime": "electron", + "target": "18.0.0-alpha.1" + }, + { + "abi": "106", + "future": false, + "lts": false, + "runtime": "electron", + "target": "19.0.0-alpha.1" + }, + { + "abi": "107", + "future": false, + "lts": false, + "runtime": "electron", + "target": "20.0.0-alpha.1" + }, + { + "abi": "109", + "future": false, + "lts": false, + "runtime": "electron", + "target": "21.0.0-alpha.1" + }, + { + "abi": "110", + "future": false, + "lts": false, + "runtime": "electron", + "target": "22.0.0-alpha.1" + }, + { + "abi": "113", + "future": false, + "lts": false, + "runtime": "electron", + "target": "23.0.0-alpha.1" + }, + { + "abi": "114", + "future": false, + "lts": false, + "runtime": "electron", + "target": "24.0.0-alpha.1" + }, + { + "abi": "116", + "future": false, + "lts": false, + "runtime": "electron", + "target": "26.0.0-alpha.1" + }, + { + "abi": "116", + "future": false, + "lts": false, + "runtime": "electron", + "target": "25.0.0-alpha.1" + }, + { + "abi": "118", + "future": false, + "lts": false, + "runtime": "electron", + "target": "27.0.0-alpha.1" + }, + { + "abi": "119", + "future": false, + "lts": false, + "runtime": "electron", + "target": "28.0.0-alpha.1" + }, + { + "abi": "121", + "future": false, + "lts": false, + "runtime": "electron", + "target": "29.0.0-alpha.1" + }, + { + "abi": "123", + "future": false, + "lts": false, + "runtime": "electron", + "target": "31.0.0-alpha.1" + }, + { + "abi": "123", + "future": false, + "lts": false, + "runtime": "electron", + "target": "30.0.0-alpha.1" + }, + { + "abi": "125", + "future": false, + "lts": false, + "runtime": "electron", + "target": "31.0.0-beta.7" + }, + { + "abi": "128", + "future": false, + "lts": false, + "runtime": "electron", + "target": "32.0.0-alpha.1" + }, + { + "abi": "130", + "future": false, + "lts": false, + "runtime": "electron", + "target": "33.0.0-alpha.1" + }, + { + "abi": "132", + "future": false, + "lts": false, + "runtime": "electron", + "target": "34.0.0-alpha.1" + }, + { + "abi": "133", + "future": false, + "lts": false, + "runtime": "electron", + "target": "35.0.0-alpha.1" + }, + { + "abi": "135", + "future": false, + "lts": false, + "runtime": "electron", + "target": "36.0.0-alpha.1" + }, + { + "abi": "136", + "future": false, + "lts": false, + "runtime": "electron", + "target": "37.0.0-alpha.1" + }, + { + "abi": "139", + "future": false, + "lts": false, + "runtime": "electron", + "target": "38.0.0-alpha.1" + }, + { + "abi": "140", + "future": false, + "lts": false, + "runtime": "electron", + "target": "39.0.0-alpha.1" + }, + { + "abi": "143", + "future": false, + "lts": false, + "runtime": "electron", + "target": "40.0.0-alpha.2" + } +] \ No newline at end of file diff --git a/miniprogram/node_modules/node-abi/index.js b/miniprogram/node_modules/node-abi/index.js new file mode 100644 index 00000000..f6561353 --- /dev/null +++ b/miniprogram/node_modules/node-abi/index.js @@ -0,0 +1,179 @@ +var semver = require('semver') + +function getNextTarget (runtime, targets) { + if (targets == null) targets = allTargets + var latest = targets.filter(function (t) { return t.runtime === runtime }).slice(-1)[0] + var increment = runtime === 'electron' ? 'minor' : 'major' + var next = semver.inc(latest.target, increment) + // Electron releases appear in the registry in their beta form, sometimes there is + // no active beta line. During this time we need to double bump + if (runtime === 'electron' && semver.parse(latest.target).prerelease.length) { + next = semver.inc(next, 'major') + } + return next +} + +function getAbi (target, runtime) { + if (target === String(Number(target))) return target + if (target) target = target.replace(/^v/, '') + if (!runtime) runtime = 'node' + + if (runtime === 'node') { + if (!target) return process.versions.modules + if (target === process.versions.node) return process.versions.modules + } + + var abi + var lastTarget + + for (var i = 0; i < allTargets.length; i++) { + var t = allTargets[i] + if (t.runtime !== runtime) continue + if (semver.lte(t.target, target) && (!lastTarget || semver.gte(t.target, lastTarget))) { + abi = t.abi + lastTarget = t.target + } + } + + if (abi && semver.lt(target, getNextTarget(runtime))) return abi + throw new Error('Could not detect abi for version ' + target + ' and runtime ' + runtime + '. Updating "node-abi" might help solve this issue if it is a new release of ' + runtime) +} + +function getTarget (abi, runtime) { + if (abi && abi !== String(Number(abi))) return abi + if (!runtime) runtime = 'node' + + if (runtime === 'node' && !abi) return process.versions.node + + var match = allTargets + .filter(function (t) { + return t.abi === abi && t.runtime === runtime + }) + .map(function (t) { + return t.target + }) + if (match.length) { + var betaSeparatorIndex = match[0].indexOf("-") + return betaSeparatorIndex > -1 + ? match[0].substring(0, betaSeparatorIndex) + : match[0] + } + + throw new Error('Could not detect target for abi ' + abi + ' and runtime ' + runtime) +} + +function sortByTargetFn (a, b) { + var abiComp = Number(a.abi) - Number(b.abi) + if (abiComp !== 0) return abiComp + if (a.target < b.target) return -1 + if (a.target > b.target) return 1 + return 0 +} + +function loadGeneratedTargets () { + var registry = require('./abi_registry.json') + var targets = { + supported: [], + additional: [], + future: [] + } + + registry.forEach(function (item) { + var target = { + runtime: item.runtime, + target: item.target, + abi: item.abi + } + if (item.lts) { + var startDate = new Date(Date.parse(item.lts[0])) + var endDate = new Date(Date.parse(item.lts[1])) + var currentDate = new Date() + target.lts = startDate < currentDate && currentDate < endDate + } else { + target.lts = false + } + + if (target.runtime === 'node-webkit') { + targets.additional.push(target) + } else if (item.future) { + targets.future.push(target) + } else { + targets.supported.push(target) + } + }) + + targets.supported.sort(sortByTargetFn) + targets.additional.sort(sortByTargetFn) + targets.future.sort(sortByTargetFn) + + return targets +} + +var generatedTargets = loadGeneratedTargets() + +var supportedTargets = [ + {runtime: 'node', target: '5.0.0', abi: '47', lts: false}, + {runtime: 'node', target: '6.0.0', abi: '48', lts: false}, + {runtime: 'node', target: '7.0.0', abi: '51', lts: false}, + {runtime: 'node', target: '8.0.0', abi: '57', lts: false}, + {runtime: 'node', target: '9.0.0', abi: '59', lts: false}, + {runtime: 'node', target: '10.0.0', abi: '64', lts: new Date(2018, 10, 1) < new Date() && new Date() < new Date(2020, 4, 31)}, + {runtime: 'electron', target: '0.36.0', abi: '47', lts: false}, + {runtime: 'electron', target: '1.1.0', abi: '48', lts: false}, + {runtime: 'electron', target: '1.3.0', abi: '49', lts: false}, + {runtime: 'electron', target: '1.4.0', abi: '50', lts: false}, + {runtime: 'electron', target: '1.5.0', abi: '51', lts: false}, + {runtime: 'electron', target: '1.6.0', abi: '53', lts: false}, + {runtime: 'electron', target: '1.7.0', abi: '54', lts: false}, + {runtime: 'electron', target: '1.8.0', abi: '57', lts: false}, + {runtime: 'electron', target: '2.0.0', abi: '57', lts: false}, + {runtime: 'electron', target: '3.0.0', abi: '64', lts: false}, + {runtime: 'electron', target: '4.0.0', abi: '64', lts: false}, + {runtime: 'electron', target: '4.0.4', abi: '69', lts: false} +] + +supportedTargets.push.apply(supportedTargets, generatedTargets.supported) + +var additionalTargets = [ + {runtime: 'node-webkit', target: '0.13.0', abi: '47', lts: false}, + {runtime: 'node-webkit', target: '0.15.0', abi: '48', lts: false}, + {runtime: 'node-webkit', target: '0.18.3', abi: '51', lts: false}, + {runtime: 'node-webkit', target: '0.23.0', abi: '57', lts: false}, + {runtime: 'node-webkit', target: '0.26.5', abi: '59', lts: false} +] + +additionalTargets.push.apply(additionalTargets, generatedTargets.additional) + +var deprecatedTargets = [ + {runtime: 'node', target: '0.2.0', abi: '1', lts: false}, + {runtime: 'node', target: '0.9.1', abi: '0x000A', lts: false}, + {runtime: 'node', target: '0.9.9', abi: '0x000B', lts: false}, + {runtime: 'node', target: '0.10.4', abi: '11', lts: false}, + {runtime: 'node', target: '0.11.0', abi: '0x000C', lts: false}, + {runtime: 'node', target: '0.11.8', abi: '13', lts: false}, + {runtime: 'node', target: '0.11.11', abi: '14', lts: false}, + {runtime: 'node', target: '1.0.0', abi: '42', lts: false}, + {runtime: 'node', target: '1.1.0', abi: '43', lts: false}, + {runtime: 'node', target: '2.0.0', abi: '44', lts: false}, + {runtime: 'node', target: '3.0.0', abi: '45', lts: false}, + {runtime: 'node', target: '4.0.0', abi: '46', lts: false}, + {runtime: 'electron', target: '0.30.0', abi: '44', lts: false}, + {runtime: 'electron', target: '0.31.0', abi: '45', lts: false}, + {runtime: 'electron', target: '0.33.0', abi: '46', lts: false} +] + +var futureTargets = generatedTargets.future + +var allTargets = deprecatedTargets + .concat(supportedTargets) + .concat(additionalTargets) + .concat(futureTargets) + +exports.getAbi = getAbi +exports.getTarget = getTarget +exports.deprecatedTargets = deprecatedTargets +exports.supportedTargets = supportedTargets +exports.additionalTargets = additionalTargets +exports.futureTargets = futureTargets +exports.allTargets = allTargets +exports._getNextTarget = getNextTarget diff --git a/miniprogram/node_modules/node-abi/package.json b/miniprogram/node_modules/node-abi/package.json new file mode 100644 index 00000000..6f7962cf --- /dev/null +++ b/miniprogram/node_modules/node-abi/package.json @@ -0,0 +1,45 @@ +{ + "name": "node-abi", + "version": "3.86.0", + "description": "Get the Node ABI for a given target and runtime, and vice versa.", + "main": "index.js", + "scripts": { + "test": "tape test/index.js", + "update-abi-registry": "node --unhandled-rejections=strict scripts/update-abi-registry.js" + }, + "files": [ + "abi_registry.json" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/electron/node-abi.git" + }, + "keywords": [ + "node", + "electron", + "node_module_version", + "abi", + "v8" + ], + "author": "Lukas Geiger", + "license": "MIT", + "bugs": { + "url": "https://github.com/electron/node-abi/issues" + }, + "homepage": "https://github.com/electron/node-abi#readme", + "devDependencies": { + "@semantic-release/npm": "13.0.0-alpha.15", + "semantic-release": "^24.2.7", + "tape": "^5.3.1" + }, + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": ">=10" + }, + "publishConfig": { + "provenance": true + }, + "packageManager": "yarn@4.10.3+sha512.c38cafb5c7bb273f3926d04e55e1d8c9dfa7d9c3ea1f36a4868fa028b9e5f72298f0b7f401ad5eb921749eb012eb1c3bb74bf7503df3ee43fd600d14a018266f" +} diff --git a/miniprogram/node_modules/node-addon-api/LICENSE.md b/miniprogram/node_modules/node-addon-api/LICENSE.md new file mode 100644 index 00000000..819d91a5 --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright (c) 2017 [Node.js API collaborators](https://github.com/nodejs/node-addon-api#collaborators) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/miniprogram/node_modules/node-addon-api/README.md b/miniprogram/node_modules/node-addon-api/README.md new file mode 100644 index 00000000..e90eb7c9 --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/README.md @@ -0,0 +1,319 @@ +NOTE: The default branch has been renamed! +master is now named main + +If you have a local clone, you can update it by running: + +```shell +git branch -m master main +git fetch origin +git branch -u origin/main main +``` + +# **node-addon-api module** +This module contains **header-only C++ wrapper classes** which simplify +the use of the C based [Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html) +provided by Node.js when using C++. It provides a C++ object model +and exception handling semantics with low overhead. + +There are three options for implementing addons: Node-API, nan, or direct +use of internal V8, libuv, and Node.js libraries. Unless there is a need for +direct access to functionality that is not exposed by Node-API as outlined +in [C/C++ addons](https://nodejs.org/dist/latest/docs/api/addons.html) +in Node.js core, use Node-API. Refer to +[C/C++ addons with Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html) +for more information on Node-API. + +Node-API is an ABI stable C interface provided by Node.js for building native +addons. It is independent of the underlying JavaScript runtime (e.g. V8 or ChakraCore) +and is maintained as part of Node.js itself. It is intended to insulate +native addons from changes in the underlying JavaScript engine and allow +modules compiled for one version to run on later versions of Node.js without +recompilation. + +The `node-addon-api` module, which is not part of Node.js, preserves the benefits +of the Node-API as it consists only of inline code that depends only on the stable API +provided by Node-API. As such, modules built against one version of Node.js +using node-addon-api should run without having to be rebuilt with newer versions +of Node.js. + +It is important to remember that *other* Node.js interfaces such as +`libuv` (included in a project via `#include `) are not ABI-stable across +Node.js major versions. Thus, an addon must use Node-API and/or `node-addon-api` +exclusively and build against a version of Node.js that includes an +implementation of Node-API (meaning an active LTS version of Node.js) in +order to benefit from ABI stability across Node.js major versions. Node.js +provides an [ABI stability guide][] containing a detailed explanation of ABI +stability in general, and the Node-API ABI stability guarantee in particular. + +As new APIs are added to Node-API, node-addon-api must be updated to provide +wrappers for those new APIs. For this reason, node-addon-api provides +methods that allow callers to obtain the underlying Node-API handles so +direct calls to Node-API and the use of the objects/methods provided by +node-addon-api can be used together. For example, in order to be able +to use an API for which the node-addon-api does not yet provide a wrapper. + +APIs exposed by node-addon-api are generally used to create and +manipulate JavaScript values. Concepts and operations generally map +to ideas specified in the **ECMA262 Language Specification**. + +The [Node-API Resource](https://nodejs.github.io/node-addon-examples/) offers an +excellent orientation and tips for developers just getting started with Node-API +and node-addon-api. + +- **[Setup](#setup)** +- **[API Documentation](#api)** +- **[Examples](#examples)** +- **[Tests](#tests)** +- **[More resource and info about native Addons](#resources)** +- **[Badges](#badges)** +- **[Code of Conduct](CODE_OF_CONDUCT.md)** +- **[Contributors](#contributors)** +- **[License](#license)** + +## **Current version: 7.1.1** + +(See [CHANGELOG.md](CHANGELOG.md) for complete Changelog) + +[![NPM](https://nodei.co/npm/node-addon-api.png?downloads=true&downloadRank=true)](https://nodei.co/npm/node-addon-api/) [![NPM](https://nodei.co/npm-dl/node-addon-api.png?months=6&height=1)](https://nodei.co/npm/node-addon-api/) + + + +node-addon-api is based on [Node-API](https://nodejs.org/api/n-api.html) and supports using different Node-API versions. +This allows addons built with it to run with Node.js versions which support the targeted Node-API version. +**However** the node-addon-api support model is to support only the active LTS Node.js versions. This means that +every year there will be a new major which drops support for the Node.js LTS version which has gone out of service. + +The oldest Node.js version supported by the current version of node-addon-api is Node.js 16.x. + +## Setup + - [Installation and usage](doc/setup.md) + - [node-gyp](doc/node-gyp.md) + - [cmake-js](doc/cmake-js.md) + - [Conversion tool](doc/conversion-tool.md) + - [Checker tool](doc/checker-tool.md) + - [Generator](doc/generator.md) + - [Prebuild tools](doc/prebuild_tools.md) + + + +### **API Documentation** + +The following is the documentation for node-addon-api. + + - [Full Class Hierarchy](doc/hierarchy.md) + - [Addon Structure](doc/addon.md) + - Data Types: + - [Env](doc/env.md) + - [CallbackInfo](doc/callbackinfo.md) + - [Reference](doc/reference.md) + - [Value](doc/value.md) + - [Name](doc/name.md) + - [Symbol](doc/symbol.md) + - [String](doc/string.md) + - [Number](doc/number.md) + - [Date](doc/date.md) + - [BigInt](doc/bigint.md) + - [Boolean](doc/boolean.md) + - [External](doc/external.md) + - [Object](doc/object.md) + - [Array](doc/array.md) + - [ObjectReference](doc/object_reference.md) + - [PropertyDescriptor](doc/property_descriptor.md) + - [Function](doc/function.md) + - [FunctionReference](doc/function_reference.md) + - [ObjectWrap](doc/object_wrap.md) + - [ClassPropertyDescriptor](doc/class_property_descriptor.md) + - [Buffer](doc/buffer.md) + - [ArrayBuffer](doc/array_buffer.md) + - [TypedArray](doc/typed_array.md) + - [TypedArrayOf](doc/typed_array_of.md) + - [DataView](doc/dataview.md) + - [Error Handling](doc/error_handling.md) + - [Error](doc/error.md) + - [TypeError](doc/type_error.md) + - [RangeError](doc/range_error.md) + - [SyntaxError](doc/syntax_error.md) + - [Object Lifetime Management](doc/object_lifetime_management.md) + - [HandleScope](doc/handle_scope.md) + - [EscapableHandleScope](doc/escapable_handle_scope.md) + - [Memory Management](doc/memory_management.md) + - [Async Operations](doc/async_operations.md) + - [AsyncWorker](doc/async_worker.md) + - [AsyncContext](doc/async_context.md) + - [AsyncWorker Variants](doc/async_worker_variants.md) + - [Thread-safe Functions](doc/threadsafe.md) + - [ThreadSafeFunction](doc/threadsafe_function.md) + - [TypedThreadSafeFunction](doc/typed_threadsafe_function.md) + - [Promises](doc/promises.md) + - [Version management](doc/version_management.md) + + + +### **Examples** + +Are you new to **node-addon-api**? Take a look at our **[examples](https://github.com/nodejs/node-addon-examples)** + +- **[Hello World](https://github.com/nodejs/node-addon-examples/tree/main/src/1-getting-started/1_hello_world)** +- **[Pass arguments to a function](https://github.com/nodejs/node-addon-examples/tree/main/src/1-getting-started/2_function_arguments/node-addon-api)** +- **[Callbacks](https://github.com/nodejs/node-addon-examples/tree/main/src/1-getting-started/3_callbacks/node-addon-api)** +- **[Object factory](https://github.com/nodejs/node-addon-examples/tree/main/src/1-getting-started/4_object_factory/node-addon-api)** +- **[Function factory](https://github.com/nodejs/node-addon-examples/tree/main/src/1-getting-started/5_function_factory/node-addon-api)** +- **[Wrapping C++ Object](https://github.com/nodejs/node-addon-examples/tree/main/src/1-getting-started/6_object_wrap/node-addon-api)** +- **[Factory of wrapped object](https://github.com/nodejs/node-addon-examples/tree/main/src/1-getting-started/7_factory_wrap/node-addon-api)** +- **[Passing wrapped object around](https://github.com/nodejs/node-addon-examples/tree/main/src/2-js-to-native-conversion/8_passing_wrapped/node-addon-api)** + + + +### **Tests** + +To run the **node-addon-api** tests do: + +``` +npm install +npm test +``` + +To avoid testing the deprecated portions of the API run +``` +npm install +npm test --disable-deprecated +``` + +To run the tests targeting a specific version of Node-API run +``` +npm install +export NAPI_VERSION=X +npm test --NAPI_VERSION=X +``` + +where X is the version of Node-API you want to target. + +To run a specific unit test, filter conditions are available + +**Example:** + compile and run only tests on objectwrap.cc and objectwrap.js + ``` + npm run unit --filter=objectwrap + ``` + +Multiple unit tests cane be selected with wildcards + +**Example:** +compile and run all test files ending with "reference" -> function_reference.cc, object_reference.cc, reference.cc + ``` + npm run unit --filter=*reference + ``` + +Multiple filter conditions can be joined to broaden the test selection + +**Example:** + compile and run all tests under folders threadsafe_function and typed_threadsafe_function and also the objectwrap.cc file + npm run unit --filter='*function objectwrap' + +### **Debug** + +To run the **node-addon-api** tests with `--debug` option: + +``` +npm run-script dev +``` + +If you want a faster build, you might use the following option: + +``` +npm run-script dev:incremental +``` + +Take a look and get inspired by our **[test suite](https://github.com/nodejs/node-addon-api/tree/HEAD/test)** + +### **Benchmarks** + +You can run the available benchmarks using the following command: + +``` +npm run-script benchmark +``` + +See [benchmark/README.md](benchmark/README.md) for more details about running and adding benchmarks. + + + +### **More resource and info about native Addons** +- **[C++ Addons](https://nodejs.org/dist/latest/docs/api/addons.html)** +- **[Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html)** +- **[Node-API - Next Generation Node API for Native Modules](https://youtu.be/-Oniup60Afs)** +- **[How We Migrated Realm JavaScript From NAN to Node-API](https://developer.mongodb.com/article/realm-javascript-nan-to-n-api)** + +As node-addon-api's core mission is to expose the plain C Node-API as C++ +wrappers, tools that facilitate n-api/node-addon-api providing more +convenient patterns for developing a Node.js add-on with n-api/node-addon-api +can be published to NPM as standalone packages. It is also recommended to tag +such packages with `node-addon-api` to provide more visibility to the community. + +Quick links to NPM searches: [keywords:node-addon-api](https://www.npmjs.com/search?q=keywords%3Anode-addon-api). + + + +### **Other bindings** + +- **[napi-rs](https://napi.rs)** - (`Rust`) + + + +### **Badges** + +The use of badges is recommended to indicate the minimum version of Node-API +required for the module. This helps to determine which Node.js major versions are +supported. Addon maintainers can consult the [Node-API support matrix][] to determine +which Node.js versions provide a given Node-API version. The following badges are +available: + +![Node-API v1 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v1%20Badge.svg) +![Node-API v2 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v2%20Badge.svg) +![Node-API v3 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v3%20Badge.svg) +![Node-API v4 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v4%20Badge.svg) +![Node-API v5 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v5%20Badge.svg) +![Node-API v6 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v6%20Badge.svg) +![Node-API v7 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v7%20Badge.svg) +![Node-API v8 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v8%20Badge.svg) +![Node-API v9 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v9%20Badge.svg) +![Node-API Experimental Version Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20Experimental%20Version%20Badge.svg) + +## **Contributing** + +We love contributions from the community to **node-addon-api**! +See [CONTRIBUTING.md](CONTRIBUTING.md) for more details on our philosophy around extending this module. + + + +## Team members + +### Active +| Name | GitHub Link | +| ------------------- | ----------------------------------------------------- | +| Anna Henningsen | [addaleax](https://github.com/addaleax) | +| Chengzhong Wu | [legendecas](https://github.com/legendecas) | +| Jack Xia | [JckXia](https://github.com/JckXia) | +| Kevin Eady | [KevinEady](https://github.com/KevinEady) | +| Michael Dawson | [mhdawson](https://github.com/mhdawson) | +| Nicola Del Gobbo | [NickNaso](https://github.com/NickNaso) | +| Vladimir Morozov | [vmoroz](https://github.com/vmoroz) | + +### Emeritus +| Name | GitHub Link | +| ------------------- | ----------------------------------------------------- | +| Arunesh Chandra | [aruneshchandra](https://github.com/aruneshchandra) | +| Benjamin Byholm | [kkoopa](https://github.com/kkoopa) | +| Gabriel Schulhof | [gabrielschulhof](https://github.com/gabrielschulhof) | +| Hitesh Kanwathirtha | [digitalinfinity](https://github.com/digitalinfinity) | +| Jason Ginchereau | [jasongin](https://github.com/jasongin) | +| Jim Schlight | [jschlight](https://github.com/jschlight) | +| Sampson Gao | [sampsongao](https://github.com/sampsongao) | +| Taylor Woll | [boingoing](https://github.com/boingoing) | + + + +Licensed under [MIT](./LICENSE.md) + +[ABI stability guide]: https://nodejs.org/en/docs/guides/abi-stability/ +[Node-API support matrix]: https://nodejs.org/dist/latest/docs/api/n-api.html#n_api_n_api_version_matrix diff --git a/miniprogram/node_modules/node-addon-api/common.gypi b/miniprogram/node_modules/node-addon-api/common.gypi new file mode 100644 index 00000000..06c0176b --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/common.gypi @@ -0,0 +1,20 @@ +{ + 'variables': { + 'NAPI_VERSION%': " +inline PropertyDescriptor PropertyDescriptor::Accessor( + const char* utf8name, + Getter getter, + napi_property_attributes attributes, + void* /*data*/) { + using CbData = details::CallbackData; + // TODO: Delete when the function is destroyed + auto callbackData = new CbData({getter, nullptr}); + + return PropertyDescriptor({utf8name, + nullptr, + nullptr, + CbData::Wrapper, + nullptr, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + const std::string& utf8name, + Getter getter, + napi_property_attributes attributes, + void* data) { + return Accessor(utf8name.c_str(), getter, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + napi_value name, + Getter getter, + napi_property_attributes attributes, + void* /*data*/) { + using CbData = details::CallbackData; + // TODO: Delete when the function is destroyed + auto callbackData = new CbData({getter, nullptr}); + + return PropertyDescriptor({nullptr, + name, + nullptr, + CbData::Wrapper, + nullptr, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Name name, Getter getter, napi_property_attributes attributes, void* data) { + napi_value nameValue = name; + return PropertyDescriptor::Accessor(nameValue, getter, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + const char* utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* /*data*/) { + using CbData = details::AccessorCallbackData; + // TODO: Delete when the function is destroyed + auto callbackData = new CbData({getter, setter, nullptr}); + + return PropertyDescriptor({utf8name, + nullptr, + nullptr, + CbData::GetterWrapper, + CbData::SetterWrapper, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + const std::string& utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* data) { + return Accessor(utf8name.c_str(), getter, setter, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + napi_value name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* /*data*/) { + using CbData = details::AccessorCallbackData; + // TODO: Delete when the function is destroyed + auto callbackData = new CbData({getter, setter, nullptr}); + + return PropertyDescriptor({nullptr, + name, + nullptr, + CbData::GetterWrapper, + CbData::SetterWrapper, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Name name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* data) { + napi_value nameValue = name; + return PropertyDescriptor::Accessor( + nameValue, getter, setter, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + const char* utf8name, + Callable cb, + napi_property_attributes attributes, + void* /*data*/) { + using ReturnType = decltype(cb(CallbackInfo(nullptr, nullptr))); + using CbData = details::CallbackData; + // TODO: Delete when the function is destroyed + auto callbackData = new CbData({cb, nullptr}); + + return PropertyDescriptor({utf8name, + nullptr, + CbData::Wrapper, + nullptr, + nullptr, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + const std::string& utf8name, + Callable cb, + napi_property_attributes attributes, + void* data) { + return Function(utf8name.c_str(), cb, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + napi_value name, + Callable cb, + napi_property_attributes attributes, + void* /*data*/) { + using ReturnType = decltype(cb(CallbackInfo(nullptr, nullptr))); + using CbData = details::CallbackData; + // TODO: Delete when the function is destroyed + auto callbackData = new CbData({cb, nullptr}); + + return PropertyDescriptor({nullptr, + name, + CbData::Wrapper, + nullptr, + nullptr, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + Name name, Callable cb, napi_property_attributes attributes, void* data) { + napi_value nameValue = name; + return PropertyDescriptor::Function(nameValue, cb, attributes, data); +} + +#endif // !SRC_NAPI_INL_DEPRECATED_H_ diff --git a/miniprogram/node_modules/node-addon-api/napi-inl.h b/miniprogram/node_modules/node-addon-api/napi-inl.h new file mode 100644 index 00000000..a5ae7af7 --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/napi-inl.h @@ -0,0 +1,6607 @@ +#ifndef SRC_NAPI_INL_H_ +#define SRC_NAPI_INL_H_ + +//////////////////////////////////////////////////////////////////////////////// +// Node-API C++ Wrapper Classes +// +// Inline header-only implementations for "Node-API" ABI-stable C APIs for +// Node.js. +//////////////////////////////////////////////////////////////////////////////// + +// Note: Do not include this file directly! Include "napi.h" instead. + +#include +#include +#if NAPI_HAS_THREADS +#include +#endif // NAPI_HAS_THREADS +#include +#include + +namespace Napi { + +#ifdef NAPI_CPP_CUSTOM_NAMESPACE +namespace NAPI_CPP_CUSTOM_NAMESPACE { +#endif + +// Helpers to handle functions exposed from C++ and internal constants. +namespace details { + +// New napi_status constants not yet available in all supported versions of +// Node.js releases. Only necessary when they are used in napi.h and napi-inl.h. +constexpr int napi_no_external_buffers_allowed = 22; + +template +inline void default_finalizer(napi_env /*env*/, void* data, void* /*hint*/) { + delete static_cast(data); +} + +// Attach a data item to an object and delete it when the object gets +// garbage-collected. +// TODO: Replace this code with `napi_add_finalizer()` whenever it becomes +// available on all supported versions of Node.js. +template > +inline napi_status AttachData(napi_env env, + napi_value obj, + FreeType* data, + void* hint = nullptr) { + napi_status status; +#if (NAPI_VERSION < 5) + napi_value symbol, external; + status = napi_create_symbol(env, nullptr, &symbol); + if (status == napi_ok) { + status = napi_create_external(env, data, finalizer, hint, &external); + if (status == napi_ok) { + napi_property_descriptor desc = {nullptr, + symbol, + nullptr, + nullptr, + nullptr, + external, + napi_default, + nullptr}; + status = napi_define_properties(env, obj, 1, &desc); + } + } +#else // NAPI_VERSION >= 5 + status = napi_add_finalizer(env, obj, data, finalizer, hint, nullptr); +#endif + return status; +} + +// For use in JS to C++ callback wrappers to catch any Napi::Error exceptions +// and rethrow them as JavaScript exceptions before returning from the callback. +template +inline napi_value WrapCallback(Callable callback) { +#ifdef NAPI_CPP_EXCEPTIONS + try { + return callback(); + } catch (const Error& e) { + e.ThrowAsJavaScriptException(); + return nullptr; + } +#else // NAPI_CPP_EXCEPTIONS + // When C++ exceptions are disabled, errors are immediately thrown as JS + // exceptions, so there is no need to catch and rethrow them here. + return callback(); +#endif // NAPI_CPP_EXCEPTIONS +} + +// For use in JS to C++ void callback wrappers to catch any Napi::Error +// exceptions and rethrow them as JavaScript exceptions before returning from +// the callback. +template +inline void WrapVoidCallback(Callable callback) { +#ifdef NAPI_CPP_EXCEPTIONS + try { + callback(); + } catch (const Error& e) { + e.ThrowAsJavaScriptException(); + } +#else // NAPI_CPP_EXCEPTIONS + // When C++ exceptions are disabled, errors are immediately thrown as JS + // exceptions, so there is no need to catch and rethrow them here. + callback(); +#endif // NAPI_CPP_EXCEPTIONS +} + +template +struct CallbackData { + static inline napi_value Wrapper(napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + CallbackData* callbackData = + static_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + return callbackData->callback(callbackInfo); + }); + } + + Callable callback; + void* data; +}; + +template +struct CallbackData { + static inline napi_value Wrapper(napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + CallbackData* callbackData = + static_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + callbackData->callback(callbackInfo); + return nullptr; + }); + } + + Callable callback; + void* data; +}; + +template +napi_value TemplatedVoidCallback(napi_env env, + napi_callback_info info) NAPI_NOEXCEPT { + return details::WrapCallback([&] { + CallbackInfo cbInfo(env, info); + Callback(cbInfo); + return nullptr; + }); +} + +template +napi_value TemplatedCallback(napi_env env, + napi_callback_info info) NAPI_NOEXCEPT { + return details::WrapCallback([&] { + CallbackInfo cbInfo(env, info); + return Callback(cbInfo); + }); +} + +template +napi_value TemplatedInstanceCallback(napi_env env, + napi_callback_info info) NAPI_NOEXCEPT { + return details::WrapCallback([&] { + CallbackInfo cbInfo(env, info); + T* instance = T::Unwrap(cbInfo.This().As()); + return instance ? (instance->*UnwrapCallback)(cbInfo) : Napi::Value(); + }); +} + +template +napi_value TemplatedInstanceVoidCallback(napi_env env, napi_callback_info info) + NAPI_NOEXCEPT { + return details::WrapCallback([&] { + CallbackInfo cbInfo(env, info); + T* instance = T::Unwrap(cbInfo.This().As()); + if (instance) (instance->*UnwrapCallback)(cbInfo); + return nullptr; + }); +} + +template +struct FinalizeData { + static inline void Wrapper(napi_env env, + void* data, + void* finalizeHint) NAPI_NOEXCEPT { + WrapVoidCallback([&] { + FinalizeData* finalizeData = static_cast(finalizeHint); + finalizeData->callback(Env(env), static_cast(data)); + delete finalizeData; + }); + } + + static inline void WrapperWithHint(napi_env env, + void* data, + void* finalizeHint) NAPI_NOEXCEPT { + WrapVoidCallback([&] { + FinalizeData* finalizeData = static_cast(finalizeHint); + finalizeData->callback( + Env(env), static_cast(data), finalizeData->hint); + delete finalizeData; + }); + } + + Finalizer callback; + Hint* hint; +}; + +#if (NAPI_VERSION > 3 && NAPI_HAS_THREADS) +template , + typename FinalizerDataType = void> +struct ThreadSafeFinalize { + static inline void Wrapper(napi_env env, + void* rawFinalizeData, + void* /* rawContext */) { + if (rawFinalizeData == nullptr) return; + + ThreadSafeFinalize* finalizeData = + static_cast(rawFinalizeData); + finalizeData->callback(Env(env)); + delete finalizeData; + } + + static inline void FinalizeWrapperWithData(napi_env env, + void* rawFinalizeData, + void* /* rawContext */) { + if (rawFinalizeData == nullptr) return; + + ThreadSafeFinalize* finalizeData = + static_cast(rawFinalizeData); + finalizeData->callback(Env(env), finalizeData->data); + delete finalizeData; + } + + static inline void FinalizeWrapperWithContext(napi_env env, + void* rawFinalizeData, + void* rawContext) { + if (rawFinalizeData == nullptr) return; + + ThreadSafeFinalize* finalizeData = + static_cast(rawFinalizeData); + finalizeData->callback(Env(env), static_cast(rawContext)); + delete finalizeData; + } + + static inline void FinalizeFinalizeWrapperWithDataAndContext( + napi_env env, void* rawFinalizeData, void* rawContext) { + if (rawFinalizeData == nullptr) return; + + ThreadSafeFinalize* finalizeData = + static_cast(rawFinalizeData); + finalizeData->callback( + Env(env), finalizeData->data, static_cast(rawContext)); + delete finalizeData; + } + + FinalizerDataType* data; + Finalizer callback; +}; + +template +inline typename std::enable_if(nullptr)>::type +CallJsWrapper(napi_env env, napi_value jsCallback, void* context, void* data) { + details::WrapVoidCallback([&]() { + call(env, + Function(env, jsCallback), + static_cast(context), + static_cast(data)); + }); +} + +template +inline typename std::enable_if(nullptr)>::type +CallJsWrapper(napi_env env, + napi_value jsCallback, + void* /*context*/, + void* /*data*/) { + details::WrapVoidCallback([&]() { + if (jsCallback != nullptr) { + Function(env, jsCallback).Call(0, nullptr); + } + }); +} + +#if NAPI_VERSION > 4 + +template +napi_value DefaultCallbackWrapper(napi_env /*env*/, std::nullptr_t /*cb*/) { + return nullptr; +} + +template +napi_value DefaultCallbackWrapper(napi_env /*env*/, Napi::Function cb) { + return cb; +} + +#else +template +napi_value DefaultCallbackWrapper(napi_env env, Napi::Function cb) { + if (cb.IsEmpty()) { + return TSFN::EmptyFunctionFactory(env); + } + return cb; +} +#endif // NAPI_VERSION > 4 +#endif // NAPI_VERSION > 3 && NAPI_HAS_THREADS + +template +struct AccessorCallbackData { + static inline napi_value GetterWrapper(napi_env env, + napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + AccessorCallbackData* callbackData = + static_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + return callbackData->getterCallback(callbackInfo); + }); + } + + static inline napi_value SetterWrapper(napi_env env, + napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + AccessorCallbackData* callbackData = + static_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + callbackData->setterCallback(callbackInfo); + return nullptr; + }); + } + + Getter getterCallback; + Setter setterCallback; + void* data; +}; + +} // namespace details + +#ifndef NODE_ADDON_API_DISABLE_DEPRECATED +#include "napi-inl.deprecated.h" +#endif // !NODE_ADDON_API_DISABLE_DEPRECATED + +//////////////////////////////////////////////////////////////////////////////// +// Module registration +//////////////////////////////////////////////////////////////////////////////// + +// Register an add-on based on an initializer function. +#define NODE_API_MODULE(modname, regfunc) \ + static napi_value __napi_##regfunc(napi_env env, napi_value exports) { \ + return Napi::RegisterModule(env, exports, regfunc); \ + } \ + NAPI_MODULE(modname, __napi_##regfunc) + +// Register an add-on based on a subclass of `Addon` with a custom Node.js +// module name. +#define NODE_API_NAMED_ADDON(modname, classname) \ + static napi_value __napi_##classname(napi_env env, napi_value exports) { \ + return Napi::RegisterModule(env, exports, &classname::Init); \ + } \ + NAPI_MODULE(modname, __napi_##classname) + +// Register an add-on based on a subclass of `Addon` with the Node.js module +// name given by node-gyp from the `target_name` in binding.gyp. +#define NODE_API_ADDON(classname) \ + NODE_API_NAMED_ADDON(NODE_GYP_MODULE_NAME, classname) + +// Adapt the NAPI_MODULE registration function: +// - Wrap the arguments in NAPI wrappers. +// - Catch any NAPI errors and rethrow as JS exceptions. +inline napi_value RegisterModule(napi_env env, + napi_value exports, + ModuleRegisterCallback registerCallback) { + return details::WrapCallback([&] { + return napi_value( + registerCallback(Napi::Env(env), Napi::Object(env, exports))); + }); +} + +//////////////////////////////////////////////////////////////////////////////// +// Maybe class +//////////////////////////////////////////////////////////////////////////////// + +template +bool Maybe::IsNothing() const { + return !_has_value; +} + +template +bool Maybe::IsJust() const { + return _has_value; +} + +template +void Maybe::Check() const { + NAPI_CHECK(IsJust(), "Napi::Maybe::Check", "Maybe value is Nothing."); +} + +template +T Maybe::Unwrap() const { + NAPI_CHECK(IsJust(), "Napi::Maybe::Unwrap", "Maybe value is Nothing."); + return _value; +} + +template +T Maybe::UnwrapOr(const T& default_value) const { + return _has_value ? _value : default_value; +} + +template +bool Maybe::UnwrapTo(T* out) const { + if (IsJust()) { + *out = _value; + return true; + }; + return false; +} + +template +bool Maybe::operator==(const Maybe& other) const { + return (IsJust() == other.IsJust()) && + (!IsJust() || Unwrap() == other.Unwrap()); +} + +template +bool Maybe::operator!=(const Maybe& other) const { + return !operator==(other); +} + +template +Maybe::Maybe() : _has_value(false) {} + +template +Maybe::Maybe(const T& t) : _has_value(true), _value(t) {} + +template +inline Maybe Nothing() { + return Maybe(); +} + +template +inline Maybe Just(const T& t) { + return Maybe(t); +} + +//////////////////////////////////////////////////////////////////////////////// +// Env class +//////////////////////////////////////////////////////////////////////////////// + +inline Env::Env(napi_env env) : _env(env) {} + +inline Env::operator napi_env() const { + return _env; +} + +inline Object Env::Global() const { + napi_value value; + napi_status status = napi_get_global(*this, &value); + NAPI_THROW_IF_FAILED(*this, status, Object()); + return Object(*this, value); +} + +inline Value Env::Undefined() const { + napi_value value; + napi_status status = napi_get_undefined(*this, &value); + NAPI_THROW_IF_FAILED(*this, status, Value()); + return Value(*this, value); +} + +inline Value Env::Null() const { + napi_value value; + napi_status status = napi_get_null(*this, &value); + NAPI_THROW_IF_FAILED(*this, status, Value()); + return Value(*this, value); +} + +inline bool Env::IsExceptionPending() const { + bool result; + napi_status status = napi_is_exception_pending(_env, &result); + if (status != napi_ok) + result = false; // Checking for a pending exception shouldn't throw. + return result; +} + +inline Error Env::GetAndClearPendingException() const { + napi_value value; + napi_status status = napi_get_and_clear_last_exception(_env, &value); + if (status != napi_ok) { + // Don't throw another exception when failing to get the exception! + return Error(); + } + return Error(_env, value); +} + +inline MaybeOrValue Env::RunScript(const char* utf8script) const { + String script = String::New(_env, utf8script); + return RunScript(script); +} + +inline MaybeOrValue Env::RunScript(const std::string& utf8script) const { + return RunScript(utf8script.c_str()); +} + +inline MaybeOrValue Env::RunScript(String script) const { + napi_value result; + napi_status status = napi_run_script(_env, script, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Value(_env, result), Napi::Value); +} + +#if NAPI_VERSION > 2 +template +void Env::CleanupHook::Wrapper(void* data) NAPI_NOEXCEPT { + auto* cleanupData = + static_cast::CleanupData*>( + data); + cleanupData->hook(); + delete cleanupData; +} + +template +void Env::CleanupHook::WrapperWithArg(void* data) NAPI_NOEXCEPT { + auto* cleanupData = + static_cast::CleanupData*>( + data); + cleanupData->hook(static_cast(cleanupData->arg)); + delete cleanupData; +} +#endif // NAPI_VERSION > 2 + +#if NAPI_VERSION > 5 +template fini> +inline void Env::SetInstanceData(T* data) const { + napi_status status = napi_set_instance_data( + _env, + data, + [](napi_env env, void* data, void*) { fini(env, static_cast(data)); }, + nullptr); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +template fini> +inline void Env::SetInstanceData(DataType* data, HintType* hint) const { + napi_status status = napi_set_instance_data( + _env, + data, + [](napi_env env, void* data, void* hint) { + fini(env, static_cast(data), static_cast(hint)); + }, + hint); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +template +inline T* Env::GetInstanceData() const { + void* data = nullptr; + + napi_status status = napi_get_instance_data(_env, &data); + NAPI_THROW_IF_FAILED(_env, status, nullptr); + + return static_cast(data); +} + +template +void Env::DefaultFini(Env, T* data) { + delete data; +} + +template +void Env::DefaultFiniWithHint(Env, DataType* data, HintType*) { + delete data; +} +#endif // NAPI_VERSION > 5 + +#if NAPI_VERSION > 8 +inline const char* Env::GetModuleFileName() const { + const char* result; + napi_status status = node_api_get_module_file_name(_env, &result); + NAPI_THROW_IF_FAILED(*this, status, nullptr); + return result; +} +#endif // NAPI_VERSION > 8 +//////////////////////////////////////////////////////////////////////////////// +// Value class +//////////////////////////////////////////////////////////////////////////////// + +inline Value::Value() : _env(nullptr), _value(nullptr) {} + +inline Value::Value(napi_env env, napi_value value) + : _env(env), _value(value) {} + +inline Value::operator napi_value() const { + return _value; +} + +inline bool Value::operator==(const Value& other) const { + return StrictEquals(other); +} + +inline bool Value::operator!=(const Value& other) const { + return !this->operator==(other); +} + +inline bool Value::StrictEquals(const Value& other) const { + bool result; + napi_status status = napi_strict_equals(_env, *this, other, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline Napi::Env Value::Env() const { + return Napi::Env(_env); +} + +inline bool Value::IsEmpty() const { + return _value == nullptr; +} + +inline napi_valuetype Value::Type() const { + if (IsEmpty()) { + return napi_undefined; + } + + napi_valuetype type; + napi_status status = napi_typeof(_env, _value, &type); + NAPI_THROW_IF_FAILED(_env, status, napi_undefined); + return type; +} + +inline bool Value::IsUndefined() const { + return Type() == napi_undefined; +} + +inline bool Value::IsNull() const { + return Type() == napi_null; +} + +inline bool Value::IsBoolean() const { + return Type() == napi_boolean; +} + +inline bool Value::IsNumber() const { + return Type() == napi_number; +} + +#if NAPI_VERSION > 5 +inline bool Value::IsBigInt() const { + return Type() == napi_bigint; +} +#endif // NAPI_VERSION > 5 + +#if (NAPI_VERSION > 4) +inline bool Value::IsDate() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_date(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} +#endif + +inline bool Value::IsString() const { + return Type() == napi_string; +} + +inline bool Value::IsSymbol() const { + return Type() == napi_symbol; +} + +inline bool Value::IsArray() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_array(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline bool Value::IsArrayBuffer() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_arraybuffer(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline bool Value::IsTypedArray() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_typedarray(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline bool Value::IsObject() const { + return Type() == napi_object || IsFunction(); +} + +inline bool Value::IsFunction() const { + return Type() == napi_function; +} + +inline bool Value::IsPromise() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_promise(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline bool Value::IsDataView() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_dataview(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline bool Value::IsBuffer() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_buffer(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline bool Value::IsExternal() const { + return Type() == napi_external; +} + +template +inline T Value::As() const { +#ifdef NODE_ADDON_API_ENABLE_TYPE_CHECK_ON_AS + T::CheckCast(_env, _value); +#endif + return T(_env, _value); +} + +inline MaybeOrValue Value::ToBoolean() const { + napi_value result; + napi_status status = napi_coerce_to_bool(_env, _value, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Boolean(_env, result), Napi::Boolean); +} + +inline MaybeOrValue Value::ToNumber() const { + napi_value result; + napi_status status = napi_coerce_to_number(_env, _value, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Number(_env, result), Napi::Number); +} + +inline MaybeOrValue Value::ToString() const { + napi_value result; + napi_status status = napi_coerce_to_string(_env, _value, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::String(_env, result), Napi::String); +} + +inline MaybeOrValue Value::ToObject() const { + napi_value result; + napi_status status = napi_coerce_to_object(_env, _value, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Object(_env, result), Napi::Object); +} + +//////////////////////////////////////////////////////////////////////////////// +// Boolean class +//////////////////////////////////////////////////////////////////////////////// + +inline Boolean Boolean::New(napi_env env, bool val) { + napi_value value; + napi_status status = napi_get_boolean(env, val, &value); + NAPI_THROW_IF_FAILED(env, status, Boolean()); + return Boolean(env, value); +} + +inline void Boolean::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "Boolean::CheckCast", "empty value"); + + napi_valuetype type; + napi_status status = napi_typeof(env, value, &type); + NAPI_CHECK(status == napi_ok, "Boolean::CheckCast", "napi_typeof failed"); + NAPI_CHECK( + type == napi_boolean, "Boolean::CheckCast", "value is not napi_boolean"); +} + +inline Boolean::Boolean() : Napi::Value() {} + +inline Boolean::Boolean(napi_env env, napi_value value) + : Napi::Value(env, value) {} + +inline Boolean::operator bool() const { + return Value(); +} + +inline bool Boolean::Value() const { + bool result; + napi_status status = napi_get_value_bool(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +//////////////////////////////////////////////////////////////////////////////// +// Number class +//////////////////////////////////////////////////////////////////////////////// + +inline Number Number::New(napi_env env, double val) { + napi_value value; + napi_status status = napi_create_double(env, val, &value); + NAPI_THROW_IF_FAILED(env, status, Number()); + return Number(env, value); +} + +inline void Number::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "Number::CheckCast", "empty value"); + + napi_valuetype type; + napi_status status = napi_typeof(env, value, &type); + NAPI_CHECK(status == napi_ok, "Number::CheckCast", "napi_typeof failed"); + NAPI_CHECK( + type == napi_number, "Number::CheckCast", "value is not napi_number"); +} + +inline Number::Number() : Value() {} + +inline Number::Number(napi_env env, napi_value value) : Value(env, value) {} + +inline Number::operator int32_t() const { + return Int32Value(); +} + +inline Number::operator uint32_t() const { + return Uint32Value(); +} + +inline Number::operator int64_t() const { + return Int64Value(); +} + +inline Number::operator float() const { + return FloatValue(); +} + +inline Number::operator double() const { + return DoubleValue(); +} + +inline int32_t Number::Int32Value() const { + int32_t result; + napi_status status = napi_get_value_int32(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +inline uint32_t Number::Uint32Value() const { + uint32_t result; + napi_status status = napi_get_value_uint32(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +inline int64_t Number::Int64Value() const { + int64_t result; + napi_status status = napi_get_value_int64(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +inline float Number::FloatValue() const { + return static_cast(DoubleValue()); +} + +inline double Number::DoubleValue() const { + double result; + napi_status status = napi_get_value_double(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +#if NAPI_VERSION > 5 +//////////////////////////////////////////////////////////////////////////////// +// BigInt Class +//////////////////////////////////////////////////////////////////////////////// + +inline BigInt BigInt::New(napi_env env, int64_t val) { + napi_value value; + napi_status status = napi_create_bigint_int64(env, val, &value); + NAPI_THROW_IF_FAILED(env, status, BigInt()); + return BigInt(env, value); +} + +inline BigInt BigInt::New(napi_env env, uint64_t val) { + napi_value value; + napi_status status = napi_create_bigint_uint64(env, val, &value); + NAPI_THROW_IF_FAILED(env, status, BigInt()); + return BigInt(env, value); +} + +inline BigInt BigInt::New(napi_env env, + int sign_bit, + size_t word_count, + const uint64_t* words) { + napi_value value; + napi_status status = + napi_create_bigint_words(env, sign_bit, word_count, words, &value); + NAPI_THROW_IF_FAILED(env, status, BigInt()); + return BigInt(env, value); +} + +inline void BigInt::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "BigInt::CheckCast", "empty value"); + + napi_valuetype type; + napi_status status = napi_typeof(env, value, &type); + NAPI_CHECK(status == napi_ok, "BigInt::CheckCast", "napi_typeof failed"); + NAPI_CHECK( + type == napi_bigint, "BigInt::CheckCast", "value is not napi_bigint"); +} + +inline BigInt::BigInt() : Value() {} + +inline BigInt::BigInt(napi_env env, napi_value value) : Value(env, value) {} + +inline int64_t BigInt::Int64Value(bool* lossless) const { + int64_t result; + napi_status status = + napi_get_value_bigint_int64(_env, _value, &result, lossless); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +inline uint64_t BigInt::Uint64Value(bool* lossless) const { + uint64_t result; + napi_status status = + napi_get_value_bigint_uint64(_env, _value, &result, lossless); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +inline size_t BigInt::WordCount() const { + size_t word_count; + napi_status status = + napi_get_value_bigint_words(_env, _value, nullptr, &word_count, nullptr); + NAPI_THROW_IF_FAILED(_env, status, 0); + return word_count; +} + +inline void BigInt::ToWords(int* sign_bit, + size_t* word_count, + uint64_t* words) { + napi_status status = + napi_get_value_bigint_words(_env, _value, sign_bit, word_count, words); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} +#endif // NAPI_VERSION > 5 + +#if (NAPI_VERSION > 4) +//////////////////////////////////////////////////////////////////////////////// +// Date Class +//////////////////////////////////////////////////////////////////////////////// + +inline Date Date::New(napi_env env, double val) { + napi_value value; + napi_status status = napi_create_date(env, val, &value); + NAPI_THROW_IF_FAILED(env, status, Date()); + return Date(env, value); +} + +inline void Date::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "Date::CheckCast", "empty value"); + + bool result; + napi_status status = napi_is_date(env, value, &result); + NAPI_CHECK(status == napi_ok, "Date::CheckCast", "napi_is_date failed"); + NAPI_CHECK(result, "Date::CheckCast", "value is not date"); +} + +inline Date::Date() : Value() {} + +inline Date::Date(napi_env env, napi_value value) : Value(env, value) {} + +inline Date::operator double() const { + return ValueOf(); +} + +inline double Date::ValueOf() const { + double result; + napi_status status = napi_get_date_value(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} +#endif + +//////////////////////////////////////////////////////////////////////////////// +// Name class +//////////////////////////////////////////////////////////////////////////////// +inline void Name::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "Name::CheckCast", "empty value"); + + napi_valuetype type; + napi_status status = napi_typeof(env, value, &type); + NAPI_CHECK(status == napi_ok, "Name::CheckCast", "napi_typeof failed"); + NAPI_CHECK(type == napi_string || type == napi_symbol, + "Name::CheckCast", + "value is not napi_string or napi_symbol"); +} + +inline Name::Name() : Value() {} + +inline Name::Name(napi_env env, napi_value value) : Value(env, value) {} + +//////////////////////////////////////////////////////////////////////////////// +// String class +//////////////////////////////////////////////////////////////////////////////// + +inline String String::New(napi_env env, const std::string& val) { + return String::New(env, val.c_str(), val.size()); +} + +inline String String::New(napi_env env, const std::u16string& val) { + return String::New(env, val.c_str(), val.size()); +} + +inline String String::New(napi_env env, const char* val) { + // TODO(@gabrielschulhof) Remove if-statement when core's error handling is + // available in all supported versions. + if (val == nullptr) { + // Throw an error that looks like it came from core. + NAPI_THROW_IF_FAILED(env, napi_invalid_arg, String()); + } + napi_value value; + napi_status status = + napi_create_string_utf8(env, val, std::strlen(val), &value); + NAPI_THROW_IF_FAILED(env, status, String()); + return String(env, value); +} + +inline String String::New(napi_env env, const char16_t* val) { + napi_value value; + // TODO(@gabrielschulhof) Remove if-statement when core's error handling is + // available in all supported versions. + if (val == nullptr) { + // Throw an error that looks like it came from core. + NAPI_THROW_IF_FAILED(env, napi_invalid_arg, String()); + } + napi_status status = + napi_create_string_utf16(env, val, std::u16string(val).size(), &value); + NAPI_THROW_IF_FAILED(env, status, String()); + return String(env, value); +} + +inline String String::New(napi_env env, const char* val, size_t length) { + napi_value value; + napi_status status = napi_create_string_utf8(env, val, length, &value); + NAPI_THROW_IF_FAILED(env, status, String()); + return String(env, value); +} + +inline String String::New(napi_env env, const char16_t* val, size_t length) { + napi_value value; + napi_status status = napi_create_string_utf16(env, val, length, &value); + NAPI_THROW_IF_FAILED(env, status, String()); + return String(env, value); +} + +inline void String::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "String::CheckCast", "empty value"); + + napi_valuetype type; + napi_status status = napi_typeof(env, value, &type); + NAPI_CHECK(status == napi_ok, "String::CheckCast", "napi_typeof failed"); + NAPI_CHECK( + type == napi_string, "String::CheckCast", "value is not napi_string"); +} + +inline String::String() : Name() {} + +inline String::String(napi_env env, napi_value value) : Name(env, value) {} + +inline String::operator std::string() const { + return Utf8Value(); +} + +inline String::operator std::u16string() const { + return Utf16Value(); +} + +inline std::string String::Utf8Value() const { + size_t length; + napi_status status = + napi_get_value_string_utf8(_env, _value, nullptr, 0, &length); + NAPI_THROW_IF_FAILED(_env, status, ""); + + std::string value; + value.reserve(length + 1); + value.resize(length); + status = napi_get_value_string_utf8( + _env, _value, &value[0], value.capacity(), nullptr); + NAPI_THROW_IF_FAILED(_env, status, ""); + return value; +} + +inline std::u16string String::Utf16Value() const { + size_t length; + napi_status status = + napi_get_value_string_utf16(_env, _value, nullptr, 0, &length); + NAPI_THROW_IF_FAILED(_env, status, NAPI_WIDE_TEXT("")); + + std::u16string value; + value.reserve(length + 1); + value.resize(length); + status = napi_get_value_string_utf16( + _env, _value, &value[0], value.capacity(), nullptr); + NAPI_THROW_IF_FAILED(_env, status, NAPI_WIDE_TEXT("")); + return value; +} + +//////////////////////////////////////////////////////////////////////////////// +// Symbol class +//////////////////////////////////////////////////////////////////////////////// + +inline Symbol Symbol::New(napi_env env, const char* description) { + napi_value descriptionValue = description != nullptr + ? String::New(env, description) + : static_cast(nullptr); + return Symbol::New(env, descriptionValue); +} + +inline Symbol Symbol::New(napi_env env, const std::string& description) { + napi_value descriptionValue = String::New(env, description); + return Symbol::New(env, descriptionValue); +} + +inline Symbol Symbol::New(napi_env env, String description) { + napi_value descriptionValue = description; + return Symbol::New(env, descriptionValue); +} + +inline Symbol Symbol::New(napi_env env, napi_value description) { + napi_value value; + napi_status status = napi_create_symbol(env, description, &value); + NAPI_THROW_IF_FAILED(env, status, Symbol()); + return Symbol(env, value); +} + +inline MaybeOrValue Symbol::WellKnown(napi_env env, + const std::string& name) { +#if defined(NODE_ADDON_API_ENABLE_MAYBE) + Value symbol_obj; + Value symbol_value; + if (Napi::Env(env).Global().Get("Symbol").UnwrapTo(&symbol_obj) && + symbol_obj.As().Get(name).UnwrapTo(&symbol_value)) { + return Just(symbol_value.As()); + } + return Nothing(); +#else + return Napi::Env(env) + .Global() + .Get("Symbol") + .As() + .Get(name) + .As(); +#endif +} + +inline MaybeOrValue Symbol::For(napi_env env, + const std::string& description) { + napi_value descriptionValue = String::New(env, description); + return Symbol::For(env, descriptionValue); +} + +inline MaybeOrValue Symbol::For(napi_env env, const char* description) { + napi_value descriptionValue = String::New(env, description); + return Symbol::For(env, descriptionValue); +} + +inline MaybeOrValue Symbol::For(napi_env env, String description) { + return Symbol::For(env, static_cast(description)); +} + +inline MaybeOrValue Symbol::For(napi_env env, napi_value description) { +#if defined(NODE_ADDON_API_ENABLE_MAYBE) + Value symbol_obj; + Value symbol_for_value; + Value symbol_value; + if (Napi::Env(env).Global().Get("Symbol").UnwrapTo(&symbol_obj) && + symbol_obj.As().Get("for").UnwrapTo(&symbol_for_value) && + symbol_for_value.As() + .Call(symbol_obj, {description}) + .UnwrapTo(&symbol_value)) { + return Just(symbol_value.As()); + } + return Nothing(); +#else + Object symbol_obj = Napi::Env(env).Global().Get("Symbol").As(); + return symbol_obj.Get("for") + .As() + .Call(symbol_obj, {description}) + .As(); +#endif +} + +inline void Symbol::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "Symbol::CheckCast", "empty value"); + + napi_valuetype type; + napi_status status = napi_typeof(env, value, &type); + NAPI_CHECK(status == napi_ok, "Symbol::CheckCast", "napi_typeof failed"); + NAPI_CHECK( + type == napi_symbol, "Symbol::CheckCast", "value is not napi_symbol"); +} + +inline Symbol::Symbol() : Name() {} + +inline Symbol::Symbol(napi_env env, napi_value value) : Name(env, value) {} + +//////////////////////////////////////////////////////////////////////////////// +// Automagic value creation +//////////////////////////////////////////////////////////////////////////////// + +namespace details { +template +struct vf_number { + static Number From(napi_env env, T value) { + return Number::New(env, static_cast(value)); + } +}; + +template <> +struct vf_number { + static Boolean From(napi_env env, bool value) { + return Boolean::New(env, value); + } +}; + +struct vf_utf8_charp { + static String From(napi_env env, const char* value) { + return String::New(env, value); + } +}; + +struct vf_utf16_charp { + static String From(napi_env env, const char16_t* value) { + return String::New(env, value); + } +}; +struct vf_utf8_string { + static String From(napi_env env, const std::string& value) { + return String::New(env, value); + } +}; + +struct vf_utf16_string { + static String From(napi_env env, const std::u16string& value) { + return String::New(env, value); + } +}; + +template +struct vf_fallback { + static Value From(napi_env env, const T& value) { return Value(env, value); } +}; + +template +struct disjunction : std::false_type {}; +template +struct disjunction : B {}; +template +struct disjunction + : std::conditional>::type {}; + +template +struct can_make_string + : disjunction::type, + typename std::is_convertible::type, + typename std::is_convertible::type, + typename std::is_convertible::type> {}; +} // namespace details + +template +Value Value::From(napi_env env, const T& value) { + using Helper = typename std::conditional< + std::is_integral::value || std::is_floating_point::value, + details::vf_number, + typename std::conditional::value, + String, + details::vf_fallback>::type>::type; + return Helper::From(env, value); +} + +template +String String::From(napi_env env, const T& value) { + struct Dummy {}; + using Helper = typename std::conditional< + std::is_convertible::value, + details::vf_utf8_charp, + typename std::conditional< + std::is_convertible::value, + details::vf_utf16_charp, + typename std::conditional< + std::is_convertible::value, + details::vf_utf8_string, + typename std::conditional< + std::is_convertible::value, + details::vf_utf16_string, + Dummy>::type>::type>::type>::type; + return Helper::From(env, value); +} + +//////////////////////////////////////////////////////////////////////////////// +// TypeTaggable class +//////////////////////////////////////////////////////////////////////////////// + +inline TypeTaggable::TypeTaggable() : Value() {} + +inline TypeTaggable::TypeTaggable(napi_env _env, napi_value _value) + : Value(_env, _value) {} + +#if NAPI_VERSION >= 8 + +inline void TypeTaggable::TypeTag(const napi_type_tag* type_tag) const { + napi_status status = napi_type_tag_object(_env, _value, type_tag); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline bool TypeTaggable::CheckTypeTag(const napi_type_tag* type_tag) const { + bool result; + napi_status status = + napi_check_object_type_tag(_env, _value, type_tag, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +#endif // NAPI_VERSION >= 8 + +//////////////////////////////////////////////////////////////////////////////// +// Object class +//////////////////////////////////////////////////////////////////////////////// + +template +inline Object::PropertyLValue::operator Value() const { + MaybeOrValue val = Object(_env, _object).Get(_key); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + return val.Unwrap(); +#else + return val; +#endif +} + +template +template +inline Object::PropertyLValue& Object::PropertyLValue::operator=( + ValueType value) { +#ifdef NODE_ADDON_API_ENABLE_MAYBE + MaybeOrValue result = +#endif + Object(_env, _object).Set(_key, value); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + result.Unwrap(); +#endif + return *this; +} + +template +inline Object::PropertyLValue::PropertyLValue(Object object, Key key) + : _env(object.Env()), _object(object), _key(key) {} + +inline Object Object::New(napi_env env) { + napi_value value; + napi_status status = napi_create_object(env, &value); + NAPI_THROW_IF_FAILED(env, status, Object()); + return Object(env, value); +} + +inline void Object::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "Object::CheckCast", "empty value"); + + napi_valuetype type; + napi_status status = napi_typeof(env, value, &type); + NAPI_CHECK(status == napi_ok, "Object::CheckCast", "napi_typeof failed"); + NAPI_CHECK( + type == napi_object, "Object::CheckCast", "value is not napi_object"); +} + +inline Object::Object() : TypeTaggable() {} + +inline Object::Object(napi_env env, napi_value value) + : TypeTaggable(env, value) {} + +inline Object::PropertyLValue Object::operator[]( + const char* utf8name) { + return PropertyLValue(*this, utf8name); +} + +inline Object::PropertyLValue Object::operator[]( + const std::string& utf8name) { + return PropertyLValue(*this, utf8name); +} + +inline Object::PropertyLValue Object::operator[](uint32_t index) { + return PropertyLValue(*this, index); +} + +inline Object::PropertyLValue Object::operator[](Value index) const { + return PropertyLValue(*this, index); +} + +inline MaybeOrValue Object::operator[](const char* utf8name) const { + return Get(utf8name); +} + +inline MaybeOrValue Object::operator[]( + const std::string& utf8name) const { + return Get(utf8name); +} + +inline MaybeOrValue Object::operator[](uint32_t index) const { + return Get(index); +} + +inline MaybeOrValue Object::Has(napi_value key) const { + bool result; + napi_status status = napi_has_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::Has(Value key) const { + bool result; + napi_status status = napi_has_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::Has(const char* utf8name) const { + bool result; + napi_status status = napi_has_named_property(_env, _value, utf8name, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::Has(const std::string& utf8name) const { + return Has(utf8name.c_str()); +} + +inline MaybeOrValue Object::HasOwnProperty(napi_value key) const { + bool result; + napi_status status = napi_has_own_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::HasOwnProperty(Value key) const { + bool result; + napi_status status = napi_has_own_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::HasOwnProperty(const char* utf8name) const { + napi_value key; + napi_status status = + napi_create_string_utf8(_env, utf8name, std::strlen(utf8name), &key); + NAPI_MAYBE_THROW_IF_FAILED(_env, status, bool); + return HasOwnProperty(key); +} + +inline MaybeOrValue Object::HasOwnProperty( + const std::string& utf8name) const { + return HasOwnProperty(utf8name.c_str()); +} + +inline MaybeOrValue Object::Get(napi_value key) const { + napi_value result; + napi_status status = napi_get_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Value(_env, result), Value); +} + +inline MaybeOrValue Object::Get(Value key) const { + napi_value result; + napi_status status = napi_get_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Value(_env, result), Value); +} + +inline MaybeOrValue Object::Get(const char* utf8name) const { + napi_value result; + napi_status status = napi_get_named_property(_env, _value, utf8name, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Value(_env, result), Value); +} + +inline MaybeOrValue Object::Get(const std::string& utf8name) const { + return Get(utf8name.c_str()); +} + +template +inline MaybeOrValue Object::Set(napi_value key, + const ValueType& value) const { + napi_status status = + napi_set_property(_env, _value, key, Value::From(_env, value)); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +template +inline MaybeOrValue Object::Set(Value key, const ValueType& value) const { + napi_status status = + napi_set_property(_env, _value, key, Value::From(_env, value)); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +template +inline MaybeOrValue Object::Set(const char* utf8name, + const ValueType& value) const { + napi_status status = + napi_set_named_property(_env, _value, utf8name, Value::From(_env, value)); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +template +inline MaybeOrValue Object::Set(const std::string& utf8name, + const ValueType& value) const { + return Set(utf8name.c_str(), value); +} + +inline MaybeOrValue Object::Delete(napi_value key) const { + bool result; + napi_status status = napi_delete_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::Delete(Value key) const { + bool result; + napi_status status = napi_delete_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::Delete(const char* utf8name) const { + return Delete(String::New(_env, utf8name)); +} + +inline MaybeOrValue Object::Delete(const std::string& utf8name) const { + return Delete(String::New(_env, utf8name)); +} + +inline MaybeOrValue Object::Has(uint32_t index) const { + bool result; + napi_status status = napi_has_element(_env, _value, index, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::Get(uint32_t index) const { + napi_value value; + napi_status status = napi_get_element(_env, _value, index, &value); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Value(_env, value), Value); +} + +template +inline MaybeOrValue Object::Set(uint32_t index, + const ValueType& value) const { + napi_status status = + napi_set_element(_env, _value, index, Value::From(_env, value)); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +inline MaybeOrValue Object::Delete(uint32_t index) const { + bool result; + napi_status status = napi_delete_element(_env, _value, index, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::GetPropertyNames() const { + napi_value result; + napi_status status = napi_get_property_names(_env, _value, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Array(_env, result), Array); +} + +inline MaybeOrValue Object::DefineProperty( + const PropertyDescriptor& property) const { + napi_status status = napi_define_properties( + _env, + _value, + 1, + reinterpret_cast(&property)); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +inline MaybeOrValue Object::DefineProperties( + const std::initializer_list& properties) const { + napi_status status = napi_define_properties( + _env, + _value, + properties.size(), + reinterpret_cast(properties.begin())); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +inline MaybeOrValue Object::DefineProperties( + const std::vector& properties) const { + napi_status status = napi_define_properties( + _env, + _value, + properties.size(), + reinterpret_cast(properties.data())); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +inline MaybeOrValue Object::InstanceOf( + const Function& constructor) const { + bool result; + napi_status status = napi_instanceof(_env, _value, constructor, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +template +inline void Object::AddFinalizer(Finalizer finalizeCallback, T* data) const { + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), nullptr}); + napi_status status = + details::AttachData::Wrapper>( + _env, *this, data, finalizeData); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED_VOID(_env, status); + } +} + +template +inline void Object::AddFinalizer(Finalizer finalizeCallback, + T* data, + Hint* finalizeHint) const { + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), finalizeHint}); + napi_status status = details:: + AttachData::WrapperWithHint>( + _env, *this, data, finalizeData); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED_VOID(_env, status); + } +} + +#ifdef NAPI_CPP_EXCEPTIONS +inline Object::const_iterator::const_iterator(const Object* object, + const Type type) { + _object = object; + _keys = object->GetPropertyNames(); + _index = type == Type::BEGIN ? 0 : _keys.Length(); +} + +inline Object::const_iterator Napi::Object::begin() const { + const_iterator it(this, Object::const_iterator::Type::BEGIN); + return it; +} + +inline Object::const_iterator Napi::Object::end() const { + const_iterator it(this, Object::const_iterator::Type::END); + return it; +} + +inline Object::const_iterator& Object::const_iterator::operator++() { + ++_index; + return *this; +} + +inline bool Object::const_iterator::operator==( + const const_iterator& other) const { + return _index == other._index; +} + +inline bool Object::const_iterator::operator!=( + const const_iterator& other) const { + return _index != other._index; +} + +inline const std::pair> +Object::const_iterator::operator*() const { + const Value key = _keys[_index]; + const PropertyLValue value = (*_object)[key]; + return {key, value}; +} + +inline Object::iterator::iterator(Object* object, const Type type) { + _object = object; + _keys = object->GetPropertyNames(); + _index = type == Type::BEGIN ? 0 : _keys.Length(); +} + +inline Object::iterator Napi::Object::begin() { + iterator it(this, Object::iterator::Type::BEGIN); + return it; +} + +inline Object::iterator Napi::Object::end() { + iterator it(this, Object::iterator::Type::END); + return it; +} + +inline Object::iterator& Object::iterator::operator++() { + ++_index; + return *this; +} + +inline bool Object::iterator::operator==(const iterator& other) const { + return _index == other._index; +} + +inline bool Object::iterator::operator!=(const iterator& other) const { + return _index != other._index; +} + +inline std::pair> +Object::iterator::operator*() { + Value key = _keys[_index]; + PropertyLValue value = (*_object)[key]; + return {key, value}; +} +#endif // NAPI_CPP_EXCEPTIONS + +#if NAPI_VERSION >= 8 +inline MaybeOrValue Object::Freeze() const { + napi_status status = napi_object_freeze(_env, _value); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +inline MaybeOrValue Object::Seal() const { + napi_status status = napi_object_seal(_env, _value); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} +#endif // NAPI_VERSION >= 8 + +//////////////////////////////////////////////////////////////////////////////// +// External class +//////////////////////////////////////////////////////////////////////////////// + +template +inline External External::New(napi_env env, T* data) { + napi_value value; + napi_status status = + napi_create_external(env, data, nullptr, nullptr, &value); + NAPI_THROW_IF_FAILED(env, status, External()); + return External(env, value); +} + +template +template +inline External External::New(napi_env env, + T* data, + Finalizer finalizeCallback) { + napi_value value; + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), nullptr}); + napi_status status = + napi_create_external(env, + data, + details::FinalizeData::Wrapper, + finalizeData, + &value); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, External()); + } + return External(env, value); +} + +template +template +inline External External::New(napi_env env, + T* data, + Finalizer finalizeCallback, + Hint* finalizeHint) { + napi_value value; + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), finalizeHint}); + napi_status status = napi_create_external( + env, + data, + details::FinalizeData::WrapperWithHint, + finalizeData, + &value); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, External()); + } + return External(env, value); +} + +template +inline void External::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "External::CheckCast", "empty value"); + + napi_valuetype type; + napi_status status = napi_typeof(env, value, &type); + NAPI_CHECK(status == napi_ok, "External::CheckCast", "napi_typeof failed"); + NAPI_CHECK(type == napi_external, + "External::CheckCast", + "value is not napi_external"); +} + +template +inline External::External() : TypeTaggable() {} + +template +inline External::External(napi_env env, napi_value value) + : TypeTaggable(env, value) {} + +template +inline T* External::Data() const { + void* data; + napi_status status = napi_get_value_external(_env, _value, &data); + NAPI_THROW_IF_FAILED(_env, status, nullptr); + return reinterpret_cast(data); +} + +//////////////////////////////////////////////////////////////////////////////// +// Array class +//////////////////////////////////////////////////////////////////////////////// + +inline Array Array::New(napi_env env) { + napi_value value; + napi_status status = napi_create_array(env, &value); + NAPI_THROW_IF_FAILED(env, status, Array()); + return Array(env, value); +} + +inline Array Array::New(napi_env env, size_t length) { + napi_value value; + napi_status status = napi_create_array_with_length(env, length, &value); + NAPI_THROW_IF_FAILED(env, status, Array()); + return Array(env, value); +} + +inline void Array::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "Array::CheckCast", "empty value"); + + bool result; + napi_status status = napi_is_array(env, value, &result); + NAPI_CHECK(status == napi_ok, "Array::CheckCast", "napi_is_array failed"); + NAPI_CHECK(result, "Array::CheckCast", "value is not array"); +} + +inline Array::Array() : Object() {} + +inline Array::Array(napi_env env, napi_value value) : Object(env, value) {} + +inline uint32_t Array::Length() const { + uint32_t result; + napi_status status = napi_get_array_length(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +//////////////////////////////////////////////////////////////////////////////// +// ArrayBuffer class +//////////////////////////////////////////////////////////////////////////////// + +inline ArrayBuffer ArrayBuffer::New(napi_env env, size_t byteLength) { + napi_value value; + void* data; + napi_status status = napi_create_arraybuffer(env, byteLength, &data, &value); + NAPI_THROW_IF_FAILED(env, status, ArrayBuffer()); + + return ArrayBuffer(env, value); +} + +#ifndef NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED +inline ArrayBuffer ArrayBuffer::New(napi_env env, + void* externalData, + size_t byteLength) { + napi_value value; + napi_status status = napi_create_external_arraybuffer( + env, externalData, byteLength, nullptr, nullptr, &value); + NAPI_THROW_IF_FAILED(env, status, ArrayBuffer()); + + return ArrayBuffer(env, value); +} + +template +inline ArrayBuffer ArrayBuffer::New(napi_env env, + void* externalData, + size_t byteLength, + Finalizer finalizeCallback) { + napi_value value; + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), nullptr}); + napi_status status = napi_create_external_arraybuffer( + env, + externalData, + byteLength, + details::FinalizeData::Wrapper, + finalizeData, + &value); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, ArrayBuffer()); + } + + return ArrayBuffer(env, value); +} + +template +inline ArrayBuffer ArrayBuffer::New(napi_env env, + void* externalData, + size_t byteLength, + Finalizer finalizeCallback, + Hint* finalizeHint) { + napi_value value; + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), finalizeHint}); + napi_status status = napi_create_external_arraybuffer( + env, + externalData, + byteLength, + details::FinalizeData::WrapperWithHint, + finalizeData, + &value); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, ArrayBuffer()); + } + + return ArrayBuffer(env, value); +} +#endif // NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + +inline void ArrayBuffer::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "ArrayBuffer::CheckCast", "empty value"); + + bool result; + napi_status status = napi_is_arraybuffer(env, value, &result); + NAPI_CHECK(status == napi_ok, + "ArrayBuffer::CheckCast", + "napi_is_arraybuffer failed"); + NAPI_CHECK(result, "ArrayBuffer::CheckCast", "value is not arraybuffer"); +} + +inline ArrayBuffer::ArrayBuffer() : Object() {} + +inline ArrayBuffer::ArrayBuffer(napi_env env, napi_value value) + : Object(env, value) {} + +inline void* ArrayBuffer::Data() { + void* data; + napi_status status = napi_get_arraybuffer_info(_env, _value, &data, nullptr); + NAPI_THROW_IF_FAILED(_env, status, nullptr); + return data; +} + +inline size_t ArrayBuffer::ByteLength() { + size_t length; + napi_status status = + napi_get_arraybuffer_info(_env, _value, nullptr, &length); + NAPI_THROW_IF_FAILED(_env, status, 0); + return length; +} + +#if NAPI_VERSION >= 7 +inline bool ArrayBuffer::IsDetached() const { + bool detached; + napi_status status = napi_is_detached_arraybuffer(_env, _value, &detached); + NAPI_THROW_IF_FAILED(_env, status, false); + return detached; +} + +inline void ArrayBuffer::Detach() { + napi_status status = napi_detach_arraybuffer(_env, _value); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} +#endif // NAPI_VERSION >= 7 + +//////////////////////////////////////////////////////////////////////////////// +// DataView class +//////////////////////////////////////////////////////////////////////////////// +inline DataView DataView::New(napi_env env, Napi::ArrayBuffer arrayBuffer) { + return New(env, arrayBuffer, 0, arrayBuffer.ByteLength()); +} + +inline DataView DataView::New(napi_env env, + Napi::ArrayBuffer arrayBuffer, + size_t byteOffset) { + if (byteOffset > arrayBuffer.ByteLength()) { + NAPI_THROW(RangeError::New( + env, "Start offset is outside the bounds of the buffer"), + DataView()); + } + return New( + env, arrayBuffer, byteOffset, arrayBuffer.ByteLength() - byteOffset); +} + +inline DataView DataView::New(napi_env env, + Napi::ArrayBuffer arrayBuffer, + size_t byteOffset, + size_t byteLength) { + if (byteOffset + byteLength > arrayBuffer.ByteLength()) { + NAPI_THROW(RangeError::New(env, "Invalid DataView length"), DataView()); + } + napi_value value; + napi_status status = + napi_create_dataview(env, byteLength, arrayBuffer, byteOffset, &value); + NAPI_THROW_IF_FAILED(env, status, DataView()); + return DataView(env, value); +} + +inline void DataView::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "DataView::CheckCast", "empty value"); + + bool result; + napi_status status = napi_is_dataview(env, value, &result); + NAPI_CHECK( + status == napi_ok, "DataView::CheckCast", "napi_is_dataview failed"); + NAPI_CHECK(result, "DataView::CheckCast", "value is not dataview"); +} + +inline DataView::DataView() : Object() {} + +inline DataView::DataView(napi_env env, napi_value value) : Object(env, value) { + napi_status status = napi_get_dataview_info(_env, + _value /* dataView */, + &_length /* byteLength */, + &_data /* data */, + nullptr /* arrayBuffer */, + nullptr /* byteOffset */); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline Napi::ArrayBuffer DataView::ArrayBuffer() const { + napi_value arrayBuffer; + napi_status status = napi_get_dataview_info(_env, + _value /* dataView */, + nullptr /* byteLength */, + nullptr /* data */, + &arrayBuffer /* arrayBuffer */, + nullptr /* byteOffset */); + NAPI_THROW_IF_FAILED(_env, status, Napi::ArrayBuffer()); + return Napi::ArrayBuffer(_env, arrayBuffer); +} + +inline size_t DataView::ByteOffset() const { + size_t byteOffset; + napi_status status = napi_get_dataview_info(_env, + _value /* dataView */, + nullptr /* byteLength */, + nullptr /* data */, + nullptr /* arrayBuffer */, + &byteOffset /* byteOffset */); + NAPI_THROW_IF_FAILED(_env, status, 0); + return byteOffset; +} + +inline size_t DataView::ByteLength() const { + return _length; +} + +inline void* DataView::Data() const { + return _data; +} + +inline float DataView::GetFloat32(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline double DataView::GetFloat64(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline int8_t DataView::GetInt8(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline int16_t DataView::GetInt16(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline int32_t DataView::GetInt32(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline uint8_t DataView::GetUint8(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline uint16_t DataView::GetUint16(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline uint32_t DataView::GetUint32(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline void DataView::SetFloat32(size_t byteOffset, float value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetFloat64(size_t byteOffset, double value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetInt8(size_t byteOffset, int8_t value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetInt16(size_t byteOffset, int16_t value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetInt32(size_t byteOffset, int32_t value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetUint8(size_t byteOffset, uint8_t value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetUint16(size_t byteOffset, uint16_t value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetUint32(size_t byteOffset, uint32_t value) const { + WriteData(byteOffset, value); +} + +template +inline T DataView::ReadData(size_t byteOffset) const { + if (byteOffset + sizeof(T) > _length || + byteOffset + sizeof(T) < byteOffset) { // overflow + NAPI_THROW( + RangeError::New(_env, "Offset is outside the bounds of the DataView"), + 0); + } + + return *reinterpret_cast(static_cast(_data) + byteOffset); +} + +template +inline void DataView::WriteData(size_t byteOffset, T value) const { + if (byteOffset + sizeof(T) > _length || + byteOffset + sizeof(T) < byteOffset) { // overflow + NAPI_THROW_VOID( + RangeError::New(_env, "Offset is outside the bounds of the DataView")); + } + + *reinterpret_cast(static_cast(_data) + byteOffset) = value; +} + +//////////////////////////////////////////////////////////////////////////////// +// TypedArray class +//////////////////////////////////////////////////////////////////////////////// +inline void TypedArray::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "TypedArray::CheckCast", "empty value"); + + bool result; + napi_status status = napi_is_typedarray(env, value, &result); + NAPI_CHECK( + status == napi_ok, "TypedArray::CheckCast", "napi_is_typedarray failed"); + NAPI_CHECK(result, "TypedArray::CheckCast", "value is not typedarray"); +} + +inline TypedArray::TypedArray() + : Object(), _type(napi_typedarray_type::napi_int8_array), _length(0) {} + +inline TypedArray::TypedArray(napi_env env, napi_value value) + : Object(env, value), + _type(napi_typedarray_type::napi_int8_array), + _length(0) { + if (value != nullptr) { + napi_status status = + napi_get_typedarray_info(_env, + _value, + &const_cast(this)->_type, + &const_cast(this)->_length, + nullptr, + nullptr, + nullptr); + NAPI_THROW_IF_FAILED_VOID(_env, status); + } +} + +inline TypedArray::TypedArray(napi_env env, + napi_value value, + napi_typedarray_type type, + size_t length) + : Object(env, value), _type(type), _length(length) {} + +inline napi_typedarray_type TypedArray::TypedArrayType() const { + return _type; +} + +inline uint8_t TypedArray::ElementSize() const { + switch (_type) { + case napi_int8_array: + case napi_uint8_array: + case napi_uint8_clamped_array: + return 1; + case napi_int16_array: + case napi_uint16_array: + return 2; + case napi_int32_array: + case napi_uint32_array: + case napi_float32_array: + return 4; + case napi_float64_array: +#if (NAPI_VERSION > 5) + case napi_bigint64_array: + case napi_biguint64_array: +#endif // (NAPI_VERSION > 5) + return 8; + default: + return 0; + } +} + +inline size_t TypedArray::ElementLength() const { + return _length; +} + +inline size_t TypedArray::ByteOffset() const { + size_t byteOffset; + napi_status status = napi_get_typedarray_info( + _env, _value, nullptr, nullptr, nullptr, nullptr, &byteOffset); + NAPI_THROW_IF_FAILED(_env, status, 0); + return byteOffset; +} + +inline size_t TypedArray::ByteLength() const { + return ElementSize() * ElementLength(); +} + +inline Napi::ArrayBuffer TypedArray::ArrayBuffer() const { + napi_value arrayBuffer; + napi_status status = napi_get_typedarray_info( + _env, _value, nullptr, nullptr, nullptr, &arrayBuffer, nullptr); + NAPI_THROW_IF_FAILED(_env, status, Napi::ArrayBuffer()); + return Napi::ArrayBuffer(_env, arrayBuffer); +} + +//////////////////////////////////////////////////////////////////////////////// +// TypedArrayOf class +//////////////////////////////////////////////////////////////////////////////// +template +inline void TypedArrayOf::CheckCast(napi_env env, napi_value value) { + TypedArray::CheckCast(env, value); + napi_typedarray_type type; + napi_status status = napi_get_typedarray_info( + env, value, &type, nullptr, nullptr, nullptr, nullptr); + NAPI_CHECK(status == napi_ok, + "TypedArrayOf::CheckCast", + "napi_is_typedarray failed"); + + NAPI_CHECK( + (type == TypedArrayTypeForPrimitiveType() || + (type == napi_uint8_clamped_array && std::is_same::value)), + "TypedArrayOf::CheckCast", + "Array type must match the template parameter. (Uint8 arrays may " + "optionally have the \"clamped\" array type.)"); +} + +template +inline TypedArrayOf TypedArrayOf::New(napi_env env, + size_t elementLength, + napi_typedarray_type type) { + Napi::ArrayBuffer arrayBuffer = + Napi::ArrayBuffer::New(env, elementLength * sizeof(T)); + return New(env, elementLength, arrayBuffer, 0, type); +} + +template +inline TypedArrayOf TypedArrayOf::New(napi_env env, + size_t elementLength, + Napi::ArrayBuffer arrayBuffer, + size_t bufferOffset, + napi_typedarray_type type) { + napi_value value; + napi_status status = napi_create_typedarray( + env, type, elementLength, arrayBuffer, bufferOffset, &value); + NAPI_THROW_IF_FAILED(env, status, TypedArrayOf()); + + return TypedArrayOf( + env, + value, + type, + elementLength, + reinterpret_cast(reinterpret_cast(arrayBuffer.Data()) + + bufferOffset)); +} + +template +inline TypedArrayOf::TypedArrayOf() : TypedArray(), _data(nullptr) {} + +template +inline TypedArrayOf::TypedArrayOf(napi_env env, napi_value value) + : TypedArray(env, value), _data(nullptr) { + napi_status status = napi_ok; + if (value != nullptr) { + void* data = nullptr; + status = napi_get_typedarray_info( + _env, _value, &_type, &_length, &data, nullptr, nullptr); + _data = static_cast(data); + } else { + _type = TypedArrayTypeForPrimitiveType(); + _length = 0; + } + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +template +inline TypedArrayOf::TypedArrayOf(napi_env env, + napi_value value, + napi_typedarray_type type, + size_t length, + T* data) + : TypedArray(env, value, type, length), _data(data) { + if (!(type == TypedArrayTypeForPrimitiveType() || + (type == napi_uint8_clamped_array && + std::is_same::value))) { + NAPI_THROW_VOID(TypeError::New( + env, + "Array type must match the template parameter. " + "(Uint8 arrays may optionally have the \"clamped\" array type.)")); + } +} + +template +inline T& TypedArrayOf::operator[](size_t index) { + return _data[index]; +} + +template +inline const T& TypedArrayOf::operator[](size_t index) const { + return _data[index]; +} + +template +inline T* TypedArrayOf::Data() { + return _data; +} + +template +inline const T* TypedArrayOf::Data() const { + return _data; +} + +//////////////////////////////////////////////////////////////////////////////// +// Function class +//////////////////////////////////////////////////////////////////////////////// + +template +inline napi_status CreateFunction(napi_env env, + const char* utf8name, + napi_callback cb, + CbData* data, + napi_value* result) { + napi_status status = + napi_create_function(env, utf8name, NAPI_AUTO_LENGTH, cb, data, result); + if (status == napi_ok) { + status = Napi::details::AttachData(env, *result, data); + } + + return status; +} + +template +inline Function Function::New(napi_env env, const char* utf8name, void* data) { + napi_value result = nullptr; + napi_status status = napi_create_function(env, + utf8name, + NAPI_AUTO_LENGTH, + details::TemplatedVoidCallback, + data, + &result); + NAPI_THROW_IF_FAILED(env, status, Function()); + return Function(env, result); +} + +template +inline Function Function::New(napi_env env, const char* utf8name, void* data) { + napi_value result = nullptr; + napi_status status = napi_create_function(env, + utf8name, + NAPI_AUTO_LENGTH, + details::TemplatedCallback, + data, + &result); + NAPI_THROW_IF_FAILED(env, status, Function()); + return Function(env, result); +} + +template +inline Function Function::New(napi_env env, + const std::string& utf8name, + void* data) { + return Function::New(env, utf8name.c_str(), data); +} + +template +inline Function Function::New(napi_env env, + const std::string& utf8name, + void* data) { + return Function::New(env, utf8name.c_str(), data); +} + +template +inline Function Function::New(napi_env env, + Callable cb, + const char* utf8name, + void* data) { + using ReturnType = decltype(cb(CallbackInfo(nullptr, nullptr))); + using CbData = details::CallbackData; + auto callbackData = new CbData{std::move(cb), data}; + + napi_value value; + napi_status status = + CreateFunction(env, utf8name, CbData::Wrapper, callbackData, &value); + if (status != napi_ok) { + delete callbackData; + NAPI_THROW_IF_FAILED(env, status, Function()); + } + + return Function(env, value); +} + +template +inline Function Function::New(napi_env env, + Callable cb, + const std::string& utf8name, + void* data) { + return New(env, cb, utf8name.c_str(), data); +} + +inline void Function::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "Function::CheckCast", "empty value"); + + napi_valuetype type; + napi_status status = napi_typeof(env, value, &type); + NAPI_CHECK(status == napi_ok, "Function::CheckCast", "napi_typeof failed"); + NAPI_CHECK(type == napi_function, + "Function::CheckCast", + "value is not napi_function"); +} + +inline Function::Function() : Object() {} + +inline Function::Function(napi_env env, napi_value value) + : Object(env, value) {} + +inline MaybeOrValue Function::operator()( + const std::initializer_list& args) const { + return Call(Env().Undefined(), args); +} + +inline MaybeOrValue Function::Call( + const std::initializer_list& args) const { + return Call(Env().Undefined(), args); +} + +inline MaybeOrValue Function::Call( + const std::vector& args) const { + return Call(Env().Undefined(), args); +} + +inline MaybeOrValue Function::Call( + const std::vector& args) const { + return Call(Env().Undefined(), args); +} + +inline MaybeOrValue Function::Call(size_t argc, + const napi_value* args) const { + return Call(Env().Undefined(), argc, args); +} + +inline MaybeOrValue Function::Call( + napi_value recv, const std::initializer_list& args) const { + return Call(recv, args.size(), args.begin()); +} + +inline MaybeOrValue Function::Call( + napi_value recv, const std::vector& args) const { + return Call(recv, args.size(), args.data()); +} + +inline MaybeOrValue Function::Call( + napi_value recv, const std::vector& args) const { + const size_t argc = args.size(); + const size_t stackArgsCount = 6; + napi_value stackArgs[stackArgsCount]; + std::vector heapArgs; + napi_value* argv; + if (argc <= stackArgsCount) { + argv = stackArgs; + } else { + heapArgs.resize(argc); + argv = heapArgs.data(); + } + + for (size_t index = 0; index < argc; index++) { + argv[index] = static_cast(args[index]); + } + + return Call(recv, argc, argv); +} + +inline MaybeOrValue Function::Call(napi_value recv, + size_t argc, + const napi_value* args) const { + napi_value result; + napi_status status = + napi_call_function(_env, recv, _value, argc, args, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Value(_env, result), Napi::Value); +} + +inline MaybeOrValue Function::MakeCallback( + napi_value recv, + const std::initializer_list& args, + napi_async_context context) const { + return MakeCallback(recv, args.size(), args.begin(), context); +} + +inline MaybeOrValue Function::MakeCallback( + napi_value recv, + const std::vector& args, + napi_async_context context) const { + return MakeCallback(recv, args.size(), args.data(), context); +} + +inline MaybeOrValue Function::MakeCallback( + napi_value recv, + size_t argc, + const napi_value* args, + napi_async_context context) const { + napi_value result; + napi_status status = + napi_make_callback(_env, context, recv, _value, argc, args, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Value(_env, result), Napi::Value); +} + +inline MaybeOrValue Function::New( + const std::initializer_list& args) const { + return New(args.size(), args.begin()); +} + +inline MaybeOrValue Function::New( + const std::vector& args) const { + return New(args.size(), args.data()); +} + +inline MaybeOrValue Function::New(size_t argc, + const napi_value* args) const { + napi_value result; + napi_status status = napi_new_instance(_env, _value, argc, args, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Object(_env, result), Napi::Object); +} + +//////////////////////////////////////////////////////////////////////////////// +// Promise class +//////////////////////////////////////////////////////////////////////////////// + +inline Promise::Deferred Promise::Deferred::New(napi_env env) { + return Promise::Deferred(env); +} + +inline Promise::Deferred::Deferred(napi_env env) : _env(env) { + napi_status status = napi_create_promise(_env, &_deferred, &_promise); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline Promise Promise::Deferred::Promise() const { + return Napi::Promise(_env, _promise); +} + +inline Napi::Env Promise::Deferred::Env() const { + return Napi::Env(_env); +} + +inline void Promise::Deferred::Resolve(napi_value value) const { + napi_status status = napi_resolve_deferred(_env, _deferred, value); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline void Promise::Deferred::Reject(napi_value value) const { + napi_status status = napi_reject_deferred(_env, _deferred, value); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline void Promise::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "Promise::CheckCast", "empty value"); + + bool result; + napi_status status = napi_is_promise(env, value, &result); + NAPI_CHECK(status == napi_ok, "Promise::CheckCast", "napi_is_promise failed"); + NAPI_CHECK(result, "Promise::CheckCast", "value is not promise"); +} + +inline Promise::Promise(napi_env env, napi_value value) : Object(env, value) {} + +//////////////////////////////////////////////////////////////////////////////// +// Buffer class +//////////////////////////////////////////////////////////////////////////////// + +template +inline Buffer Buffer::New(napi_env env, size_t length) { + napi_value value; + void* data; + napi_status status = + napi_create_buffer(env, length * sizeof(T), &data, &value); + NAPI_THROW_IF_FAILED(env, status, Buffer()); + return Buffer(env, value); +} + +#ifndef NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED +template +inline Buffer Buffer::New(napi_env env, T* data, size_t length) { + napi_value value; + napi_status status = napi_create_external_buffer( + env, length * sizeof(T), data, nullptr, nullptr, &value); + NAPI_THROW_IF_FAILED(env, status, Buffer()); + return Buffer(env, value); +} + +template +template +inline Buffer Buffer::New(napi_env env, + T* data, + size_t length, + Finalizer finalizeCallback) { + napi_value value; + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), nullptr}); + napi_status status = + napi_create_external_buffer(env, + length * sizeof(T), + data, + details::FinalizeData::Wrapper, + finalizeData, + &value); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, Buffer()); + } + return Buffer(env, value); +} + +template +template +inline Buffer Buffer::New(napi_env env, + T* data, + size_t length, + Finalizer finalizeCallback, + Hint* finalizeHint) { + napi_value value; + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), finalizeHint}); + napi_status status = napi_create_external_buffer( + env, + length * sizeof(T), + data, + details::FinalizeData::WrapperWithHint, + finalizeData, + &value); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, Buffer()); + } + return Buffer(env, value); +} +#endif // NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + +template +inline Buffer Buffer::NewOrCopy(napi_env env, T* data, size_t length) { +#ifndef NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + napi_value value; + napi_status status = napi_create_external_buffer( + env, length * sizeof(T), data, nullptr, nullptr, &value); + if (status == details::napi_no_external_buffers_allowed) { +#endif // NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + // If we can't create an external buffer, we'll just copy the data. + return Buffer::Copy(env, data, length); +#ifndef NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + } + NAPI_THROW_IF_FAILED(env, status, Buffer()); + return Buffer(env, value); +#endif // NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED +} + +template +template +inline Buffer Buffer::NewOrCopy(napi_env env, + T* data, + size_t length, + Finalizer finalizeCallback) { + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), nullptr}); +#ifndef NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + napi_value value; + napi_status status = + napi_create_external_buffer(env, + length * sizeof(T), + data, + details::FinalizeData::Wrapper, + finalizeData, + &value); + if (status == details::napi_no_external_buffers_allowed) { +#endif // NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + // If we can't create an external buffer, we'll just copy the data. + Buffer ret = Buffer::Copy(env, data, length); + details::FinalizeData::Wrapper(env, data, finalizeData); + return ret; +#ifndef NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + } + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, Buffer()); + } + return Buffer(env, value); +#endif // NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED +} + +template +template +inline Buffer Buffer::NewOrCopy(napi_env env, + T* data, + size_t length, + Finalizer finalizeCallback, + Hint* finalizeHint) { + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), finalizeHint}); +#ifndef NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + napi_value value; + napi_status status = napi_create_external_buffer( + env, + length * sizeof(T), + data, + details::FinalizeData::WrapperWithHint, + finalizeData, + &value); + if (status == details::napi_no_external_buffers_allowed) { +#endif + // If we can't create an external buffer, we'll just copy the data. + Buffer ret = Buffer::Copy(env, data, length); + details::FinalizeData::WrapperWithHint( + env, data, finalizeData); + return ret; +#ifndef NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + } + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, Buffer()); + } + return Buffer(env, value); +#endif +} + +template +inline Buffer Buffer::Copy(napi_env env, const T* data, size_t length) { + napi_value value; + napi_status status = + napi_create_buffer_copy(env, length * sizeof(T), data, nullptr, &value); + NAPI_THROW_IF_FAILED(env, status, Buffer()); + return Buffer(env, value); +} + +template +inline void Buffer::CheckCast(napi_env env, napi_value value) { + NAPI_CHECK(value != nullptr, "Buffer::CheckCast", "empty value"); + + bool result; + napi_status status = napi_is_buffer(env, value, &result); + NAPI_CHECK(status == napi_ok, "Buffer::CheckCast", "napi_is_buffer failed"); + NAPI_CHECK(result, "Buffer::CheckCast", "value is not buffer"); +} + +template +inline Buffer::Buffer() : Uint8Array() {} + +template +inline Buffer::Buffer(napi_env env, napi_value value) + : Uint8Array(env, value) {} + +template +inline size_t Buffer::Length() const { + return ByteLength() / sizeof(T); +} + +template +inline T* Buffer::Data() const { + return reinterpret_cast(const_cast(Uint8Array::Data())); +} + +//////////////////////////////////////////////////////////////////////////////// +// Error class +//////////////////////////////////////////////////////////////////////////////// + +inline Error Error::New(napi_env env) { + napi_status status; + napi_value error = nullptr; + bool is_exception_pending; + napi_extended_error_info last_error_info_copy; + + { + // We must retrieve the last error info before doing anything else because + // doing anything else will replace the last error info. + const napi_extended_error_info* last_error_info; + status = napi_get_last_error_info(env, &last_error_info); + NAPI_FATAL_IF_FAILED(status, "Error::New", "napi_get_last_error_info"); + + // All fields of the `napi_extended_error_info` structure gets reset in + // subsequent Node-API function calls on the same `env`. This includes a + // call to `napi_is_exception_pending()`. So here it is necessary to make a + // copy of the information as the `error_code` field is used later on. + memcpy(&last_error_info_copy, + last_error_info, + sizeof(napi_extended_error_info)); + } + + status = napi_is_exception_pending(env, &is_exception_pending); + NAPI_FATAL_IF_FAILED(status, "Error::New", "napi_is_exception_pending"); + + // A pending exception takes precedence over any internal error status. + if (is_exception_pending) { + status = napi_get_and_clear_last_exception(env, &error); + NAPI_FATAL_IF_FAILED( + status, "Error::New", "napi_get_and_clear_last_exception"); + } else { + const char* error_message = last_error_info_copy.error_message != nullptr + ? last_error_info_copy.error_message + : "Error in native callback"; + + napi_value message; + status = napi_create_string_utf8( + env, error_message, std::strlen(error_message), &message); + NAPI_FATAL_IF_FAILED(status, "Error::New", "napi_create_string_utf8"); + + switch (last_error_info_copy.error_code) { + case napi_object_expected: + case napi_string_expected: + case napi_boolean_expected: + case napi_number_expected: + status = napi_create_type_error(env, nullptr, message, &error); + break; + default: + status = napi_create_error(env, nullptr, message, &error); + break; + } + NAPI_FATAL_IF_FAILED(status, "Error::New", "napi_create_error"); + } + + return Error(env, error); +} + +inline Error Error::New(napi_env env, const char* message) { + return Error::New( + env, message, std::strlen(message), napi_create_error); +} + +inline Error Error::New(napi_env env, const std::string& message) { + return Error::New( + env, message.c_str(), message.size(), napi_create_error); +} + +inline NAPI_NO_RETURN void Error::Fatal(const char* location, + const char* message) { + napi_fatal_error(location, NAPI_AUTO_LENGTH, message, NAPI_AUTO_LENGTH); +} + +inline Error::Error() : ObjectReference() {} + +inline Error::Error(napi_env env, napi_value value) + : ObjectReference(env, nullptr) { + if (value != nullptr) { + // Attempting to create a reference on the error object. + // If it's not a Object/Function/Symbol, this call will return an error + // status. + napi_status status = napi_create_reference(env, value, 1, &_ref); + + if (status != napi_ok) { + napi_value wrappedErrorObj; + + // Create an error object + status = napi_create_object(env, &wrappedErrorObj); + NAPI_FATAL_IF_FAILED(status, "Error::Error", "napi_create_object"); + + // property flag that we attach to show the error object is wrapped + napi_property_descriptor wrapObjFlag = { + ERROR_WRAP_VALUE(), // Unique GUID identifier since Symbol isn't a + // viable option + nullptr, + nullptr, + nullptr, + nullptr, + Value::From(env, value), + napi_enumerable, + nullptr}; + + status = napi_define_properties(env, wrappedErrorObj, 1, &wrapObjFlag); +#ifdef NODE_API_SWALLOW_UNTHROWABLE_EXCEPTIONS + if (status == napi_pending_exception) { + // Test if the pending exception was reported because the environment is + // shutting down. We assume that a status of napi_pending_exception + // coupled with the absence of an actual pending exception means that + // the environment is shutting down. If so, we replace the + // napi_pending_exception status with napi_ok. + bool is_exception_pending = false; + status = napi_is_exception_pending(env, &is_exception_pending); + if (status == napi_ok && !is_exception_pending) { + status = napi_ok; + } else { + status = napi_pending_exception; + } + } +#endif // NODE_API_SWALLOW_UNTHROWABLE_EXCEPTIONS + NAPI_FATAL_IF_FAILED(status, "Error::Error", "napi_define_properties"); + + // Create a reference on the newly wrapped object + status = napi_create_reference(env, wrappedErrorObj, 1, &_ref); + } + + // Avoid infinite recursion in the failure case. + NAPI_FATAL_IF_FAILED(status, "Error::Error", "napi_create_reference"); + } +} + +inline Object Error::Value() const { + if (_ref == nullptr) { + return Object(_env, nullptr); + } + + napi_value refValue; + napi_status status = napi_get_reference_value(_env, _ref, &refValue); + NAPI_THROW_IF_FAILED(_env, status, Object()); + + napi_valuetype type; + status = napi_typeof(_env, refValue, &type); + NAPI_THROW_IF_FAILED(_env, status, Object()); + + // If refValue isn't a symbol, then we proceed to whether the refValue has the + // wrapped error flag + if (type != napi_symbol) { + // We are checking if the object is wrapped + bool isWrappedObject = false; + + status = napi_has_property(_env, + refValue, + String::From(_env, ERROR_WRAP_VALUE()), + &isWrappedObject); + + // Don't care about status + if (isWrappedObject) { + napi_value unwrappedValue; + status = napi_get_property(_env, + refValue, + String::From(_env, ERROR_WRAP_VALUE()), + &unwrappedValue); + NAPI_THROW_IF_FAILED(_env, status, Object()); + + return Object(_env, unwrappedValue); + } + } + + return Object(_env, refValue); +} + +inline Error::Error(Error&& other) : ObjectReference(std::move(other)) {} + +inline Error& Error::operator=(Error&& other) { + static_cast*>(this)->operator=(std::move(other)); + return *this; +} + +inline Error::Error(const Error& other) : ObjectReference(other) {} + +inline Error& Error::operator=(const Error& other) { + Reset(); + + _env = other.Env(); + HandleScope scope(_env); + + napi_value value = other.Value(); + if (value != nullptr) { + napi_status status = napi_create_reference(_env, value, 1, &_ref); + NAPI_THROW_IF_FAILED(_env, status, *this); + } + + return *this; +} + +inline const std::string& Error::Message() const NAPI_NOEXCEPT { + if (_message.size() == 0 && _env != nullptr) { +#ifdef NAPI_CPP_EXCEPTIONS + try { + _message = Get("message").As(); + } catch (...) { + // Catch all errors here, to include e.g. a std::bad_alloc from + // the std::string::operator=, because this method may not throw. + } +#else // NAPI_CPP_EXCEPTIONS +#if defined(NODE_ADDON_API_ENABLE_MAYBE) + Napi::Value message_val; + if (Get("message").UnwrapTo(&message_val)) { + _message = message_val.As(); + } +#else + _message = Get("message").As(); +#endif +#endif // NAPI_CPP_EXCEPTIONS + } + return _message; +} + +// we created an object on the &_ref +inline void Error::ThrowAsJavaScriptException() const { + HandleScope scope(_env); + if (!IsEmpty()) { +#ifdef NODE_API_SWALLOW_UNTHROWABLE_EXCEPTIONS + bool pendingException = false; + + // check if there is already a pending exception. If so don't try to throw a + // new one as that is not allowed/possible + napi_status status = napi_is_exception_pending(_env, &pendingException); + + if ((status != napi_ok) || + ((status == napi_ok) && (pendingException == false))) { + // We intentionally don't use `NAPI_THROW_*` macros here to ensure + // that there is no possible recursion as `ThrowAsJavaScriptException` + // is part of `NAPI_THROW_*` macro definition for noexcept. + + status = napi_throw(_env, Value()); + + if (status == napi_pending_exception) { + // The environment must be terminating as we checked earlier and there + // was no pending exception. In this case continuing will result + // in a fatal error and there is nothing the author has done incorrectly + // in their code that is worth flagging through a fatal error + return; + } + } else { + status = napi_pending_exception; + } +#else + // We intentionally don't use `NAPI_THROW_*` macros here to ensure + // that there is no possible recursion as `ThrowAsJavaScriptException` + // is part of `NAPI_THROW_*` macro definition for noexcept. + + napi_status status = napi_throw(_env, Value()); +#endif + +#ifdef NAPI_CPP_EXCEPTIONS + if (status != napi_ok) { + throw Error::New(_env); + } +#else // NAPI_CPP_EXCEPTIONS + NAPI_FATAL_IF_FAILED( + status, "Error::ThrowAsJavaScriptException", "napi_throw"); +#endif // NAPI_CPP_EXCEPTIONS + } +} + +#ifdef NAPI_CPP_EXCEPTIONS + +inline const char* Error::what() const NAPI_NOEXCEPT { + return Message().c_str(); +} + +#endif // NAPI_CPP_EXCEPTIONS + +inline const char* Error::ERROR_WRAP_VALUE() NAPI_NOEXCEPT { + return "4bda9e7e-4913-4dbc-95de-891cbf66598e-errorVal"; +} + +template +inline TError Error::New(napi_env env, + const char* message, + size_t length, + create_error_fn create_error) { + napi_value str; + napi_status status = napi_create_string_utf8(env, message, length, &str); + NAPI_THROW_IF_FAILED(env, status, TError()); + + napi_value error; + status = create_error(env, nullptr, str, &error); + NAPI_THROW_IF_FAILED(env, status, TError()); + + return TError(env, error); +} + +inline TypeError TypeError::New(napi_env env, const char* message) { + return Error::New( + env, message, std::strlen(message), napi_create_type_error); +} + +inline TypeError TypeError::New(napi_env env, const std::string& message) { + return Error::New( + env, message.c_str(), message.size(), napi_create_type_error); +} + +inline TypeError::TypeError() : Error() {} + +inline TypeError::TypeError(napi_env env, napi_value value) + : Error(env, value) {} + +inline RangeError RangeError::New(napi_env env, const char* message) { + return Error::New( + env, message, std::strlen(message), napi_create_range_error); +} + +inline RangeError RangeError::New(napi_env env, const std::string& message) { + return Error::New( + env, message.c_str(), message.size(), napi_create_range_error); +} + +inline RangeError::RangeError() : Error() {} + +inline RangeError::RangeError(napi_env env, napi_value value) + : Error(env, value) {} + +#if NAPI_VERSION > 8 +inline SyntaxError SyntaxError::New(napi_env env, const char* message) { + return Error::New( + env, message, std::strlen(message), node_api_create_syntax_error); +} + +inline SyntaxError SyntaxError::New(napi_env env, const std::string& message) { + return Error::New( + env, message.c_str(), message.size(), node_api_create_syntax_error); +} + +inline SyntaxError::SyntaxError() : Error() {} + +inline SyntaxError::SyntaxError(napi_env env, napi_value value) + : Error(env, value) {} +#endif // NAPI_VERSION > 8 + +//////////////////////////////////////////////////////////////////////////////// +// Reference class +//////////////////////////////////////////////////////////////////////////////// + +template +inline Reference Reference::New(const T& value, + uint32_t initialRefcount) { + napi_env env = value.Env(); + napi_value val = value; + + if (val == nullptr) { + return Reference(env, nullptr); + } + + napi_ref ref; + napi_status status = napi_create_reference(env, value, initialRefcount, &ref); + NAPI_THROW_IF_FAILED(env, status, Reference()); + + return Reference(env, ref); +} + +template +inline Reference::Reference() + : _env(nullptr), _ref(nullptr), _suppressDestruct(false) {} + +template +inline Reference::Reference(napi_env env, napi_ref ref) + : _env(env), _ref(ref), _suppressDestruct(false) {} + +template +inline Reference::~Reference() { + if (_ref != nullptr) { + if (!_suppressDestruct) { + napi_delete_reference(_env, _ref); + } + + _ref = nullptr; + } +} + +template +inline Reference::Reference(Reference&& other) + : _env(other._env), + _ref(other._ref), + _suppressDestruct(other._suppressDestruct) { + other._env = nullptr; + other._ref = nullptr; + other._suppressDestruct = false; +} + +template +inline Reference& Reference::operator=(Reference&& other) { + Reset(); + _env = other._env; + _ref = other._ref; + _suppressDestruct = other._suppressDestruct; + other._env = nullptr; + other._ref = nullptr; + other._suppressDestruct = false; + return *this; +} + +template +inline Reference::Reference(const Reference& other) + : _env(other._env), _ref(nullptr), _suppressDestruct(false) { + HandleScope scope(_env); + + napi_value value = other.Value(); + if (value != nullptr) { + // Copying is a limited scenario (currently only used for Error object) and + // always creates a strong reference to the given value even if the incoming + // reference is weak. + napi_status status = napi_create_reference(_env, value, 1, &_ref); + NAPI_FATAL_IF_FAILED( + status, "Reference::Reference", "napi_create_reference"); + } +} + +template +inline Reference::operator napi_ref() const { + return _ref; +} + +template +inline bool Reference::operator==(const Reference& other) const { + HandleScope scope(_env); + return this->Value().StrictEquals(other.Value()); +} + +template +inline bool Reference::operator!=(const Reference& other) const { + return !this->operator==(other); +} + +template +inline Napi::Env Reference::Env() const { + return Napi::Env(_env); +} + +template +inline bool Reference::IsEmpty() const { + return _ref == nullptr; +} + +template +inline T Reference::Value() const { + if (_ref == nullptr) { + return T(_env, nullptr); + } + + napi_value value; + napi_status status = napi_get_reference_value(_env, _ref, &value); + NAPI_THROW_IF_FAILED(_env, status, T()); + return T(_env, value); +} + +template +inline uint32_t Reference::Ref() const { + uint32_t result; + napi_status status = napi_reference_ref(_env, _ref, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +template +inline uint32_t Reference::Unref() const { + uint32_t result; + napi_status status = napi_reference_unref(_env, _ref, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +template +inline void Reference::Reset() { + if (_ref != nullptr) { + napi_status status = napi_delete_reference(_env, _ref); + NAPI_THROW_IF_FAILED_VOID(_env, status); + _ref = nullptr; + } +} + +template +inline void Reference::Reset(const T& value, uint32_t refcount) { + Reset(); + _env = value.Env(); + + napi_value val = value; + if (val != nullptr) { + napi_status status = napi_create_reference(_env, value, refcount, &_ref); + NAPI_THROW_IF_FAILED_VOID(_env, status); + } +} + +template +inline void Reference::SuppressDestruct() { + _suppressDestruct = true; +} + +template +inline Reference Weak(T value) { + return Reference::New(value, 0); +} + +inline ObjectReference Weak(Object value) { + return Reference::New(value, 0); +} + +inline FunctionReference Weak(Function value) { + return Reference::New(value, 0); +} + +template +inline Reference Persistent(T value) { + return Reference::New(value, 1); +} + +inline ObjectReference Persistent(Object value) { + return Reference::New(value, 1); +} + +inline FunctionReference Persistent(Function value) { + return Reference::New(value, 1); +} + +//////////////////////////////////////////////////////////////////////////////// +// ObjectReference class +//////////////////////////////////////////////////////////////////////////////// + +inline ObjectReference::ObjectReference() : Reference() {} + +inline ObjectReference::ObjectReference(napi_env env, napi_ref ref) + : Reference(env, ref) {} + +inline ObjectReference::ObjectReference(Reference&& other) + : Reference(std::move(other)) {} + +inline ObjectReference& ObjectReference::operator=(Reference&& other) { + static_cast*>(this)->operator=(std::move(other)); + return *this; +} + +inline ObjectReference::ObjectReference(ObjectReference&& other) + : Reference(std::move(other)) {} + +inline ObjectReference& ObjectReference::operator=(ObjectReference&& other) { + static_cast*>(this)->operator=(std::move(other)); + return *this; +} + +inline ObjectReference::ObjectReference(const ObjectReference& other) + : Reference(other) {} + +inline MaybeOrValue ObjectReference::Get( + const char* utf8name) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Get(utf8name); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue ObjectReference::Get( + const std::string& utf8name) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Get(utf8name); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue ObjectReference::Set(const char* utf8name, + napi_value value) const { + HandleScope scope(_env); + return Value().Set(utf8name, value); +} + +inline MaybeOrValue ObjectReference::Set(const char* utf8name, + Napi::Value value) const { + HandleScope scope(_env); + return Value().Set(utf8name, value); +} + +inline MaybeOrValue ObjectReference::Set(const char* utf8name, + const char* utf8value) const { + HandleScope scope(_env); + return Value().Set(utf8name, utf8value); +} + +inline MaybeOrValue ObjectReference::Set(const char* utf8name, + bool boolValue) const { + HandleScope scope(_env); + return Value().Set(utf8name, boolValue); +} + +inline MaybeOrValue ObjectReference::Set(const char* utf8name, + double numberValue) const { + HandleScope scope(_env); + return Value().Set(utf8name, numberValue); +} + +inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, + napi_value value) const { + HandleScope scope(_env); + return Value().Set(utf8name, value); +} + +inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, + Napi::Value value) const { + HandleScope scope(_env); + return Value().Set(utf8name, value); +} + +inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, + std::string& utf8value) const { + HandleScope scope(_env); + return Value().Set(utf8name, utf8value); +} + +inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, + bool boolValue) const { + HandleScope scope(_env); + return Value().Set(utf8name, boolValue); +} + +inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, + double numberValue) const { + HandleScope scope(_env); + return Value().Set(utf8name, numberValue); +} + +inline MaybeOrValue ObjectReference::Get(uint32_t index) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Get(index); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue ObjectReference::Set(uint32_t index, + napi_value value) const { + HandleScope scope(_env); + return Value().Set(index, value); +} + +inline MaybeOrValue ObjectReference::Set(uint32_t index, + Napi::Value value) const { + HandleScope scope(_env); + return Value().Set(index, value); +} + +inline MaybeOrValue ObjectReference::Set(uint32_t index, + const char* utf8value) const { + HandleScope scope(_env); + return Value().Set(index, utf8value); +} + +inline MaybeOrValue ObjectReference::Set( + uint32_t index, const std::string& utf8value) const { + HandleScope scope(_env); + return Value().Set(index, utf8value); +} + +inline MaybeOrValue ObjectReference::Set(uint32_t index, + bool boolValue) const { + HandleScope scope(_env); + return Value().Set(index, boolValue); +} + +inline MaybeOrValue ObjectReference::Set(uint32_t index, + double numberValue) const { + HandleScope scope(_env); + return Value().Set(index, numberValue); +} + +//////////////////////////////////////////////////////////////////////////////// +// FunctionReference class +//////////////////////////////////////////////////////////////////////////////// + +inline FunctionReference::FunctionReference() : Reference() {} + +inline FunctionReference::FunctionReference(napi_env env, napi_ref ref) + : Reference(env, ref) {} + +inline FunctionReference::FunctionReference(Reference&& other) + : Reference(std::move(other)) {} + +inline FunctionReference& FunctionReference::operator=( + Reference&& other) { + static_cast*>(this)->operator=(std::move(other)); + return *this; +} + +inline FunctionReference::FunctionReference(FunctionReference&& other) + : Reference(std::move(other)) {} + +inline FunctionReference& FunctionReference::operator=( + FunctionReference&& other) { + static_cast*>(this)->operator=(std::move(other)); + return *this; +} + +inline MaybeOrValue FunctionReference::operator()( + const std::initializer_list& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value()(args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::Call( + const std::initializer_list& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Call(args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::Call( + const std::vector& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Call(args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::Call( + napi_value recv, const std::initializer_list& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Call(recv, args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::Call( + napi_value recv, const std::vector& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Call(recv, args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::Call( + napi_value recv, size_t argc, const napi_value* args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Call(recv, argc, args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::MakeCallback( + napi_value recv, + const std::initializer_list& args, + napi_async_context context) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().MakeCallback(recv, args, context); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::MakeCallback( + napi_value recv, + const std::vector& args, + napi_async_context context) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().MakeCallback(recv, args, context); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::MakeCallback( + napi_value recv, + size_t argc, + const napi_value* args, + napi_async_context context) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = + Value().MakeCallback(recv, argc, args, context); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::New( + const std::initializer_list& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().New(args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap()).As()); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Object(); + } + return scope.Escape(result).As(); +#endif +} + +inline MaybeOrValue FunctionReference::New( + const std::vector& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().New(args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap()).As()); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Object(); + } + return scope.Escape(result).As(); +#endif +} + +//////////////////////////////////////////////////////////////////////////////// +// CallbackInfo class +//////////////////////////////////////////////////////////////////////////////// + +inline CallbackInfo::CallbackInfo(napi_env env, napi_callback_info info) + : _env(env), + _info(info), + _this(nullptr), + _dynamicArgs(nullptr), + _data(nullptr) { + _argc = _staticArgCount; + _argv = _staticArgs; + napi_status status = + napi_get_cb_info(env, info, &_argc, _argv, &_this, &_data); + NAPI_THROW_IF_FAILED_VOID(_env, status); + + if (_argc > _staticArgCount) { + // Use either a fixed-size array (on the stack) or a dynamically-allocated + // array (on the heap) depending on the number of args. + _dynamicArgs = new napi_value[_argc]; + _argv = _dynamicArgs; + + status = napi_get_cb_info(env, info, &_argc, _argv, nullptr, nullptr); + NAPI_THROW_IF_FAILED_VOID(_env, status); + } +} + +inline CallbackInfo::~CallbackInfo() { + if (_dynamicArgs != nullptr) { + delete[] _dynamicArgs; + } +} + +inline CallbackInfo::operator napi_callback_info() const { + return _info; +} + +inline Value CallbackInfo::NewTarget() const { + napi_value newTarget; + napi_status status = napi_get_new_target(_env, _info, &newTarget); + NAPI_THROW_IF_FAILED(_env, status, Value()); + return Value(_env, newTarget); +} + +inline bool CallbackInfo::IsConstructCall() const { + return !NewTarget().IsEmpty(); +} + +inline Napi::Env CallbackInfo::Env() const { + return Napi::Env(_env); +} + +inline size_t CallbackInfo::Length() const { + return _argc; +} + +inline const Value CallbackInfo::operator[](size_t index) const { + return index < _argc ? Value(_env, _argv[index]) : Env().Undefined(); +} + +inline Value CallbackInfo::This() const { + if (_this == nullptr) { + return Env().Undefined(); + } + return Object(_env, _this); +} + +inline void* CallbackInfo::Data() const { + return _data; +} + +inline void CallbackInfo::SetData(void* data) { + _data = data; +} + +//////////////////////////////////////////////////////////////////////////////// +// PropertyDescriptor class +//////////////////////////////////////////////////////////////////////////////// + +template +PropertyDescriptor PropertyDescriptor::Accessor( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + + desc.utf8name = utf8name; + desc.getter = details::TemplatedCallback; + desc.attributes = attributes; + desc.data = data; + + return desc; +} + +template +PropertyDescriptor PropertyDescriptor::Accessor( + const std::string& utf8name, + napi_property_attributes attributes, + void* data) { + return Accessor(utf8name.c_str(), attributes, data); +} + +template +PropertyDescriptor PropertyDescriptor::Accessor( + Name name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + + desc.name = name; + desc.getter = details::TemplatedCallback; + desc.attributes = attributes; + desc.data = data; + + return desc; +} + +template +PropertyDescriptor PropertyDescriptor::Accessor( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + + desc.utf8name = utf8name; + desc.getter = details::TemplatedCallback; + desc.setter = details::TemplatedVoidCallback; + desc.attributes = attributes; + desc.data = data; + + return desc; +} + +template +PropertyDescriptor PropertyDescriptor::Accessor( + const std::string& utf8name, + napi_property_attributes attributes, + void* data) { + return Accessor(utf8name.c_str(), attributes, data); +} + +template +PropertyDescriptor PropertyDescriptor::Accessor( + Name name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + + desc.name = name; + desc.getter = details::TemplatedCallback; + desc.setter = details::TemplatedVoidCallback; + desc.attributes = attributes; + desc.data = data; + + return desc; +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Napi::Env env, + Napi::Object object, + const char* utf8name, + Getter getter, + napi_property_attributes attributes, + void* data) { + using CbData = details::CallbackData; + auto callbackData = new CbData({getter, data}); + + napi_status status = AttachData(env, object, callbackData); + if (status != napi_ok) { + delete callbackData; + NAPI_THROW_IF_FAILED(env, status, napi_property_descriptor()); + } + + return PropertyDescriptor({utf8name, + nullptr, + nullptr, + CbData::Wrapper, + nullptr, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Napi::Env env, + Napi::Object object, + const std::string& utf8name, + Getter getter, + napi_property_attributes attributes, + void* data) { + return Accessor(env, object, utf8name.c_str(), getter, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Napi::Env env, + Napi::Object object, + Name name, + Getter getter, + napi_property_attributes attributes, + void* data) { + using CbData = details::CallbackData; + auto callbackData = new CbData({getter, data}); + + napi_status status = AttachData(env, object, callbackData); + if (status != napi_ok) { + delete callbackData; + NAPI_THROW_IF_FAILED(env, status, napi_property_descriptor()); + } + + return PropertyDescriptor({nullptr, + name, + nullptr, + CbData::Wrapper, + nullptr, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Napi::Env env, + Napi::Object object, + const char* utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* data) { + using CbData = details::AccessorCallbackData; + auto callbackData = new CbData({getter, setter, data}); + + napi_status status = AttachData(env, object, callbackData); + if (status != napi_ok) { + delete callbackData; + NAPI_THROW_IF_FAILED(env, status, napi_property_descriptor()); + } + + return PropertyDescriptor({utf8name, + nullptr, + nullptr, + CbData::GetterWrapper, + CbData::SetterWrapper, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Napi::Env env, + Napi::Object object, + const std::string& utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* data) { + return Accessor( + env, object, utf8name.c_str(), getter, setter, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Napi::Env env, + Napi::Object object, + Name name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* data) { + using CbData = details::AccessorCallbackData; + auto callbackData = new CbData({getter, setter, data}); + + napi_status status = AttachData(env, object, callbackData); + if (status != napi_ok) { + delete callbackData; + NAPI_THROW_IF_FAILED(env, status, napi_property_descriptor()); + } + + return PropertyDescriptor({nullptr, + name, + nullptr, + CbData::GetterWrapper, + CbData::SetterWrapper, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + Napi::Env env, + Napi::Object /*object*/, + const char* utf8name, + Callable cb, + napi_property_attributes attributes, + void* data) { + return PropertyDescriptor({utf8name, + nullptr, + nullptr, + nullptr, + nullptr, + Napi::Function::New(env, cb, utf8name, data), + attributes, + nullptr}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + Napi::Env env, + Napi::Object object, + const std::string& utf8name, + Callable cb, + napi_property_attributes attributes, + void* data) { + return Function(env, object, utf8name.c_str(), cb, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + Napi::Env env, + Napi::Object /*object*/, + Name name, + Callable cb, + napi_property_attributes attributes, + void* data) { + return PropertyDescriptor({nullptr, + name, + nullptr, + nullptr, + nullptr, + Napi::Function::New(env, cb, nullptr, data), + attributes, + nullptr}); +} + +inline PropertyDescriptor PropertyDescriptor::Value( + const char* utf8name, + napi_value value, + napi_property_attributes attributes) { + return PropertyDescriptor({utf8name, + nullptr, + nullptr, + nullptr, + nullptr, + value, + attributes, + nullptr}); +} + +inline PropertyDescriptor PropertyDescriptor::Value( + const std::string& utf8name, + napi_value value, + napi_property_attributes attributes) { + return Value(utf8name.c_str(), value, attributes); +} + +inline PropertyDescriptor PropertyDescriptor::Value( + napi_value name, napi_value value, napi_property_attributes attributes) { + return PropertyDescriptor( + {nullptr, name, nullptr, nullptr, nullptr, value, attributes, nullptr}); +} + +inline PropertyDescriptor PropertyDescriptor::Value( + Name name, Napi::Value value, napi_property_attributes attributes) { + napi_value nameValue = name; + napi_value valueValue = value; + return PropertyDescriptor::Value(nameValue, valueValue, attributes); +} + +inline PropertyDescriptor::PropertyDescriptor(napi_property_descriptor desc) + : _desc(desc) {} + +inline PropertyDescriptor::operator napi_property_descriptor&() { + return _desc; +} + +inline PropertyDescriptor::operator const napi_property_descriptor&() const { + return _desc; +} + +//////////////////////////////////////////////////////////////////////////////// +// InstanceWrap class +//////////////////////////////////////////////////////////////////////////////// + +template +inline void InstanceWrap::AttachPropData( + napi_env env, napi_value value, const napi_property_descriptor* prop) { + napi_status status; + if (!(prop->attributes & napi_static)) { + if (prop->method == T::InstanceVoidMethodCallbackWrapper) { + status = Napi::details::AttachData( + env, value, static_cast(prop->data)); + NAPI_THROW_IF_FAILED_VOID(env, status); + } else if (prop->method == T::InstanceMethodCallbackWrapper) { + status = Napi::details::AttachData( + env, value, static_cast(prop->data)); + NAPI_THROW_IF_FAILED_VOID(env, status); + } else if (prop->getter == T::InstanceGetterCallbackWrapper || + prop->setter == T::InstanceSetterCallbackWrapper) { + status = Napi::details::AttachData( + env, value, static_cast(prop->data)); + NAPI_THROW_IF_FAILED_VOID(env, status); + } + } +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + const char* utf8name, + InstanceVoidMethodCallback method, + napi_property_attributes attributes, + void* data) { + InstanceVoidMethodCallbackData* callbackData = + new InstanceVoidMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = T::InstanceVoidMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + const char* utf8name, + InstanceMethodCallback method, + napi_property_attributes attributes, + void* data) { + InstanceMethodCallbackData* callbackData = + new InstanceMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = T::InstanceMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + Symbol name, + InstanceVoidMethodCallback method, + napi_property_attributes attributes, + void* data) { + InstanceVoidMethodCallbackData* callbackData = + new InstanceVoidMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = T::InstanceVoidMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + Symbol name, + InstanceMethodCallback method, + napi_property_attributes attributes, + void* data) { + InstanceMethodCallbackData* callbackData = + new InstanceMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = T::InstanceMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = attributes; + return desc; +} + +template +template ::InstanceVoidMethodCallback method> +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = details::TemplatedInstanceVoidCallback; + desc.data = data; + desc.attributes = attributes; + return desc; +} + +template +template ::InstanceMethodCallback method> +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = details::TemplatedInstanceCallback; + desc.data = data; + desc.attributes = attributes; + return desc; +} + +template +template ::InstanceVoidMethodCallback method> +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + Symbol name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = details::TemplatedInstanceVoidCallback; + desc.data = data; + desc.attributes = attributes; + return desc; +} + +template +template ::InstanceMethodCallback method> +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + Symbol name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = details::TemplatedInstanceCallback; + desc.data = data; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceAccessor( + const char* utf8name, + InstanceGetterCallback getter, + InstanceSetterCallback setter, + napi_property_attributes attributes, + void* data) { + InstanceAccessorCallbackData* callbackData = + new InstanceAccessorCallbackData({getter, setter, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.getter = getter != nullptr ? T::InstanceGetterCallbackWrapper : nullptr; + desc.setter = setter != nullptr ? T::InstanceSetterCallbackWrapper : nullptr; + desc.data = callbackData; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceAccessor( + Symbol name, + InstanceGetterCallback getter, + InstanceSetterCallback setter, + napi_property_attributes attributes, + void* data) { + InstanceAccessorCallbackData* callbackData = + new InstanceAccessorCallbackData({getter, setter, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.getter = getter != nullptr ? T::InstanceGetterCallbackWrapper : nullptr; + desc.setter = setter != nullptr ? T::InstanceSetterCallbackWrapper : nullptr; + desc.data = callbackData; + desc.attributes = attributes; + return desc; +} + +template +template ::InstanceGetterCallback getter, + typename InstanceWrap::InstanceSetterCallback setter> +inline ClassPropertyDescriptor InstanceWrap::InstanceAccessor( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.getter = details::TemplatedInstanceCallback; + desc.setter = This::WrapSetter(This::SetterTag()); + desc.data = data; + desc.attributes = attributes; + return desc; +} + +template +template ::InstanceGetterCallback getter, + typename InstanceWrap::InstanceSetterCallback setter> +inline ClassPropertyDescriptor InstanceWrap::InstanceAccessor( + Symbol name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.getter = details::TemplatedInstanceCallback; + desc.setter = This::WrapSetter(This::SetterTag()); + desc.data = data; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceValue( + const char* utf8name, + Napi::Value value, + napi_property_attributes attributes) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.value = value; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceValue( + Symbol name, Napi::Value value, napi_property_attributes attributes) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.value = value; + desc.attributes = attributes; + return desc; +} + +template +inline napi_value InstanceWrap::InstanceVoidMethodCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + InstanceVoidMethodCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + T* instance = T::Unwrap(callbackInfo.This().As()); + auto cb = callbackData->callback; + if (instance) (instance->*cb)(callbackInfo); + return nullptr; + }); +} + +template +inline napi_value InstanceWrap::InstanceMethodCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + InstanceMethodCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + T* instance = T::Unwrap(callbackInfo.This().As()); + auto cb = callbackData->callback; + return instance ? (instance->*cb)(callbackInfo) : Napi::Value(); + }); +} + +template +inline napi_value InstanceWrap::InstanceGetterCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + InstanceAccessorCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + T* instance = T::Unwrap(callbackInfo.This().As()); + auto cb = callbackData->getterCallback; + return instance ? (instance->*cb)(callbackInfo) : Napi::Value(); + }); +} + +template +inline napi_value InstanceWrap::InstanceSetterCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + InstanceAccessorCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + T* instance = T::Unwrap(callbackInfo.This().As()); + auto cb = callbackData->setterCallback; + if (instance) (instance->*cb)(callbackInfo, callbackInfo[0]); + return nullptr; + }); +} + +template +template ::InstanceSetterCallback method> +inline napi_value InstanceWrap::WrappedMethod( + napi_env env, napi_callback_info info) NAPI_NOEXCEPT { + return details::WrapCallback([&] { + const CallbackInfo cbInfo(env, info); + T* instance = T::Unwrap(cbInfo.This().As()); + if (instance) (instance->*method)(cbInfo, cbInfo[0]); + return nullptr; + }); +} + +//////////////////////////////////////////////////////////////////////////////// +// ObjectWrap class +//////////////////////////////////////////////////////////////////////////////// + +template +inline ObjectWrap::ObjectWrap(const Napi::CallbackInfo& callbackInfo) { + napi_env env = callbackInfo.Env(); + napi_value wrapper = callbackInfo.This(); + napi_status status; + napi_ref ref; + T* instance = static_cast(this); + status = napi_wrap(env, wrapper, instance, FinalizeCallback, nullptr, &ref); + NAPI_THROW_IF_FAILED_VOID(env, status); + + Reference* instanceRef = instance; + *instanceRef = Reference(env, ref); +} + +template +inline ObjectWrap::~ObjectWrap() { + // If the JS object still exists at this point, remove the finalizer added + // through `napi_wrap()`. + if (!IsEmpty()) { + Object object = Value(); + // It is not valid to call `napi_remove_wrap()` with an empty `object`. + // This happens e.g. during garbage collection. + if (!object.IsEmpty() && _construction_failed) { + napi_remove_wrap(Env(), object, nullptr); + } + } +} + +template +inline T* ObjectWrap::Unwrap(Object wrapper) { + void* unwrapped; + napi_status status = napi_unwrap(wrapper.Env(), wrapper, &unwrapped); + NAPI_THROW_IF_FAILED(wrapper.Env(), status, nullptr); + return static_cast(unwrapped); +} + +template +inline Function ObjectWrap::DefineClass( + Napi::Env env, + const char* utf8name, + const size_t props_count, + const napi_property_descriptor* descriptors, + void* data) { + napi_status status; + std::vector props(props_count); + + // We copy the descriptors to a local array because before defining the class + // we must replace static method property descriptors with value property + // descriptors such that the value is a function-valued `napi_value` created + // with `CreateFunction()`. + // + // This replacement could be made for instance methods as well, but V8 aborts + // if we do that, because it expects methods defined on the prototype template + // to have `FunctionTemplate`s. + for (size_t index = 0; index < props_count; index++) { + props[index] = descriptors[index]; + napi_property_descriptor* prop = &props[index]; + if (prop->method == T::StaticMethodCallbackWrapper) { + status = + CreateFunction(env, + utf8name, + prop->method, + static_cast(prop->data), + &(prop->value)); + NAPI_THROW_IF_FAILED(env, status, Function()); + prop->method = nullptr; + prop->data = nullptr; + } else if (prop->method == T::StaticVoidMethodCallbackWrapper) { + status = + CreateFunction(env, + utf8name, + prop->method, + static_cast(prop->data), + &(prop->value)); + NAPI_THROW_IF_FAILED(env, status, Function()); + prop->method = nullptr; + prop->data = nullptr; + } + } + + napi_value value; + status = napi_define_class(env, + utf8name, + NAPI_AUTO_LENGTH, + T::ConstructorCallbackWrapper, + data, + props_count, + props.data(), + &value); + NAPI_THROW_IF_FAILED(env, status, Function()); + + // After defining the class we iterate once more over the property descriptors + // and attach the data associated with accessors and instance methods to the + // newly created JavaScript class. + for (size_t idx = 0; idx < props_count; idx++) { + const napi_property_descriptor* prop = &props[idx]; + + if (prop->getter == T::StaticGetterCallbackWrapper || + prop->setter == T::StaticSetterCallbackWrapper) { + status = Napi::details::AttachData( + env, value, static_cast(prop->data)); + NAPI_THROW_IF_FAILED(env, status, Function()); + } else { + // InstanceWrap::AttachPropData is responsible for attaching the data + // of instance methods and accessors. + T::AttachPropData(env, value, prop); + } + } + + return Function(env, value); +} + +template +inline Function ObjectWrap::DefineClass( + Napi::Env env, + const char* utf8name, + const std::initializer_list>& properties, + void* data) { + return DefineClass( + env, + utf8name, + properties.size(), + reinterpret_cast(properties.begin()), + data); +} + +template +inline Function ObjectWrap::DefineClass( + Napi::Env env, + const char* utf8name, + const std::vector>& properties, + void* data) { + return DefineClass( + env, + utf8name, + properties.size(), + reinterpret_cast(properties.data()), + data); +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + const char* utf8name, + StaticVoidMethodCallback method, + napi_property_attributes attributes, + void* data) { + StaticVoidMethodCallbackData* callbackData = + new StaticVoidMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = T::StaticVoidMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + const char* utf8name, + StaticMethodCallback method, + napi_property_attributes attributes, + void* data) { + StaticMethodCallbackData* callbackData = + new StaticMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = T::StaticMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + Symbol name, + StaticVoidMethodCallback method, + napi_property_attributes attributes, + void* data) { + StaticVoidMethodCallbackData* callbackData = + new StaticVoidMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = T::StaticVoidMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + Symbol name, + StaticMethodCallback method, + napi_property_attributes attributes, + void* data) { + StaticMethodCallbackData* callbackData = + new StaticMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = T::StaticMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +template ::StaticVoidMethodCallback method> +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = details::TemplatedVoidCallback; + desc.data = data; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +template ::StaticVoidMethodCallback method> +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + Symbol name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = details::TemplatedVoidCallback; + desc.data = data; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +template ::StaticMethodCallback method> +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = details::TemplatedCallback; + desc.data = data; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +template ::StaticMethodCallback method> +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + Symbol name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = details::TemplatedCallback; + desc.data = data; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticAccessor( + const char* utf8name, + StaticGetterCallback getter, + StaticSetterCallback setter, + napi_property_attributes attributes, + void* data) { + StaticAccessorCallbackData* callbackData = + new StaticAccessorCallbackData({getter, setter, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.getter = getter != nullptr ? T::StaticGetterCallbackWrapper : nullptr; + desc.setter = setter != nullptr ? T::StaticSetterCallbackWrapper : nullptr; + desc.data = callbackData; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticAccessor( + Symbol name, + StaticGetterCallback getter, + StaticSetterCallback setter, + napi_property_attributes attributes, + void* data) { + StaticAccessorCallbackData* callbackData = + new StaticAccessorCallbackData({getter, setter, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.getter = getter != nullptr ? T::StaticGetterCallbackWrapper : nullptr; + desc.setter = setter != nullptr ? T::StaticSetterCallbackWrapper : nullptr; + desc.data = callbackData; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +template ::StaticGetterCallback getter, + typename ObjectWrap::StaticSetterCallback setter> +inline ClassPropertyDescriptor ObjectWrap::StaticAccessor( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.getter = details::TemplatedCallback; + desc.setter = This::WrapStaticSetter(This::StaticSetterTag()); + desc.data = data; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +template ::StaticGetterCallback getter, + typename ObjectWrap::StaticSetterCallback setter> +inline ClassPropertyDescriptor ObjectWrap::StaticAccessor( + Symbol name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.getter = details::TemplatedCallback; + desc.setter = This::WrapStaticSetter(This::StaticSetterTag()); + desc.data = data; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticValue( + const char* utf8name, + Napi::Value value, + napi_property_attributes attributes) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.value = value; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticValue( + Symbol name, Napi::Value value, napi_property_attributes attributes) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.value = value; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline Value ObjectWrap::OnCalledAsFunction( + const Napi::CallbackInfo& callbackInfo) { + NAPI_THROW( + TypeError::New(callbackInfo.Env(), + "Class constructors cannot be invoked without 'new'"), + Napi::Value()); +} + +template +inline void ObjectWrap::Finalize(Napi::Env /*env*/) {} + +template +inline napi_value ObjectWrap::ConstructorCallbackWrapper( + napi_env env, napi_callback_info info) { + napi_value new_target; + napi_status status = napi_get_new_target(env, info, &new_target); + if (status != napi_ok) return nullptr; + + bool isConstructCall = (new_target != nullptr); + if (!isConstructCall) { + return details::WrapCallback( + [&] { return T::OnCalledAsFunction(CallbackInfo(env, info)); }); + } + + napi_value wrapper = details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + T* instance = new T(callbackInfo); +#ifdef NAPI_CPP_EXCEPTIONS + instance->_construction_failed = false; +#else + if (callbackInfo.Env().IsExceptionPending()) { + // We need to clear the exception so that removing the wrap might work. + Error e = callbackInfo.Env().GetAndClearPendingException(); + delete instance; + e.ThrowAsJavaScriptException(); + } else { + instance->_construction_failed = false; + } +#endif // NAPI_CPP_EXCEPTIONS + return callbackInfo.This(); + }); + + return wrapper; +} + +template +inline napi_value ObjectWrap::StaticVoidMethodCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + StaticVoidMethodCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + callbackData->callback(callbackInfo); + return nullptr; + }); +} + +template +inline napi_value ObjectWrap::StaticMethodCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + StaticMethodCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + return callbackData->callback(callbackInfo); + }); +} + +template +inline napi_value ObjectWrap::StaticGetterCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + StaticAccessorCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + return callbackData->getterCallback(callbackInfo); + }); +} + +template +inline napi_value ObjectWrap::StaticSetterCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + StaticAccessorCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + callbackData->setterCallback(callbackInfo, callbackInfo[0]); + return nullptr; + }); +} + +template +inline void ObjectWrap::FinalizeCallback(napi_env env, + void* data, + void* /*hint*/) { + HandleScope scope(env); + T* instance = static_cast(data); + instance->Finalize(Napi::Env(env)); + delete instance; +} + +template +template ::StaticSetterCallback method> +inline napi_value ObjectWrap::WrappedMethod( + napi_env env, napi_callback_info info) NAPI_NOEXCEPT { + return details::WrapCallback([&] { + const CallbackInfo cbInfo(env, info); + method(cbInfo, cbInfo[0]); + return nullptr; + }); +} + +//////////////////////////////////////////////////////////////////////////////// +// HandleScope class +//////////////////////////////////////////////////////////////////////////////// + +inline HandleScope::HandleScope(napi_env env, napi_handle_scope scope) + : _env(env), _scope(scope) {} + +inline HandleScope::HandleScope(Napi::Env env) : _env(env) { + napi_status status = napi_open_handle_scope(_env, &_scope); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline HandleScope::~HandleScope() { + napi_status status = napi_close_handle_scope(_env, _scope); + NAPI_FATAL_IF_FAILED( + status, "HandleScope::~HandleScope", "napi_close_handle_scope"); +} + +inline HandleScope::operator napi_handle_scope() const { + return _scope; +} + +inline Napi::Env HandleScope::Env() const { + return Napi::Env(_env); +} + +//////////////////////////////////////////////////////////////////////////////// +// EscapableHandleScope class +//////////////////////////////////////////////////////////////////////////////// + +inline EscapableHandleScope::EscapableHandleScope( + napi_env env, napi_escapable_handle_scope scope) + : _env(env), _scope(scope) {} + +inline EscapableHandleScope::EscapableHandleScope(Napi::Env env) : _env(env) { + napi_status status = napi_open_escapable_handle_scope(_env, &_scope); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline EscapableHandleScope::~EscapableHandleScope() { + napi_status status = napi_close_escapable_handle_scope(_env, _scope); + NAPI_FATAL_IF_FAILED(status, + "EscapableHandleScope::~EscapableHandleScope", + "napi_close_escapable_handle_scope"); +} + +inline EscapableHandleScope::operator napi_escapable_handle_scope() const { + return _scope; +} + +inline Napi::Env EscapableHandleScope::Env() const { + return Napi::Env(_env); +} + +inline Value EscapableHandleScope::Escape(napi_value escapee) { + napi_value result; + napi_status status = napi_escape_handle(_env, _scope, escapee, &result); + NAPI_THROW_IF_FAILED(_env, status, Value()); + return Value(_env, result); +} + +#if (NAPI_VERSION > 2) +//////////////////////////////////////////////////////////////////////////////// +// CallbackScope class +//////////////////////////////////////////////////////////////////////////////// + +inline CallbackScope::CallbackScope(napi_env env, napi_callback_scope scope) + : _env(env), _scope(scope) {} + +inline CallbackScope::CallbackScope(napi_env env, napi_async_context context) + : _env(env) { + napi_status status = + napi_open_callback_scope(_env, Object::New(env), context, &_scope); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline CallbackScope::~CallbackScope() { + napi_status status = napi_close_callback_scope(_env, _scope); + NAPI_FATAL_IF_FAILED( + status, "CallbackScope::~CallbackScope", "napi_close_callback_scope"); +} + +inline CallbackScope::operator napi_callback_scope() const { + return _scope; +} + +inline Napi::Env CallbackScope::Env() const { + return Napi::Env(_env); +} +#endif + +//////////////////////////////////////////////////////////////////////////////// +// AsyncContext class +//////////////////////////////////////////////////////////////////////////////// + +inline AsyncContext::AsyncContext(napi_env env, const char* resource_name) + : AsyncContext(env, resource_name, Object::New(env)) {} + +inline AsyncContext::AsyncContext(napi_env env, + const char* resource_name, + const Object& resource) + : _env(env), _context(nullptr) { + napi_value resource_id; + napi_status status = napi_create_string_utf8( + _env, resource_name, NAPI_AUTO_LENGTH, &resource_id); + NAPI_THROW_IF_FAILED_VOID(_env, status); + + status = napi_async_init(_env, resource, resource_id, &_context); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline AsyncContext::~AsyncContext() { + if (_context != nullptr) { + napi_async_destroy(_env, _context); + _context = nullptr; + } +} + +inline AsyncContext::AsyncContext(AsyncContext&& other) { + _env = other._env; + other._env = nullptr; + _context = other._context; + other._context = nullptr; +} + +inline AsyncContext& AsyncContext::operator=(AsyncContext&& other) { + _env = other._env; + other._env = nullptr; + _context = other._context; + other._context = nullptr; + return *this; +} + +inline AsyncContext::operator napi_async_context() const { + return _context; +} + +inline Napi::Env AsyncContext::Env() const { + return Napi::Env(_env); +} + +//////////////////////////////////////////////////////////////////////////////// +// AsyncWorker class +//////////////////////////////////////////////////////////////////////////////// + +#if NAPI_HAS_THREADS + +inline AsyncWorker::AsyncWorker(const Function& callback) + : AsyncWorker(callback, "generic") {} + +inline AsyncWorker::AsyncWorker(const Function& callback, + const char* resource_name) + : AsyncWorker(callback, resource_name, Object::New(callback.Env())) {} + +inline AsyncWorker::AsyncWorker(const Function& callback, + const char* resource_name, + const Object& resource) + : AsyncWorker( + Object::New(callback.Env()), callback, resource_name, resource) {} + +inline AsyncWorker::AsyncWorker(const Object& receiver, + const Function& callback) + : AsyncWorker(receiver, callback, "generic") {} + +inline AsyncWorker::AsyncWorker(const Object& receiver, + const Function& callback, + const char* resource_name) + : AsyncWorker( + receiver, callback, resource_name, Object::New(callback.Env())) {} + +inline AsyncWorker::AsyncWorker(const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource) + : _env(callback.Env()), + _receiver(Napi::Persistent(receiver)), + _callback(Napi::Persistent(callback)), + _suppress_destruct(false) { + napi_value resource_id; + napi_status status = napi_create_string_latin1( + _env, resource_name, NAPI_AUTO_LENGTH, &resource_id); + NAPI_THROW_IF_FAILED_VOID(_env, status); + + status = napi_create_async_work(_env, + resource, + resource_id, + OnAsyncWorkExecute, + OnAsyncWorkComplete, + this, + &_work); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline AsyncWorker::AsyncWorker(Napi::Env env) : AsyncWorker(env, "generic") {} + +inline AsyncWorker::AsyncWorker(Napi::Env env, const char* resource_name) + : AsyncWorker(env, resource_name, Object::New(env)) {} + +inline AsyncWorker::AsyncWorker(Napi::Env env, + const char* resource_name, + const Object& resource) + : _env(env), _receiver(), _callback(), _suppress_destruct(false) { + napi_value resource_id; + napi_status status = napi_create_string_latin1( + _env, resource_name, NAPI_AUTO_LENGTH, &resource_id); + NAPI_THROW_IF_FAILED_VOID(_env, status); + + status = napi_create_async_work(_env, + resource, + resource_id, + OnAsyncWorkExecute, + OnAsyncWorkComplete, + this, + &_work); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline AsyncWorker::~AsyncWorker() { + if (_work != nullptr) { + napi_delete_async_work(_env, _work); + _work = nullptr; + } +} + +inline void AsyncWorker::Destroy() { + delete this; +} + +inline AsyncWorker::operator napi_async_work() const { + return _work; +} + +inline Napi::Env AsyncWorker::Env() const { + return Napi::Env(_env); +} + +inline void AsyncWorker::Queue() { + napi_status status = napi_queue_async_work(_env, _work); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline void AsyncWorker::Cancel() { + napi_status status = napi_cancel_async_work(_env, _work); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline ObjectReference& AsyncWorker::Receiver() { + return _receiver; +} + +inline FunctionReference& AsyncWorker::Callback() { + return _callback; +} + +inline void AsyncWorker::SuppressDestruct() { + _suppress_destruct = true; +} + +inline void AsyncWorker::OnOK() { + if (!_callback.IsEmpty()) { + _callback.Call(_receiver.Value(), GetResult(_callback.Env())); + } +} + +inline void AsyncWorker::OnError(const Error& e) { + if (!_callback.IsEmpty()) { + _callback.Call(_receiver.Value(), + std::initializer_list{e.Value()}); + } +} + +inline void AsyncWorker::SetError(const std::string& error) { + _error = error; +} + +inline std::vector AsyncWorker::GetResult(Napi::Env /*env*/) { + return {}; +} +// The OnAsyncWorkExecute method receives an napi_env argument. However, do NOT +// use it within this method, as it does not run on the JavaScript thread and +// must not run any method that would cause JavaScript to run. In practice, +// this means that almost any use of napi_env will be incorrect. +inline void AsyncWorker::OnAsyncWorkExecute(napi_env env, void* asyncworker) { + AsyncWorker* self = static_cast(asyncworker); + self->OnExecute(env); +} +// The OnExecute method receives an napi_env argument. However, do NOT +// use it within this method, as it does not run on the JavaScript thread and +// must not run any method that would cause JavaScript to run. In practice, +// this means that almost any use of napi_env will be incorrect. +inline void AsyncWorker::OnExecute(Napi::Env /*DO_NOT_USE*/) { +#ifdef NAPI_CPP_EXCEPTIONS + try { + Execute(); + } catch (const std::exception& e) { + SetError(e.what()); + } +#else // NAPI_CPP_EXCEPTIONS + Execute(); +#endif // NAPI_CPP_EXCEPTIONS +} + +inline void AsyncWorker::OnAsyncWorkComplete(napi_env env, + napi_status status, + void* asyncworker) { + AsyncWorker* self = static_cast(asyncworker); + self->OnWorkComplete(env, status); +} +inline void AsyncWorker::OnWorkComplete(Napi::Env /*env*/, napi_status status) { + if (status != napi_cancelled) { + HandleScope scope(_env); + details::WrapCallback([&] { + if (_error.size() == 0) { + OnOK(); + } else { + OnError(Error::New(_env, _error)); + } + return nullptr; + }); + } + if (!_suppress_destruct) { + Destroy(); + } +} + +#endif // NAPI_HAS_THREADS + +#if (NAPI_VERSION > 3 && NAPI_HAS_THREADS) +//////////////////////////////////////////////////////////////////////////////// +// TypedThreadSafeFunction class +//////////////////////////////////////////////////////////////////////////////// + +// Starting with NAPI 5, the JavaScript function `func` parameter of +// `napi_create_threadsafe_function` is optional. +#if NAPI_VERSION > 4 +// static, with Callback [missing] Resource [missing] Finalizer [missing] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context) { + TypedThreadSafeFunction tsfn; + + napi_status status = + napi_create_threadsafe_function(env, + nullptr, + nullptr, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + nullptr, + nullptr, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +// static, with Callback [missing] Resource [passed] Finalizer [missing] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context) { + TypedThreadSafeFunction tsfn; + + napi_status status = + napi_create_threadsafe_function(env, + nullptr, + resource, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + nullptr, + nullptr, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +// static, with Callback [missing] Resource [missing] Finalizer [passed] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data) { + TypedThreadSafeFunction tsfn; + + auto* finalizeData = new details:: + ThreadSafeFinalize( + {data, finalizeCallback}); + napi_status status = napi_create_threadsafe_function( + env, + nullptr, + nullptr, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + finalizeData, + details::ThreadSafeFinalize:: + FinalizeFinalizeWrapperWithDataAndContext, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +// static, with Callback [missing] Resource [passed] Finalizer [passed] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data) { + TypedThreadSafeFunction tsfn; + + auto* finalizeData = new details:: + ThreadSafeFinalize( + {data, finalizeCallback}); + napi_status status = napi_create_threadsafe_function( + env, + nullptr, + resource, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + finalizeData, + details::ThreadSafeFinalize:: + FinalizeFinalizeWrapperWithDataAndContext, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} +#endif + +// static, with Callback [passed] Resource [missing] Finalizer [missing] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context) { + TypedThreadSafeFunction tsfn; + + napi_status status = + napi_create_threadsafe_function(env, + callback, + nullptr, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + nullptr, + nullptr, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +// static, with Callback [passed] Resource [passed] Finalizer [missing] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context) { + TypedThreadSafeFunction tsfn; + + napi_status status = + napi_create_threadsafe_function(env, + callback, + resource, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + nullptr, + nullptr, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +// static, with Callback [passed] Resource [missing] Finalizer [passed] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data) { + TypedThreadSafeFunction tsfn; + + auto* finalizeData = new details:: + ThreadSafeFinalize( + {data, finalizeCallback}); + napi_status status = napi_create_threadsafe_function( + env, + callback, + nullptr, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + finalizeData, + details::ThreadSafeFinalize:: + FinalizeFinalizeWrapperWithDataAndContext, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +// static, with: Callback [passed] Resource [passed] Finalizer [passed] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + CallbackType callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data) { + TypedThreadSafeFunction tsfn; + + auto* finalizeData = new details:: + ThreadSafeFinalize( + {data, finalizeCallback}); + napi_status status = napi_create_threadsafe_function( + env, + details::DefaultCallbackWrapper< + CallbackType, + TypedThreadSafeFunction>(env, + callback), + resource, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + finalizeData, + details::ThreadSafeFinalize:: + FinalizeFinalizeWrapperWithDataAndContext, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +template +inline TypedThreadSafeFunction:: + TypedThreadSafeFunction() + : _tsfn() {} + +template +inline TypedThreadSafeFunction:: + TypedThreadSafeFunction(napi_threadsafe_function tsfn) + : _tsfn(tsfn) {} + +template +inline TypedThreadSafeFunction:: +operator napi_threadsafe_function() const { + return _tsfn; +} + +template +inline napi_status +TypedThreadSafeFunction::BlockingCall( + DataType* data) const { + return napi_call_threadsafe_function(_tsfn, data, napi_tsfn_blocking); +} + +template +inline napi_status +TypedThreadSafeFunction::NonBlockingCall( + DataType* data) const { + return napi_call_threadsafe_function(_tsfn, data, napi_tsfn_nonblocking); +} + +template +inline void TypedThreadSafeFunction::Ref( + napi_env env) const { + if (_tsfn != nullptr) { + napi_status status = napi_ref_threadsafe_function(env, _tsfn); + NAPI_THROW_IF_FAILED_VOID(env, status); + } +} + +template +inline void TypedThreadSafeFunction::Unref( + napi_env env) const { + if (_tsfn != nullptr) { + napi_status status = napi_unref_threadsafe_function(env, _tsfn); + NAPI_THROW_IF_FAILED_VOID(env, status); + } +} + +template +inline napi_status +TypedThreadSafeFunction::Acquire() const { + return napi_acquire_threadsafe_function(_tsfn); +} + +template +inline napi_status +TypedThreadSafeFunction::Release() const { + return napi_release_threadsafe_function(_tsfn, napi_tsfn_release); +} + +template +inline napi_status +TypedThreadSafeFunction::Abort() const { + return napi_release_threadsafe_function(_tsfn, napi_tsfn_abort); +} + +template +inline ContextType* +TypedThreadSafeFunction::GetContext() const { + void* context; + napi_status status = napi_get_threadsafe_function_context(_tsfn, &context); + NAPI_FATAL_IF_FAILED(status, + "TypedThreadSafeFunction::GetContext", + "napi_get_threadsafe_function_context"); + return static_cast(context); +} + +// static +template +void TypedThreadSafeFunction::CallJsInternal( + napi_env env, napi_value jsCallback, void* context, void* data) { + details::CallJsWrapper( + env, jsCallback, context, data); +} + +#if NAPI_VERSION == 4 +// static +template +Napi::Function +TypedThreadSafeFunction::EmptyFunctionFactory( + Napi::Env env) { + return Napi::Function::New(env, [](const CallbackInfo& cb) {}); +} + +// static +template +Napi::Function +TypedThreadSafeFunction::FunctionOrEmpty( + Napi::Env env, Napi::Function& callback) { + if (callback.IsEmpty()) { + return EmptyFunctionFactory(env); + } + return callback; +} + +#else +// static +template +std::nullptr_t +TypedThreadSafeFunction::EmptyFunctionFactory( + Napi::Env /*env*/) { + return nullptr; +} + +// static +template +Napi::Function +TypedThreadSafeFunction::FunctionOrEmpty( + Napi::Env /*env*/, Napi::Function& callback) { + return callback; +} + +#endif + +//////////////////////////////////////////////////////////////////////////////// +// ThreadSafeFunction class +//////////////////////////////////////////////////////////////////////////////// + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount) { + return New( + env, callback, Object(), resourceName, maxQueueSize, initialThreadCount); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context) { + return New(env, + callback, + Object(), + resourceName, + maxQueueSize, + initialThreadCount, + context); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback) { + return New(env, + callback, + Object(), + resourceName, + maxQueueSize, + initialThreadCount, + finalizeCallback); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback, + FinalizerDataType* data) { + return New(env, + callback, + Object(), + resourceName, + maxQueueSize, + initialThreadCount, + finalizeCallback, + data); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback) { + return New(env, + callback, + Object(), + resourceName, + maxQueueSize, + initialThreadCount, + context, + finalizeCallback); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data) { + return New(env, + callback, + Object(), + resourceName, + maxQueueSize, + initialThreadCount, + context, + finalizeCallback, + data); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount) { + return New(env, + callback, + resource, + resourceName, + maxQueueSize, + initialThreadCount, + static_cast(nullptr) /* context */); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context) { + return New(env, + callback, + resource, + resourceName, + maxQueueSize, + initialThreadCount, + context, + [](Env, ContextType*) {} /* empty finalizer */); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback) { + return New(env, + callback, + resource, + resourceName, + maxQueueSize, + initialThreadCount, + static_cast(nullptr) /* context */, + finalizeCallback, + static_cast(nullptr) /* data */, + details::ThreadSafeFinalize::Wrapper); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback, + FinalizerDataType* data) { + return New(env, + callback, + resource, + resourceName, + maxQueueSize, + initialThreadCount, + static_cast(nullptr) /* context */, + finalizeCallback, + data, + details::ThreadSafeFinalize:: + FinalizeWrapperWithData); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback) { + return New( + env, + callback, + resource, + resourceName, + maxQueueSize, + initialThreadCount, + context, + finalizeCallback, + static_cast(nullptr) /* data */, + details::ThreadSafeFinalize::FinalizeWrapperWithContext); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data) { + return New( + env, + callback, + resource, + resourceName, + maxQueueSize, + initialThreadCount, + context, + finalizeCallback, + data, + details::ThreadSafeFinalize:: + FinalizeFinalizeWrapperWithDataAndContext); +} + +inline ThreadSafeFunction::ThreadSafeFunction() : _tsfn() {} + +inline ThreadSafeFunction::ThreadSafeFunction(napi_threadsafe_function tsfn) + : _tsfn(tsfn) {} + +inline ThreadSafeFunction::operator napi_threadsafe_function() const { + return _tsfn; +} + +inline napi_status ThreadSafeFunction::BlockingCall() const { + return CallInternal(nullptr, napi_tsfn_blocking); +} + +template <> +inline napi_status ThreadSafeFunction::BlockingCall(void* data) const { + return napi_call_threadsafe_function(_tsfn, data, napi_tsfn_blocking); +} + +template +inline napi_status ThreadSafeFunction::BlockingCall(Callback callback) const { + return CallInternal(new CallbackWrapper(callback), napi_tsfn_blocking); +} + +template +inline napi_status ThreadSafeFunction::BlockingCall(DataType* data, + Callback callback) const { + auto wrapper = [data, callback](Env env, Function jsCallback) { + callback(env, jsCallback, data); + }; + return CallInternal(new CallbackWrapper(wrapper), napi_tsfn_blocking); +} + +inline napi_status ThreadSafeFunction::NonBlockingCall() const { + return CallInternal(nullptr, napi_tsfn_nonblocking); +} + +template <> +inline napi_status ThreadSafeFunction::NonBlockingCall(void* data) const { + return napi_call_threadsafe_function(_tsfn, data, napi_tsfn_nonblocking); +} + +template +inline napi_status ThreadSafeFunction::NonBlockingCall( + Callback callback) const { + return CallInternal(new CallbackWrapper(callback), napi_tsfn_nonblocking); +} + +template +inline napi_status ThreadSafeFunction::NonBlockingCall( + DataType* data, Callback callback) const { + auto wrapper = [data, callback](Env env, Function jsCallback) { + callback(env, jsCallback, data); + }; + return CallInternal(new CallbackWrapper(wrapper), napi_tsfn_nonblocking); +} + +inline void ThreadSafeFunction::Ref(napi_env env) const { + if (_tsfn != nullptr) { + napi_status status = napi_ref_threadsafe_function(env, _tsfn); + NAPI_THROW_IF_FAILED_VOID(env, status); + } +} + +inline void ThreadSafeFunction::Unref(napi_env env) const { + if (_tsfn != nullptr) { + napi_status status = napi_unref_threadsafe_function(env, _tsfn); + NAPI_THROW_IF_FAILED_VOID(env, status); + } +} + +inline napi_status ThreadSafeFunction::Acquire() const { + return napi_acquire_threadsafe_function(_tsfn); +} + +inline napi_status ThreadSafeFunction::Release() const { + return napi_release_threadsafe_function(_tsfn, napi_tsfn_release); +} + +inline napi_status ThreadSafeFunction::Abort() const { + return napi_release_threadsafe_function(_tsfn, napi_tsfn_abort); +} + +inline ThreadSafeFunction::ConvertibleContext ThreadSafeFunction::GetContext() + const { + void* context; + napi_status status = napi_get_threadsafe_function_context(_tsfn, &context); + NAPI_FATAL_IF_FAILED(status, + "ThreadSafeFunction::GetContext", + "napi_get_threadsafe_function_context"); + return ConvertibleContext({context}); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data, + napi_finalize wrapper) { + static_assert(details::can_make_string::value || + std::is_convertible::value, + "Resource name should be convertible to the string type"); + + ThreadSafeFunction tsfn; + auto* finalizeData = new details:: + ThreadSafeFinalize( + {data, finalizeCallback}); + napi_status status = + napi_create_threadsafe_function(env, + callback, + resource, + Value::From(env, resourceName), + maxQueueSize, + initialThreadCount, + finalizeData, + wrapper, + context, + CallJS, + &tsfn._tsfn); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, ThreadSafeFunction()); + } + + return tsfn; +} + +inline napi_status ThreadSafeFunction::CallInternal( + CallbackWrapper* callbackWrapper, + napi_threadsafe_function_call_mode mode) const { + napi_status status = + napi_call_threadsafe_function(_tsfn, callbackWrapper, mode); + if (status != napi_ok && callbackWrapper != nullptr) { + delete callbackWrapper; + } + + return status; +} + +// static +inline void ThreadSafeFunction::CallJS(napi_env env, + napi_value jsCallback, + void* /* context */, + void* data) { + if (env == nullptr && jsCallback == nullptr) { + return; + } + + details::WrapVoidCallback([&]() { + if (data != nullptr) { + auto* callbackWrapper = static_cast(data); + (*callbackWrapper)(env, Function(env, jsCallback)); + delete callbackWrapper; + } else if (jsCallback != nullptr) { + Function(env, jsCallback).Call({}); + } + }); +} + +//////////////////////////////////////////////////////////////////////////////// +// Async Progress Worker Base class +//////////////////////////////////////////////////////////////////////////////// +template +inline AsyncProgressWorkerBase::AsyncProgressWorkerBase( + const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource, + size_t queue_size) + : AsyncWorker(receiver, callback, resource_name, resource) { + // Fill all possible arguments to work around ambiguous + // ThreadSafeFunction::New signatures. + _tsfn = ThreadSafeFunction::New(callback.Env(), + callback, + resource, + resource_name, + queue_size, + /** initialThreadCount */ 1, + /** context */ this, + OnThreadSafeFunctionFinalize, + /** finalizeData */ this); +} + +#if NAPI_VERSION > 4 +template +inline AsyncProgressWorkerBase::AsyncProgressWorkerBase( + Napi::Env env, + const char* resource_name, + const Object& resource, + size_t queue_size) + : AsyncWorker(env, resource_name, resource) { + // TODO: Once the changes to make the callback optional for threadsafe + // functions are available on all versions we can remove the dummy Function + // here. + Function callback; + // Fill all possible arguments to work around ambiguous + // ThreadSafeFunction::New signatures. + _tsfn = ThreadSafeFunction::New(env, + callback, + resource, + resource_name, + queue_size, + /** initialThreadCount */ 1, + /** context */ this, + OnThreadSafeFunctionFinalize, + /** finalizeData */ this); +} +#endif + +template +inline AsyncProgressWorkerBase::~AsyncProgressWorkerBase() { + // Abort pending tsfn call. + // Don't send progress events after we've already completed. + // It's ok to call ThreadSafeFunction::Abort and ThreadSafeFunction::Release + // duplicated. + _tsfn.Abort(); +} + +template +inline void AsyncProgressWorkerBase::OnAsyncWorkProgress( + Napi::Env /* env */, Napi::Function /* jsCallback */, void* data) { + ThreadSafeData* tsd = static_cast(data); + tsd->asyncprogressworker()->OnWorkProgress(tsd->data()); + delete tsd; +} + +template +inline napi_status AsyncProgressWorkerBase::NonBlockingCall( + DataType* data) { + auto tsd = new AsyncProgressWorkerBase::ThreadSafeData(this, data); + auto ret = _tsfn.NonBlockingCall(tsd, OnAsyncWorkProgress); + if (ret != napi_ok) { + delete tsd; + } + return ret; +} + +template +inline void AsyncProgressWorkerBase::OnWorkComplete( + Napi::Env /* env */, napi_status status) { + _work_completed = true; + _complete_status = status; + _tsfn.Release(); +} + +template +inline void AsyncProgressWorkerBase::OnThreadSafeFunctionFinalize( + Napi::Env env, void* /* data */, AsyncProgressWorkerBase* context) { + if (context->_work_completed) { + context->AsyncWorker::OnWorkComplete(env, context->_complete_status); + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Async Progress Worker class +//////////////////////////////////////////////////////////////////////////////// +template +inline AsyncProgressWorker::AsyncProgressWorker(const Function& callback) + : AsyncProgressWorker(callback, "generic") {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(const Function& callback, + const char* resource_name) + : AsyncProgressWorker( + callback, resource_name, Object::New(callback.Env())) {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(const Function& callback, + const char* resource_name, + const Object& resource) + : AsyncProgressWorker( + Object::New(callback.Env()), callback, resource_name, resource) {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(const Object& receiver, + const Function& callback) + : AsyncProgressWorker(receiver, callback, "generic") {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(const Object& receiver, + const Function& callback, + const char* resource_name) + : AsyncProgressWorker( + receiver, callback, resource_name, Object::New(callback.Env())) {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource) + : AsyncProgressWorkerBase(receiver, callback, resource_name, resource), + _asyncdata(nullptr), + _asyncsize(0), + _signaled(false) {} + +#if NAPI_VERSION > 4 +template +inline AsyncProgressWorker::AsyncProgressWorker(Napi::Env env) + : AsyncProgressWorker(env, "generic") {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(Napi::Env env, + const char* resource_name) + : AsyncProgressWorker(env, resource_name, Object::New(env)) {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(Napi::Env env, + const char* resource_name, + const Object& resource) + : AsyncProgressWorkerBase(env, resource_name, resource), + _asyncdata(nullptr), + _asyncsize(0) {} +#endif + +template +inline AsyncProgressWorker::~AsyncProgressWorker() { + { + std::lock_guard lock(this->_mutex); + _asyncdata = nullptr; + _asyncsize = 0; + } +} + +template +inline void AsyncProgressWorker::Execute() { + ExecutionProgress progress(this); + Execute(progress); +} + +template +inline void AsyncProgressWorker::OnWorkProgress(void*) { + T* data; + size_t size; + bool signaled; + { + std::lock_guard lock(this->_mutex); + data = this->_asyncdata; + size = this->_asyncsize; + signaled = this->_signaled; + this->_asyncdata = nullptr; + this->_asyncsize = 0; + this->_signaled = false; + } + + /** + * The callback of ThreadSafeFunction is not been invoked immediately on the + * callback of uv_async_t (uv io poll), rather the callback of TSFN is + * invoked on the right next uv idle callback. There are chances that during + * the deferring the signal of uv_async_t is been sent again, i.e. potential + * not coalesced two calls of the TSFN callback. + */ + if (data == nullptr && !signaled) { + return; + } + + this->OnProgress(data, size); + delete[] data; +} + +template +inline void AsyncProgressWorker::SendProgress_(const T* data, size_t count) { + T* new_data = new T[count]; + std::copy(data, data + count, new_data); + + T* old_data; + { + std::lock_guard lock(this->_mutex); + old_data = _asyncdata; + _asyncdata = new_data; + _asyncsize = count; + _signaled = false; + } + this->NonBlockingCall(nullptr); + + delete[] old_data; +} + +template +inline void AsyncProgressWorker::Signal() { + { + std::lock_guard lock(this->_mutex); + _signaled = true; + } + this->NonBlockingCall(static_cast(nullptr)); +} + +template +inline void AsyncProgressWorker::ExecutionProgress::Signal() const { + this->_worker->Signal(); +} + +template +inline void AsyncProgressWorker::ExecutionProgress::Send( + const T* data, size_t count) const { + _worker->SendProgress_(data, count); +} + +//////////////////////////////////////////////////////////////////////////////// +// Async Progress Queue Worker class +//////////////////////////////////////////////////////////////////////////////// +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + const Function& callback) + : AsyncProgressQueueWorker(callback, "generic") {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + const Function& callback, const char* resource_name) + : AsyncProgressQueueWorker( + callback, resource_name, Object::New(callback.Env())) {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + const Function& callback, const char* resource_name, const Object& resource) + : AsyncProgressQueueWorker( + Object::New(callback.Env()), callback, resource_name, resource) {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + const Object& receiver, const Function& callback) + : AsyncProgressQueueWorker(receiver, callback, "generic") {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + const Object& receiver, const Function& callback, const char* resource_name) + : AsyncProgressQueueWorker( + receiver, callback, resource_name, Object::New(callback.Env())) {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource) + : AsyncProgressWorkerBase>( + receiver, + callback, + resource_name, + resource, + /** unlimited queue size */ 0) {} + +#if NAPI_VERSION > 4 +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker(Napi::Env env) + : AsyncProgressQueueWorker(env, "generic") {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + Napi::Env env, const char* resource_name) + : AsyncProgressQueueWorker(env, resource_name, Object::New(env)) {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + Napi::Env env, const char* resource_name, const Object& resource) + : AsyncProgressWorkerBase>( + env, resource_name, resource, /** unlimited queue size */ 0) {} +#endif + +template +inline void AsyncProgressQueueWorker::Execute() { + ExecutionProgress progress(this); + Execute(progress); +} + +template +inline void AsyncProgressQueueWorker::OnWorkProgress( + std::pair* datapair) { + if (datapair == nullptr) { + return; + } + + T* data = datapair->first; + size_t size = datapair->second; + + this->OnProgress(data, size); + delete datapair; + delete[] data; +} + +template +inline void AsyncProgressQueueWorker::SendProgress_(const T* data, + size_t count) { + T* new_data = new T[count]; + std::copy(data, data + count, new_data); + + auto pair = new std::pair(new_data, count); + this->NonBlockingCall(pair); +} + +template +inline void AsyncProgressQueueWorker::Signal() const { + this->SendProgress_(static_cast(nullptr), 0); +} + +template +inline void AsyncProgressQueueWorker::OnWorkComplete(Napi::Env env, + napi_status status) { + // Draining queued items in TSFN. + AsyncProgressWorkerBase>::OnWorkComplete(env, status); +} + +template +inline void AsyncProgressQueueWorker::ExecutionProgress::Signal() const { + _worker->SendProgress_(static_cast(nullptr), 0); +} + +template +inline void AsyncProgressQueueWorker::ExecutionProgress::Send( + const T* data, size_t count) const { + _worker->SendProgress_(data, count); +} +#endif // NAPI_VERSION > 3 && NAPI_HAS_THREADS + +//////////////////////////////////////////////////////////////////////////////// +// Memory Management class +//////////////////////////////////////////////////////////////////////////////// + +inline int64_t MemoryManagement::AdjustExternalMemory(Env env, + int64_t change_in_bytes) { + int64_t result; + napi_status status = + napi_adjust_external_memory(env, change_in_bytes, &result); + NAPI_THROW_IF_FAILED(env, status, 0); + return result; +} + +//////////////////////////////////////////////////////////////////////////////// +// Version Management class +//////////////////////////////////////////////////////////////////////////////// + +inline uint32_t VersionManagement::GetNapiVersion(Env env) { + uint32_t result; + napi_status status = napi_get_version(env, &result); + NAPI_THROW_IF_FAILED(env, status, 0); + return result; +} + +inline const napi_node_version* VersionManagement::GetNodeVersion(Env env) { + const napi_node_version* result; + napi_status status = napi_get_node_version(env, &result); + NAPI_THROW_IF_FAILED(env, status, 0); + return result; +} + +#if NAPI_VERSION > 5 +//////////////////////////////////////////////////////////////////////////////// +// Addon class +//////////////////////////////////////////////////////////////////////////////// + +template +inline Object Addon::Init(Env env, Object exports) { + T* addon = new T(env, exports); + env.SetInstanceData(addon); + return addon->entry_point_; +} + +template +inline T* Addon::Unwrap(Object wrapper) { + return wrapper.Env().GetInstanceData(); +} + +template +inline void Addon::DefineAddon( + Object exports, const std::initializer_list& props) { + DefineProperties(exports, props); + entry_point_ = exports; +} + +template +inline Napi::Object Addon::DefineProperties( + Object object, const std::initializer_list& props) { + const napi_property_descriptor* properties = + reinterpret_cast(props.begin()); + size_t size = props.size(); + napi_status status = + napi_define_properties(object.Env(), object, size, properties); + NAPI_THROW_IF_FAILED(object.Env(), status, object); + for (size_t idx = 0; idx < size; idx++) + T::AttachPropData(object.Env(), object, &properties[idx]); + return object; +} +#endif // NAPI_VERSION > 5 + +#if NAPI_VERSION > 2 +template +Env::CleanupHook Env::AddCleanupHook(Hook hook, Arg* arg) { + return CleanupHook(*this, hook, arg); +} + +template +Env::CleanupHook Env::AddCleanupHook(Hook hook) { + return CleanupHook(*this, hook); +} + +template +Env::CleanupHook::CleanupHook() { + data = nullptr; +} + +template +Env::CleanupHook::CleanupHook(Napi::Env env, Hook hook) + : wrapper(Env::CleanupHook::Wrapper) { + data = new CleanupData{std::move(hook), nullptr}; + napi_status status = napi_add_env_cleanup_hook(env, wrapper, data); + if (status != napi_ok) { + delete data; + data = nullptr; + } +} + +template +Env::CleanupHook::CleanupHook(Napi::Env env, Hook hook, Arg* arg) + : wrapper(Env::CleanupHook::WrapperWithArg) { + data = new CleanupData{std::move(hook), arg}; + napi_status status = napi_add_env_cleanup_hook(env, wrapper, data); + if (status != napi_ok) { + delete data; + data = nullptr; + } +} + +template +bool Env::CleanupHook::Remove(Env env) { + napi_status status = napi_remove_env_cleanup_hook(env, wrapper, data); + delete data; + data = nullptr; + return status == napi_ok; +} + +template +bool Env::CleanupHook::IsEmpty() const { + return data == nullptr; +} +#endif // NAPI_VERSION > 2 + +#ifdef NAPI_CPP_CUSTOM_NAMESPACE +} // namespace NAPI_CPP_CUSTOM_NAMESPACE +#endif + +} // namespace Napi + +#endif // SRC_NAPI_INL_H_ diff --git a/miniprogram/node_modules/node-addon-api/napi.h b/miniprogram/node_modules/node-addon-api/napi.h new file mode 100644 index 00000000..9f20cb88 --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/napi.h @@ -0,0 +1,3201 @@ +#ifndef SRC_NAPI_H_ +#define SRC_NAPI_H_ + +#ifndef NAPI_HAS_THREADS +#if !defined(__wasm__) || (defined(__EMSCRIPTEN_PTHREADS__) || \ + (defined(__wasi__) && defined(_REENTRANT))) +#define NAPI_HAS_THREADS 1 +#else +#define NAPI_HAS_THREADS 0 +#endif +#endif + +#include +#include +#include +#include +#if NAPI_HAS_THREADS +#include +#endif // NAPI_HAS_THREADS +#include +#include + +// VS2015 RTM has bugs with constexpr, so require min of VS2015 Update 3 (known +// good version) +#if !defined(_MSC_VER) || _MSC_FULL_VER >= 190024210 +#define NAPI_HAS_CONSTEXPR 1 +#endif + +// VS2013 does not support char16_t literal strings, so we'll work around it +// using wchar_t strings and casting them. This is safe as long as the character +// sizes are the same. +#if defined(_MSC_VER) && _MSC_VER <= 1800 +static_assert(sizeof(char16_t) == sizeof(wchar_t), + "Size mismatch between char16_t and wchar_t"); +#define NAPI_WIDE_TEXT(x) reinterpret_cast(L##x) +#else +#define NAPI_WIDE_TEXT(x) u##x +#endif + +// If C++ exceptions are not explicitly enabled or disabled, enable them +// if exceptions were enabled in the compiler settings. +#if !defined(NAPI_CPP_EXCEPTIONS) && !defined(NAPI_DISABLE_CPP_EXCEPTIONS) +#if defined(_CPPUNWIND) || defined(__EXCEPTIONS) +#define NAPI_CPP_EXCEPTIONS +#else +#error Exception support not detected. \ + Define either NAPI_CPP_EXCEPTIONS or NAPI_DISABLE_CPP_EXCEPTIONS. +#endif +#endif + +// If C++ NAPI_CPP_EXCEPTIONS are enabled, NODE_ADDON_API_ENABLE_MAYBE should +// not be set +#if defined(NAPI_CPP_EXCEPTIONS) && defined(NODE_ADDON_API_ENABLE_MAYBE) +#error NODE_ADDON_API_ENABLE_MAYBE should not be set when \ + NAPI_CPP_EXCEPTIONS is defined. +#endif + +#ifdef _NOEXCEPT +#define NAPI_NOEXCEPT _NOEXCEPT +#else +#define NAPI_NOEXCEPT noexcept +#endif + +#ifdef NAPI_CPP_EXCEPTIONS + +// When C++ exceptions are enabled, Errors are thrown directly. There is no need +// to return anything after the throw statements. The variadic parameter is an +// optional return value that is ignored. +// We need _VOID versions of the macros to avoid warnings resulting from +// leaving the NAPI_THROW_* `...` argument empty. + +#define NAPI_THROW(e, ...) throw e +#define NAPI_THROW_VOID(e) throw e + +#define NAPI_THROW_IF_FAILED(env, status, ...) \ + if ((status) != napi_ok) throw Napi::Error::New(env); + +#define NAPI_THROW_IF_FAILED_VOID(env, status) \ + if ((status) != napi_ok) throw Napi::Error::New(env); + +#else // NAPI_CPP_EXCEPTIONS + +// When C++ exceptions are disabled, Errors are thrown as JavaScript exceptions, +// which are pending until the callback returns to JS. The variadic parameter +// is an optional return value; usually it is an empty result. +// We need _VOID versions of the macros to avoid warnings resulting from +// leaving the NAPI_THROW_* `...` argument empty. + +#define NAPI_THROW(e, ...) \ + do { \ + (e).ThrowAsJavaScriptException(); \ + return __VA_ARGS__; \ + } while (0) + +#define NAPI_THROW_VOID(e) \ + do { \ + (e).ThrowAsJavaScriptException(); \ + return; \ + } while (0) + +#define NAPI_THROW_IF_FAILED(env, status, ...) \ + if ((status) != napi_ok) { \ + Napi::Error::New(env).ThrowAsJavaScriptException(); \ + return __VA_ARGS__; \ + } + +#define NAPI_THROW_IF_FAILED_VOID(env, status) \ + if ((status) != napi_ok) { \ + Napi::Error::New(env).ThrowAsJavaScriptException(); \ + return; \ + } + +#endif // NAPI_CPP_EXCEPTIONS + +#ifdef NODE_ADDON_API_ENABLE_MAYBE +#define NAPI_MAYBE_THROW_IF_FAILED(env, status, type) \ + NAPI_THROW_IF_FAILED(env, status, Napi::Nothing()) + +#define NAPI_RETURN_OR_THROW_IF_FAILED(env, status, result, type) \ + NAPI_MAYBE_THROW_IF_FAILED(env, status, type); \ + return Napi::Just(result); +#else +#define NAPI_MAYBE_THROW_IF_FAILED(env, status, type) \ + NAPI_THROW_IF_FAILED(env, status, type()) + +#define NAPI_RETURN_OR_THROW_IF_FAILED(env, status, result, type) \ + NAPI_MAYBE_THROW_IF_FAILED(env, status, type); \ + return result; +#endif + +#define NAPI_DISALLOW_ASSIGN(CLASS) void operator=(const CLASS&) = delete; +#define NAPI_DISALLOW_COPY(CLASS) CLASS(const CLASS&) = delete; + +#define NAPI_DISALLOW_ASSIGN_COPY(CLASS) \ + NAPI_DISALLOW_ASSIGN(CLASS) \ + NAPI_DISALLOW_COPY(CLASS) + +#define NAPI_CHECK(condition, location, message) \ + do { \ + if (!(condition)) { \ + Napi::Error::Fatal((location), (message)); \ + } \ + } while (0) + +#define NAPI_FATAL_IF_FAILED(status, location, message) \ + NAPI_CHECK((status) == napi_ok, location, message) + +//////////////////////////////////////////////////////////////////////////////// +/// Node-API C++ Wrapper Classes +/// +/// These classes wrap the "Node-API" ABI-stable C APIs for Node.js, providing a +/// C++ object model and C++ exception-handling semantics with low overhead. +/// The wrappers are all header-only so that they do not affect the ABI. +//////////////////////////////////////////////////////////////////////////////// +namespace Napi { + +#ifdef NAPI_CPP_CUSTOM_NAMESPACE +// NAPI_CPP_CUSTOM_NAMESPACE can be #define'd per-addon to avoid symbol +// conflicts between different instances of node-addon-api + +// First dummy definition of the namespace to make sure that Napi::(name) still +// refers to the right things inside this file. +namespace NAPI_CPP_CUSTOM_NAMESPACE {} +using namespace NAPI_CPP_CUSTOM_NAMESPACE; + +namespace NAPI_CPP_CUSTOM_NAMESPACE { +#endif + +// Forward declarations +class Env; +class Value; +class Boolean; +class Number; +#if NAPI_VERSION > 5 +class BigInt; +#endif // NAPI_VERSION > 5 +#if (NAPI_VERSION > 4) +class Date; +#endif +class String; +class Object; +class Array; +class ArrayBuffer; +class Function; +class Error; +class PropertyDescriptor; +class CallbackInfo; +class TypedArray; +template +class TypedArrayOf; + +using Int8Array = + TypedArrayOf; ///< Typed-array of signed 8-bit integers +using Uint8Array = + TypedArrayOf; ///< Typed-array of unsigned 8-bit integers +using Int16Array = + TypedArrayOf; ///< Typed-array of signed 16-bit integers +using Uint16Array = + TypedArrayOf; ///< Typed-array of unsigned 16-bit integers +using Int32Array = + TypedArrayOf; ///< Typed-array of signed 32-bit integers +using Uint32Array = + TypedArrayOf; ///< Typed-array of unsigned 32-bit integers +using Float32Array = + TypedArrayOf; ///< Typed-array of 32-bit floating-point values +using Float64Array = + TypedArrayOf; ///< Typed-array of 64-bit floating-point values +#if NAPI_VERSION > 5 +using BigInt64Array = + TypedArrayOf; ///< Typed array of signed 64-bit integers +using BigUint64Array = + TypedArrayOf; ///< Typed array of unsigned 64-bit integers +#endif // NAPI_VERSION > 5 + +/// Defines the signature of a Node-API C++ module's registration callback +/// (init) function. +using ModuleRegisterCallback = Object (*)(Env env, Object exports); + +class MemoryManagement; + +/// A simple Maybe type, representing an object which may or may not have a +/// value. +/// +/// If an API method returns a Maybe<>, the API method can potentially fail +/// either because an exception is thrown, or because an exception is pending, +/// e.g. because a previous API call threw an exception that hasn't been +/// caught yet. In that case, a "Nothing" value is returned. +template +class Maybe { + public: + bool IsNothing() const; + bool IsJust() const; + + /// Short-hand for Unwrap(), which doesn't return a value. Could be used + /// where the actual value of the Maybe is not needed like Object::Set. + /// If this Maybe is nothing (empty), node-addon-api will crash the + /// process. + void Check() const; + + /// Return the value of type T contained in the Maybe. If this Maybe is + /// nothing (empty), node-addon-api will crash the process. + T Unwrap() const; + + /// Return the value of type T contained in the Maybe, or using a default + /// value if this Maybe is nothing (empty). + T UnwrapOr(const T& default_value) const; + + /// Converts this Maybe to a value of type T in the out. If this Maybe is + /// nothing (empty), `false` is returned and `out` is left untouched. + bool UnwrapTo(T* out) const; + + bool operator==(const Maybe& other) const; + bool operator!=(const Maybe& other) const; + + private: + Maybe(); + explicit Maybe(const T& t); + + bool _has_value; + T _value; + + template + friend Maybe Nothing(); + template + friend Maybe Just(const U& u); +}; + +template +inline Maybe Nothing(); + +template +inline Maybe Just(const T& t); + +#if defined(NODE_ADDON_API_ENABLE_MAYBE) +template +using MaybeOrValue = Maybe; +#else +template +using MaybeOrValue = T; +#endif + +/// Environment for Node-API values and operations. +/// +/// All Node-API values and operations must be associated with an environment. +/// An environment instance is always provided to callback functions; that +/// environment must then be used for any creation of Node-API values or other +/// Node-API operations within the callback. (Many methods infer the +/// environment from the `this` instance that the method is called on.) +/// +/// In the future, multiple environments per process may be supported, +/// although current implementations only support one environment per process. +/// +/// In the V8 JavaScript engine, a Node-API environment approximately +/// corresponds to an Isolate. +class Env { + private: + napi_env _env; +#if NAPI_VERSION > 5 + template + static void DefaultFini(Env, T* data); + template + static void DefaultFiniWithHint(Env, DataType* data, HintType* hint); +#endif // NAPI_VERSION > 5 + public: + Env(napi_env env); + + operator napi_env() const; + + Object Global() const; + Value Undefined() const; + Value Null() const; + + bool IsExceptionPending() const; + Error GetAndClearPendingException() const; + + MaybeOrValue RunScript(const char* utf8script) const; + MaybeOrValue RunScript(const std::string& utf8script) const; + MaybeOrValue RunScript(String script) const; + +#if NAPI_VERSION > 2 + template + class CleanupHook; + + template + CleanupHook AddCleanupHook(Hook hook); + + template + CleanupHook AddCleanupHook(Hook hook, Arg* arg); +#endif // NAPI_VERSION > 2 + +#if NAPI_VERSION > 5 + template + T* GetInstanceData() const; + + template + using Finalizer = void (*)(Env, T*); + template fini = Env::DefaultFini> + void SetInstanceData(T* data) const; + + template + using FinalizerWithHint = void (*)(Env, DataType*, HintType*); + template fini = + Env::DefaultFiniWithHint> + void SetInstanceData(DataType* data, HintType* hint) const; +#endif // NAPI_VERSION > 5 + +#if NAPI_VERSION > 2 + template + class CleanupHook { + public: + CleanupHook(); + CleanupHook(Env env, Hook hook, Arg* arg); + CleanupHook(Env env, Hook hook); + bool Remove(Env env); + bool IsEmpty() const; + + private: + static inline void Wrapper(void* data) NAPI_NOEXCEPT; + static inline void WrapperWithArg(void* data) NAPI_NOEXCEPT; + + void (*wrapper)(void* arg); + struct CleanupData { + Hook hook; + Arg* arg; + } * data; + }; +#endif // NAPI_VERSION > 2 + +#if NAPI_VERSION > 8 + const char* GetModuleFileName() const; +#endif // NAPI_VERSION > 8 +}; + +/// A JavaScript value of unknown type. +/// +/// For type-specific operations, convert to one of the Value subclasses using a +/// `To*` or `As()` method. The `To*` methods do type coercion; the `As()` +/// method does not. +/// +/// Napi::Value value = ... +/// if (!value.IsString()) throw Napi::TypeError::New(env, "Invalid +/// arg..."); Napi::String str = value.As(); // Cast to a +/// string value +/// +/// Napi::Value anotherValue = ... +/// bool isTruthy = anotherValue.ToBoolean(); // Coerce to a boolean value +class Value { + public: + Value(); ///< Creates a new _empty_ Value instance. + Value(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + /// Creates a JS value from a C++ primitive. + /// + /// `value` may be any of: + /// - bool + /// - Any integer type + /// - Any floating point type + /// - const char* (encoded using UTF-8, null-terminated) + /// - const char16_t* (encoded using UTF-16-LE, null-terminated) + /// - std::string (encoded using UTF-8) + /// - std::u16string + /// - napi::Value + /// - napi_value + template + static Value From(napi_env env, const T& value); + + /// Converts to a Node-API value primitive. + /// + /// If the instance is _empty_, this returns `nullptr`. + operator napi_value() const; + + /// Tests if this value strictly equals another value. + bool operator==(const Value& other) const; + + /// Tests if this value does not strictly equal another value. + bool operator!=(const Value& other) const; + + /// Tests if this value strictly equals another value. + bool StrictEquals(const Value& other) const; + + /// Gets the environment the value is associated with. + Napi::Env Env() const; + + /// Checks if the value is empty (uninitialized). + /// + /// An empty value is invalid, and most attempts to perform an operation on an + /// empty value will result in an exception. Note an empty value is distinct + /// from JavaScript `null` or `undefined`, which are valid values. + /// + /// When C++ exceptions are disabled at compile time, a method with a `Value` + /// return type may return an empty value to indicate a pending exception. So + /// when not using C++ exceptions, callers should check whether the value is + /// empty before attempting to use it. + bool IsEmpty() const; + + napi_valuetype Type() const; ///< Gets the type of the value. + + bool IsUndefined() + const; ///< Tests if a value is an undefined JavaScript value. + bool IsNull() const; ///< Tests if a value is a null JavaScript value. + bool IsBoolean() const; ///< Tests if a value is a JavaScript boolean. + bool IsNumber() const; ///< Tests if a value is a JavaScript number. +#if NAPI_VERSION > 5 + bool IsBigInt() const; ///< Tests if a value is a JavaScript bigint. +#endif // NAPI_VERSION > 5 +#if (NAPI_VERSION > 4) + bool IsDate() const; ///< Tests if a value is a JavaScript date. +#endif + bool IsString() const; ///< Tests if a value is a JavaScript string. + bool IsSymbol() const; ///< Tests if a value is a JavaScript symbol. + bool IsArray() const; ///< Tests if a value is a JavaScript array. + bool IsArrayBuffer() + const; ///< Tests if a value is a JavaScript array buffer. + bool IsTypedArray() const; ///< Tests if a value is a JavaScript typed array. + bool IsObject() const; ///< Tests if a value is a JavaScript object. + bool IsFunction() const; ///< Tests if a value is a JavaScript function. + bool IsPromise() const; ///< Tests if a value is a JavaScript promise. + bool IsDataView() const; ///< Tests if a value is a JavaScript data view. + bool IsBuffer() const; ///< Tests if a value is a Node buffer. + bool IsExternal() const; ///< Tests if a value is a pointer to external data. + + /// Casts to another type of `Napi::Value`, when the actual type is known or + /// assumed. + /// + /// This conversion does NOT coerce the type. Calling any methods + /// inappropriate for the actual value type will throw `Napi::Error`. + /// + /// If `NODE_ADDON_API_ENABLE_TYPE_CHECK_ON_AS` is defined, this method + /// asserts that the actual type is the expected type. + template + T As() const; + + MaybeOrValue ToBoolean() + const; ///< Coerces a value to a JavaScript boolean. + MaybeOrValue ToNumber() + const; ///< Coerces a value to a JavaScript number. + MaybeOrValue ToString() + const; ///< Coerces a value to a JavaScript string. + MaybeOrValue ToObject() + const; ///< Coerces a value to a JavaScript object. + + protected: + /// !cond INTERNAL + napi_env _env; + napi_value _value; + /// !endcond +}; + +/// A JavaScript boolean value. +class Boolean : public Value { + public: + static Boolean New(napi_env env, ///< Node-API environment + bool value ///< Boolean value + ); + + static void CheckCast(napi_env env, napi_value value); + + Boolean(); ///< Creates a new _empty_ Boolean instance. + Boolean(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + operator bool() const; ///< Converts a Boolean value to a boolean primitive. + bool Value() const; ///< Converts a Boolean value to a boolean primitive. +}; + +/// A JavaScript number value. +class Number : public Value { + public: + static Number New(napi_env env, ///< Node-API environment + double value ///< Number value + ); + + static void CheckCast(napi_env env, napi_value value); + + Number(); ///< Creates a new _empty_ Number instance. + Number(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + operator int32_t() + const; ///< Converts a Number value to a 32-bit signed integer value. + operator uint32_t() + const; ///< Converts a Number value to a 32-bit unsigned integer value. + operator int64_t() + const; ///< Converts a Number value to a 64-bit signed integer value. + operator float() + const; ///< Converts a Number value to a 32-bit floating-point value. + operator double() + const; ///< Converts a Number value to a 64-bit floating-point value. + + int32_t Int32Value() + const; ///< Converts a Number value to a 32-bit signed integer value. + uint32_t Uint32Value() + const; ///< Converts a Number value to a 32-bit unsigned integer value. + int64_t Int64Value() + const; ///< Converts a Number value to a 64-bit signed integer value. + float FloatValue() + const; ///< Converts a Number value to a 32-bit floating-point value. + double DoubleValue() + const; ///< Converts a Number value to a 64-bit floating-point value. +}; + +#if NAPI_VERSION > 5 +/// A JavaScript bigint value. +class BigInt : public Value { + public: + static BigInt New(napi_env env, ///< Node-API environment + int64_t value ///< Number value + ); + static BigInt New(napi_env env, ///< Node-API environment + uint64_t value ///< Number value + ); + + /// Creates a new BigInt object using a specified sign bit and a + /// specified list of digits/words. + /// The resulting number is calculated as: + /// (-1)^sign_bit * (words[0] * (2^64)^0 + words[1] * (2^64)^1 + ...) + static BigInt New(napi_env env, ///< Node-API environment + int sign_bit, ///< Sign bit. 1 if negative. + size_t word_count, ///< Number of words in array + const uint64_t* words ///< Array of words + ); + + static void CheckCast(napi_env env, napi_value value); + + BigInt(); ///< Creates a new _empty_ BigInt instance. + BigInt(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + int64_t Int64Value(bool* lossless) + const; ///< Converts a BigInt value to a 64-bit signed integer value. + uint64_t Uint64Value(bool* lossless) + const; ///< Converts a BigInt value to a 64-bit unsigned integer value. + + size_t WordCount() const; ///< The number of 64-bit words needed to store + ///< the result of ToWords(). + + /// Writes the contents of this BigInt to a specified memory location. + /// `sign_bit` must be provided and will be set to 1 if this BigInt is + /// negative. + /// `*word_count` has to be initialized to the length of the `words` array. + /// Upon return, it will be set to the actual number of words that would + /// be needed to store this BigInt (i.e. the return value of `WordCount()`). + void ToWords(int* sign_bit, size_t* word_count, uint64_t* words); +}; +#endif // NAPI_VERSION > 5 + +#if (NAPI_VERSION > 4) +/// A JavaScript date value. +class Date : public Value { + public: + /// Creates a new Date value from a double primitive. + static Date New(napi_env env, ///< Node-API environment + double value ///< Number value + ); + + static void CheckCast(napi_env env, napi_value value); + + Date(); ///< Creates a new _empty_ Date instance. + Date(napi_env env, napi_value value); ///< Wraps a Node-API value primitive. + operator double() const; ///< Converts a Date value to double primitive + + double ValueOf() const; ///< Converts a Date value to a double primitive. +}; +#endif + +/// A JavaScript string or symbol value (that can be used as a property name). +class Name : public Value { + public: + static void CheckCast(napi_env env, napi_value value); + + Name(); ///< Creates a new _empty_ Name instance. + Name(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. +}; + +/// A JavaScript string value. +class String : public Name { + public: + /// Creates a new String value from a UTF-8 encoded C++ string. + static String New(napi_env env, ///< Node-API environment + const std::string& value ///< UTF-8 encoded C++ string + ); + + /// Creates a new String value from a UTF-16 encoded C++ string. + static String New(napi_env env, ///< Node-API environment + const std::u16string& value ///< UTF-16 encoded C++ string + ); + + /// Creates a new String value from a UTF-8 encoded C string. + static String New( + napi_env env, ///< Node-API environment + const char* value ///< UTF-8 encoded null-terminated C string + ); + + /// Creates a new String value from a UTF-16 encoded C string. + static String New( + napi_env env, ///< Node-API environment + const char16_t* value ///< UTF-16 encoded null-terminated C string + ); + + /// Creates a new String value from a UTF-8 encoded C string with specified + /// length. + static String New(napi_env env, ///< Node-API environment + const char* value, ///< UTF-8 encoded C string (not + ///< necessarily null-terminated) + size_t length ///< length of the string in bytes + ); + + /// Creates a new String value from a UTF-16 encoded C string with specified + /// length. + static String New( + napi_env env, ///< Node-API environment + const char16_t* value, ///< UTF-16 encoded C string (not necessarily + ///< null-terminated) + size_t length ///< Length of the string in 2-byte code units + ); + + /// Creates a new String based on the original object's type. + /// + /// `value` may be any of: + /// - const char* (encoded using UTF-8, null-terminated) + /// - const char16_t* (encoded using UTF-16-LE, null-terminated) + /// - std::string (encoded using UTF-8) + /// - std::u16string + template + static String From(napi_env env, const T& value); + + static void CheckCast(napi_env env, napi_value value); + + String(); ///< Creates a new _empty_ String instance. + String(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + operator std::string() + const; ///< Converts a String value to a UTF-8 encoded C++ string. + operator std::u16string() + const; ///< Converts a String value to a UTF-16 encoded C++ string. + std::string Utf8Value() + const; ///< Converts a String value to a UTF-8 encoded C++ string. + std::u16string Utf16Value() + const; ///< Converts a String value to a UTF-16 encoded C++ string. +}; + +/// A JavaScript symbol value. +class Symbol : public Name { + public: + /// Creates a new Symbol value with an optional description. + static Symbol New( + napi_env env, ///< Node-API environment + const char* description = + nullptr ///< Optional UTF-8 encoded null-terminated C string + /// describing the symbol + ); + + /// Creates a new Symbol value with a description. + static Symbol New( + napi_env env, ///< Node-API environment + const std::string& + description ///< UTF-8 encoded C++ string describing the symbol + ); + + /// Creates a new Symbol value with a description. + static Symbol New(napi_env env, ///< Node-API environment + String description ///< String value describing the symbol + ); + + /// Creates a new Symbol value with a description. + static Symbol New( + napi_env env, ///< Node-API environment + napi_value description ///< String value describing the symbol + ); + + /// Get a public Symbol (e.g. Symbol.iterator). + static MaybeOrValue WellKnown(napi_env, const std::string& name); + + // Create a symbol in the global registry, UTF-8 Encoded cpp string + static MaybeOrValue For(napi_env env, const std::string& description); + + // Create a symbol in the global registry, C style string (null terminated) + static MaybeOrValue For(napi_env env, const char* description); + + // Create a symbol in the global registry, String value describing the symbol + static MaybeOrValue For(napi_env env, String description); + + // Create a symbol in the global registry, napi_value describing the symbol + static MaybeOrValue For(napi_env env, napi_value description); + + static void CheckCast(napi_env env, napi_value value); + + Symbol(); ///< Creates a new _empty_ Symbol instance. + Symbol(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. +}; + +class TypeTaggable : public Value { + public: +#if NAPI_VERSION >= 8 + void TypeTag(const napi_type_tag* type_tag) const; + bool CheckTypeTag(const napi_type_tag* type_tag) const; +#endif // NAPI_VERSION >= 8 + protected: + TypeTaggable(); + TypeTaggable(napi_env env, napi_value value); +}; + +/// A JavaScript object value. +class Object : public TypeTaggable { + public: + /// Enables property and element assignments using indexing syntax. + /// + /// This is a convenient helper to get and set object properties. As + /// getting and setting object properties may throw with JavaScript + /// exceptions, it is notable that these operations may fail. + /// When NODE_ADDON_API_ENABLE_MAYBE is defined, the process will abort + /// on JavaScript exceptions. + /// + /// Example: + /// + /// Napi::Value propertyValue = object1['A']; + /// object2['A'] = propertyValue; + /// Napi::Value elementValue = array[0]; + /// array[1] = elementValue; + template + class PropertyLValue { + public: + /// Converts an L-value to a value. + operator Value() const; + + /// Assigns a value to the property. The type of value can be + /// anything supported by `Object::Set`. + template + PropertyLValue& operator=(ValueType value); + + private: + PropertyLValue() = delete; + PropertyLValue(Object object, Key key); + napi_env _env; + napi_value _object; + Key _key; + + friend class Napi::Object; + }; + + /// Creates a new Object value. + static Object New(napi_env env ///< Node-API environment + ); + + static void CheckCast(napi_env env, napi_value value); + + Object(); ///< Creates a new _empty_ Object instance. + Object(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + /// Gets or sets a named property. + PropertyLValue operator[]( + const char* utf8name ///< UTF-8 encoded null-terminated property name + ); + + /// Gets or sets a named property. + PropertyLValue operator[]( + const std::string& utf8name ///< UTF-8 encoded property name + ); + + /// Gets or sets an indexed property or array element. + PropertyLValue operator[]( + uint32_t index /// Property / element index + ); + + /// Gets or sets an indexed property or array element. + PropertyLValue operator[](Value index /// Property / element index + ) const; + + /// Gets a named property. + MaybeOrValue operator[]( + const char* utf8name ///< UTF-8 encoded null-terminated property name + ) const; + + /// Gets a named property. + MaybeOrValue operator[]( + const std::string& utf8name ///< UTF-8 encoded property name + ) const; + + /// Gets an indexed property or array element. + MaybeOrValue operator[](uint32_t index ///< Property / element index + ) const; + + /// Checks whether a property is present. + MaybeOrValue Has(napi_value key ///< Property key primitive + ) const; + + /// Checks whether a property is present. + MaybeOrValue Has(Value key ///< Property key + ) const; + + /// Checks whether a named property is present. + MaybeOrValue Has( + const char* utf8name ///< UTF-8 encoded null-terminated property name + ) const; + + /// Checks whether a named property is present. + MaybeOrValue Has( + const std::string& utf8name ///< UTF-8 encoded property name + ) const; + + /// Checks whether a own property is present. + MaybeOrValue HasOwnProperty(napi_value key ///< Property key primitive + ) const; + + /// Checks whether a own property is present. + MaybeOrValue HasOwnProperty(Value key ///< Property key + ) const; + + /// Checks whether a own property is present. + MaybeOrValue HasOwnProperty( + const char* utf8name ///< UTF-8 encoded null-terminated property name + ) const; + + /// Checks whether a own property is present. + MaybeOrValue HasOwnProperty( + const std::string& utf8name ///< UTF-8 encoded property name + ) const; + + /// Gets a property. + MaybeOrValue Get(napi_value key ///< Property key primitive + ) const; + + /// Gets a property. + MaybeOrValue Get(Value key ///< Property key + ) const; + + /// Gets a named property. + MaybeOrValue Get( + const char* utf8name ///< UTF-8 encoded null-terminated property name + ) const; + + /// Gets a named property. + MaybeOrValue Get( + const std::string& utf8name ///< UTF-8 encoded property name + ) const; + + /// Sets a property. + template + MaybeOrValue Set(napi_value key, ///< Property key primitive + const ValueType& value ///< Property value primitive + ) const; + + /// Sets a property. + template + MaybeOrValue Set(Value key, ///< Property key + const ValueType& value ///< Property value + ) const; + + /// Sets a named property. + template + MaybeOrValue Set( + const char* utf8name, ///< UTF-8 encoded null-terminated property name + const ValueType& value) const; + + /// Sets a named property. + template + MaybeOrValue Set( + const std::string& utf8name, ///< UTF-8 encoded property name + const ValueType& value ///< Property value primitive + ) const; + + /// Delete property. + MaybeOrValue Delete(napi_value key ///< Property key primitive + ) const; + + /// Delete property. + MaybeOrValue Delete(Value key ///< Property key + ) const; + + /// Delete property. + MaybeOrValue Delete( + const char* utf8name ///< UTF-8 encoded null-terminated property name + ) const; + + /// Delete property. + MaybeOrValue Delete( + const std::string& utf8name ///< UTF-8 encoded property name + ) const; + + /// Checks whether an indexed property is present. + MaybeOrValue Has(uint32_t index ///< Property / element index + ) const; + + /// Gets an indexed property or array element. + MaybeOrValue Get(uint32_t index ///< Property / element index + ) const; + + /// Sets an indexed property or array element. + template + MaybeOrValue Set(uint32_t index, ///< Property / element index + const ValueType& value ///< Property value primitive + ) const; + + /// Deletes an indexed property or array element. + MaybeOrValue Delete(uint32_t index ///< Property / element index + ) const; + + /// This operation can fail in case of Proxy.[[OwnPropertyKeys]] and + /// Proxy.[[GetOwnProperty]] calling into JavaScript. See: + /// - + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-ownpropertykeys + /// - + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-getownproperty-p + MaybeOrValue GetPropertyNames() const; ///< Get all property names + + /// Defines a property on the object. + /// + /// This operation can fail in case of Proxy.[[DefineOwnProperty]] calling + /// into JavaScript. See + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-defineownproperty-p-desc + MaybeOrValue DefineProperty( + const PropertyDescriptor& + property ///< Descriptor for the property to be defined + ) const; + + /// Defines properties on the object. + /// + /// This operation can fail in case of Proxy.[[DefineOwnProperty]] calling + /// into JavaScript. See + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-defineownproperty-p-desc + MaybeOrValue DefineProperties( + const std::initializer_list& properties + ///< List of descriptors for the properties to be defined + ) const; + + /// Defines properties on the object. + /// + /// This operation can fail in case of Proxy.[[DefineOwnProperty]] calling + /// into JavaScript. See + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-defineownproperty-p-desc + MaybeOrValue DefineProperties( + const std::vector& properties + ///< Vector of descriptors for the properties to be defined + ) const; + + /// Checks if an object is an instance created by a constructor function. + /// + /// This is equivalent to the JavaScript `instanceof` operator. + /// + /// This operation can fail in case of Proxy.[[GetPrototypeOf]] calling into + /// JavaScript. + /// See + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-getprototypeof + MaybeOrValue InstanceOf( + const Function& constructor ///< Constructor function + ) const; + + template + inline void AddFinalizer(Finalizer finalizeCallback, T* data) const; + + template + inline void AddFinalizer(Finalizer finalizeCallback, + T* data, + Hint* finalizeHint) const; + +#ifdef NAPI_CPP_EXCEPTIONS + class const_iterator; + + inline const_iterator begin() const; + + inline const_iterator end() const; + + class iterator; + + inline iterator begin(); + + inline iterator end(); +#endif // NAPI_CPP_EXCEPTIONS + +#if NAPI_VERSION >= 8 + /// This operation can fail in case of Proxy.[[GetPrototypeOf]] calling into + /// JavaScript. + /// See + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-getprototypeof + MaybeOrValue Freeze() const; + /// This operation can fail in case of Proxy.[[GetPrototypeOf]] calling into + /// JavaScript. + /// See + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-getprototypeof + MaybeOrValue Seal() const; +#endif // NAPI_VERSION >= 8 +}; + +template +class External : public TypeTaggable { + public: + static External New(napi_env env, T* data); + + // Finalizer must implement `void operator()(Env env, T* data)`. + template + static External New(napi_env env, T* data, Finalizer finalizeCallback); + // Finalizer must implement `void operator()(Env env, T* data, Hint* hint)`. + template + static External New(napi_env env, + T* data, + Finalizer finalizeCallback, + Hint* finalizeHint); + + static void CheckCast(napi_env env, napi_value value); + + External(); + External(napi_env env, napi_value value); + + T* Data() const; +}; + +class Array : public Object { + public: + static Array New(napi_env env); + static Array New(napi_env env, size_t length); + + static void CheckCast(napi_env env, napi_value value); + + Array(); + Array(napi_env env, napi_value value); + + uint32_t Length() const; +}; + +#ifdef NAPI_CPP_EXCEPTIONS +class Object::const_iterator { + private: + enum class Type { BEGIN, END }; + + inline const_iterator(const Object* object, const Type type); + + public: + inline const_iterator& operator++(); + + inline bool operator==(const const_iterator& other) const; + + inline bool operator!=(const const_iterator& other) const; + + inline const std::pair> operator*() + const; + + private: + const Napi::Object* _object; + Array _keys; + uint32_t _index; + + friend class Object; +}; + +class Object::iterator { + private: + enum class Type { BEGIN, END }; + + inline iterator(Object* object, const Type type); + + public: + inline iterator& operator++(); + + inline bool operator==(const iterator& other) const; + + inline bool operator!=(const iterator& other) const; + + inline std::pair> operator*(); + + private: + Napi::Object* _object; + Array _keys; + uint32_t _index; + + friend class Object; +}; +#endif // NAPI_CPP_EXCEPTIONS + +/// A JavaScript array buffer value. +class ArrayBuffer : public Object { + public: + /// Creates a new ArrayBuffer instance over a new automatically-allocated + /// buffer. + static ArrayBuffer New( + napi_env env, ///< Node-API environment + size_t byteLength ///< Length of the buffer to be allocated, in bytes + ); + +#ifndef NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + /// Creates a new ArrayBuffer instance, using an external buffer with + /// specified byte length. + static ArrayBuffer New( + napi_env env, ///< Node-API environment + void* externalData, ///< Pointer to the external buffer to be used by + ///< the array + size_t byteLength ///< Length of the external buffer to be used by the + ///< array, in bytes + ); + + /// Creates a new ArrayBuffer instance, using an external buffer with + /// specified byte length. + template + static ArrayBuffer New( + napi_env env, ///< Node-API environment + void* externalData, ///< Pointer to the external buffer to be used by + ///< the array + size_t byteLength, ///< Length of the external buffer to be used by the + ///< array, + /// in bytes + Finalizer finalizeCallback ///< Function to be called when the array + ///< buffer is destroyed; + /// must implement `void operator()(Env env, + /// void* externalData)` + ); + + /// Creates a new ArrayBuffer instance, using an external buffer with + /// specified byte length. + template + static ArrayBuffer New( + napi_env env, ///< Node-API environment + void* externalData, ///< Pointer to the external buffer to be used by + ///< the array + size_t byteLength, ///< Length of the external buffer to be used by the + ///< array, + /// in bytes + Finalizer finalizeCallback, ///< Function to be called when the array + ///< buffer is destroyed; + /// must implement `void operator()(Env + /// env, void* externalData, Hint* hint)` + Hint* finalizeHint ///< Hint (second parameter) to be passed to the + ///< finalize callback + ); +#endif // NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + + static void CheckCast(napi_env env, napi_value value); + + ArrayBuffer(); ///< Creates a new _empty_ ArrayBuffer instance. + ArrayBuffer(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + void* Data(); ///< Gets a pointer to the data buffer. + size_t ByteLength(); ///< Gets the length of the array buffer in bytes. + +#if NAPI_VERSION >= 7 + bool IsDetached() const; + void Detach(); +#endif // NAPI_VERSION >= 7 +}; + +/// A JavaScript typed-array value with unknown array type. +/// +/// For type-specific operations, cast to a `TypedArrayOf` instance using the +/// `As()` method: +/// +/// Napi::TypedArray array = ... +/// if (t.TypedArrayType() == napi_int32_array) { +/// Napi::Int32Array int32Array = t.As(); +/// } +class TypedArray : public Object { + public: + static void CheckCast(napi_env env, napi_value value); + + TypedArray(); ///< Creates a new _empty_ TypedArray instance. + TypedArray(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + napi_typedarray_type TypedArrayType() + const; ///< Gets the type of this typed-array. + Napi::ArrayBuffer ArrayBuffer() const; ///< Gets the backing array buffer. + + uint8_t ElementSize() + const; ///< Gets the size in bytes of one element in the array. + size_t ElementLength() const; ///< Gets the number of elements in the array. + size_t ByteOffset() + const; ///< Gets the offset into the buffer where the array starts. + size_t ByteLength() const; ///< Gets the length of the array in bytes. + + protected: + /// !cond INTERNAL + napi_typedarray_type _type; + size_t _length; + + TypedArray(napi_env env, + napi_value value, + napi_typedarray_type type, + size_t length); + + template + static +#if defined(NAPI_HAS_CONSTEXPR) + constexpr +#endif + napi_typedarray_type + TypedArrayTypeForPrimitiveType() { + return std::is_same::value ? napi_int8_array + : std::is_same::value ? napi_uint8_array + : std::is_same::value ? napi_int16_array + : std::is_same::value ? napi_uint16_array + : std::is_same::value ? napi_int32_array + : std::is_same::value ? napi_uint32_array + : std::is_same::value ? napi_float32_array + : std::is_same::value ? napi_float64_array +#if NAPI_VERSION > 5 + : std::is_same::value ? napi_bigint64_array + : std::is_same::value ? napi_biguint64_array +#endif // NAPI_VERSION > 5 + : napi_int8_array; + } + /// !endcond +}; + +/// A JavaScript typed-array value with known array type. +/// +/// Note while it is possible to create and access Uint8 "clamped" arrays using +/// this class, the _clamping_ behavior is only applied in JavaScript. +template +class TypedArrayOf : public TypedArray { + public: + /// Creates a new TypedArray instance over a new automatically-allocated array + /// buffer. + /// + /// The array type parameter can normally be omitted (because it is inferred + /// from the template parameter T), except when creating a "clamped" array: + /// + /// Uint8Array::New(env, length, napi_uint8_clamped_array) + static TypedArrayOf New( + napi_env env, ///< Node-API environment + size_t elementLength, ///< Length of the created array, as a number of + ///< elements +#if defined(NAPI_HAS_CONSTEXPR) + napi_typedarray_type type = + TypedArray::TypedArrayTypeForPrimitiveType() +#else + napi_typedarray_type type +#endif + ///< Type of array, if different from the default array type for the + ///< template parameter T. + ); + + /// Creates a new TypedArray instance over a provided array buffer. + /// + /// The array type parameter can normally be omitted (because it is inferred + /// from the template parameter T), except when creating a "clamped" array: + /// + /// Uint8Array::New(env, length, buffer, 0, napi_uint8_clamped_array) + static TypedArrayOf New( + napi_env env, ///< Node-API environment + size_t elementLength, ///< Length of the created array, as a number of + ///< elements + Napi::ArrayBuffer arrayBuffer, ///< Backing array buffer instance to use + size_t bufferOffset, ///< Offset into the array buffer where the + ///< typed-array starts +#if defined(NAPI_HAS_CONSTEXPR) + napi_typedarray_type type = + TypedArray::TypedArrayTypeForPrimitiveType() +#else + napi_typedarray_type type +#endif + ///< Type of array, if different from the default array type for the + ///< template parameter T. + ); + + static void CheckCast(napi_env env, napi_value value); + + TypedArrayOf(); ///< Creates a new _empty_ TypedArrayOf instance. + TypedArrayOf(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + T& operator[](size_t index); ///< Gets or sets an element in the array. + const T& operator[](size_t index) const; ///< Gets an element in the array. + + /// Gets a pointer to the array's backing buffer. + /// + /// This is not necessarily the same as the `ArrayBuffer::Data()` pointer, + /// because the typed-array may have a non-zero `ByteOffset()` into the + /// `ArrayBuffer`. + T* Data(); + + /// Gets a pointer to the array's backing buffer. + /// + /// This is not necessarily the same as the `ArrayBuffer::Data()` pointer, + /// because the typed-array may have a non-zero `ByteOffset()` into the + /// `ArrayBuffer`. + const T* Data() const; + + private: + T* _data; + + TypedArrayOf(napi_env env, + napi_value value, + napi_typedarray_type type, + size_t length, + T* data); +}; + +/// The DataView provides a low-level interface for reading/writing multiple +/// number types in an ArrayBuffer irrespective of the platform's endianness. +class DataView : public Object { + public: + static DataView New(napi_env env, Napi::ArrayBuffer arrayBuffer); + static DataView New(napi_env env, + Napi::ArrayBuffer arrayBuffer, + size_t byteOffset); + static DataView New(napi_env env, + Napi::ArrayBuffer arrayBuffer, + size_t byteOffset, + size_t byteLength); + + static void CheckCast(napi_env env, napi_value value); + + DataView(); ///< Creates a new _empty_ DataView instance. + DataView(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + Napi::ArrayBuffer ArrayBuffer() const; ///< Gets the backing array buffer. + size_t ByteOffset() + const; ///< Gets the offset into the buffer where the array starts. + size_t ByteLength() const; ///< Gets the length of the array in bytes. + + void* Data() const; + + float GetFloat32(size_t byteOffset) const; + double GetFloat64(size_t byteOffset) const; + int8_t GetInt8(size_t byteOffset) const; + int16_t GetInt16(size_t byteOffset) const; + int32_t GetInt32(size_t byteOffset) const; + uint8_t GetUint8(size_t byteOffset) const; + uint16_t GetUint16(size_t byteOffset) const; + uint32_t GetUint32(size_t byteOffset) const; + + void SetFloat32(size_t byteOffset, float value) const; + void SetFloat64(size_t byteOffset, double value) const; + void SetInt8(size_t byteOffset, int8_t value) const; + void SetInt16(size_t byteOffset, int16_t value) const; + void SetInt32(size_t byteOffset, int32_t value) const; + void SetUint8(size_t byteOffset, uint8_t value) const; + void SetUint16(size_t byteOffset, uint16_t value) const; + void SetUint32(size_t byteOffset, uint32_t value) const; + + private: + template + T ReadData(size_t byteOffset) const; + + template + void WriteData(size_t byteOffset, T value) const; + + void* _data; + size_t _length; +}; + +class Function : public Object { + public: + using VoidCallback = void (*)(const CallbackInfo& info); + using Callback = Value (*)(const CallbackInfo& info); + + template + static Function New(napi_env env, + const char* utf8name = nullptr, + void* data = nullptr); + + template + static Function New(napi_env env, + const char* utf8name = nullptr, + void* data = nullptr); + + template + static Function New(napi_env env, + const std::string& utf8name, + void* data = nullptr); + + template + static Function New(napi_env env, + const std::string& utf8name, + void* data = nullptr); + + /// Callable must implement operator() accepting a const CallbackInfo& + /// and return either void or Value. + template + static Function New(napi_env env, + Callable cb, + const char* utf8name = nullptr, + void* data = nullptr); + /// Callable must implement operator() accepting a const CallbackInfo& + /// and return either void or Value. + template + static Function New(napi_env env, + Callable cb, + const std::string& utf8name, + void* data = nullptr); + + static void CheckCast(napi_env env, napi_value value); + + Function(); + Function(napi_env env, napi_value value); + + MaybeOrValue operator()( + const std::initializer_list& args) const; + + MaybeOrValue Call(const std::initializer_list& args) const; + MaybeOrValue Call(const std::vector& args) const; + MaybeOrValue Call(const std::vector& args) const; + MaybeOrValue Call(size_t argc, const napi_value* args) const; + MaybeOrValue Call(napi_value recv, + const std::initializer_list& args) const; + MaybeOrValue Call(napi_value recv, + const std::vector& args) const; + MaybeOrValue Call(napi_value recv, + const std::vector& args) const; + MaybeOrValue Call(napi_value recv, + size_t argc, + const napi_value* args) const; + + MaybeOrValue MakeCallback( + napi_value recv, + const std::initializer_list& args, + napi_async_context context = nullptr) const; + MaybeOrValue MakeCallback(napi_value recv, + const std::vector& args, + napi_async_context context = nullptr) const; + MaybeOrValue MakeCallback(napi_value recv, + size_t argc, + const napi_value* args, + napi_async_context context = nullptr) const; + + MaybeOrValue New(const std::initializer_list& args) const; + MaybeOrValue New(const std::vector& args) const; + MaybeOrValue New(size_t argc, const napi_value* args) const; +}; + +class Promise : public Object { + public: + class Deferred { + public: + static Deferred New(napi_env env); + Deferred(napi_env env); + + Napi::Promise Promise() const; + Napi::Env Env() const; + + void Resolve(napi_value value) const; + void Reject(napi_value value) const; + + private: + napi_env _env; + napi_deferred _deferred; + napi_value _promise; + }; + + static void CheckCast(napi_env env, napi_value value); + + Promise(napi_env env, napi_value value); +}; + +template +class Buffer : public Uint8Array { + public: + static Buffer New(napi_env env, size_t length); +#ifndef NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + static Buffer New(napi_env env, T* data, size_t length); + + // Finalizer must implement `void operator()(Env env, T* data)`. + template + static Buffer New(napi_env env, + T* data, + size_t length, + Finalizer finalizeCallback); + // Finalizer must implement `void operator()(Env env, T* data, Hint* hint)`. + template + static Buffer New(napi_env env, + T* data, + size_t length, + Finalizer finalizeCallback, + Hint* finalizeHint); +#endif // NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED + + static Buffer NewOrCopy(napi_env env, T* data, size_t length); + // Finalizer must implement `void operator()(Env env, T* data)`. + template + static Buffer NewOrCopy(napi_env env, + T* data, + size_t length, + Finalizer finalizeCallback); + // Finalizer must implement `void operator()(Env env, T* data, Hint* hint)`. + template + static Buffer NewOrCopy(napi_env env, + T* data, + size_t length, + Finalizer finalizeCallback, + Hint* finalizeHint); + + static Buffer Copy(napi_env env, const T* data, size_t length); + + static void CheckCast(napi_env env, napi_value value); + + Buffer(); + Buffer(napi_env env, napi_value value); + size_t Length() const; + T* Data() const; + + private: +}; + +/// Holds a counted reference to a value; initially a weak reference unless +/// otherwise specified, may be changed to/from a strong reference by adjusting +/// the refcount. +/// +/// The referenced value is not immediately destroyed when the reference count +/// is zero; it is merely then eligible for garbage-collection if there are no +/// other references to the value. +template +class Reference { + public: + static Reference New(const T& value, uint32_t initialRefcount = 0); + + Reference(); + Reference(napi_env env, napi_ref ref); + ~Reference(); + + // A reference can be moved but cannot be copied. + Reference(Reference&& other); + Reference& operator=(Reference&& other); + NAPI_DISALLOW_ASSIGN(Reference) + + operator napi_ref() const; + bool operator==(const Reference& other) const; + bool operator!=(const Reference& other) const; + + Napi::Env Env() const; + bool IsEmpty() const; + + // Note when getting the value of a Reference it is usually correct to do so + // within a HandleScope so that the value handle gets cleaned up efficiently. + T Value() const; + + uint32_t Ref() const; + uint32_t Unref() const; + void Reset(); + void Reset(const T& value, uint32_t refcount = 0); + + // Call this on a reference that is declared as static data, to prevent its + // destructor from running at program shutdown time, which would attempt to + // reset the reference when the environment is no longer valid. Avoid using + // this if at all possible. If you do need to use static data, MAKE SURE to + // warn your users that your addon is NOT threadsafe. + void SuppressDestruct(); + + protected: + Reference(const Reference&); + + /// !cond INTERNAL + napi_env _env; + napi_ref _ref; + /// !endcond + + private: + bool _suppressDestruct; +}; + +class ObjectReference : public Reference { + public: + ObjectReference(); + ObjectReference(napi_env env, napi_ref ref); + + // A reference can be moved but cannot be copied. + ObjectReference(Reference&& other); + ObjectReference& operator=(Reference&& other); + ObjectReference(ObjectReference&& other); + ObjectReference& operator=(ObjectReference&& other); + NAPI_DISALLOW_ASSIGN(ObjectReference) + + MaybeOrValue Get(const char* utf8name) const; + MaybeOrValue Get(const std::string& utf8name) const; + MaybeOrValue Set(const char* utf8name, napi_value value) const; + MaybeOrValue Set(const char* utf8name, Napi::Value value) const; + MaybeOrValue Set(const char* utf8name, const char* utf8value) const; + MaybeOrValue Set(const char* utf8name, bool boolValue) const; + MaybeOrValue Set(const char* utf8name, double numberValue) const; + MaybeOrValue Set(const std::string& utf8name, napi_value value) const; + MaybeOrValue Set(const std::string& utf8name, Napi::Value value) const; + MaybeOrValue Set(const std::string& utf8name, + std::string& utf8value) const; + MaybeOrValue Set(const std::string& utf8name, bool boolValue) const; + MaybeOrValue Set(const std::string& utf8name, double numberValue) const; + + MaybeOrValue Get(uint32_t index) const; + MaybeOrValue Set(uint32_t index, const napi_value value) const; + MaybeOrValue Set(uint32_t index, const Napi::Value value) const; + MaybeOrValue Set(uint32_t index, const char* utf8value) const; + MaybeOrValue Set(uint32_t index, const std::string& utf8value) const; + MaybeOrValue Set(uint32_t index, bool boolValue) const; + MaybeOrValue Set(uint32_t index, double numberValue) const; + + protected: + ObjectReference(const ObjectReference&); +}; + +class FunctionReference : public Reference { + public: + FunctionReference(); + FunctionReference(napi_env env, napi_ref ref); + + // A reference can be moved but cannot be copied. + FunctionReference(Reference&& other); + FunctionReference& operator=(Reference&& other); + FunctionReference(FunctionReference&& other); + FunctionReference& operator=(FunctionReference&& other); + NAPI_DISALLOW_ASSIGN_COPY(FunctionReference) + + MaybeOrValue operator()( + const std::initializer_list& args) const; + + MaybeOrValue Call( + const std::initializer_list& args) const; + MaybeOrValue Call(const std::vector& args) const; + MaybeOrValue Call( + napi_value recv, const std::initializer_list& args) const; + MaybeOrValue Call(napi_value recv, + const std::vector& args) const; + MaybeOrValue Call(napi_value recv, + size_t argc, + const napi_value* args) const; + + MaybeOrValue MakeCallback( + napi_value recv, + const std::initializer_list& args, + napi_async_context context = nullptr) const; + MaybeOrValue MakeCallback( + napi_value recv, + const std::vector& args, + napi_async_context context = nullptr) const; + MaybeOrValue MakeCallback( + napi_value recv, + size_t argc, + const napi_value* args, + napi_async_context context = nullptr) const; + + MaybeOrValue New(const std::initializer_list& args) const; + MaybeOrValue New(const std::vector& args) const; +}; + +// Shortcuts to creating a new reference with inferred type and refcount = 0. +template +Reference Weak(T value); +ObjectReference Weak(Object value); +FunctionReference Weak(Function value); + +// Shortcuts to creating a new reference with inferred type and refcount = 1. +template +Reference Persistent(T value); +ObjectReference Persistent(Object value); +FunctionReference Persistent(Function value); + +/// A persistent reference to a JavaScript error object. Use of this class +/// depends somewhat on whether C++ exceptions are enabled at compile time. +/// +/// ### Handling Errors With C++ Exceptions +/// +/// If C++ exceptions are enabled, then the `Error` class extends +/// `std::exception` and enables integrated error-handling for C++ exceptions +/// and JavaScript exceptions. +/// +/// If a Node-API call fails without executing any JavaScript code (for +/// example due to an invalid argument), then the Node-API wrapper +/// automatically converts and throws the error as a C++ exception of type +/// `Napi::Error`. Or if a JavaScript function called by C++ code via Node-API +/// throws a JavaScript exception, then the Node-API wrapper automatically +/// converts and throws it as a C++ exception of type `Napi::Error`. +/// +/// If a C++ exception of type `Napi::Error` escapes from a Node-API C++ +/// callback, then the Node-API wrapper automatically converts and throws it +/// as a JavaScript exception. Therefore, catching a C++ exception of type +/// `Napi::Error` prevents a JavaScript exception from being thrown. +/// +/// #### Example 1A - Throwing a C++ exception: +/// +/// Napi::Env env = ... +/// throw Napi::Error::New(env, "Example exception"); +/// +/// Following C++ statements will not be executed. The exception will bubble +/// up as a C++ exception of type `Napi::Error`, until it is either caught +/// while still in C++, or else automatically propataged as a JavaScript +/// exception when the callback returns to JavaScript. +/// +/// #### Example 2A - Propagating a Node-API C++ exception: +/// +/// Napi::Function jsFunctionThatThrows = someObj.As(); +/// Napi::Value result = jsFunctionThatThrows({ arg1, arg2 }); +/// +/// Following C++ statements will not be executed. The exception will bubble +/// up as a C++ exception of type `Napi::Error`, until it is either caught +/// while still in C++, or else automatically propagated as a JavaScript +/// exception when the callback returns to JavaScript. +/// +/// #### Example 3A - Handling a Node-API C++ exception: +/// +/// Napi::Function jsFunctionThatThrows = someObj.As(); +/// Napi::Value result; +/// try { +/// result = jsFunctionThatThrows({ arg1, arg2 }); +/// } catch (const Napi::Error& e) { +/// cerr << "Caught JavaScript exception: " + e.what(); +/// } +/// +/// Since the exception was caught here, it will not be propagated as a +/// JavaScript exception. +/// +/// ### Handling Errors Without C++ Exceptions +/// +/// If C++ exceptions are disabled (by defining `NAPI_DISABLE_CPP_EXCEPTIONS`) +/// then this class does not extend `std::exception`, and APIs in the `Napi` +/// namespace do not throw C++ exceptions when they fail. Instead, they raise +/// _pending_ JavaScript exceptions and return _empty_ `Value`s. Calling code +/// should check `Value::IsEmpty()` before attempting to use a returned value, +/// and may use methods on the `Env` class to check for, get, and clear a +/// pending JavaScript exception. If the pending exception is not cleared, it +/// will be thrown when the native callback returns to JavaScript. +/// +/// #### Example 1B - Throwing a JS exception +/// +/// Napi::Env env = ... +/// Napi::Error::New(env, "Example +/// exception").ThrowAsJavaScriptException(); return; +/// +/// After throwing a JS exception, the code should generally return +/// immediately from the native callback, after performing any necessary +/// cleanup. +/// +/// #### Example 2B - Propagating a Node-API JS exception: +/// +/// Napi::Function jsFunctionThatThrows = someObj.As(); +/// Napi::Value result = jsFunctionThatThrows({ arg1, arg2 }); +/// if (result.IsEmpty()) return; +/// +/// An empty value result from a Node-API call indicates an error occurred, +/// and a JavaScript exception is pending. To let the exception propagate, the +/// code should generally return immediately from the native callback, after +/// performing any necessary cleanup. +/// +/// #### Example 3B - Handling a Node-API JS exception: +/// +/// Napi::Function jsFunctionThatThrows = someObj.As(); +/// Napi::Value result = jsFunctionThatThrows({ arg1, arg2 }); +/// if (result.IsEmpty()) { +/// Napi::Error e = env.GetAndClearPendingException(); +/// cerr << "Caught JavaScript exception: " + e.Message(); +/// } +/// +/// Since the exception was cleared here, it will not be propagated as a +/// JavaScript exception after the native callback returns. +class Error : public ObjectReference +#ifdef NAPI_CPP_EXCEPTIONS + , + public std::exception +#endif // NAPI_CPP_EXCEPTIONS +{ + public: + static Error New(napi_env env); + static Error New(napi_env env, const char* message); + static Error New(napi_env env, const std::string& message); + + static NAPI_NO_RETURN void Fatal(const char* location, const char* message); + + Error(); + Error(napi_env env, napi_value value); + + // An error can be moved or copied. + Error(Error&& other); + Error& operator=(Error&& other); + Error(const Error&); + Error& operator=(const Error&); + + const std::string& Message() const NAPI_NOEXCEPT; + void ThrowAsJavaScriptException() const; + + Object Value() const; + +#ifdef NAPI_CPP_EXCEPTIONS + const char* what() const NAPI_NOEXCEPT override; +#endif // NAPI_CPP_EXCEPTIONS + + protected: + /// !cond INTERNAL + using create_error_fn = napi_status (*)(napi_env envb, + napi_value code, + napi_value msg, + napi_value* result); + + template + static TError New(napi_env env, + const char* message, + size_t length, + create_error_fn create_error); + /// !endcond + + private: + static inline const char* ERROR_WRAP_VALUE() NAPI_NOEXCEPT; + mutable std::string _message; +}; + +class TypeError : public Error { + public: + static TypeError New(napi_env env, const char* message); + static TypeError New(napi_env env, const std::string& message); + + TypeError(); + TypeError(napi_env env, napi_value value); +}; + +class RangeError : public Error { + public: + static RangeError New(napi_env env, const char* message); + static RangeError New(napi_env env, const std::string& message); + + RangeError(); + RangeError(napi_env env, napi_value value); +}; + +#if NAPI_VERSION > 8 +class SyntaxError : public Error { + public: + static SyntaxError New(napi_env env, const char* message); + static SyntaxError New(napi_env env, const std::string& message); + + SyntaxError(); + SyntaxError(napi_env env, napi_value value); +}; +#endif // NAPI_VERSION > 8 + +class CallbackInfo { + public: + CallbackInfo(napi_env env, napi_callback_info info); + ~CallbackInfo(); + + // Disallow copying to prevent multiple free of _dynamicArgs + NAPI_DISALLOW_ASSIGN_COPY(CallbackInfo) + + Napi::Env Env() const; + Value NewTarget() const; + bool IsConstructCall() const; + size_t Length() const; + const Value operator[](size_t index) const; + Value This() const; + void* Data() const; + void SetData(void* data); + explicit operator napi_callback_info() const; + + private: + const size_t _staticArgCount = 6; + napi_env _env; + napi_callback_info _info; + napi_value _this; + size_t _argc; + napi_value* _argv; + napi_value _staticArgs[6]; + napi_value* _dynamicArgs; + void* _data; +}; + +class PropertyDescriptor { + public: + using GetterCallback = Napi::Value (*)(const Napi::CallbackInfo& info); + using SetterCallback = void (*)(const Napi::CallbackInfo& info); + +#ifndef NODE_ADDON_API_DISABLE_DEPRECATED + template + static PropertyDescriptor Accessor( + const char* utf8name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + const std::string& utf8name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + napi_value name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Name name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + const char* utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + const std::string& utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + napi_value name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Name name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + const char* utf8name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + const std::string& utf8name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + napi_value name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + Name name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); +#endif // !NODE_ADDON_API_DISABLE_DEPRECATED + + template + static PropertyDescriptor Accessor( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + + template + static PropertyDescriptor Accessor( + const std::string& utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + + template + static PropertyDescriptor Accessor( + Name name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + + template + static PropertyDescriptor Accessor( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + + template + static PropertyDescriptor Accessor( + const std::string& utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + + template + static PropertyDescriptor Accessor( + Name name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + + template + static PropertyDescriptor Accessor( + Napi::Env env, + Napi::Object object, + const char* utf8name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Napi::Env env, + Napi::Object object, + const std::string& utf8name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Napi::Env env, + Napi::Object object, + Name name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Napi::Env env, + Napi::Object object, + const char* utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Napi::Env env, + Napi::Object object, + const std::string& utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Napi::Env env, + Napi::Object object, + Name name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + Napi::Env env, + Napi::Object object, + const char* utf8name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + Napi::Env env, + Napi::Object object, + const std::string& utf8name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + Napi::Env env, + Napi::Object object, + Name name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor Value( + const char* utf8name, + napi_value value, + napi_property_attributes attributes = napi_default); + static PropertyDescriptor Value( + const std::string& utf8name, + napi_value value, + napi_property_attributes attributes = napi_default); + static PropertyDescriptor Value( + napi_value name, + napi_value value, + napi_property_attributes attributes = napi_default); + static PropertyDescriptor Value( + Name name, + Napi::Value value, + napi_property_attributes attributes = napi_default); + + PropertyDescriptor(napi_property_descriptor desc); + + operator napi_property_descriptor&(); + operator const napi_property_descriptor&() const; + + private: + napi_property_descriptor _desc; +}; + +/// Property descriptor for use with `ObjectWrap::DefineClass()`. +/// +/// This is different from the standalone `PropertyDescriptor` because it is +/// specific to each `ObjectWrap` subclass. This prevents using descriptors +/// from a different class when defining a new class (preventing the callbacks +/// from having incorrect `this` pointers). +template +class ClassPropertyDescriptor { + public: + ClassPropertyDescriptor(napi_property_descriptor desc) : _desc(desc) {} + + operator napi_property_descriptor&() { return _desc; } + operator const napi_property_descriptor&() const { return _desc; } + + private: + napi_property_descriptor _desc; +}; + +template +struct MethodCallbackData { + TCallback callback; + void* data; +}; + +template +struct AccessorCallbackData { + TGetterCallback getterCallback; + TSetterCallback setterCallback; + void* data; +}; + +template +class InstanceWrap { + public: + using InstanceVoidMethodCallback = void (T::*)(const CallbackInfo& info); + using InstanceMethodCallback = Napi::Value (T::*)(const CallbackInfo& info); + using InstanceGetterCallback = Napi::Value (T::*)(const CallbackInfo& info); + using InstanceSetterCallback = void (T::*)(const CallbackInfo& info, + const Napi::Value& value); + + using PropertyDescriptor = ClassPropertyDescriptor; + + static PropertyDescriptor InstanceMethod( + const char* utf8name, + InstanceVoidMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor InstanceMethod( + const char* utf8name, + InstanceMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor InstanceMethod( + Symbol name, + InstanceVoidMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor InstanceMethod( + Symbol name, + InstanceMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor InstanceMethod( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor InstanceMethod( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor InstanceMethod( + Symbol name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor InstanceMethod( + Symbol name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor InstanceAccessor( + const char* utf8name, + InstanceGetterCallback getter, + InstanceSetterCallback setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor InstanceAccessor( + Symbol name, + InstanceGetterCallback getter, + InstanceSetterCallback setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor InstanceAccessor( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor InstanceAccessor( + Symbol name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor InstanceValue( + const char* utf8name, + Napi::Value value, + napi_property_attributes attributes = napi_default); + static PropertyDescriptor InstanceValue( + Symbol name, + Napi::Value value, + napi_property_attributes attributes = napi_default); + + protected: + static void AttachPropData(napi_env env, + napi_value value, + const napi_property_descriptor* prop); + + private: + using This = InstanceWrap; + + using InstanceVoidMethodCallbackData = + MethodCallbackData; + using InstanceMethodCallbackData = + MethodCallbackData; + using InstanceAccessorCallbackData = + AccessorCallbackData; + + static napi_value InstanceVoidMethodCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value InstanceMethodCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value InstanceGetterCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value InstanceSetterCallbackWrapper(napi_env env, + napi_callback_info info); + + template + static napi_value WrappedMethod(napi_env env, + napi_callback_info info) NAPI_NOEXCEPT; + + template + struct SetterTag {}; + + template + static napi_callback WrapSetter(SetterTag) NAPI_NOEXCEPT { + return &This::WrappedMethod; + } + static napi_callback WrapSetter(SetterTag) NAPI_NOEXCEPT { + return nullptr; + } +}; + +/// Base class to be extended by C++ classes exposed to JavaScript; each C++ +/// class instance gets "wrapped" by a JavaScript object that is managed by this +/// class. +/// +/// At initialization time, the `DefineClass()` method must be used to +/// hook up the accessor and method callbacks. It takes a list of +/// property descriptors, which can be constructed via the various +/// static methods on the base class. +/// +/// #### Example: +/// +/// class Example: public Napi::ObjectWrap { +/// public: +/// static void Initialize(Napi::Env& env, Napi::Object& target) { +/// Napi::Function constructor = DefineClass(env, "Example", { +/// InstanceAccessor<&Example::GetSomething, +/// &Example::SetSomething>("value"), +/// InstanceMethod<&Example::DoSomething>("doSomething"), +/// }); +/// target.Set("Example", constructor); +/// } +/// +/// Example(const Napi::CallbackInfo& info); // Constructor +/// Napi::Value GetSomething(const Napi::CallbackInfo& info); +/// void SetSomething(const Napi::CallbackInfo& info, const Napi::Value& +/// value); Napi::Value DoSomething(const Napi::CallbackInfo& info); +/// } +template +class ObjectWrap : public InstanceWrap, public Reference { + public: + ObjectWrap(const CallbackInfo& callbackInfo); + virtual ~ObjectWrap(); + + static T* Unwrap(Object wrapper); + + // Methods exposed to JavaScript must conform to one of these callback + // signatures. + using StaticVoidMethodCallback = void (*)(const CallbackInfo& info); + using StaticMethodCallback = Napi::Value (*)(const CallbackInfo& info); + using StaticGetterCallback = Napi::Value (*)(const CallbackInfo& info); + using StaticSetterCallback = void (*)(const CallbackInfo& info, + const Napi::Value& value); + + using PropertyDescriptor = ClassPropertyDescriptor; + + static Function DefineClass( + Napi::Env env, + const char* utf8name, + const std::initializer_list& properties, + void* data = nullptr); + static Function DefineClass(Napi::Env env, + const char* utf8name, + const std::vector& properties, + void* data = nullptr); + static PropertyDescriptor StaticMethod( + const char* utf8name, + StaticVoidMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor StaticMethod( + const char* utf8name, + StaticMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor StaticMethod( + Symbol name, + StaticVoidMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor StaticMethod( + Symbol name, + StaticMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor StaticMethod( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor StaticMethod( + Symbol name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor StaticMethod( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor StaticMethod( + Symbol name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor StaticAccessor( + const char* utf8name, + StaticGetterCallback getter, + StaticSetterCallback setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor StaticAccessor( + Symbol name, + StaticGetterCallback getter, + StaticSetterCallback setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor StaticAccessor( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor StaticAccessor( + Symbol name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor StaticValue( + const char* utf8name, + Napi::Value value, + napi_property_attributes attributes = napi_default); + static PropertyDescriptor StaticValue( + Symbol name, + Napi::Value value, + napi_property_attributes attributes = napi_default); + static Napi::Value OnCalledAsFunction(const Napi::CallbackInfo& callbackInfo); + virtual void Finalize(Napi::Env env); + + private: + using This = ObjectWrap; + + static napi_value ConstructorCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value StaticVoidMethodCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value StaticMethodCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value StaticGetterCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value StaticSetterCallbackWrapper(napi_env env, + napi_callback_info info); + static void FinalizeCallback(napi_env env, void* data, void* hint); + static Function DefineClass(Napi::Env env, + const char* utf8name, + const size_t props_count, + const napi_property_descriptor* props, + void* data = nullptr); + + using StaticVoidMethodCallbackData = + MethodCallbackData; + using StaticMethodCallbackData = MethodCallbackData; + + using StaticAccessorCallbackData = + AccessorCallbackData; + + template + static napi_value WrappedMethod(napi_env env, + napi_callback_info info) NAPI_NOEXCEPT; + + template + struct StaticSetterTag {}; + + template + static napi_callback WrapStaticSetter(StaticSetterTag) NAPI_NOEXCEPT { + return &This::WrappedMethod; + } + static napi_callback WrapStaticSetter(StaticSetterTag) + NAPI_NOEXCEPT { + return nullptr; + } + + bool _construction_failed = true; +}; + +class HandleScope { + public: + HandleScope(napi_env env, napi_handle_scope scope); + explicit HandleScope(Napi::Env env); + ~HandleScope(); + + // Disallow copying to prevent double close of napi_handle_scope + NAPI_DISALLOW_ASSIGN_COPY(HandleScope) + + operator napi_handle_scope() const; + + Napi::Env Env() const; + + private: + napi_env _env; + napi_handle_scope _scope; +}; + +class EscapableHandleScope { + public: + EscapableHandleScope(napi_env env, napi_escapable_handle_scope scope); + explicit EscapableHandleScope(Napi::Env env); + ~EscapableHandleScope(); + + // Disallow copying to prevent double close of napi_escapable_handle_scope + NAPI_DISALLOW_ASSIGN_COPY(EscapableHandleScope) + + operator napi_escapable_handle_scope() const; + + Napi::Env Env() const; + Value Escape(napi_value escapee); + + private: + napi_env _env; + napi_escapable_handle_scope _scope; +}; + +#if (NAPI_VERSION > 2) +class CallbackScope { + public: + CallbackScope(napi_env env, napi_callback_scope scope); + CallbackScope(napi_env env, napi_async_context context); + virtual ~CallbackScope(); + + // Disallow copying to prevent double close of napi_callback_scope + NAPI_DISALLOW_ASSIGN_COPY(CallbackScope) + + operator napi_callback_scope() const; + + Napi::Env Env() const; + + private: + napi_env _env; + napi_callback_scope _scope; +}; +#endif + +class AsyncContext { + public: + explicit AsyncContext(napi_env env, const char* resource_name); + explicit AsyncContext(napi_env env, + const char* resource_name, + const Object& resource); + virtual ~AsyncContext(); + + AsyncContext(AsyncContext&& other); + AsyncContext& operator=(AsyncContext&& other); + NAPI_DISALLOW_ASSIGN_COPY(AsyncContext) + + operator napi_async_context() const; + + Napi::Env Env() const; + + private: + napi_env _env; + napi_async_context _context; +}; + +#if NAPI_HAS_THREADS +class AsyncWorker { + public: + virtual ~AsyncWorker(); + + NAPI_DISALLOW_ASSIGN_COPY(AsyncWorker) + + operator napi_async_work() const; + + Napi::Env Env() const; + + void Queue(); + void Cancel(); + void SuppressDestruct(); + + ObjectReference& Receiver(); + FunctionReference& Callback(); + + virtual void OnExecute(Napi::Env env); + virtual void OnWorkComplete(Napi::Env env, napi_status status); + + protected: + explicit AsyncWorker(const Function& callback); + explicit AsyncWorker(const Function& callback, const char* resource_name); + explicit AsyncWorker(const Function& callback, + const char* resource_name, + const Object& resource); + explicit AsyncWorker(const Object& receiver, const Function& callback); + explicit AsyncWorker(const Object& receiver, + const Function& callback, + const char* resource_name); + explicit AsyncWorker(const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource); + + explicit AsyncWorker(Napi::Env env); + explicit AsyncWorker(Napi::Env env, const char* resource_name); + explicit AsyncWorker(Napi::Env env, + const char* resource_name, + const Object& resource); + + virtual void Execute() = 0; + virtual void OnOK(); + virtual void OnError(const Error& e); + virtual void Destroy(); + virtual std::vector GetResult(Napi::Env env); + + void SetError(const std::string& error); + + private: + static inline void OnAsyncWorkExecute(napi_env env, void* asyncworker); + static inline void OnAsyncWorkComplete(napi_env env, + napi_status status, + void* asyncworker); + + napi_env _env; + napi_async_work _work; + ObjectReference _receiver; + FunctionReference _callback; + std::string _error; + bool _suppress_destruct; +}; +#endif // NAPI_HAS_THREADS + +#if (NAPI_VERSION > 3 && NAPI_HAS_THREADS) +class ThreadSafeFunction { + public: + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback, + FinalizerDataType* data); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback, + FinalizerDataType* data); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data); + + ThreadSafeFunction(); + ThreadSafeFunction(napi_threadsafe_function tsFunctionValue); + + operator napi_threadsafe_function() const; + + // This API may be called from any thread. + napi_status BlockingCall() const; + + // This API may be called from any thread. + template + napi_status BlockingCall(Callback callback) const; + + // This API may be called from any thread. + template + napi_status BlockingCall(DataType* data, Callback callback) const; + + // This API may be called from any thread. + napi_status NonBlockingCall() const; + + // This API may be called from any thread. + template + napi_status NonBlockingCall(Callback callback) const; + + // This API may be called from any thread. + template + napi_status NonBlockingCall(DataType* data, Callback callback) const; + + // This API may only be called from the main thread. + void Ref(napi_env env) const; + + // This API may only be called from the main thread. + void Unref(napi_env env) const; + + // This API may be called from any thread. + napi_status Acquire() const; + + // This API may be called from any thread. + napi_status Release() const; + + // This API may be called from any thread. + napi_status Abort() const; + + struct ConvertibleContext { + template + operator T*() { + return static_cast(context); + } + void* context; + }; + + // This API may be called from any thread. + ConvertibleContext GetContext() const; + + private: + using CallbackWrapper = std::function; + + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data, + napi_finalize wrapper); + + napi_status CallInternal(CallbackWrapper* callbackWrapper, + napi_threadsafe_function_call_mode mode) const; + + static void CallJS(napi_env env, + napi_value jsCallback, + void* context, + void* data); + + napi_threadsafe_function _tsfn; +}; + +// A TypedThreadSafeFunction by default has no context (nullptr) and can +// accept any type (void) to its CallJs. +template +class TypedThreadSafeFunction { + public: + // This API may only be called from the main thread. + // Helper function that returns nullptr if running Node-API 5+, otherwise a + // non-empty, no-op Function. This provides the ability to specify at + // compile-time a callback parameter to `New` that safely does no action + // when targeting _any_ Node-API version. +#if NAPI_VERSION > 4 + static std::nullptr_t EmptyFunctionFactory(Napi::Env env); +#else + static Napi::Function EmptyFunctionFactory(Napi::Env env); +#endif + static Napi::Function FunctionOrEmpty(Napi::Env env, + Napi::Function& callback); + +#if NAPI_VERSION > 4 + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [missing] Resource [missing] Finalizer [missing] + template + static TypedThreadSafeFunction New( + napi_env env, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context = nullptr); + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [missing] Resource [passed] Finalizer [missing] + template + static TypedThreadSafeFunction New( + napi_env env, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context = nullptr); + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [missing] Resource [missing] Finalizer [passed] + template + static TypedThreadSafeFunction New( + napi_env env, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data = nullptr); + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [missing] Resource [passed] Finalizer [passed] + template + static TypedThreadSafeFunction New( + napi_env env, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data = nullptr); +#endif + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [passed] Resource [missing] Finalizer [missing] + template + static TypedThreadSafeFunction New( + napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context = nullptr); + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [passed] Resource [passed] Finalizer [missing] + template + static TypedThreadSafeFunction New( + napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context = nullptr); + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [passed] Resource [missing] Finalizer [passed] + template + static TypedThreadSafeFunction New( + napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data = nullptr); + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [passed] Resource [passed] Finalizer [passed] + template + static TypedThreadSafeFunction New( + napi_env env, + CallbackType callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data = nullptr); + + TypedThreadSafeFunction(); + TypedThreadSafeFunction(napi_threadsafe_function tsFunctionValue); + + operator napi_threadsafe_function() const; + + // This API may be called from any thread. + napi_status BlockingCall(DataType* data = nullptr) const; + + // This API may be called from any thread. + napi_status NonBlockingCall(DataType* data = nullptr) const; + + // This API may only be called from the main thread. + void Ref(napi_env env) const; + + // This API may only be called from the main thread. + void Unref(napi_env env) const; + + // This API may be called from any thread. + napi_status Acquire() const; + + // This API may be called from any thread. + napi_status Release() const; + + // This API may be called from any thread. + napi_status Abort() const; + + // This API may be called from any thread. + ContextType* GetContext() const; + + private: + template + static TypedThreadSafeFunction New( + napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data, + napi_finalize wrapper); + + static void CallJsInternal(napi_env env, + napi_value jsCallback, + void* context, + void* data); + + protected: + napi_threadsafe_function _tsfn; +}; +template +class AsyncProgressWorkerBase : public AsyncWorker { + public: + virtual void OnWorkProgress(DataType* data) = 0; + class ThreadSafeData { + public: + ThreadSafeData(AsyncProgressWorkerBase* asyncprogressworker, DataType* data) + : _asyncprogressworker(asyncprogressworker), _data(data) {} + + AsyncProgressWorkerBase* asyncprogressworker() { + return _asyncprogressworker; + }; + DataType* data() { return _data; }; + + private: + AsyncProgressWorkerBase* _asyncprogressworker; + DataType* _data; + }; + void OnWorkComplete(Napi::Env env, napi_status status) override; + + protected: + explicit AsyncProgressWorkerBase(const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource, + size_t queue_size = 1); + virtual ~AsyncProgressWorkerBase(); + +// Optional callback of Napi::ThreadSafeFunction only available after +// NAPI_VERSION 4. Refs: https://github.com/nodejs/node/pull/27791 +#if NAPI_VERSION > 4 + explicit AsyncProgressWorkerBase(Napi::Env env, + const char* resource_name, + const Object& resource, + size_t queue_size = 1); +#endif + + static inline void OnAsyncWorkProgress(Napi::Env env, + Napi::Function jsCallback, + void* data); + + napi_status NonBlockingCall(DataType* data); + + private: + ThreadSafeFunction _tsfn; + bool _work_completed = false; + napi_status _complete_status; + static inline void OnThreadSafeFunctionFinalize( + Napi::Env env, void* data, AsyncProgressWorkerBase* context); +}; + +template +class AsyncProgressWorker : public AsyncProgressWorkerBase { + public: + virtual ~AsyncProgressWorker(); + + class ExecutionProgress { + friend class AsyncProgressWorker; + + public: + void Signal() const; + void Send(const T* data, size_t count) const; + + private: + explicit ExecutionProgress(AsyncProgressWorker* worker) : _worker(worker) {} + AsyncProgressWorker* const _worker; + }; + + void OnWorkProgress(void*) override; + + protected: + explicit AsyncProgressWorker(const Function& callback); + explicit AsyncProgressWorker(const Function& callback, + const char* resource_name); + explicit AsyncProgressWorker(const Function& callback, + const char* resource_name, + const Object& resource); + explicit AsyncProgressWorker(const Object& receiver, + const Function& callback); + explicit AsyncProgressWorker(const Object& receiver, + const Function& callback, + const char* resource_name); + explicit AsyncProgressWorker(const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource); + +// Optional callback of Napi::ThreadSafeFunction only available after +// NAPI_VERSION 4. Refs: https://github.com/nodejs/node/pull/27791 +#if NAPI_VERSION > 4 + explicit AsyncProgressWorker(Napi::Env env); + explicit AsyncProgressWorker(Napi::Env env, const char* resource_name); + explicit AsyncProgressWorker(Napi::Env env, + const char* resource_name, + const Object& resource); +#endif + virtual void Execute(const ExecutionProgress& progress) = 0; + virtual void OnProgress(const T* data, size_t count) = 0; + + private: + void Execute() override; + void Signal(); + void SendProgress_(const T* data, size_t count); + + std::mutex _mutex; + T* _asyncdata; + size_t _asyncsize; + bool _signaled; +}; + +template +class AsyncProgressQueueWorker + : public AsyncProgressWorkerBase> { + public: + virtual ~AsyncProgressQueueWorker(){}; + + class ExecutionProgress { + friend class AsyncProgressQueueWorker; + + public: + void Signal() const; + void Send(const T* data, size_t count) const; + + private: + explicit ExecutionProgress(AsyncProgressQueueWorker* worker) + : _worker(worker) {} + AsyncProgressQueueWorker* const _worker; + }; + + void OnWorkComplete(Napi::Env env, napi_status status) override; + void OnWorkProgress(std::pair*) override; + + protected: + explicit AsyncProgressQueueWorker(const Function& callback); + explicit AsyncProgressQueueWorker(const Function& callback, + const char* resource_name); + explicit AsyncProgressQueueWorker(const Function& callback, + const char* resource_name, + const Object& resource); + explicit AsyncProgressQueueWorker(const Object& receiver, + const Function& callback); + explicit AsyncProgressQueueWorker(const Object& receiver, + const Function& callback, + const char* resource_name); + explicit AsyncProgressQueueWorker(const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource); + +// Optional callback of Napi::ThreadSafeFunction only available after +// NAPI_VERSION 4. Refs: https://github.com/nodejs/node/pull/27791 +#if NAPI_VERSION > 4 + explicit AsyncProgressQueueWorker(Napi::Env env); + explicit AsyncProgressQueueWorker(Napi::Env env, const char* resource_name); + explicit AsyncProgressQueueWorker(Napi::Env env, + const char* resource_name, + const Object& resource); +#endif + virtual void Execute(const ExecutionProgress& progress) = 0; + virtual void OnProgress(const T* data, size_t count) = 0; + + private: + void Execute() override; + void Signal() const; + void SendProgress_(const T* data, size_t count); +}; +#endif // NAPI_VERSION > 3 && NAPI_HAS_THREADS + +// Memory management. +class MemoryManagement { + public: + static int64_t AdjustExternalMemory(Env env, int64_t change_in_bytes); +}; + +// Version management +class VersionManagement { + public: + static uint32_t GetNapiVersion(Env env); + static const napi_node_version* GetNodeVersion(Env env); +}; + +#if NAPI_VERSION > 5 +template +class Addon : public InstanceWrap { + public: + static inline Object Init(Env env, Object exports); + static T* Unwrap(Object wrapper); + + protected: + using AddonProp = ClassPropertyDescriptor; + void DefineAddon(Object exports, + const std::initializer_list& props); + Napi::Object DefineProperties(Object object, + const std::initializer_list& props); + + private: + Object entry_point_; +}; +#endif // NAPI_VERSION > 5 + +#ifdef NAPI_CPP_CUSTOM_NAMESPACE +} // namespace NAPI_CPP_CUSTOM_NAMESPACE +#endif + +} // namespace Napi + +// Inline implementations of all the above class methods are included here. +#include "napi-inl.h" + +#endif // SRC_NAPI_H_ diff --git a/miniprogram/node_modules/node-addon-api/node_addon_api.gyp b/miniprogram/node_modules/node-addon-api/node_addon_api.gyp new file mode 100644 index 00000000..29905ed4 --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/node_addon_api.gyp @@ -0,0 +1,32 @@ +{ + 'targets': [ + { + 'target_name': 'node_addon_api', + 'type': 'none', + 'sources': [ 'napi.h', 'napi-inl.h' ], + 'direct_dependent_settings': { + 'include_dirs': [ '.' ], + 'includes': ['noexcept.gypi'], + } + }, + { + 'target_name': 'node_addon_api_except', + 'type': 'none', + 'sources': [ 'napi.h', 'napi-inl.h' ], + 'direct_dependent_settings': { + 'include_dirs': [ '.' ], + 'includes': ['except.gypi'], + } + }, + { + 'target_name': 'node_addon_api_maybe', + 'type': 'none', + 'sources': [ 'napi.h', 'napi-inl.h' ], + 'direct_dependent_settings': { + 'include_dirs': [ '.' ], + 'includes': ['noexcept.gypi'], + 'defines': ['NODE_ADDON_API_ENABLE_MAYBE'] + } + }, + ] +} diff --git a/miniprogram/node_modules/node-addon-api/node_api.gyp b/miniprogram/node_modules/node-addon-api/node_api.gyp new file mode 100644 index 00000000..4ff0ae7d --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/node_api.gyp @@ -0,0 +1,9 @@ +{ + 'targets': [ + { + 'target_name': 'nothing', + 'type': 'static_library', + 'sources': [ 'nothing.c' ] + } + ] +} diff --git a/miniprogram/node_modules/node-addon-api/noexcept.gypi b/miniprogram/node_modules/node-addon-api/noexcept.gypi new file mode 100644 index 00000000..404a05f3 --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/noexcept.gypi @@ -0,0 +1,26 @@ +{ + 'defines': [ 'NAPI_DISABLE_CPP_EXCEPTIONS' ], + 'cflags': [ '-fno-exceptions' ], + 'cflags_cc': [ '-fno-exceptions' ], + 'conditions': [ + ["OS=='win'", { + # _HAS_EXCEPTIONS is already defined and set to 0 in common.gypi + #"defines": [ + # "_HAS_EXCEPTIONS=0" + #], + "msvs_settings": { + "VCCLCompilerTool": { + 'ExceptionHandling': 0, + 'EnablePREfast': 'true', + }, + }, + }], + ["OS=='mac'", { + 'xcode_settings': { + 'CLANG_CXX_LIBRARY': 'libc++', + 'MACOSX_DEPLOYMENT_TARGET': '10.7', + 'GCC_ENABLE_CPP_EXCEPTIONS': 'NO', + }, + }], + ], +} diff --git a/miniprogram/node_modules/node-addon-api/nothing.c b/miniprogram/node_modules/node-addon-api/nothing.c new file mode 100644 index 00000000..e69de29b diff --git a/miniprogram/node_modules/node-addon-api/package-support.json b/miniprogram/node_modules/node-addon-api/package-support.json new file mode 100644 index 00000000..10d3607a --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/package-support.json @@ -0,0 +1,21 @@ +{ + "versions": [ + { + "version": "*", + "target": { + "node": "active" + }, + "response": { + "type": "time-permitting", + "paid": false, + "contact": { + "name": "node-addon-api team", + "url": "https://github.com/nodejs/node-addon-api/issues" + } + }, + "backing": [ { "project": "https://github.com/nodejs" }, + { "foundation": "https://openjsf.org/" } + ] + } + ] +} diff --git a/miniprogram/node_modules/node-addon-api/package.json b/miniprogram/node_modules/node-addon-api/package.json new file mode 100644 index 00000000..d772ddc9 --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/package.json @@ -0,0 +1,480 @@ +{ + "bugs": { + "url": "https://github.com/nodejs/node-addon-api/issues" + }, + "contributors": [ + { + "name": "Abhishek Kumar Singh", + "url": "https://github.com/abhi11210646" + }, + { + "name": "Alba Mendez", + "url": "https://github.com/jmendeth" + }, + { + "name": "Alexander Floh", + "url": "https://github.com/alexanderfloh" + }, + { + "name": "Ammar Faizi", + "url": "https://github.com/ammarfaizi2" + }, + { + "name": "András Timár, Dr", + "url": "https://github.com/timarandras" + }, + { + "name": "Andrew Petersen", + "url": "https://github.com/kirbysayshi" + }, + { + "name": "Anisha Rohra", + "url": "https://github.com/anisha-rohra" + }, + { + "name": "Anna Henningsen", + "url": "https://github.com/addaleax" + }, + { + "name": "Arnaud Botella", + "url": "https://github.com/BotellaA" + }, + { + "name": "Arunesh Chandra", + "url": "https://github.com/aruneshchandra" + }, + { + "name": "Azlan Mukhtar", + "url": "https://github.com/azlan" + }, + { + "name": "Ben Berman", + "url": "https://github.com/rivertam" + }, + { + "name": "Benjamin Byholm", + "url": "https://github.com/kkoopa" + }, + { + "name": "Bill Gallafent", + "url": "https://github.com/gallafent" + }, + { + "name": "blagoev", + "url": "https://github.com/blagoev" + }, + { + "name": "Bruce A. MacNaughton", + "url": "https://github.com/bmacnaughton" + }, + { + "name": "Cory Mickelson", + "url": "https://github.com/corymickelson" + }, + { + "name": "Daniel Bevenius", + "url": "https://github.com/danbev" + }, + { + "name": "Dante Calderón", + "url": "https://github.com/dantehemerson" + }, + { + "name": "Darshan Sen", + "url": "https://github.com/RaisinTen" + }, + { + "name": "David Halls", + "url": "https://github.com/davedoesdev" + }, + { + "name": "Deepak Rajamohan", + "url": "https://github.com/deepakrkris" + }, + { + "name": "Dmitry Ashkadov", + "url": "https://github.com/dmitryash" + }, + { + "name": "Dongjin Na", + "url": "https://github.com/nadongguri" + }, + { + "name": "Doni Rubiagatra", + "url": "https://github.com/rubiagatra" + }, + { + "name": "Eric Bickle", + "url": "https://github.com/ebickle" + }, + { + "name": "extremeheat", + "url": "https://github.com/extremeheat" + }, + { + "name": "Feng Yu", + "url": "https://github.com/F3n67u" + }, + { + "name": "Ferdinand Holzer", + "url": "https://github.com/fholzer" + }, + { + "name": "Gabriel Schulhof", + "url": "https://github.com/gabrielschulhof" + }, + { + "name": "Guenter Sandner", + "url": "https://github.com/gms1" + }, + { + "name": "Gus Caplan", + "url": "https://github.com/devsnek" + }, + { + "name": "Helio Frota", + "url": "https://github.com/helio-frota" + }, + { + "name": "Hitesh Kanwathirtha", + "url": "https://github.com/digitalinfinity" + }, + { + "name": "ikokostya", + "url": "https://github.com/ikokostya" + }, + { + "name": "Jack Xia", + "url": "https://github.com/JckXia" + }, + { + "name": "Jake Barnes", + "url": "https://github.com/DuBistKomisch" + }, + { + "name": "Jake Yoon", + "url": "https://github.com/yjaeseok" + }, + { + "name": "Jason Ginchereau", + "url": "https://github.com/jasongin" + }, + { + "name": "Jenny", + "url": "https://github.com/egg-bread" + }, + { + "name": "Jeroen Janssen", + "url": "https://github.com/japj" + }, + { + "name": "Jim Schlight", + "url": "https://github.com/jschlight" + }, + { + "name": "Jinho Bang", + "url": "https://github.com/romandev" + }, + { + "name": "José Expósito", + "url": "https://github.com/JoseExposito" + }, + { + "name": "joshgarde", + "url": "https://github.com/joshgarde" + }, + { + "name": "Julian Mesa", + "url": "https://github.com/julianmesa-gitkraken" + }, + { + "name": "Kasumi Hanazuki", + "url": "https://github.com/hanazuki" + }, + { + "name": "Kelvin", + "url": "https://github.com/kelvinhammond" + }, + { + "name": "Kevin Eady", + "url": "https://github.com/KevinEady" + }, + { + "name": "Kévin VOYER", + "url": "https://github.com/kecsou" + }, + { + "name": "kidneysolo", + "url": "https://github.com/kidneysolo" + }, + { + "name": "Koki Nishihara", + "url": "https://github.com/Nishikoh" + }, + { + "name": "Konstantin Tarkus", + "url": "https://github.com/koistya" + }, + { + "name": "Kyle Farnung", + "url": "https://github.com/kfarnung" + }, + { + "name": "Kyle Kovacs", + "url": "https://github.com/nullromo" + }, + { + "name": "legendecas", + "url": "https://github.com/legendecas" + }, + { + "name": "LongYinan", + "url": "https://github.com/Brooooooklyn" + }, + { + "name": "Lovell Fuller", + "url": "https://github.com/lovell" + }, + { + "name": "Luciano Martorella", + "url": "https://github.com/lmartorella" + }, + { + "name": "mastergberry", + "url": "https://github.com/mastergberry" + }, + { + "name": "Mathias Küsel", + "url": "https://github.com/mathiask88" + }, + { + "name": "Mathias Stearn", + "url": "https://github.com/RedBeard0531" + }, + { + "name": "Matteo Collina", + "url": "https://github.com/mcollina" + }, + { + "name": "Michael Dawson", + "url": "https://github.com/mhdawson" + }, + { + "name": "Michael Price", + "url": "https://github.com/mikepricedev" + }, + { + "name": "Michele Campus", + "url": "https://github.com/kYroL01" + }, + { + "name": "Mikhail Cheshkov", + "url": "https://github.com/mcheshkov" + }, + { + "name": "nempoBu4", + "url": "https://github.com/nempoBu4" + }, + { + "name": "Nicola Del Gobbo", + "url": "https://github.com/NickNaso" + }, + { + "name": "Nick Soggin", + "url": "https://github.com/iSkore" + }, + { + "name": "Nikolai Vavilov", + "url": "https://github.com/seishun" + }, + { + "name": "Nurbol Alpysbayev", + "url": "https://github.com/anurbol" + }, + { + "name": "pacop", + "url": "https://github.com/pacop" + }, + { + "name": "Peter Šándor", + "url": "https://github.com/petersandor" + }, + { + "name": "Philipp Renoth", + "url": "https://github.com/DaAitch" + }, + { + "name": "rgerd", + "url": "https://github.com/rgerd" + }, + { + "name": "Richard Lau", + "url": "https://github.com/richardlau" + }, + { + "name": "Rolf Timmermans", + "url": "https://github.com/rolftimmermans" + }, + { + "name": "Ross Weir", + "url": "https://github.com/ross-weir" + }, + { + "name": "Ryuichi Okumura", + "url": "https://github.com/okuryu" + }, + { + "name": "Saint Gabriel", + "url": "https://github.com/chineduG" + }, + { + "name": "Sampson Gao", + "url": "https://github.com/sampsongao" + }, + { + "name": "Sam Roberts", + "url": "https://github.com/sam-github" + }, + { + "name": "strager", + "url": "https://github.com/strager" + }, + { + "name": "Taylor Woll", + "url": "https://github.com/boingoing" + }, + { + "name": "Thomas Gentilhomme", + "url": "https://github.com/fraxken" + }, + { + "name": "Tim Rach", + "url": "https://github.com/timrach" + }, + { + "name": "Tobias Nießen", + "url": "https://github.com/tniessen" + }, + { + "name": "todoroff", + "url": "https://github.com/todoroff" + }, + { + "name": "Toyo Li", + "url": "https://github.com/toyobayashi" + }, + { + "name": "Tux3", + "url": "https://github.com/tux3" + }, + { + "name": "Vlad Velmisov", + "url": "https://github.com/Velmisov" + }, + { + "name": "Vladimir Morozov", + "url": "https://github.com/vmoroz" + + }, + { + "name": "WenheLI", + "url": "https://github.com/WenheLI" + }, + { + "name": "Xuguang Mei", + "url": "https://github.com/meixg" + }, + { + "name": "Yohei Kishimoto", + "url": "https://github.com/morokosi" + }, + { + "name": "Yulong Wang", + "url": "https://github.com/fs-eire" + }, + { + "name": "Ziqiu Zhao", + "url": "https://github.com/ZzqiZQute" + }, + { + "name": "Feng Yu", + "url": "https://github.com/F3n67u" + }, + { + "name": "wanlu wang", + "url": "https://github.com/wanlu" + }, + { + "name": "Caleb Hearon", + "url": "https://github.com/chearon" + }, + { + "name": "Marx", + "url": "https://github.com/MarxJiao" + }, + { + "name": "Ömer AKGÜL", + "url": "https://github.com/tuhalf" + } + ], + "description": "Node.js API (Node-API)", + "devDependencies": { + "benchmark": "^2.1.4", + "bindings": "^1.5.0", + "clang-format": "^1.4.0", + "eslint": "^7.32.0", + "eslint-config-semistandard": "^16.0.0", + "eslint-config-standard": "^16.0.3", + "eslint-plugin-import": "^2.24.2", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.0", + "fs-extra": "^11.1.1", + "path": "^0.12.7", + "pre-commit": "^1.2.2", + "safe-buffer": "^5.1.1" + }, + "directories": {}, + "gypfile": false, + "homepage": "https://github.com/nodejs/node-addon-api", + "keywords": [ + "n-api", + "napi", + "addon", + "native", + "bindings", + "c", + "c++", + "nan", + "node-addon-api" + ], + "license": "MIT", + "main": "index.js", + "name": "node-addon-api", + "readme": "README.md", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/node-addon-api.git" + }, + "files": [ + "*.{c,h,gyp,gypi}", + "package-support.json", + "tools/" + ], + "scripts": { + "prebenchmark": "node-gyp rebuild -C benchmark", + "benchmark": "node benchmark", + "pretest": "node-gyp rebuild -C test", + "test": "node test", + "test:debug": "node-gyp rebuild -C test --debug && NODE_API_BUILD_CONFIG=Debug node ./test/index.js", + "predev": "node-gyp rebuild -C test --debug", + "dev": "node test", + "predev:incremental": "node-gyp configure build -C test --debug", + "dev:incremental": "node test", + "doc": "doxygen doc/Doxyfile", + "lint": "node tools/eslint-format && node tools/clang-format", + "lint:fix": "node tools/clang-format --fix && node tools/eslint-format --fix" + }, + "pre-commit": "lint", + "version": "7.1.1", + "support": true +} diff --git a/miniprogram/node_modules/node-addon-api/tools/README.md b/miniprogram/node_modules/node-addon-api/tools/README.md new file mode 100644 index 00000000..6b80e94f --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/tools/README.md @@ -0,0 +1,73 @@ +# Tools + +## clang-format + +The clang-format checking tools is designed to check changed lines of code compared to given git-refs. + +## Migration Script + +The migration tool is designed to reduce repetitive work in the migration process. However, the script is not aiming to convert every thing for you. There are usually some small fixes and major reconstruction required. + +### How To Use + +To run the conversion script, first make sure you have the latest `node-addon-api` in your `node_modules` directory. +``` +npm install node-addon-api +``` + +Then run the script passing your project directory +``` +node ./node_modules/node-addon-api/tools/conversion.js ./ +``` + +After finish, recompile and debug things that are missed by the script. + + +### Quick Fixes +Here is the list of things that can be fixed easily. + 1. Change your methods' return value to void if it doesn't return value to JavaScript. + 2. Use `.` to access attribute or to invoke member function in Napi::Object instead of `->`. + 3. `Napi::New(env, value);` to `Napi::[Type]::New(env, value); + + +### Major Reconstructions +The implementation of `Napi::ObjectWrap` is significantly different from NAN's. `Napi::ObjectWrap` takes a pointer to the wrapped object and creates a reference to the wrapped object inside ObjectWrap constructor. `Napi::ObjectWrap` also associates wrapped object's instance methods to Javascript module instead of static methods like NAN. + +So if you use Nan::ObjectWrap in your module, you will need to execute the following steps. + + 1. Convert your [ClassName]::New function to a constructor function that takes a `Napi::CallbackInfo`. Declare it as +``` +[ClassName](const Napi::CallbackInfo& info); +``` +and define it as +``` +[ClassName]::[ClassName](const Napi::CallbackInfo& info) : Napi::ObjectWrap<[ClassName]>(info){ + ... +} +``` +This way, the `Napi::ObjectWrap` constructor will be invoked after the object has been instantiated and `Napi::ObjectWrap` can use the `this` pointer to create a reference to the wrapped object. + + 2. Move your original constructor code into the new constructor. Delete your original constructor. + 3. In your class initialization function, associate native methods in the following way. +``` +Napi::FunctionReference constructor; + +void [ClassName]::Init(Napi::Env env, Napi::Object exports, Napi::Object module) { + Napi::HandleScope scope(env); + Napi::Function ctor = DefineClass(env, "Canvas", { + InstanceMethod<&[ClassName]::Func1>("Func1"), + InstanceMethod<&[ClassName]::Func2>("Func2"), + InstanceAccessor<&[ClassName]::ValueGetter>("Value"), + StaticMethod<&[ClassName]::StaticMethod>("MethodName"), + InstanceValue("Value", Napi::[Type]::New(env, value)), + }); + + constructor = Napi::Persistent(ctor); + constructor .SuppressDestruct(); + exports.Set("[ClassName]", ctor); +} +``` + 4. In function where you need to Unwrap the ObjectWrap in NAN like `[ClassName]* native = Nan::ObjectWrap::Unwrap<[ClassName]>(info.This());`, use `this` pointer directly as the unwrapped object as each ObjectWrap instance is associated with a unique object instance. + + +If you still find issues after following this guide, please leave us an issue describing your problem and we will try to resolve it. diff --git a/miniprogram/node_modules/node-addon-api/tools/check-napi.js b/miniprogram/node_modules/node-addon-api/tools/check-napi.js new file mode 100644 index 00000000..9199af33 --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/tools/check-napi.js @@ -0,0 +1,99 @@ +'use strict'; +// Descend into a directory structure and, for each file matching *.node, output +// based on the imports found in the file whether it's an N-API module or not. + +const fs = require('fs'); +const path = require('path'); + +// Read the output of the command, break it into lines, and use the reducer to +// decide whether the file is an N-API module or not. +function checkFile (file, command, argv, reducer) { + const child = require('child_process').spawn(command, argv, { + stdio: ['inherit', 'pipe', 'inherit'] + }); + let leftover = ''; + let isNapi; + child.stdout.on('data', (chunk) => { + if (isNapi === undefined) { + chunk = (leftover + chunk.toString()).split(/[\r\n]+/); + leftover = chunk.pop(); + isNapi = chunk.reduce(reducer, isNapi); + if (isNapi !== undefined) { + child.kill(); + } + } + }); + child.on('close', (code, signal) => { + if ((code === null && signal !== null) || (code !== 0)) { + console.log( + command + ' exited with code: ' + code + ' and signal: ' + signal); + } else { + // Green if it's a N-API module, red otherwise. + console.log( + '\x1b[' + (isNapi ? '42' : '41') + 'm' + + (isNapi ? ' N-API' : 'Not N-API') + + '\x1b[0m: ' + file); + } + }); +} + +// Use nm -a to list symbols. +function checkFileUNIX (file) { + checkFile(file, 'nm', ['-a', file], (soFar, line) => { + if (soFar === undefined) { + line = line.match(/([0-9a-f]*)? ([a-zA-Z]) (.*$)/); + if (line[2] === 'U') { + if (/^napi/.test(line[3])) { + soFar = true; + } + } + } + return soFar; + }); +} + +// Use dumpbin /imports to list symbols. +function checkFileWin32 (file) { + checkFile(file, 'dumpbin', ['/imports', file], (soFar, line) => { + if (soFar === undefined) { + line = line.match(/([0-9a-f]*)? +([a-zA-Z0-9]) (.*$)/); + if (line && /^napi/.test(line[line.length - 1])) { + soFar = true; + } + } + return soFar; + }); +} + +// Descend into a directory structure and pass each file ending in '.node' to +// one of the above checks, depending on the OS. +function recurse (top) { + fs.readdir(top, (error, items) => { + if (error) { + throw new Error('error reading directory ' + top + ': ' + error); + } + items.forEach((item) => { + item = path.join(top, item); + fs.stat(item, ((item) => (error, stats) => { + if (error) { + throw new Error('error about ' + item + ': ' + error); + } + if (stats.isDirectory()) { + recurse(item); + } else if (/[.]node$/.test(item) && + // Explicitly ignore files called 'nothing.node' because they are + // artefacts of node-addon-api having identified a version of + // Node.js that ships with a correct implementation of N-API. + path.basename(item) !== 'nothing.node') { + process.platform === 'win32' + ? checkFileWin32(item) + : checkFileUNIX(item); + } + })(item)); + }); + }); +} + +// Start with the directory given on the command line or the current directory +// if nothing was given. +recurse(process.argv.length > 3 ? process.argv[2] : '.'); diff --git a/miniprogram/node_modules/node-addon-api/tools/clang-format.js b/miniprogram/node_modules/node-addon-api/tools/clang-format.js new file mode 100644 index 00000000..e4bb4f52 --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/tools/clang-format.js @@ -0,0 +1,71 @@ +#!/usr/bin/env node + +const spawn = require('child_process').spawnSync; +const path = require('path'); + +const filesToCheck = ['*.h', '*.cc']; +const FORMAT_START = process.env.FORMAT_START || 'main'; + +function main (args) { + let fix = false; + while (args.length > 0) { + switch (args[0]) { + case '-f': + case '--fix': + fix = true; + break; + default: + } + args.shift(); + } + + const clangFormatPath = path.dirname(require.resolve('clang-format')); + const binary = process.platform === 'win32' + ? 'node_modules\\.bin\\clang-format.cmd' + : 'node_modules/.bin/clang-format'; + const options = ['--binary=' + binary, '--style=file']; + if (fix) { + options.push(FORMAT_START); + } else { + options.push('--diff', FORMAT_START); + } + + const gitClangFormatPath = path.join(clangFormatPath, 'bin/git-clang-format'); + const result = spawn( + 'python', + [gitClangFormatPath, ...options, '--', ...filesToCheck], + { encoding: 'utf-8' } + ); + + if (result.stderr) { + console.error('Error running git-clang-format:', result.stderr); + return 2; + } + + const clangFormatOutput = result.stdout.trim(); + // Bail fast if in fix mode. + if (fix) { + console.log(clangFormatOutput); + return 0; + } + // Detect if there is any complains from clang-format + if ( + clangFormatOutput !== '' && + clangFormatOutput !== 'no modified files to format' && + clangFormatOutput !== 'clang-format did not modify any files' + ) { + console.error(clangFormatOutput); + const fixCmd = 'npm run lint:fix'; + console.error(` + ERROR: please run "${fixCmd}" to format changes in your commit + Note that when running the command locally, please keep your local + main branch and working branch up to date with nodejs/node-addon-api + to exclude un-related complains. + Or you can run "env FORMAT_START=upstream/main ${fixCmd}".`); + return 1; + } +} + +if (require.main === module) { + process.exitCode = main(process.argv.slice(2)); +} diff --git a/miniprogram/node_modules/node-addon-api/tools/conversion.js b/miniprogram/node_modules/node-addon-api/tools/conversion.js new file mode 100644 index 00000000..f89245ac --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/tools/conversion.js @@ -0,0 +1,301 @@ +#! /usr/bin/env node + +'use strict'; + +const fs = require('fs'); +const path = require('path'); + +const args = process.argv.slice(2); +const dir = args[0]; +if (!dir) { + console.log('Usage: node ' + path.basename(__filename) + ' '); + process.exit(1); +} + +const NodeApiVersion = require('../package.json').version; + +const disable = args[1]; +let ConfigFileOperations; +if (disable !== '--disable' && dir !== '--disable') { + ConfigFileOperations = { + 'package.json': [ + [/([ ]*)"dependencies": {/g, '$1"dependencies": {\n$1 "node-addon-api": "' + NodeApiVersion + '",'], + [/[ ]*"nan": *"[^"]+"(,|)[\n\r]/g, ''] + ], + 'binding.gyp': [ + [/([ ]*)'include_dirs': \[/g, '$1\'include_dirs\': [\n$1 \'\s+(\w+)\s*=\s*Nan::New\([\w\d:]+\);(?:\w+->Reset\(\1\))?\s+\1->SetClassName\(Nan::String::New\("(\w+)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$2", {'], + [/Local\s+(\w+)\s*=\s*Nan::New\([\w\d:]+\);\s+(\w+)\.Reset\((\1)\);\s+\1->SetClassName\((Nan::String::New|Nan::New<(v8::)*String>)\("(.+?)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$6", {'], + [/Local\s+(\w+)\s*=\s*Nan::New\([\w\d:]+\);(?:\w+->Reset\(\1\))?\s+\1->SetClassName\(Nan::String::New\("(\w+)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$2", {'], + [/Nan::New\(([\w\d:]+)\)->GetFunction\(\)/g, 'Napi::Function::New(env, $1)'], + [/Nan::New\(([\w\d:]+)\)->GetFunction()/g, 'Napi::Function::New(env, $1);'], + [/Nan::New\(([\w\d:]+)\)/g, 'Napi::Function::New(env, $1)'], + [/Nan::New\(([\w\d:]+)\)/g, 'Napi::Function::New(env, $1)'], + + // FunctionTemplate to FunctionReference + [/Nan::Persistent<(v8::)*FunctionTemplate>/g, 'Napi::FunctionReference'], + [/Nan::Persistent<(v8::)*Function>/g, 'Napi::FunctionReference'], + [/v8::Local/g, 'Napi::FunctionReference'], + [/Local/g, 'Napi::FunctionReference'], + [/v8::FunctionTemplate/g, 'Napi::FunctionReference'], + [/FunctionTemplate/g, 'Napi::FunctionReference'], + + [/([ ]*)Nan::SetPrototypeMethod\(\w+, "(\w+)", (\w+)\);/g, '$1InstanceMethod("$2", &$3),'], + [/([ ]*)(?:\w+\.Reset\(\w+\);\s+)?\(target\)\.Set\("(\w+)",\s*Nan::GetFunction\((\w+)\)\);/gm, + '});\n\n' + + '$1constructor = Napi::Persistent($3);\n' + + '$1constructor.SuppressDestruct();\n' + + '$1target.Set("$2", $3);'], + + // TODO: Other attribute combinations + [/static_cast\(ReadOnly\s*\|\s*DontDelete\)/gm, + 'static_cast(napi_enumerable | napi_configurable)'], + + [/([\w\d:<>]+?)::Cast\((.+?)\)/g, '$2.As<$1>()'], + + [/\*Nan::Utf8String\(([^)]+)\)/g, '$1->As().Utf8Value().c_str()'], + [/Nan::Utf8String +(\w+)\(([^)]+)\)/g, 'std::string $1 = $2.As()'], + [/Nan::Utf8String/g, 'std::string'], + + [/v8::String::Utf8Value (.+?)\((.+?)\)/g, 'Napi::String $1(env, $2)'], + [/String::Utf8Value (.+?)\((.+?)\)/g, 'Napi::String $1(env, $2)'], + [/\.length\(\)/g, '.Length()'], + + [/Nan::MakeCallback\(([^,]+),[\s\\]+([^,]+),/gm, '$2.MakeCallback($1,'], + + [/class\s+(\w+)\s*:\s*public\s+Nan::ObjectWrap/g, 'class $1 : public Napi::ObjectWrap<$1>'], + [/(\w+)\(([^)]*)\)\s*:\s*Nan::ObjectWrap\(\)\s*(,)?/gm, '$1($2) : Napi::ObjectWrap<$1>()$3'], + + // HandleOKCallback to OnOK + [/HandleOKCallback/g, 'OnOK'], + // HandleErrorCallback to OnError + [/HandleErrorCallback/g, 'OnError'], + + // ex. .As() to .As() + [/\.As\(\)/g, '.As()'], + [/\.As<(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>\(\)/g, '.As()'], + + // ex. Nan::New(info[0]) to Napi::Number::New(info[0]) + [/Nan::New<(v8::)*Integer>\((.+?)\)/g, 'Napi::Number::New(env, $2)'], + [/Nan::New\(([0-9.]+)\)/g, 'Napi::Number::New(env, $1)'], + [/Nan::New<(v8::)*String>\("(.+?)"\)/g, 'Napi::String::New(env, "$2")'], + [/Nan::New\("(.+?)"\)/g, 'Napi::String::New(env, "$1")'], + [/Nan::New<(v8::)*(.+?)>\(\)/g, 'Napi::$2::New(env)'], + [/Nan::New<(.+?)>\(\)/g, 'Napi::$1::New(env)'], + [/Nan::New<(v8::)*(.+?)>\(/g, 'Napi::$2::New(env, '], + [/Nan::New<(.+?)>\(/g, 'Napi::$1::New(env, '], + [/Nan::NewBuffer\(/g, 'Napi::Buffer::New(env, '], + // TODO: Properly handle this + [/Nan::New\(/g, 'Napi::New(env, '], + + [/\.IsInt32\(\)/g, '.IsNumber()'], + [/->IsInt32\(\)/g, '.IsNumber()'], + + [/(.+?)->BooleanValue\(\)/g, '$1.As().Value()'], + [/(.+?)->Int32Value\(\)/g, '$1.As().Int32Value()'], + [/(.+?)->Uint32Value\(\)/g, '$1.As().Uint32Value()'], + [/(.+?)->IntegerValue\(\)/g, '$1.As().Int64Value()'], + [/(.+?)->NumberValue\(\)/g, '$1.As().DoubleValue()'], + + // ex. Nan::To(info[0]) to info[0].Value() + [/Nan::To\((.+?)\)/g, '$2.To()'], + [/Nan::To<(Boolean|String|Number|Object|Array|Symbol|Function)>\((.+?)\)/g, '$2.To()'], + // ex. Nan::To(info[0]) to info[0].As().Value() + [/Nan::To\((.+?)\)/g, '$1.As().Value()'], + // ex. Nan::To(info[0]) to info[0].As().Int32Value() + [/Nan::To\((.+?)\)/g, '$1.As().Int32Value()'], + // ex. Nan::To(info[0]) to info[0].As().Int32Value() + [/Nan::To\((.+?)\)/g, '$1.As().Int32Value()'], + // ex. Nan::To(info[0]) to info[0].As().Uint32Value() + [/Nan::To\((.+?)\)/g, '$1.As().Uint32Value()'], + // ex. Nan::To(info[0]) to info[0].As().Int64Value() + [/Nan::To\((.+?)\)/g, '$1.As().Int64Value()'], + // ex. Nan::To(info[0]) to info[0].As().FloatValue() + [/Nan::To\((.+?)\)/g, '$1.As().FloatValue()'], + // ex. Nan::To(info[0]) to info[0].As().DoubleValue() + [/Nan::To\((.+?)\)/g, '$1.As().DoubleValue()'], + + [/Nan::New\((\w+)\)->HasInstance\((\w+)\)/g, '$2.InstanceOf($1.Value())'], + + [/Nan::Has\(([^,]+),\s*/gm, '($1).Has('], + [/\.Has\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\)/gm, '.Has($1)'], + [/\.Has\([\s|\\]*Nan::New\(([^)]+)\)\)/gm, '.Has($1)'], + + [/Nan::Get\(([^,]+),\s*/gm, '($1).Get('], + [/\.Get\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\)/gm, '.Get($1)'], + [/\.Get\([\s|\\]*Nan::New\(([^)]+)\)\)/gm, '.Get($1)'], + + [/Nan::Set\(([^,]+),\s*/gm, '($1).Set('], + [/\.Set\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\s*,/gm, '.Set($1,'], + [/\.Set\([\s|\\]*Nan::New\(([^)]+)\)\s*,/gm, '.Set($1,'], + + // ex. node::Buffer::HasInstance(info[0]) to info[0].IsBuffer() + [/node::Buffer::HasInstance\((.+?)\)/g, '$1.IsBuffer()'], + // ex. node::Buffer::Length(info[0]) to info[0].Length() + [/node::Buffer::Length\((.+?)\)/g, '$1.As>().Length()'], + // ex. node::Buffer::Data(info[0]) to info[0].Data() + [/node::Buffer::Data\((.+?)\)/g, '$1.As>().Data()'], + [/Nan::CopyBuffer\(/g, 'Napi::Buffer::Copy(env, '], + + // Nan::AsyncQueueWorker(worker) + [/Nan::AsyncQueueWorker\((.+)\);/g, '$1.Queue();'], + [/Nan::(Undefined|Null|True|False)\(\)/g, 'env.$1()'], + + // Nan::ThrowError(error) to Napi::Error::New(env, error).ThrowAsJavaScriptException() + [/([ ]*)return Nan::Throw(\w*?)Error\((.+?)\);/g, '$1Napi::$2Error::New(env, $3).ThrowAsJavaScriptException();\n$1return env.Null();'], + [/Nan::Throw(\w*?)Error\((.+?)\);\n(\s*)return;/g, 'Napi::$1Error::New(env, $2).ThrowAsJavaScriptException();\n$3return env.Null();'], + [/Nan::Throw(\w*?)Error\((.+?)\);/g, 'Napi::$1Error::New(env, $2).ThrowAsJavaScriptException();\n'], + // Nan::RangeError(error) to Napi::RangeError::New(env, error) + [/Nan::(\w*?)Error\((.+)\)/g, 'Napi::$1Error::New(env, $2)'], + + [/Nan::Set\((.+?),\n* *(.+?),\n* *(.+?),\n* *(.+?)\)/g, '$1.Set($2, $3, $4)'], + + [/Nan::(Escapable)?HandleScope\s+(\w+)\s*;/g, 'Napi::$1HandleScope $2(env);'], + [/Nan::(Escapable)?HandleScope/g, 'Napi::$1HandleScope'], + [/Nan::ForceSet\(([^,]+), ?/g, '$1->DefineProperty('], + [/\.ForceSet\(Napi::String::New\(env, "(\w+)"\),\s*?/g, '.DefineProperty("$1", '], + // [ /Nan::GetPropertyNames\(([^,]+)\)/, '$1->GetPropertyNames()' ], + [/Nan::Equals\(([^,]+),/g, '$1.StrictEquals('], + + [/(.+)->Set\(/g, '$1.Set('], + + [/Nan::Callback/g, 'Napi::FunctionReference'], + + [/Nan::Persistent/g, 'Napi::ObjectReference'], + [/Nan::ADDON_REGISTER_FUNCTION_ARGS_TYPE target/g, 'Napi::Env& env, Napi::Object& target'], + + [/(\w+)\*\s+(\w+)\s*=\s*Nan::ObjectWrap::Unwrap<\w+>\(info\.This\(\)\);/g, '$1* $2 = this;'], + [/Nan::ObjectWrap::Unwrap<(\w+)>\((.*)\);/g, '$2.Unwrap<$1>();'], + + [/Nan::NAN_METHOD_RETURN_TYPE/g, 'void'], + [/NAN_INLINE/g, 'inline'], + + [/Nan::NAN_METHOD_ARGS_TYPE/g, 'const Napi::CallbackInfo&'], + [/NAN_METHOD\(([\w\d:]+?)\)/g, 'Napi::Value $1(const Napi::CallbackInfo& info)'], + [/static\s*NAN_GETTER\(([\w\d:]+?)\)/g, 'Napi::Value $1(const Napi::CallbackInfo& info)'], + [/NAN_GETTER\(([\w\d:]+?)\)/g, 'Napi::Value $1(const Napi::CallbackInfo& info)'], + [/static\s*NAN_SETTER\(([\w\d:]+?)\)/g, 'void $1(const Napi::CallbackInfo& info, const Napi::Value& value)'], + [/NAN_SETTER\(([\w\d:]+?)\)/g, 'void $1(const Napi::CallbackInfo& info, const Napi::Value& value)'], + [/void Init\((v8::)*Local<(v8::)*Object> exports\)/g, 'Napi::Object Init(Napi::Env env, Napi::Object exports)'], + [/NAN_MODULE_INIT\(([\w\d:]+?)\);/g, 'Napi::Object $1(Napi::Env env, Napi::Object exports);'], + [/NAN_MODULE_INIT\(([\w\d:]+?)\)/g, 'Napi::Object $1(Napi::Env env, Napi::Object exports)'], + + [/::(Init(?:ialize)?)\(target\)/g, '::$1(env, target, module)'], + [/constructor_template/g, 'constructor'], + + [/Nan::FunctionCallbackInfo<(v8::)?Value>[ ]*& [ ]*info\)[ ]*{\n*([ ]*)/gm, 'Napi::CallbackInfo& info) {\n$2Napi::Env env = info.Env();\n$2'], + [/Nan::FunctionCallbackInfo<(v8::)*Value>\s*&\s*info\);/g, 'Napi::CallbackInfo& info);'], + [/Nan::FunctionCallbackInfo<(v8::)*Value>\s*&/g, 'Napi::CallbackInfo&'], + + [/Buffer::HasInstance\(([^)]+)\)/g, '$1.IsBuffer()'], + + [/info\[(\d+)\]->/g, 'info[$1].'], + [/info\[([\w\d]+)\]->/g, 'info[$1].'], + [/info\.This\(\)->/g, 'info.This().'], + [/->Is(Object|String|Int32|Number)\(\)/g, '.Is$1()'], + [/info.GetReturnValue\(\).SetUndefined\(\)/g, 'return env.Undefined()'], + [/info\.GetReturnValue\(\)\.Set\(((\n|.)+?)\);/g, 'return $1;'], + + // ex. Local to Napi::Value + [/v8::Local/g, 'Napi::$1'], + [/Local<(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>/g, 'Napi::$1'], + + // Declare an env in helper functions that take a Napi::Value + [/(\w+)\(Napi::Value (\w+)(,\s*[^()]+)?\)\s*{\n*([ ]*)/gm, '$1(Napi::Value $2$3) {\n$4Napi::Env env = $2.Env();\n$4'], + + // delete #include and/or + [/#include +(<|")(?:node|nan).h("|>)/g, '#include $1napi.h$2\n#include $1uv.h$2'], + // NODE_MODULE to NODE_API_MODULE + [/NODE_MODULE/g, 'NODE_API_MODULE'], + [/Nan::/g, 'Napi::'], + [/nan.h/g, 'napi.h'], + + // delete .FromJust() + [/\.FromJust\(\)/g, ''], + // delete .ToLocalCheck() + [/\.ToLocalChecked\(\)/g, ''], + [/^.*->SetInternalFieldCount\(.*$/gm, ''], + + // replace using node; and/or using v8; to using Napi; + [/using (node|v8);/g, 'using Napi;'], + [/using namespace (node|Nan|v8);/g, 'using namespace Napi;'], + // delete using v8::Local; + [/using v8::Local;\n/g, ''], + // replace using v8::XXX; with using Napi::XXX + [/using v8::([A-Za-z]+);/g, 'using Napi::$1;'] + +]; + +const paths = listFiles(dir); +paths.forEach(function (dirEntry) { + const filename = dirEntry.split('\\').pop().split('/').pop(); + + // Check whether the file is a source file or a config file + // then execute function accordingly + const sourcePattern = /.+\.h|.+\.cc|.+\.cpp/; + if (sourcePattern.test(filename)) { + convertFile(dirEntry, SourceFileOperations); + } else if (ConfigFileOperations[filename] != null) { + convertFile(dirEntry, ConfigFileOperations[filename]); + } +}); + +function listFiles (dir, filelist) { + const files = fs.readdirSync(dir); + filelist = filelist || []; + files.forEach(function (file) { + if (file === 'node_modules') { + return; + } + + if (fs.statSync(path.join(dir, file)).isDirectory()) { + filelist = listFiles(path.join(dir, file), filelist); + } else { + filelist.push(path.join(dir, file)); + } + }); + return filelist; +} + +function convert (content, operations) { + for (let i = 0; i < operations.length; i++) { + const operation = operations[i]; + content = content.replace(operation[0], operation[1]); + } + return content; +} + +function convertFile (fileName, operations) { + fs.readFile(fileName, 'utf-8', function (err, file) { + if (err) throw err; + + file = convert(file, operations); + + fs.writeFile(fileName, file, function (err) { + if (err) throw err; + }); + }); +} diff --git a/miniprogram/node_modules/node-addon-api/tools/eslint-format.js b/miniprogram/node_modules/node-addon-api/tools/eslint-format.js new file mode 100644 index 00000000..6923ab7b --- /dev/null +++ b/miniprogram/node_modules/node-addon-api/tools/eslint-format.js @@ -0,0 +1,79 @@ +#!/usr/bin/env node + +const spawn = require('child_process').spawnSync; + +const filesToCheck = '*.js'; +const FORMAT_START = process.env.FORMAT_START || 'main'; +const IS_WIN = process.platform === 'win32'; +const ESLINT_PATH = IS_WIN ? 'node_modules\\.bin\\eslint.cmd' : 'node_modules/.bin/eslint'; + +function main (args) { + let fix = false; + while (args.length > 0) { + switch (args[0]) { + case '-f': + case '--fix': + fix = true; + break; + default: + } + args.shift(); + } + + // Check js files that change on unstaged file + const fileUnStaged = spawn( + 'git', + ['diff', '--name-only', '--diff-filter=d', FORMAT_START, filesToCheck], + { + encoding: 'utf-8' + } + ); + + // Check js files that change on staged file + const fileStaged = spawn( + 'git', + ['diff', '--name-only', '--cached', '--diff-filter=d', FORMAT_START, filesToCheck], + { + encoding: 'utf-8' + } + ); + + const options = [ + ...fileStaged.stdout.split('\n').filter((f) => f !== ''), + ...fileUnStaged.stdout.split('\n').filter((f) => f !== '') + ]; + + if (fix) { + options.push('--fix'); + } + + const result = spawn(ESLINT_PATH, [...options], { + encoding: 'utf-8' + }); + + if (result.error && result.error.errno === 'ENOENT') { + console.error('Eslint not found! Eslint is supposed to be found at ', ESLINT_PATH); + return 2; + } + + if (result.status === 1) { + console.error('Eslint error:', result.stdout); + const fixCmd = 'npm run lint:fix'; + console.error(`ERROR: please run "${fixCmd}" to format changes in your commit + Note that when running the command locally, please keep your local + main branch and working branch up to date with nodejs/node-addon-api + to exclude un-related complains. + Or you can run "env FORMAT_START=upstream/main ${fixCmd}". + Also fix JS files by yourself if necessary.`); + return 1; + } + + if (result.stderr) { + console.error('Error running eslint:', result.stderr); + return 2; + } +} + +if (require.main === module) { + process.exitCode = main(process.argv.slice(2)); +} diff --git a/miniprogram/node_modules/once/LICENSE b/miniprogram/node_modules/once/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/miniprogram/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/miniprogram/node_modules/once/README.md b/miniprogram/node_modules/once/README.md new file mode 100644 index 00000000..1f1ffca9 --- /dev/null +++ b/miniprogram/node_modules/once/README.md @@ -0,0 +1,79 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` + +## `once.strict(func)` + +Throw an error if the function is called twice. + +Some functions are expected to be called only once. Using `once` for them would +potentially hide logical errors. + +In the example below, the `greet` function has to call the callback only once: + +```javascript +function greet (name, cb) { + // return is missing from the if statement + // when no name is passed, the callback is called twice + if (!name) cb('Hello anonymous') + cb('Hello ' + name) +} + +function log (msg) { + console.log(msg) +} + +// this will print 'Hello anonymous' but the logical error will be missed +greet(null, once(msg)) + +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time +greet(null, once.strict(msg)) +``` diff --git a/miniprogram/node_modules/once/once.js b/miniprogram/node_modules/once/once.js new file mode 100644 index 00000000..23540673 --- /dev/null +++ b/miniprogram/node_modules/once/once.js @@ -0,0 +1,42 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} diff --git a/miniprogram/node_modules/once/package.json b/miniprogram/node_modules/once/package.json new file mode 100644 index 00000000..16815b2f --- /dev/null +++ b/miniprogram/node_modules/once/package.json @@ -0,0 +1,33 @@ +{ + "name": "once", + "version": "1.4.0", + "description": "Run a function exactly one time", + "main": "once.js", + "directories": { + "test": "test" + }, + "dependencies": { + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.0.1" + }, + "scripts": { + "test": "tap test/*.js" + }, + "files": [ + "once.js" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once" + }, + "keywords": [ + "once", + "function", + "one", + "single" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/miniprogram/node_modules/prebuild-install/CHANGELOG.md b/miniprogram/node_modules/prebuild-install/CHANGELOG.md new file mode 100644 index 00000000..03cd97aa --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/CHANGELOG.md @@ -0,0 +1,131 @@ +# Changelog + +## [7.1.3] - 2025-01-22 + +### Fixed + +- Bump napi-build-utils from 1 to 2 ([#204](https://github.com/prebuild/prebuild-install/issues/204)) ([`1bf4a15`](https://github.com/prebuild/prebuild-install/commit/1bf4a15)) (Bailey Pearson) + +## [7.1.2] - 2024-02-29 + +### Fixed + +- Support environments where MD5 is prohibited ([#191](https://github.com/prebuild/prebuild-install/issues/191)) ([`9140468`](https://github.com/prebuild/prebuild-install/commit/9140468)) (Tomasz Szuba) + +## [7.1.1] - 2022-06-07 + +### Fixed + +- Replace use of npmlog dependency with console.error ([#182](https://github.com/prebuild/prebuild-install/issues/182)) ([`4e2284c`](https://github.com/prebuild/prebuild-install/commit/4e2284c)) (Lovell Fuller) +- Ensure script output can be captured by tests ([#181](https://github.com/prebuild/prebuild-install/issues/181)) ([`d1853cb`](https://github.com/prebuild/prebuild-install/commit/d1853cb)) (Lovell Fuller) + +## [7.1.0] - 2022-04-20 + +### Changed + +- Allow setting libc to glibc on non-glibc platform ([#176](https://github.com/prebuild/prebuild-install/issues/176)) ([`f729abb`](https://github.com/prebuild/prebuild-install/commit/f729abb)) (Joona Heinikoski) + +## [7.0.1] - 2022-01-28 + +### Changed + +- Upgrade to the latest version of `detect-libc` ([#166](https://github.com/prebuild/prebuild-install/issues/166)) ([`f71c6b9`](https://github.com/prebuild/prebuild-install/commit/f71c6b9)) (Lovell Fuller) + +## [7.0.0] - 2021-11-12 + +### Changed + +- **Breaking:** bump `node-abi` so that Electron 14+ gets correct ABI ([#161](https://github.com/prebuild/prebuild-install/issues/161)) ([`477f347`](https://github.com/prebuild/prebuild-install/commit/477f347)) (csett86). Drops support of Node.js < 10. +- Bump `simple-get` ([`7468c14`](https://github.com/prebuild/prebuild-install/commit/7468c14)) (Vincent Weevers). + +## [6.1.4] - 2021-08-11 + +### Fixed + +- Move auth token to header instead of query param ([#160](https://github.com/prebuild/prebuild-install/issues/160)) ([`b3fad76`](https://github.com/prebuild/prebuild-install/commit/b3fad76)) (nicolai-nordic) +- Remove `_` prefix as it isn't allowed by npm config ([#153](https://github.com/prebuild/prebuild-install/issues/153)) ([`a964e5b`](https://github.com/prebuild/prebuild-install/commit/a964e5b)) (Tom Boothman) +- Make `rc.path` absolute ([#158](https://github.com/prebuild/prebuild-install/issues/158)) ([`57bcc06`](https://github.com/prebuild/prebuild-install/commit/57bcc06)) (George Waters). + +## [6.1.3] - 2021-06-03 + +### Changed + +- Inline no longer maintained `noop-logger` ([#155](https://github.com/prebuild/prebuild-install/issues/155)) ([`e08d75a`](https://github.com/prebuild/prebuild-install/commit/e08d75a)) (Alexandru Dima) +- Point users towards `prebuildify` in README ([#150](https://github.com/prebuild/prebuild-install/issues/150)) ([`5ee1a2f`](https://github.com/prebuild/prebuild-install/commit/5ee1a2f)) (Vincent Weevers) + +## [6.1.2] - 2021-04-24 + +### Fixed + +- Support URL-safe strings in scoped packages ([#148](https://github.com/prebuild/prebuild-install/issues/148)) ([`db36c7a`](https://github.com/prebuild/prebuild-install/commit/db36c7a)) (Marco) + +## [6.1.1] - 2021-04-04 + +### Fixed + +- Support `force` & `buildFromSource` options in yarn ([#140](https://github.com/prebuild/prebuild-install/issues/140)) ([`8cb1ced`](https://github.com/prebuild/prebuild-install/commit/8cb1ced)) (João Moreno) +- Bump `node-abi` to prevent dedupe (closes [#135](https://github.com/prebuild/prebuild-install/issues/135)) ([`2950fb2`](https://github.com/prebuild/prebuild-install/commit/2950fb2)) (Vincent Weevers) + +## [6.1.0] - 2021-04-03 + +### Added + +- Restore local prebuilds feature ([#137](https://github.com/prebuild/prebuild-install/issues/137)) ([`dc4e5ea`](https://github.com/prebuild/prebuild-install/commit/dc4e5ea)) (Wes Roberts). Previously removed in [#81](https://github.com/prebuild/prebuild-install/issues/81) / [`a069253`](https://github.com/prebuild/prebuild-install/commit/a06925378d38ca821bfa93aa4c1fdedc253b2420). + +## [6.0.1] - 2021-02-14 + +### Fixed + +- Fixes empty `--tag-prefix` ([#143](https://github.com/prebuild/prebuild-install/issues/143)) ([**@mathiask88**](https://github.com/mathiask88)) + +## [6.0.0] - 2020-10-23 + +### Changed + +- **Breaking:** don't skip downloads in standalone mode ([`b6f3b36`](https://github.com/prebuild/prebuild-install/commit/b6f3b36)) ([**@vweevers**](https://github.com/vweevers)) + +### Added + +- Document cross platform options ([`e5c9a5a`](https://github.com/prebuild/prebuild-install/commit/e5c9a5a)) ([**@fishbone1**](https://github.com/fishbone1)) + +### Removed + +- **Breaking:** remove `--compile` and `--prebuild` options ([`94f2492`](https://github.com/prebuild/prebuild-install/commit/94f2492)) ([**@vweevers**](https://github.com/vweevers)) + +### Fixed + +- Support npm 7 ([`8acccac`](https://github.com/prebuild/prebuild-install/commit/8acccac), [`08eaf6d`](https://github.com/prebuild/prebuild-install/commit/08eaf6d), [`22175b8`](https://github.com/prebuild/prebuild-install/commit/22175b8)) ([**@vweevers**](https://github.com/vweevers)) + +## [5.3.6] - 2020-10-20 + +### Changed + +- Replace `mkdirp` dependency with `mkdirp-classic` ([**@ralphtheninja**](https://github.com/ralphtheninja)) + +[7.1.3]: https://github.com/prebuild/prebuild-install/releases/tag/v7.1.3 + +[7.1.2]: https://github.com/prebuild/prebuild-install/releases/tag/v7.1.2 + +[7.1.1]: https://github.com/prebuild/prebuild-install/releases/tag/v7.1.1 + +[7.1.0]: https://github.com/prebuild/prebuild-install/releases/tag/v7.1.0 + +[7.0.1]: https://github.com/prebuild/prebuild-install/releases/tag/v7.0.1 + +[7.0.0]: https://github.com/prebuild/prebuild-install/releases/tag/v7.0.0 + +[6.1.4]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.4 + +[6.1.3]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.3 + +[6.1.2]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.2 + +[6.1.1]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.1 + +[6.1.0]: https://github.com/prebuild/prebuild-install/releases/tag/v6.1.0 + +[6.0.1]: https://github.com/prebuild/prebuild-install/releases/tag/v6.0.1 + +[6.0.0]: https://github.com/prebuild/prebuild-install/releases/tag/v6.0.0 + +[5.3.6]: https://github.com/prebuild/prebuild-install/releases/tag/v5.3.6 diff --git a/miniprogram/node_modules/prebuild-install/CONTRIBUTING.md b/miniprogram/node_modules/prebuild-install/CONTRIBUTING.md new file mode 100644 index 00000000..07860da8 --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/CONTRIBUTING.md @@ -0,0 +1,6 @@ +# Contributing to prebuild + +- no commits direct to master +- all commits as pull requests (one or several per PR) +- each commit solves one identifiable problem +- never merge one's own PRs, another contributor does this diff --git a/miniprogram/node_modules/prebuild-install/LICENSE b/miniprogram/node_modules/prebuild-install/LICENSE new file mode 100644 index 00000000..66a4d2a1 --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/miniprogram/node_modules/prebuild-install/README.md b/miniprogram/node_modules/prebuild-install/README.md new file mode 100644 index 00000000..d5aafa9b --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/README.md @@ -0,0 +1,163 @@ +# prebuild-install + +> **A command line tool to easily install prebuilt binaries for multiple versions of Node.js & Electron on a specific platform.** +> By default it downloads prebuilt binaries from a GitHub release. + +[![npm](https://img.shields.io/npm/v/prebuild-install.svg)](https://www.npmjs.com/package/prebuild-install) +![Node version](https://img.shields.io/node/v/prebuild-install.svg) +[![Test](https://img.shields.io/github/workflow/status/prebuild/prebuild-install/Test?label=test)](https://github.com/prebuild/prebuild-install/actions/workflows/test.yml) +[![Standard](https://img.shields.io/badge/standard-informational?logo=javascript\&logoColor=fff)](https://standardjs.com) +[![Common Changelog](https://common-changelog.org/badge.svg)](https://common-changelog.org) + +## Note + +**Instead of [`prebuild`](https://github.com/prebuild/prebuild) paired with [`prebuild-install`](https://github.com/prebuild/prebuild-install), we recommend [`prebuildify`](https://github.com/prebuild/prebuildify) paired with [`node-gyp-build`](https://github.com/prebuild/node-gyp-build).** + +With `prebuildify`, all prebuilt binaries are shipped inside the package that is published to npm, which means there's no need for a separate download step like you find in `prebuild`. The irony of this approach is that it is faster to download all prebuilt binaries for every platform when they are bundled than it is to download a single prebuilt binary as an install script. + +Upsides: + +1. No extra download step, making it more reliable and faster to install. +2. Supports changing runtime versions locally and using the same install between Node.js and Electron. Reinstalling or rebuilding is not necessary, as all prebuilt binaries are in the npm tarball and the correct one is simply picked on runtime. +3. The `node-gyp-build` runtime dependency is dependency-free and will remain so out of principle, because introducing dependencies would negate the shorter install time. +4. Prebuilt binaries work even if npm install scripts are disabled. +5. The npm package checksum covers prebuilt binaries too. + +Downsides: + +1. The installed npm package is larger on disk. Using [Node-API](https://nodejs.org/api/n-api.html) alleviates this because Node-API binaries are runtime-agnostic and forward-compatible. +2. Publishing is mildly more complicated, because `npm publish` must be done after compiling and fetching prebuilt binaries (typically in CI). + +## Usage + +Use [`prebuild`](https://github.com/prebuild/prebuild) to create and upload prebuilt binaries. Then change your package.json install script to: + +```json +{ + "scripts": { + "install": "prebuild-install || node-gyp rebuild" + } +} +``` + +When a consumer then installs your package with npm thus triggering the above install script, `prebuild-install` will download a suitable prebuilt binary, or exit with a non-zero exit code if there is none, which triggers `node-gyp rebuild` in order to build from source. + +Options (see below) can be passed to `prebuild-install` like so: + +```json +{ + "scripts": { + "install": "prebuild-install -r napi || node-gyp rebuild" + } +} +``` + +### Help + +``` +prebuild-install [options] + + --download -d [url] (download prebuilds, no url means github) + --target -t version (version to install for) + --runtime -r runtime (Node runtime [node, napi or electron] to build or install for, default is node) + --path -p path (make a prebuild-install here) + --token -T gh-token (github token for private repos) + --arch arch (target CPU architecture, see Node OS module docs, default is current arch) + --platform platform (target platform, see Node OS module docs, default is current platform) + --tag-prefix (github tag prefix, default is "v") + --build-from-source (skip prebuild download) + --verbose (log verbosely) + --libc (use provided libc rather than system default) + --debug (set Debug or Release configuration) + --version (print prebuild-install version and exit) +``` + +When `prebuild-install` is run via an `npm` script, options `--build-from-source`, `--debug`, `--download`, `--target`, `--runtime`, `--arch` `--platform` and `--libc` may be passed through via arguments given to the `npm` command. + +Alternatively you can set environment variables `npm_config_build_from_source=true`, `npm_config_platform`, `npm_config_arch`, `npm_config_target` `npm_config_runtime` and `npm_config_libc`. + +### Libc + +On non-glibc Linux platforms, the Libc name is appended to platform name. For example, musl-based environments are called `linuxmusl`. If `--libc=glibc` is passed as option, glibc is discarded and platform is called as just `linux`. This can be used for example to build cross-platform packages on Alpine Linux. + +### Private Repositories + +`prebuild-install` supports downloading prebuilds from private GitHub repositories using the `-T `: + +``` +$ prebuild-install -T +``` + +If you don't want to use the token on cli you can put it in `~/.prebuild-installrc`: + +``` +token= +``` + +Alternatively you can specify it in the `prebuild-install_token` environment variable. + +Note that using a GitHub token uses the API to resolve the correct release meaning that you are subject to the ([GitHub Rate Limit](https://developer.github.com/v3/rate_limit/)). + +### Create GitHub Token + +To create a token: + +- Go to [this page](https://github.com/settings/tokens) +- Click the `Generate new token` button +- Give the token a name and click the `Generate token` button, see below + +![prebuild-token](https://cloud.githubusercontent.com/assets/13285808/20844584/d0b85268-b8c0-11e6-8b08-2b19522165a9.png) + +The default scopes should be fine. + +### Custom binaries + +The end user can override binary download location through environment variables in their .npmrc file. +The variable needs to meet the mask `% your package name %_binary_host` or `% your package name %_binary_host_mirror`. For example: + +``` +leveldown_binary_host=http://overriden-host.com/overriden-path +``` + +Note that the package version subpath and file name will still be appended. +So if you are installing `leveldown@1.2.3` the resulting url will be: + +``` +http://overriden-host.com/overriden-path/v1.2.3/leveldown-v1.2.3-node-v57-win32-x64.tar.gz +``` + +#### Local prebuilds + +If you want to use prebuilds from your local filesystem, you can use the `% your package name %_local_prebuilds` .npmrc variable to set a path to the folder containing prebuilds. For example: + +``` +leveldown_local_prebuilds=/path/to/prebuilds +``` + +This option will look directly in that folder for bundles created with `prebuild`, for example: + +``` +/path/to/prebuilds/leveldown-v1.2.3-node-v57-win32-x64.tar.gz +``` + +Non-absolute paths resolve relative to the directory of the package invoking prebuild-install, e.g. for nested dependencies. + +### Cache + +All prebuilt binaries are cached to minimize traffic. So first `prebuild-install` picks binaries from the cache and if no binary could be found, it will be downloaded. Depending on the environment, the cache folder is determined in the following order: + +- `${npm_config_cache}/_prebuilds` +- `${APP_DATA}/npm-cache/_prebuilds` +- `${HOME}/.npm/_prebuilds` + +## Install + +With [npm](https://npmjs.org) do: + +``` +npm install prebuild-install +``` + +## License + +[MIT](./LICENSE) diff --git a/miniprogram/node_modules/prebuild-install/asset.js b/miniprogram/node_modules/prebuild-install/asset.js new file mode 100644 index 00000000..7a58e8b2 --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/asset.js @@ -0,0 +1,44 @@ +const get = require('simple-get') +const util = require('./util') +const proxy = require('./proxy') + +function findAssetId (opts, cb) { + const downloadUrl = util.getDownloadUrl(opts) + const apiUrl = util.getApiUrl(opts) + const log = opts.log || util.noopLogger + + log.http('request', 'GET ' + apiUrl) + const reqOpts = proxy({ + url: apiUrl, + json: true, + headers: { + 'User-Agent': 'simple-get', + Authorization: 'token ' + opts.token + } + }, opts) + + const req = get.concat(reqOpts, function (err, res, data) { + if (err) return cb(err) + log.http(res.statusCode, apiUrl) + if (res.statusCode !== 200) return cb(err) + + // Find asset id in release + for (const release of data) { + if (release.tag_name === opts['tag-prefix'] + opts.pkg.version) { + for (const asset of release.assets) { + if (asset.browser_download_url === downloadUrl) { + return cb(null, asset.id) + } + } + } + } + + cb(new Error('Could not find GitHub release for version')) + }) + + req.setTimeout(30 * 1000, function () { + req.abort() + }) +} + +module.exports = findAssetId diff --git a/miniprogram/node_modules/prebuild-install/bin.js b/miniprogram/node_modules/prebuild-install/bin.js new file mode 100644 index 00000000..e5260cce --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/bin.js @@ -0,0 +1,78 @@ +#!/usr/bin/env node + +const path = require('path') +const fs = require('fs') +const napi = require('napi-build-utils') + +const pkg = require(path.resolve('package.json')) +const rc = require('./rc')(pkg) +const log = require('./log')(rc, process.env) +const download = require('./download') +const asset = require('./asset') +const util = require('./util') + +const prebuildClientVersion = require('./package.json').version +if (rc.version) { + console.log(prebuildClientVersion) + process.exit(0) +} + +if (rc.path) process.chdir(rc.path) + +if (rc.runtime === 'electron' && rc.target[0] === '4' && rc.abi === '64') { + log.error(`Electron version ${rc.target} found - skipping prebuild-install work due to known ABI issue`) + log.error('More information about this issue can be found at https://github.com/lgeiger/node-abi/issues/54') + process.exit(1) +} + +if (!fs.existsSync('package.json')) { + log.error('setup', 'No package.json found. Aborting...') + process.exit(1) +} + +if (rc.help) { + console.error(fs.readFileSync(path.join(__dirname, 'help.txt'), 'utf-8')) + process.exit(0) +} + +log.info('begin', 'Prebuild-install version', prebuildClientVersion) + +const opts = Object.assign({}, rc, { pkg: pkg, log: log }) + +if (napi.isNapiRuntime(rc.runtime)) napi.logUnsupportedVersion(rc.target, log) + +const origin = util.packageOrigin(process.env, pkg) + +if (opts.force) { + log.warn('install', 'prebuilt binaries enforced with --force!') + log.warn('install', 'prebuilt binaries may be out of date!') +} else if (origin && origin.length > 4 && origin.substr(0, 4) === 'git+') { + log.info('install', 'installing from git repository, skipping download.') + process.exit(1) +} else if (opts.buildFromSource) { + log.info('install', '--build-from-source specified, not attempting download.') + process.exit(1) +} + +const startDownload = function (downloadUrl) { + download(downloadUrl, opts, function (err) { + if (err) { + log.warn('install', err.message) + return process.exit(1) + } + log.info('install', 'Successfully installed prebuilt binary!') + }) +} + +if (opts.token) { + asset(opts, function (err, assetId) { + if (err) { + log.warn('install', err.message) + return process.exit(1) + } + + startDownload(util.getAssetUrl(opts, assetId)) + }) +} else { + startDownload(util.getDownloadUrl(opts)) +} diff --git a/miniprogram/node_modules/prebuild-install/download.js b/miniprogram/node_modules/prebuild-install/download.js new file mode 100644 index 00000000..26f04b05 --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/download.js @@ -0,0 +1,142 @@ +const path = require('path') +const fs = require('fs') +const get = require('simple-get') +const pump = require('pump') +const tfs = require('tar-fs') +const zlib = require('zlib') +const util = require('./util') +const error = require('./error') +const proxy = require('./proxy') +const mkdirp = require('mkdirp-classic') + +function downloadPrebuild (downloadUrl, opts, cb) { + let cachedPrebuild = util.cachedPrebuild(downloadUrl) + const localPrebuild = util.localPrebuild(downloadUrl, opts) + const tempFile = util.tempFile(cachedPrebuild) + const log = opts.log || util.noopLogger + + if (opts.nolocal) return download() + + log.info('looking for local prebuild @', localPrebuild) + fs.access(localPrebuild, fs.R_OK | fs.W_OK, function (err) { + if (err && err.code === 'ENOENT') { + return download() + } + + log.info('found local prebuild') + cachedPrebuild = localPrebuild + unpack() + }) + + function download () { + ensureNpmCacheDir(function (err) { + if (err) return onerror(err) + + log.info('looking for cached prebuild @', cachedPrebuild) + fs.access(cachedPrebuild, fs.R_OK | fs.W_OK, function (err) { + if (!(err && err.code === 'ENOENT')) { + log.info('found cached prebuild') + return unpack() + } + + log.http('request', 'GET ' + downloadUrl) + const reqOpts = proxy({ url: downloadUrl }, opts) + + if (opts.token) { + reqOpts.headers = { + 'User-Agent': 'simple-get', + Accept: 'application/octet-stream', + Authorization: 'token ' + opts.token + } + } + + const req = get(reqOpts, function (err, res) { + if (err) return onerror(err) + log.http(res.statusCode, downloadUrl) + if (res.statusCode !== 200) return onerror() + mkdirp(util.prebuildCache(), function () { + log.info('downloading to @', tempFile) + pump(res, fs.createWriteStream(tempFile), function (err) { + if (err) return onerror(err) + fs.rename(tempFile, cachedPrebuild, function (err) { + if (err) return cb(err) + log.info('renaming to @', cachedPrebuild) + unpack() + }) + }) + }) + }) + + req.setTimeout(30 * 1000, function () { + req.abort() + }) + }) + + function onerror (err) { + fs.unlink(tempFile, function () { + cb(err || error.noPrebuilts(opts)) + }) + } + }) + } + + function unpack () { + let binaryName + + const updateName = opts.updateName || function (entry) { + if (/\.node$/i.test(entry.name)) binaryName = entry.name + } + + log.info('unpacking @', cachedPrebuild) + + const options = { + readable: true, + writable: true, + hardlinkAsFilesFallback: true + } + const extract = tfs.extract(opts.path, options).on('entry', updateName) + + pump(fs.createReadStream(cachedPrebuild), zlib.createGunzip(), extract, + function (err) { + if (err) return cb(err) + + let resolved + if (binaryName) { + try { + resolved = path.resolve(opts.path || '.', binaryName) + } catch (err) { + return cb(err) + } + log.info('unpack', 'resolved to ' + resolved) + + if (opts.runtime === 'node' && opts.platform === process.platform && opts.abi === process.versions.modules && opts.arch === process.arch) { + try { + require(resolved) + } catch (err) { + return cb(err) + } + log.info('unpack', 'required ' + resolved + ' successfully') + } + } + + cb(null, resolved) + }) + } + + function ensureNpmCacheDir (cb) { + const cacheFolder = util.npmCache() + fs.access(cacheFolder, fs.R_OK | fs.W_OK, function (err) { + if (err && err.code === 'ENOENT') { + return makeNpmCacheDir() + } + cb(err) + }) + + function makeNpmCacheDir () { + log.info('npm cache directory missing, creating it...') + mkdirp(cacheFolder, cb) + } + } +} + +module.exports = downloadPrebuild diff --git a/miniprogram/node_modules/prebuild-install/error.js b/miniprogram/node_modules/prebuild-install/error.js new file mode 100644 index 00000000..c266c189 --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/error.js @@ -0,0 +1,14 @@ +exports.noPrebuilts = function (opts) { + return new Error([ + 'No prebuilt binaries found', + '(target=' + opts.target, + 'runtime=' + opts.runtime, + 'arch=' + opts.arch, + 'libc=' + opts.libc, + 'platform=' + opts.platform + ')' + ].join(' ')) +} + +exports.invalidArchive = function () { + return new Error('Missing .node file in archive') +} diff --git a/miniprogram/node_modules/prebuild-install/help.txt b/miniprogram/node_modules/prebuild-install/help.txt new file mode 100644 index 00000000..0dd316e3 --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/help.txt @@ -0,0 +1,16 @@ +prebuild-install [options] + + --download -d [url] (download prebuilds, no url means github) + --target -t version (version to install for) + --runtime -r runtime (Node runtime [node or electron] to build or install for, default is node) + --path -p path (make a prebuild-install here) + --token -T gh-token (github token for private repos) + --arch arch (target CPU architecture, see Node OS module docs, default is current arch) + --platform platform (target platform, see Node OS module docs, default is current platform) + --tag-prefix (github tag prefix, default is "v") + --force (always use prebuilt binaries when available) + --build-from-source (skip prebuild download) + --verbose (log verbosely) + --libc (use provided libc rather than system default) + --debug (set Debug or Release configuration) + --version (print prebuild-install version and exit) diff --git a/miniprogram/node_modules/prebuild-install/index.js b/miniprogram/node_modules/prebuild-install/index.js new file mode 100644 index 00000000..b5fc28a7 --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/index.js @@ -0,0 +1 @@ +exports.download = require('./download') diff --git a/miniprogram/node_modules/prebuild-install/log.js b/miniprogram/node_modules/prebuild-install/log.js new file mode 100644 index 00000000..b5ecc01b --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/log.js @@ -0,0 +1,33 @@ +const levels = { + silent: 0, + error: 1, + warn: 2, + notice: 3, + http: 4, + timing: 5, + info: 6, + verbose: 7, + silly: 8 +} + +module.exports = function (rc, env) { + const level = rc.verbose + ? 'verbose' + : env.npm_config_loglevel || 'notice' + + const logAtLevel = function (messageLevel) { + return function (...args) { + if (levels[messageLevel] <= levels[level]) { + console.error(`prebuild-install ${messageLevel} ${args.join(' ')}`) + } + } + } + + return { + error: logAtLevel('error'), + warn: logAtLevel('warn'), + http: logAtLevel('http'), + info: logAtLevel('info'), + level + } +} diff --git a/miniprogram/node_modules/prebuild-install/package.json b/miniprogram/node_modules/prebuild-install/package.json new file mode 100644 index 00000000..316b8c2c --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/package.json @@ -0,0 +1,67 @@ +{ + "name": "prebuild-install", + "version": "7.1.3", + "description": "A command line tool to easily install prebuilt binaries for multiple version of node/iojs on a specific platform", + "scripts": { + "test": "standard && hallmark && tape test/*-test.js", + "hallmark": "hallmark --fix" + }, + "keywords": [ + "prebuilt", + "binaries", + "native", + "addon", + "module", + "c", + "c++", + "bindings", + "devops", + "napi" + ], + "dependencies": { + "detect-libc": "^2.0.0", + "expand-template": "^2.0.3", + "github-from-package": "0.0.0", + "minimist": "^1.2.3", + "mkdirp-classic": "^0.5.3", + "napi-build-utils": "^2.0.0", + "node-abi": "^3.3.0", + "pump": "^3.0.0", + "rc": "^1.2.7", + "simple-get": "^4.0.0", + "tar-fs": "^2.0.0", + "tunnel-agent": "^0.6.0" + }, + "devDependencies": { + "a-native-module": "^1.0.0", + "hallmark": "^4.0.0", + "nock": "^10.0.6", + "rimraf": "^2.5.2", + "standard": "^16.0.4", + "tape": "^5.3.1", + "tempy": "0.2.1" + }, + "bin": "./bin.js", + "repository": { + "type": "git", + "url": "https://github.com/prebuild/prebuild-install.git" + }, + "author": "Mathias Buus (@mafintosh)", + "contributors": [ + "Julian Gruber (https://github.com/juliangruber)", + "Brett Lawson (https://github.com/brett19)", + "Pieter Hintjens (https://github.com/hintjens)", + "Lars-Magnus Skog (https://github.com/ralphtheninja)", + "Jesús Leganés Combarro (https://github.com/piranna)", + "Mathias Küsel (https://github.com/mathiask88)", + "Lukas Geiger (https://github.com/lgeiger)" + ], + "license": "MIT", + "bugs": { + "url": "https://github.com/prebuild/prebuild-install/issues" + }, + "homepage": "https://github.com/prebuild/prebuild-install", + "engines": { + "node": ">=10" + } +} \ No newline at end of file diff --git a/miniprogram/node_modules/prebuild-install/proxy.js b/miniprogram/node_modules/prebuild-install/proxy.js new file mode 100644 index 00000000..40d3aea3 --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/proxy.js @@ -0,0 +1,35 @@ +const url = require('url') +const tunnel = require('tunnel-agent') +const util = require('./util') + +function applyProxy (reqOpts, opts) { + const log = opts.log || util.noopLogger + + const proxy = opts['https-proxy'] || opts.proxy + + if (proxy) { + // eslint-disable-next-line node/no-deprecated-api + const parsedDownloadUrl = url.parse(reqOpts.url) + // eslint-disable-next-line node/no-deprecated-api + const parsedProxy = url.parse(proxy) + const uriProtocol = (parsedDownloadUrl.protocol === 'https:' ? 'https' : 'http') + const proxyProtocol = (parsedProxy.protocol === 'https:' ? 'Https' : 'Http') + const tunnelFnName = [uriProtocol, proxyProtocol].join('Over') + reqOpts.agent = tunnel[tunnelFnName]({ + proxy: { + host: parsedProxy.hostname, + port: +parsedProxy.port, + proxyAuth: parsedProxy.auth + } + }) + log.http('request', 'Proxy setup detected (Host: ' + + parsedProxy.hostname + ', Port: ' + + parsedProxy.port + ', Authentication: ' + + (parsedProxy.auth ? 'Yes' : 'No') + ')' + + ' Tunneling with ' + tunnelFnName) + } + + return reqOpts +} + +module.exports = applyProxy diff --git a/miniprogram/node_modules/prebuild-install/rc.js b/miniprogram/node_modules/prebuild-install/rc.js new file mode 100644 index 00000000..de0ea7a7 --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/rc.js @@ -0,0 +1,64 @@ +const path = require('path') +const minimist = require('minimist') +const getAbi = require('node-abi').getAbi +const detectLibc = require('detect-libc') +const napi = require('napi-build-utils') + +const env = process.env + +const libc = env.LIBC || process.env.npm_config_libc || + (detectLibc.isNonGlibcLinuxSync() && detectLibc.familySync()) || '' + +// Get the configuration +module.exports = function (pkg) { + const pkgConf = pkg.config || {} + const buildFromSource = env.npm_config_build_from_source + + const rc = require('rc')('prebuild-install', { + target: pkgConf.target || env.npm_config_target || process.versions.node, + runtime: pkgConf.runtime || env.npm_config_runtime || 'node', + arch: pkgConf.arch || env.npm_config_arch || process.arch, + libc: libc, + platform: env.npm_config_platform || process.platform, + debug: env.npm_config_debug === 'true', + force: false, + verbose: env.npm_config_verbose === 'true', + buildFromSource: buildFromSource === pkg.name || buildFromSource === 'true', + path: '.', + proxy: env.npm_config_proxy || env.http_proxy || env.HTTP_PROXY, + 'https-proxy': env.npm_config_https_proxy || env.https_proxy || env.HTTPS_PROXY, + 'local-address': env.npm_config_local_address, + 'local-prebuilds': 'prebuilds', + 'tag-prefix': 'v', + download: env.npm_config_download + }, minimist(process.argv, { + alias: { + target: 't', + runtime: 'r', + help: 'h', + arch: 'a', + path: 'p', + version: 'v', + download: 'd', + buildFromSource: 'build-from-source', + token: 'T' + } + })) + + rc.path = path.resolve(rc.path === true ? '.' : rc.path || '.') + + if (napi.isNapiRuntime(rc.runtime) && rc.target === process.versions.node) { + rc.target = napi.getBestNapiBuildVersion() + } + + rc.abi = napi.isNapiRuntime(rc.runtime) ? rc.target : getAbi(rc.target, rc.runtime) + + rc.libc = rc.platform !== 'linux' || rc.libc === detectLibc.GLIBC ? '' : rc.libc + + return rc +} + +// Print the configuration values when executed standalone for testing purposses +if (!module.parent) { + console.log(JSON.stringify(module.exports({}), null, 2)) +} diff --git a/miniprogram/node_modules/prebuild-install/util.js b/miniprogram/node_modules/prebuild-install/util.js new file mode 100644 index 00000000..d7cc515d --- /dev/null +++ b/miniprogram/node_modules/prebuild-install/util.js @@ -0,0 +1,143 @@ +const path = require('path') +const github = require('github-from-package') +const home = require('os').homedir +const crypto = require('crypto') +const expandTemplate = require('expand-template')() + +function getDownloadUrl (opts) { + const pkgName = opts.pkg.name.replace(/^@[a-zA-Z0-9_\-.~]+\//, '') + return expandTemplate(urlTemplate(opts), { + name: pkgName, + package_name: pkgName, + version: opts.pkg.version, + major: opts.pkg.version.split('.')[0], + minor: opts.pkg.version.split('.')[1], + patch: opts.pkg.version.split('.')[2], + prerelease: opts.pkg.version.split('-')[1], + build: opts.pkg.version.split('+')[1], + abi: opts.abi || process.versions.modules, + node_abi: process.versions.modules, + runtime: opts.runtime || 'node', + platform: opts.platform, + arch: opts.arch, + libc: opts.libc || '', + configuration: (opts.debug ? 'Debug' : 'Release'), + module_name: opts.pkg.binary && opts.pkg.binary.module_name, + tag_prefix: opts['tag-prefix'] + }) +} + +function getApiUrl (opts) { + return github(opts.pkg).replace('github.com', 'api.github.com/repos') + '/releases' +} + +function getAssetUrl (opts, assetId) { + return getApiUrl(opts) + '/assets/' + assetId +} + +function urlTemplate (opts) { + if (typeof opts.download === 'string') { + return opts.download + } + + const packageName = '{name}-v{version}-{runtime}-v{abi}-{platform}{libc}-{arch}.tar.gz' + const hostMirrorUrl = getHostMirrorUrl(opts) + + if (hostMirrorUrl) { + return hostMirrorUrl + '/{tag_prefix}{version}/' + packageName + } + + if (opts.pkg.binary && opts.pkg.binary.host) { + return [ + opts.pkg.binary.host, + opts.pkg.binary.remote_path, + opts.pkg.binary.package_name || packageName + ].map(function (path) { + return trimSlashes(path) + }).filter(Boolean).join('/') + } + + return github(opts.pkg) + '/releases/download/{tag_prefix}{version}/' + packageName +} + +function getEnvPrefix (pkgName) { + return 'npm_config_' + (pkgName || '').replace(/[^a-zA-Z0-9]/g, '_').replace(/^_/, '') +} + +function getHostMirrorUrl (opts) { + const propName = getEnvPrefix(opts.pkg.name) + '_binary_host' + return process.env[propName] || process.env[propName + '_mirror'] +} + +function trimSlashes (str) { + if (str) return str.replace(/^\.\/|^\/|\/$/g, '') +} + +function cachedPrebuild (url) { + const digest = crypto.createHash('sha512').update(url).digest('hex').slice(0, 6) + return path.join(prebuildCache(), digest + '-' + path.basename(url).replace(/[^a-zA-Z0-9.]+/g, '-')) +} + +function npmCache () { + const env = process.env + return env.npm_config_cache || (env.APPDATA ? path.join(env.APPDATA, 'npm-cache') : path.join(home(), '.npm')) +} + +function prebuildCache () { + return path.join(npmCache(), '_prebuilds') +} + +function tempFile (cached) { + return cached + '.' + process.pid + '-' + Math.random().toString(16).slice(2) + '.tmp' +} + +function packageOrigin (env, pkg) { + // npm <= 6: metadata is stored on disk in node_modules + if (pkg._from) { + return pkg._from + } + + // npm 7: metadata is exposed to environment by arborist + if (env.npm_package_from) { + // NOTE: seems undefined atm (npm 7.0.2) + return env.npm_package_from + } + + if (env.npm_package_resolved) { + // NOTE: not sure about the difference with _from, but it's all we have + return env.npm_package_resolved + } +} + +function localPrebuild (url, opts) { + const propName = getEnvPrefix(opts.pkg.name) + '_local_prebuilds' + const prefix = process.env[propName] || opts['local-prebuilds'] || 'prebuilds' + return path.join(prefix, path.basename(url)) +} + +const noopLogger = { + http: function () {}, + silly: function () {}, + debug: function () {}, + info: function () {}, + warn: function () {}, + error: function () {}, + critical: function () {}, + alert: function () {}, + emergency: function () {}, + notice: function () {}, + verbose: function () {}, + fatal: function () {} +} + +exports.getDownloadUrl = getDownloadUrl +exports.getApiUrl = getApiUrl +exports.getAssetUrl = getAssetUrl +exports.urlTemplate = urlTemplate +exports.cachedPrebuild = cachedPrebuild +exports.localPrebuild = localPrebuild +exports.prebuildCache = prebuildCache +exports.npmCache = npmCache +exports.tempFile = tempFile +exports.packageOrigin = packageOrigin +exports.noopLogger = noopLogger diff --git a/miniprogram/node_modules/pump/.github/FUNDING.yml b/miniprogram/node_modules/pump/.github/FUNDING.yml new file mode 100644 index 00000000..f6c9139a --- /dev/null +++ b/miniprogram/node_modules/pump/.github/FUNDING.yml @@ -0,0 +1,2 @@ +github: mafintosh +tidelift: "npm/pump" diff --git a/miniprogram/node_modules/pump/.travis.yml b/miniprogram/node_modules/pump/.travis.yml new file mode 100644 index 00000000..17f94330 --- /dev/null +++ b/miniprogram/node_modules/pump/.travis.yml @@ -0,0 +1,5 @@ +language: node_js +node_js: + - "0.10" + +script: "npm test" diff --git a/miniprogram/node_modules/pump/LICENSE b/miniprogram/node_modules/pump/LICENSE new file mode 100644 index 00000000..757562ec --- /dev/null +++ b/miniprogram/node_modules/pump/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/miniprogram/node_modules/pump/README.md b/miniprogram/node_modules/pump/README.md new file mode 100644 index 00000000..5dcd8a52 --- /dev/null +++ b/miniprogram/node_modules/pump/README.md @@ -0,0 +1,74 @@ +# pump + +pump is a small node module that pipes streams together and destroys all of them if one of them closes. + +``` +npm install pump +``` + +[![build status](http://img.shields.io/travis/mafintosh/pump.svg?style=flat)](http://travis-ci.org/mafintosh/pump) + +## What problem does it solve? + +When using standard `source.pipe(dest)` source will _not_ be destroyed if dest emits close or an error. +You are also not able to provide a callback to tell when then pipe has finished. + +pump does these two things for you + +## Usage + +Simply pass the streams you want to pipe together to pump and add an optional callback + +``` js +var pump = require('pump') +var fs = require('fs') + +var source = fs.createReadStream('/dev/random') +var dest = fs.createWriteStream('/dev/null') + +pump(source, dest, function(err) { + console.log('pipe finished', err) +}) + +setTimeout(function() { + dest.destroy() // when dest is closed pump will destroy source +}, 1000) +``` + +You can use pump to pipe more than two streams together as well + +``` js +var transform = someTransformStream() + +pump(source, transform, anotherTransform, dest, function(err) { + console.log('pipe finished', err) +}) +``` + +If `source`, `transform`, `anotherTransform` or `dest` closes all of them will be destroyed. + +Similarly to `stream.pipe()`, `pump()` returns the last stream passed in, so you can do: + +``` +return pump(s1, s2) // returns s2 +``` + +Note that `pump` attaches error handlers to the streams to do internal error handling, so if `s2` emits an +error in the above scenario, it will not trigger a `proccess.on('uncaughtException')` if you do not listen for it. + +If you want to return a stream that combines *both* s1 and s2 to a single stream use +[pumpify](https://github.com/mafintosh/pumpify) instead. + +## License + +MIT + +## Related + +`pump` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. + +## For enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of pump and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-pump?utm_source=npm-pump&utm_medium=referral&utm_campaign=enterprise) diff --git a/miniprogram/node_modules/pump/SECURITY.md b/miniprogram/node_modules/pump/SECURITY.md new file mode 100644 index 00000000..da9c516d --- /dev/null +++ b/miniprogram/node_modules/pump/SECURITY.md @@ -0,0 +1,5 @@ +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. diff --git a/miniprogram/node_modules/pump/index.js b/miniprogram/node_modules/pump/index.js new file mode 100644 index 00000000..712c076a --- /dev/null +++ b/miniprogram/node_modules/pump/index.js @@ -0,0 +1,86 @@ +var once = require('once') +var eos = require('end-of-stream') +var fs + +try { + fs = require('fs') // we only need fs to get the ReadStream and WriteStream prototypes +} catch (e) {} + +var noop = function () {} +var ancient = typeof process === 'undefined' ? false : /^v?\.0/.test(process.version) + +var isFn = function (fn) { + return typeof fn === 'function' +} + +var isFS = function (stream) { + if (!ancient) return false // newer node version do not need to care about fs is a special way + if (!fs) return false // browser + return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) +} + +var isRequest = function (stream) { + return stream.setHeader && isFn(stream.abort) +} + +var destroyer = function (stream, reading, writing, callback) { + callback = once(callback) + + var closed = false + stream.on('close', function () { + closed = true + }) + + eos(stream, {readable: reading, writable: writing}, function (err) { + if (err) return callback(err) + closed = true + callback() + }) + + var destroyed = false + return function (err) { + if (closed) return + if (destroyed) return + destroyed = true + + if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks + if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want + + if (isFn(stream.destroy)) return stream.destroy() + + callback(err || new Error('stream was destroyed')) + } +} + +var call = function (fn) { + fn() +} + +var pipe = function (from, to) { + return from.pipe(to) +} + +var pump = function () { + var streams = Array.prototype.slice.call(arguments) + var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop + + if (Array.isArray(streams[0])) streams = streams[0] + if (streams.length < 2) throw new Error('pump requires two streams per minimum') + + var error + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1 + var writing = i > 0 + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err + if (err) destroys.forEach(call) + if (reading) return + destroys.forEach(call) + callback(error) + }) + }) + + return streams.reduce(pipe) +} + +module.exports = pump diff --git a/miniprogram/node_modules/pump/package.json b/miniprogram/node_modules/pump/package.json new file mode 100644 index 00000000..976555cd --- /dev/null +++ b/miniprogram/node_modules/pump/package.json @@ -0,0 +1,24 @@ +{ + "name": "pump", + "version": "3.0.3", + "repository": "git://github.com/mafintosh/pump.git", + "license": "MIT", + "description": "pipe streams together and close all of them if one of them closes", + "browser": { + "fs": false + }, + "keywords": [ + "streams", + "pipe", + "destroy", + "callback" + ], + "author": "Mathias Buus Madsen ", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + }, + "scripts": { + "test": "node test-browser.js && node test-node.js" + } +} diff --git a/miniprogram/node_modules/pump/test-browser.js b/miniprogram/node_modules/pump/test-browser.js new file mode 100644 index 00000000..9a06c8a4 --- /dev/null +++ b/miniprogram/node_modules/pump/test-browser.js @@ -0,0 +1,66 @@ +var stream = require('stream') +var pump = require('./index') + +var rs = new stream.Readable() +var ws = new stream.Writable() + +rs._read = function (size) { + this.push(Buffer(size).fill('abc')) +} + +ws._write = function (chunk, encoding, cb) { + setTimeout(function () { + cb() + }, 100) +} + +var toHex = function () { + var reverse = new (require('stream').Transform)() + + reverse._transform = function (chunk, enc, callback) { + reverse.push(chunk.toString('hex')) + callback() + } + + return reverse +} + +var wsClosed = false +var rsClosed = false +var callbackCalled = false + +var check = function () { + if (wsClosed && rsClosed && callbackCalled) { + console.log('test-browser.js passes') + clearTimeout(timeout) + } +} + +ws.on('finish', function () { + wsClosed = true + check() +}) + +rs.on('end', function () { + rsClosed = true + check() +}) + +var res = pump(rs, toHex(), toHex(), toHex(), ws, function () { + callbackCalled = true + check() +}) + +if (res !== ws) { + throw new Error('should return last stream') +} + +setTimeout(function () { + rs.push(null) + rs.emit('close') +}, 1000) + +var timeout = setTimeout(function () { + check() + throw new Error('timeout') +}, 5000) diff --git a/miniprogram/node_modules/pump/test-node.js b/miniprogram/node_modules/pump/test-node.js new file mode 100644 index 00000000..561251a0 --- /dev/null +++ b/miniprogram/node_modules/pump/test-node.js @@ -0,0 +1,53 @@ +var pump = require('./index') + +var rs = require('fs').createReadStream('/dev/random') +var ws = require('fs').createWriteStream('/dev/null') + +var toHex = function () { + var reverse = new (require('stream').Transform)() + + reverse._transform = function (chunk, enc, callback) { + reverse.push(chunk.toString('hex')) + callback() + } + + return reverse +} + +var wsClosed = false +var rsClosed = false +var callbackCalled = false + +var check = function () { + if (wsClosed && rsClosed && callbackCalled) { + console.log('test-node.js passes') + clearTimeout(timeout) + } +} + +ws.on('close', function () { + wsClosed = true + check() +}) + +rs.on('close', function () { + rsClosed = true + check() +}) + +var res = pump(rs, toHex(), toHex(), toHex(), ws, function () { + callbackCalled = true + check() +}) + +if (res !== ws) { + throw new Error('should return last stream') +} + +setTimeout(function () { + rs.destroy() +}, 1000) + +var timeout = setTimeout(function () { + throw new Error('timeout') +}, 5000) diff --git a/miniprogram/node_modules/rc/LICENSE.APACHE2 b/miniprogram/node_modules/rc/LICENSE.APACHE2 new file mode 100644 index 00000000..6366c047 --- /dev/null +++ b/miniprogram/node_modules/rc/LICENSE.APACHE2 @@ -0,0 +1,15 @@ +Apache License, Version 2.0 + +Copyright (c) 2011 Dominic Tarr + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/miniprogram/node_modules/rc/LICENSE.BSD b/miniprogram/node_modules/rc/LICENSE.BSD new file mode 100644 index 00000000..96bb796a --- /dev/null +++ b/miniprogram/node_modules/rc/LICENSE.BSD @@ -0,0 +1,26 @@ +Copyright (c) 2013, Dominic Tarr +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The views and conclusions contained in the software and documentation are those +of the authors and should not be interpreted as representing official policies, +either expressed or implied, of the FreeBSD Project. diff --git a/miniprogram/node_modules/rc/LICENSE.MIT b/miniprogram/node_modules/rc/LICENSE.MIT new file mode 100644 index 00000000..6eafbd73 --- /dev/null +++ b/miniprogram/node_modules/rc/LICENSE.MIT @@ -0,0 +1,24 @@ +The MIT License + +Copyright (c) 2011 Dominic Tarr + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/miniprogram/node_modules/rc/README.md b/miniprogram/node_modules/rc/README.md new file mode 100644 index 00000000..e6522e26 --- /dev/null +++ b/miniprogram/node_modules/rc/README.md @@ -0,0 +1,227 @@ +# rc + +The non-configurable configuration loader for lazy people. + +## Usage + +The only option is to pass rc the name of your app, and your default configuration. + +```javascript +var conf = require('rc')(appname, { + //defaults go here. + port: 2468, + + //defaults which are objects will be merged, not replaced + views: { + engine: 'jade' + } +}); +``` + +`rc` will return your configuration options merged with the defaults you specify. +If you pass in a predefined defaults object, it will be mutated: + +```javascript +var conf = {}; +require('rc')(appname, conf); +``` + +If `rc` finds any config files for your app, the returned config object will have +a `configs` array containing their paths: + +```javascript +var appCfg = require('rc')(appname, conf); +appCfg.configs[0] // /etc/appnamerc +appCfg.configs[1] // /home/dominictarr/.config/appname +appCfg.config // same as appCfg.configs[appCfg.configs.length - 1] +``` + +## Standards + +Given your application name (`appname`), rc will look in all the obvious places for configuration. + + * command line arguments, parsed by minimist _(e.g. `--foo baz`, also nested: `--foo.bar=baz`)_ + * environment variables prefixed with `${appname}_` + * or use "\_\_" to indicate nested properties
_(e.g. `appname_foo__bar__baz` => `foo.bar.baz`)_ + * if you passed an option `--config file` then from that file + * a local `.${appname}rc` or the first found looking in `./ ../ ../../ ../../../` etc. + * `$HOME/.${appname}rc` + * `$HOME/.${appname}/config` + * `$HOME/.config/${appname}` + * `$HOME/.config/${appname}/config` + * `/etc/${appname}rc` + * `/etc/${appname}/config` + * the defaults object you passed in. + +All configuration sources that were found will be flattened into one object, +so that sources **earlier** in this list override later ones. + + +## Configuration File Formats + +Configuration files (e.g. `.appnamerc`) may be in either [json](http://json.org/example) or [ini](http://en.wikipedia.org/wiki/INI_file) format. **No** file extension (`.json` or `.ini`) should be used. The example configurations below are equivalent: + + +#### Formatted as `ini` + +``` +; You can include comments in `ini` format if you want. + +dependsOn=0.10.0 + + +; `rc` has built-in support for ini sections, see? + +[commands] + www = ./commands/www + console = ./commands/repl + + +; You can even do nested sections + +[generators.options] + engine = ejs + +[generators.modules] + new = generate-new + engine = generate-backend + +``` + +#### Formatted as `json` + +```javascript +{ + // You can even comment your JSON, if you want + "dependsOn": "0.10.0", + "commands": { + "www": "./commands/www", + "console": "./commands/repl" + }, + "generators": { + "options": { + "engine": "ejs" + }, + "modules": { + "new": "generate-new", + "backend": "generate-backend" + } + } +} +``` + +Comments are stripped from JSON config via [strip-json-comments](https://github.com/sindresorhus/strip-json-comments). + +> Since ini, and env variables do not have a standard for types, your application needs be prepared for strings. + +To ensure that string representations of booleans and numbers are always converted into their proper types (especially useful if you intend to do strict `===` comparisons), consider using a module such as [parse-strings-in-object](https://github.com/anselanza/parse-strings-in-object) to wrap the config object returned from rc. + + +## Simple example demonstrating precedence +Assume you have an application like this (notice the hard-coded defaults passed to rc): +``` +const conf = require('rc')('myapp', { + port: 12345, + mode: 'test' +}); + +console.log(JSON.stringify(conf, null, 2)); +``` +You also have a file `config.json`, with these contents: +``` +{ + "port": 9000, + "foo": "from config json", + "something": "else" +} +``` +And a file `.myapprc` in the same folder, with these contents: +``` +{ + "port": "3001", + "foo": "bar" +} +``` +Here is the expected output from various commands: + +`node .` +``` +{ + "port": "3001", + "mode": "test", + "foo": "bar", + "_": [], + "configs": [ + "/Users/stephen/repos/conftest/.myapprc" + ], + "config": "/Users/stephen/repos/conftest/.myapprc" +} +``` +*Default `mode` from hard-coded object is retained, but port is overridden by `.myapprc` file (automatically found based on appname match), and `foo` is added.* + + +`node . --foo baz` +``` +{ + "port": "3001", + "mode": "test", + "foo": "baz", + "_": [], + "configs": [ + "/Users/stephen/repos/conftest/.myapprc" + ], + "config": "/Users/stephen/repos/conftest/.myapprc" +} +``` +*Same result as above but `foo` is overridden because command-line arguments take precedence over `.myapprc` file.* + +`node . --foo barbar --config config.json` +``` +{ + "port": 9000, + "mode": "test", + "foo": "barbar", + "something": "else", + "_": [], + "config": "config.json", + "configs": [ + "/Users/stephen/repos/conftest/.myapprc", + "config.json" + ] +} +``` +*Now the `port` comes from the `config.json` file specified (overriding the value from `.myapprc`), and `foo` value is overriden by command-line despite also being specified in the `config.json` file.* + + + +## Advanced Usage + +#### Pass in your own `argv` + +You may pass in your own `argv` as the third argument to `rc`. This is in case you want to [use your own command-line opts parser](https://github.com/dominictarr/rc/pull/12). + +```javascript +require('rc')(appname, defaults, customArgvParser); +``` + +## Pass in your own parser + +If you have a special need to use a non-standard parser, +you can do so by passing in the parser as the 4th argument. +(leave the 3rd as null to get the default args parser) + +```javascript +require('rc')(appname, defaults, null, parser); +``` + +This may also be used to force a more strict format, +such as strict, valid JSON only. + +## Note on Performance + +`rc` is running `fs.statSync`-- so make sure you don't use it in a hot code path (e.g. a request handler) + + +## License + +Multi-licensed under the two-clause BSD License, MIT License, or Apache License, version 2.0 diff --git a/miniprogram/node_modules/rc/browser.js b/miniprogram/node_modules/rc/browser.js new file mode 100644 index 00000000..8c230c5c --- /dev/null +++ b/miniprogram/node_modules/rc/browser.js @@ -0,0 +1,7 @@ + +// when this is loaded into the browser, +// just use the defaults... + +module.exports = function (name, defaults) { + return defaults +} diff --git a/miniprogram/node_modules/rc/cli.js b/miniprogram/node_modules/rc/cli.js new file mode 100644 index 00000000..ab05b607 --- /dev/null +++ b/miniprogram/node_modules/rc/cli.js @@ -0,0 +1,4 @@ +#! /usr/bin/env node +var rc = require('./index') + +console.log(JSON.stringify(rc(process.argv[2]), false, 2)) diff --git a/miniprogram/node_modules/rc/index.js b/miniprogram/node_modules/rc/index.js new file mode 100644 index 00000000..65eb47af --- /dev/null +++ b/miniprogram/node_modules/rc/index.js @@ -0,0 +1,53 @@ +var cc = require('./lib/utils') +var join = require('path').join +var deepExtend = require('deep-extend') +var etc = '/etc' +var win = process.platform === "win32" +var home = win + ? process.env.USERPROFILE + : process.env.HOME + +module.exports = function (name, defaults, argv, parse) { + if('string' !== typeof name) + throw new Error('rc(name): name *must* be string') + if(!argv) + argv = require('minimist')(process.argv.slice(2)) + defaults = ( + 'string' === typeof defaults + ? cc.json(defaults) : defaults + ) || {} + + parse = parse || cc.parse + + var env = cc.env(name + '_') + + var configs = [defaults] + var configFiles = [] + function addConfigFile (file) { + if (configFiles.indexOf(file) >= 0) return + var fileConfig = cc.file(file) + if (fileConfig) { + configs.push(parse(fileConfig)) + configFiles.push(file) + } + } + + // which files do we look at? + if (!win) + [join(etc, name, 'config'), + join(etc, name + 'rc')].forEach(addConfigFile) + if (home) + [join(home, '.config', name, 'config'), + join(home, '.config', name), + join(home, '.' + name, 'config'), + join(home, '.' + name + 'rc')].forEach(addConfigFile) + addConfigFile(cc.find('.'+name+'rc')) + if (env.config) addConfigFile(env.config) + if (argv.config) addConfigFile(argv.config) + + return deepExtend.apply(null, configs.concat([ + env, + argv, + configFiles.length ? {configs: configFiles, config: configFiles[configFiles.length - 1]} : undefined, + ])) +} diff --git a/miniprogram/node_modules/rc/lib/utils.js b/miniprogram/node_modules/rc/lib/utils.js new file mode 100644 index 00000000..8b3beffa --- /dev/null +++ b/miniprogram/node_modules/rc/lib/utils.js @@ -0,0 +1,104 @@ +'use strict'; +var fs = require('fs') +var ini = require('ini') +var path = require('path') +var stripJsonComments = require('strip-json-comments') + +var parse = exports.parse = function (content) { + + //if it ends in .json or starts with { then it must be json. + //must be done this way, because ini accepts everything. + //can't just try and parse it and let it throw if it's not ini. + //everything is ini. even json with a syntax error. + + if(/^\s*{/.test(content)) + return JSON.parse(stripJsonComments(content)) + return ini.parse(content) + +} + +var file = exports.file = function () { + var args = [].slice.call(arguments).filter(function (arg) { return arg != null }) + + //path.join breaks if it's a not a string, so just skip this. + for(var i in args) + if('string' !== typeof args[i]) + return + + var file = path.join.apply(null, args) + var content + try { + return fs.readFileSync(file,'utf-8') + } catch (err) { + return + } +} + +var json = exports.json = function () { + var content = file.apply(null, arguments) + return content ? parse(content) : null +} + +var env = exports.env = function (prefix, env) { + env = env || process.env + var obj = {} + var l = prefix.length + for(var k in env) { + if(k.toLowerCase().indexOf(prefix.toLowerCase()) === 0) { + + var keypath = k.substring(l).split('__') + + // Trim empty strings from keypath array + var _emptyStringIndex + while ((_emptyStringIndex=keypath.indexOf('')) > -1) { + keypath.splice(_emptyStringIndex, 1) + } + + var cursor = obj + keypath.forEach(function _buildSubObj(_subkey,i){ + + // (check for _subkey first so we ignore empty strings) + // (check for cursor to avoid assignment to primitive objects) + if (!_subkey || typeof cursor !== 'object') + return + + // If this is the last key, just stuff the value in there + // Assigns actual value from env variable to final key + // (unless it's just an empty string- in that case use the last valid key) + if (i === keypath.length-1) + cursor[_subkey] = env[k] + + + // Build sub-object if nothing already exists at the keypath + if (cursor[_subkey] === undefined) + cursor[_subkey] = {} + + // Increment cursor used to track the object at the current depth + cursor = cursor[_subkey] + + }) + + } + + } + + return obj +} + +var find = exports.find = function () { + var rel = path.join.apply(null, [].slice.call(arguments)) + + function find(start, rel) { + var file = path.join(start, rel) + try { + fs.statSync(file) + return file + } catch (err) { + if(path.dirname(start) !== start) // root + return find(path.dirname(start), rel) + } + } + return find(process.cwd(), rel) +} + + diff --git a/miniprogram/node_modules/rc/package.json b/miniprogram/node_modules/rc/package.json new file mode 100644 index 00000000..887238fa --- /dev/null +++ b/miniprogram/node_modules/rc/package.json @@ -0,0 +1,29 @@ +{ + "name": "rc", + "version": "1.2.8", + "description": "hardwired configuration loader", + "main": "index.js", + "browser": "browser.js", + "scripts": { + "test": "set -e; node test/test.js; node test/ini.js; node test/nested-env-vars.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/dominictarr/rc.git" + }, + "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", + "keywords": [ + "config", + "rc", + "unix", + "defaults" + ], + "bin": "./cli.js", + "author": "Dominic Tarr (dominictarr.com)", + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + } +} diff --git a/miniprogram/node_modules/rc/test/ini.js b/miniprogram/node_modules/rc/test/ini.js new file mode 100644 index 00000000..e6857f8b --- /dev/null +++ b/miniprogram/node_modules/rc/test/ini.js @@ -0,0 +1,16 @@ +var cc =require('../lib/utils') +var INI = require('ini') +var assert = require('assert') + +function test(obj) { + + var _json, _ini + var json = cc.parse (_json = JSON.stringify(obj)) + var ini = cc.parse (_ini = INI.stringify(obj)) + console.log(_ini, _json) + assert.deepEqual(json, ini) +} + + +test({hello: true}) + diff --git a/miniprogram/node_modules/rc/test/nested-env-vars.js b/miniprogram/node_modules/rc/test/nested-env-vars.js new file mode 100644 index 00000000..0ecd1763 --- /dev/null +++ b/miniprogram/node_modules/rc/test/nested-env-vars.js @@ -0,0 +1,50 @@ + +var seed = Math.random(); +var n = 'rc'+ seed; +var N = 'RC'+ seed; +var assert = require('assert') + + +// Basic usage +process.env[n+'_someOpt__a'] = 42 +process.env[n+'_someOpt__x__'] = 99 +process.env[n+'_someOpt__a__b'] = 186 +process.env[n+'_someOpt__a__b__c'] = 243 +process.env[n+'_someOpt__x__y'] = 1862 +process.env[n+'_someOpt__z'] = 186577 + +// Should ignore empty strings from orphaned '__' +process.env[n+'_someOpt__z__x__'] = 18629 +process.env[n+'_someOpt__w__w__'] = 18629 + +// Leading '__' should ignore everything up to 'z' +process.env[n+'___z__i__'] = 9999 + +// should ignore case for config name section. +process.env[N+'_test_upperCase'] = 187 + +function testPrefix(prefix) { + var config = require('../')(prefix, { + option: true + }) + + console.log('\n\n------ nested-env-vars ------\n',{prefix: prefix}, '\n', config); + + assert.equal(config.option, true) + assert.equal(config.someOpt.a, 42) + assert.equal(config.someOpt.x, 99) + // Should not override `a` once it's been set + assert.equal(config.someOpt.a/*.b*/, 42) + // Should not override `x` once it's been set + assert.equal(config.someOpt.x/*.y*/, 99) + assert.equal(config.someOpt.z, 186577) + // Should not override `z` once it's been set + assert.equal(config.someOpt.z/*.x*/, 186577) + assert.equal(config.someOpt.w.w, 18629) + assert.equal(config.z.i, 9999) + + assert.equal(config.test_upperCase, 187) +} + +testPrefix(n); +testPrefix(N); diff --git a/miniprogram/node_modules/rc/test/test.js b/miniprogram/node_modules/rc/test/test.js new file mode 100644 index 00000000..4f633518 --- /dev/null +++ b/miniprogram/node_modules/rc/test/test.js @@ -0,0 +1,59 @@ + +var n = 'rc'+Math.random() +var assert = require('assert') + +process.env[n+'_envOption'] = 42 + +var config = require('../')(n, { + option: true +}) + +console.log(config) + +assert.equal(config.option, true) +assert.equal(config.envOption, 42) + +var customArgv = require('../')(n, { + option: true +}, { // nopt-like argv + option: false, + envOption: 24, + argv: { + remain: [], + cooked: ['--no-option', '--envOption', '24'], + original: ['--no-option', '--envOption=24'] + } +}) + +console.log(customArgv) + +assert.equal(customArgv.option, false) +assert.equal(customArgv.envOption, 24) + +var fs = require('fs') +var path = require('path') +var jsonrc = path.resolve('.' + n + 'rc'); + +fs.writeFileSync(jsonrc, [ + '{', + '// json overrides default', + '"option": false,', + '/* env overrides json */', + '"envOption": 24', + '}' +].join('\n')); + +var commentedJSON = require('../')(n, { + option: true +}) + +fs.unlinkSync(jsonrc); + +console.log(commentedJSON) + +assert.equal(commentedJSON.option, false) +assert.equal(commentedJSON.envOption, 42) + +assert.equal(commentedJSON.config, jsonrc) +assert.equal(commentedJSON.configs.length, 1) +assert.equal(commentedJSON.configs[0], jsonrc) diff --git a/miniprogram/node_modules/readable-stream/CONTRIBUTING.md b/miniprogram/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 00000000..f478d58d --- /dev/null +++ b/miniprogram/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/miniprogram/node_modules/readable-stream/GOVERNANCE.md b/miniprogram/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 00000000..16ffb93f --- /dev/null +++ b/miniprogram/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/miniprogram/node_modules/readable-stream/LICENSE b/miniprogram/node_modules/readable-stream/LICENSE new file mode 100644 index 00000000..2873b3b2 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/miniprogram/node_modules/readable-stream/README.md b/miniprogram/node_modules/readable-stream/README.md new file mode 100644 index 00000000..19117c1a --- /dev/null +++ b/miniprogram/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.18.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/miniprogram/node_modules/readable-stream/errors-browser.js b/miniprogram/node_modules/readable-stream/errors-browser.js new file mode 100644 index 00000000..fb8e73e1 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/miniprogram/node_modules/readable-stream/errors.js b/miniprogram/node_modules/readable-stream/errors.js new file mode 100644 index 00000000..8471526d --- /dev/null +++ b/miniprogram/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/miniprogram/node_modules/readable-stream/experimentalWarning.js b/miniprogram/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 00000000..78e84149 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/miniprogram/node_modules/readable-stream/lib/_stream_duplex.js b/miniprogram/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 00000000..19abfa60 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,126 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); +require('inherits')(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); \ No newline at end of file diff --git a/miniprogram/node_modules/readable-stream/lib/_stream_passthrough.js b/miniprogram/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 00000000..24a6bdde --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,37 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; +var Transform = require('./_stream_transform'); +require('inherits')(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/miniprogram/node_modules/readable-stream/lib/_stream_readable.js b/miniprogram/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 00000000..df1f608d --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1027 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +var debugUtil = require('util'); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + +var BufferList = require('./internal/streams/buffer_list'); +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +require('inherits')(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + return createReadableStreamAsyncIterator(this); + }; +} +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/miniprogram/node_modules/readable-stream/lib/_stream_transform.js b/miniprogram/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 00000000..1ccb7157 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,190 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = require('./_stream_duplex'); +require('inherits')(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} \ No newline at end of file diff --git a/miniprogram/node_modules/readable-stream/lib/_stream_writable.js b/miniprogram/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 00000000..292415e2 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,641 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +require('inherits')(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; \ No newline at end of file diff --git a/miniprogram/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/miniprogram/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 00000000..742c5a46 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,180 @@ +'use strict'; + +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = require('./end-of-stream'); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } + + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/miniprogram/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/miniprogram/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 00000000..69bda497 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,183 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = require('buffer'), + Buffer = _require.Buffer; +var _require2 = require('util'), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); \ No newline at end of file diff --git a/miniprogram/node_modules/readable-stream/lib/internal/streams/destroy.js b/miniprogram/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 00000000..31a17c4d --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,96 @@ +'use strict'; + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; \ No newline at end of file diff --git a/miniprogram/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/miniprogram/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 00000000..59c671b5 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; \ No newline at end of file diff --git a/miniprogram/node_modules/readable-stream/lib/internal/streams/from-browser.js b/miniprogram/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 00000000..a4ce56f3 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/miniprogram/node_modules/readable-stream/lib/internal/streams/from.js b/miniprogram/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 00000000..0a34ee92 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,52 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; diff --git a/miniprogram/node_modules/readable-stream/lib/internal/streams/pipeline.js b/miniprogram/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 00000000..e6f39241 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; + + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; \ No newline at end of file diff --git a/miniprogram/node_modules/readable-stream/lib/internal/streams/state.js b/miniprogram/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 00000000..3fbf8927 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,22 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/miniprogram/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/miniprogram/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 00000000..9332a3fd --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/miniprogram/node_modules/readable-stream/lib/internal/streams/stream.js b/miniprogram/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 00000000..ce2ad5b6 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/miniprogram/node_modules/readable-stream/package.json b/miniprogram/node_modules/readable-stream/package.json new file mode 100644 index 00000000..ade59e71 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/package.json @@ -0,0 +1,68 @@ +{ + "name": "readable-stream", + "version": "3.6.2", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "engines": { + "node": ">= 6" + }, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "scripts": { + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/miniprogram/node_modules/readable-stream/readable-browser.js b/miniprogram/node_modules/readable-stream/readable-browser.js new file mode 100644 index 00000000..adbf60de --- /dev/null +++ b/miniprogram/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/miniprogram/node_modules/readable-stream/readable.js b/miniprogram/node_modules/readable-stream/readable.js new file mode 100644 index 00000000..9e0ca120 --- /dev/null +++ b/miniprogram/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/miniprogram/node_modules/safe-buffer/LICENSE b/miniprogram/node_modules/safe-buffer/LICENSE new file mode 100644 index 00000000..0c068cee --- /dev/null +++ b/miniprogram/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/miniprogram/node_modules/safe-buffer/README.md b/miniprogram/node_modules/safe-buffer/README.md new file mode 100644 index 00000000..e9a81afd --- /dev/null +++ b/miniprogram/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/miniprogram/node_modules/safe-buffer/index.d.ts b/miniprogram/node_modules/safe-buffer/index.d.ts new file mode 100644 index 00000000..e9fed809 --- /dev/null +++ b/miniprogram/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/miniprogram/node_modules/safe-buffer/index.js b/miniprogram/node_modules/safe-buffer/index.js new file mode 100644 index 00000000..f8d3ec98 --- /dev/null +++ b/miniprogram/node_modules/safe-buffer/index.js @@ -0,0 +1,65 @@ +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/miniprogram/node_modules/safe-buffer/package.json b/miniprogram/node_modules/safe-buffer/package.json new file mode 100644 index 00000000..f2869e25 --- /dev/null +++ b/miniprogram/node_modules/safe-buffer/package.json @@ -0,0 +1,51 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.2.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^5.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/miniprogram/node_modules/semver/LICENSE b/miniprogram/node_modules/semver/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/miniprogram/node_modules/semver/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/miniprogram/node_modules/semver/README.md b/miniprogram/node_modules/semver/README.md new file mode 100644 index 00000000..e9522153 --- /dev/null +++ b/miniprogram/node_modules/semver/README.md @@ -0,0 +1,664 @@ +semver(1) -- The semantic versioner for npm +=========================================== + +## Install + +```bash +npm install semver +```` + +## Usage + +As a node module: + +```js +const semver = require('semver') + +semver.valid('1.2.3') // '1.2.3' +semver.valid('a.b.c') // null +semver.clean(' =v1.2.3 ') // '1.2.3' +semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true +semver.gt('1.2.3', '9.8.7') // false +semver.lt('1.2.3', '9.8.7') // true +semver.minVersion('>=1.0.0') // '1.0.0' +semver.valid(semver.coerce('v2')) // '2.0.0' +semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' +``` + +You can also just load the module for the function that you care about if +you'd like to minimize your footprint. + +```js +// load the whole API at once in a single object +const semver = require('semver') + +// or just load the bits you need +// all of them listed here, just pick and choose what you want + +// classes +const SemVer = require('semver/classes/semver') +const Comparator = require('semver/classes/comparator') +const Range = require('semver/classes/range') + +// functions for working with versions +const semverParse = require('semver/functions/parse') +const semverValid = require('semver/functions/valid') +const semverClean = require('semver/functions/clean') +const semverInc = require('semver/functions/inc') +const semverDiff = require('semver/functions/diff') +const semverMajor = require('semver/functions/major') +const semverMinor = require('semver/functions/minor') +const semverPatch = require('semver/functions/patch') +const semverPrerelease = require('semver/functions/prerelease') +const semverCompare = require('semver/functions/compare') +const semverRcompare = require('semver/functions/rcompare') +const semverCompareLoose = require('semver/functions/compare-loose') +const semverCompareBuild = require('semver/functions/compare-build') +const semverSort = require('semver/functions/sort') +const semverRsort = require('semver/functions/rsort') + +// low-level comparators between versions +const semverGt = require('semver/functions/gt') +const semverLt = require('semver/functions/lt') +const semverEq = require('semver/functions/eq') +const semverNeq = require('semver/functions/neq') +const semverGte = require('semver/functions/gte') +const semverLte = require('semver/functions/lte') +const semverCmp = require('semver/functions/cmp') +const semverCoerce = require('semver/functions/coerce') + +// working with ranges +const semverSatisfies = require('semver/functions/satisfies') +const semverMaxSatisfying = require('semver/ranges/max-satisfying') +const semverMinSatisfying = require('semver/ranges/min-satisfying') +const semverToComparators = require('semver/ranges/to-comparators') +const semverMinVersion = require('semver/ranges/min-version') +const semverValidRange = require('semver/ranges/valid') +const semverOutside = require('semver/ranges/outside') +const semverGtr = require('semver/ranges/gtr') +const semverLtr = require('semver/ranges/ltr') +const semverIntersects = require('semver/ranges/intersects') +const semverSimplifyRange = require('semver/ranges/simplify') +const semverRangeSubset = require('semver/ranges/subset') +``` + +As a command-line utility: + +``` +$ semver -h + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, prerelease, or release. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-n <0|1> + This is the base to be used for the prerelease identifier. + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them. +``` + +## Versions + +A "version" is described by the `v2.0.0` specification found at +. + +A leading `"="` or `"v"` character is stripped off and ignored. +Support for stripping a leading "v" is kept for compatibility with `v1.0.0` of the SemVer +specification but should not be used anymore. + +## Ranges + +A `version range` is a set of `comparators` that specify versions +that satisfy the range. + +A `comparator` is composed of an `operator` and a `version`. The set +of primitive `operators` is: + +* `<` Less than +* `<=` Less than or equal to +* `>` Greater than +* `>=` Greater than or equal to +* `=` Equal. If no operator is specified, then equality is assumed, + so this operator is optional but MAY be included. + +For example, the comparator `>=1.2.7` would match the versions +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` +or `1.1.0`. The comparator `>1` is equivalent to `>=2.0.0` and +would match the versions `2.0.0` and `3.1.0`, but not the versions +`1.0.1` or `1.1.0`. + +Comparators can be joined by whitespace to form a `comparator set`, +which is satisfied by the **intersection** of all of the comparators +it includes. + +A range is composed of one or more comparator sets, joined by `||`. A +version matches a range if and only if every comparator in at least +one of the `||`-separated comparator sets is satisfied by the version. + +For example, the range `>=1.2.7 <1.3.0` would match the versions +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, +or `1.1.0`. + +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. + +### Prerelease Tags + +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then +it will only be allowed to satisfy comparator sets if at least one +comparator with the same `[major, minor, patch]` tuple also has a +prerelease tag. + +For example, the range `>1.2.3-alpha.3` would be allowed to match the +version `1.2.3-alpha.7`, but it would *not* be satisfied by +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version +range only accepts prerelease tags on the `1.2.3` version. +Version `3.4.5` *would* satisfy the range because it does not have a +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. + +The purpose of this behavior is twofold. First, prerelease versions +frequently are updated very quickly, and contain many breaking changes +that are (by the author's design) not yet fit for public consumption. +Therefore, by default, they are excluded from range-matching +semantics. + +Second, a user who has opted into using a prerelease version has +indicated the intent to use *that specific* set of +alpha/beta/rc versions. By including a prerelease tag in the range, +the user is indicating that they are aware of the risk. However, it +is still not appropriate to assume that they have opted into taking a +similar risk on the *next* set of prerelease versions. + +Note that this behavior can be suppressed (treating all prerelease +versions as if they were normal versions, for range-matching) +by setting the `includePrerelease` flag on the options +object to any +[functions](https://github.com/npm/node-semver#functions) that do +range matching. + +#### Prerelease Identifiers + +The method `.inc` takes an additional `identifier` string argument that +will append the value of the string as a prerelease identifier: + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta') +// '1.2.4-beta.0' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta +1.2.4-beta.0 +``` + +Which then can be used to increment further: + +```bash +$ semver 1.2.4-beta.0 -i prerelease +1.2.4-beta.1 +``` + +To get out of the prerelease phase, use the `release` option: + +```bash +$ semver 1.2.4-beta.1 -i release +1.2.4 +``` + +#### Prerelease Identifier Base + +The method `.inc` takes an optional parameter 'identifierBase' string +that will let you let your prerelease number as zero-based or one-based. +Set to `false` to omit the prerelease number altogether. +If you do not specify this parameter, it will default to zero-based. + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta', '1') +// '1.2.4-beta.1' +``` + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta', false) +// '1.2.4-beta' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta -n 1 +1.2.4-beta.1 +``` + +```bash +$ semver 1.2.3 -i prerelease --preid beta -n false +1.2.4-beta +``` + +### Advanced Range Syntax + +Advanced range syntax desugars to primitive comparators in +deterministic ways. + +Advanced ranges may be combined in the same way as primitive +comparators using white space or `||`. + +#### Hyphen Ranges `X.Y.Z - A.B.C` + +Specifies an inclusive set. + +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` + +If a partial version is provided as the first version in the inclusive +range, then the missing pieces are replaced with zeroes. + +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` + +If a partial version is provided as the second version in the +inclusive range, then all versions that start with the supplied parts +of the tuple are accepted, but nothing that would be greater than the +provided tuple parts. + +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0-0` +* `1.2.3 - 2` := `>=1.2.3 <3.0.0-0` + +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` + +Any of `X`, `x`, or `*` may be used to "stand in" for one of the +numeric values in the `[major, minor, patch]` tuple. + +* `*` := `>=0.0.0` (Any non-prerelease version satisfies, unless + `includePrerelease` is specified, in which case any version at all + satisfies) +* `1.x` := `>=1.0.0 <2.0.0-0` (Matching major version) +* `1.2.x` := `>=1.2.0 <1.3.0-0` (Matching major and minor versions) + +A partial version range is treated as an X-Range, so the special +character is in fact optional. + +* `""` (empty string) := `*` := `>=0.0.0` +* `1` := `1.x.x` := `>=1.0.0 <2.0.0-0` +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0-0` + +#### Tilde Ranges `~1.2.3` `~1.2` `~1` + +Allows patch-level changes if a minor version is specified on the +comparator. Allows minor-level changes if not. + +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0-0` +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0-0` (Same as `1.2.x`) +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0-0` (Same as `1.x`) +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0-0` +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0-0` (Same as `0.2.x`) +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0-0` (Same as `0.x`) +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0-0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. + +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` + +Allows changes that do not modify the left-most non-zero element in the +`[major, minor, patch]` tuple. In other words, this allows patch and +minor updates for versions `1.0.0` and above, patch updates for +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. + +Many authors treat a `0.x` version as if the `x` were the major +"breaking-change" indicator. + +Caret ranges are ideal when an author may make breaking changes +between `0.2.4` and `0.3.0` releases, which is a common practice. +However, it presumes that there will *not* be breaking changes between +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be +additive (but non-breaking), according to commonly observed practices. + +* `^1.2.3` := `>=1.2.3 <2.0.0-0` +* `^0.2.3` := `>=0.2.3 <0.3.0-0` +* `^0.0.3` := `>=0.0.3 <0.0.4-0` +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0-0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4-0` Note that prereleases in the + `0.0.3` version *only* will be allowed, if they are greater than or + equal to `beta`. So, `0.0.3-pr.2` would be allowed. + +When parsing caret ranges, a missing `patch` value desugars to the +number `0`, but will allow flexibility within that value, even if the +major and minor versions are both `0`. + +* `^1.2.x` := `>=1.2.0 <2.0.0-0` +* `^0.0.x` := `>=0.0.0 <0.1.0-0` +* `^0.0` := `>=0.0.0 <0.1.0-0` + +A missing `minor` and `patch` values will desugar to zero, but also +allow flexibility within those values, even if the major version is +zero. + +* `^1.x` := `>=1.0.0 <2.0.0-0` +* `^0.x` := `>=0.0.0 <1.0.0-0` + +### Range Grammar + +Putting all this together, here is a Backus-Naur grammar for ranges, +for the benefit of parser authors: + +```bnf +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ +``` + +## Functions + +All methods and classes take a final `options` object argument. All +options in this object are `false` by default. The options supported +are: + +- `loose`: Be more forgiving about not-quite-valid semver strings. + (Any resulting output will always be 100% strict compliant, of + course.) For backwards compatibility reasons, if the `options` + argument is a boolean value instead of an object, it is interpreted + to be the `loose` param. +- `includePrerelease`: Set to suppress the [default + behavior](https://github.com/npm/node-semver#prerelease-tags) of + excluding prerelease tagged versions from ranges unless they are + explicitly opted into. + +Strict-mode Comparators and Ranges will be strict about the SemVer +strings that they parse. + +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, releaseType, options, identifier, identifierBase)`: + Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, `prerelease`, or `release`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, `prerelease` will work the + same as `prepatch`. It increments the patch version and then makes a + prerelease. If the input version is already a prerelease it simply + increments it. + * `release` will remove any prerelease part of the version. + * `identifier` can be used to prefix `premajor`, `preminor`, + `prepatch`, or `prerelease` version increments. `identifierBase` + is the base to be used for the `prerelease` identifier. +* `prerelease(v)`: Returns an array of prerelease components, or null + if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` +* `major(v)`: Return the major version number. +* `minor(v)`: Return the minor version number. +* `patch(v)`: Return the patch version number. +* `intersects(r1, r2, loose)`: Return true if the two supplied ranges + or comparators intersect. +* `parse(v)`: Attempt to parse a string as a semantic version, returning either + a `SemVer` object or `null`. + +### Comparison + +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, + even if they're not the same string. You already know how to + compare strings. +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call + the corresponding function above. `"==="` and `"!=="` do simple + string comparison, but are included for completeness. Throws if an + invalid comparison string is provided. +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of `compare`. Sorts an array of versions + in descending order when passed to `Array.sort()`. +* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions + are equal. Sorts in ascending order if passed to `Array.sort()`. +* `compareLoose(v1, v2)`: Short for `compare(v1, v2, { loose: true })`. +* `diff(v1, v2)`: Returns the difference between two versions by the release type + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), + or null if the versions are the same. + +### Sorting + +* `sort(versions)`: Returns a sorted array of versions based on the `compareBuild` + function. +* `rsort(versions)`: The reverse of `sort`. Returns an array of versions based on + the `compareBuild` function in descending order. + +### Comparators + +* `intersects(comparator)`: Return true if the comparators intersect + +### Ranges + +* `validRange(range)`: Return the valid range or null if it's not valid. +* `satisfies(version, range)`: Return true if the version satisfies the + range. +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `minSatisfying(versions, range)`: Return the lowest version in the list + that satisfies the range, or `null` if none of them do. +* `minVersion(range)`: Return the lowest version that can match + the given range. +* `gtr(version, range)`: Return `true` if the version is greater than all the + versions possible in the range. +* `ltr(version, range)`: Return `true` if the version is less than all the + versions possible in the range. +* `outside(version, range, hilo)`: Return true if the version is outside + the bounds of the range in either the high or low direction. The + `hilo` argument must be either the string `'>'` or `'<'`. (This is + the function called by `gtr` and `ltr`.) +* `intersects(range)`: Return true if any of the range comparators intersect. +* `simplifyRange(versions, range)`: Return a "simplified" range that + matches the same items in the `versions` list as the range specified. Note + that it does *not* guarantee that it would match the same versions in all + cases, only for the set of versions provided. This is useful when + generating ranges by joining together multiple versions with `||` + programmatically, to provide the user with something a bit more + ergonomic. If the provided range is shorter in string-length than the + generated range, then that is returned. +* `subset(subRange, superRange)`: Return `true` if the `subRange` range is + entirely contained by the `superRange` range. + +Note that, since ranges may be non-contiguous, a version might not be +greater than a range, less than a range, *or* satisfy a range! For +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` +until `2.0.0`, so version `1.2.10` would not be greater than the +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not +satisfy the range. + +If you want to know if a version satisfies or does not satisfy a +range, use the `satisfies(version, range)` function. + +### Coercion + +* `coerce(version, options)`: Coerces a string to semver if possible + +This aims to provide a very forgiving translation of a non-semver string to +semver. It looks for the first digit in a string and consumes all +remaining characters which satisfy at least a partial semver (e.g., `1`, +`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer +versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All +surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes +`3.4.0`). Only text which lacks digits will fail coercion (`version one` +is not valid). The maximum length for any semver component considered for +coercion is 16 characters; longer components will be ignored +(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any +semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value +components are invalid (`9999999999999999.4.7.4` is likely invalid). + +If the `options.rtl` flag is set, then `coerce` will return the right-most +coercible tuple that does not share an ending index with a longer coercible +tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not +`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of +any other overlapping SemVer tuple. + +If the `options.includePrerelease` flag is set, then the `coerce` result will contain +prerelease and build parts of a version. For example, `1.2.3.4-rc.1+rev.2` +will preserve prerelease `rc.1` and build `rev.2` in the result. + +### Clean + +* `clean(version)`: Clean a string to be a valid semver if possible + +This will return a cleaned and trimmed semver version. If the provided +version is not valid a null will be returned. This does not work for +ranges. + +ex. +* `s.clean(' = v 2.1.5foo')`: `null` +* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean(' = v 2.1.5-foo')`: `null` +* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean('=v2.1.5')`: `'2.1.5'` +* `s.clean(' =v2.1.5')`: `'2.1.5'` +* `s.clean(' 2.1.5 ')`: `'2.1.5'` +* `s.clean('~1.0.0')`: `null` + +## Constants + +As a convenience, helper constants are exported to provide information about what `node-semver` supports: + +### `RELEASE_TYPES` + +- major +- premajor +- minor +- preminor +- patch +- prepatch +- prerelease + +``` +const semver = require('semver'); + +if (semver.RELEASE_TYPES.includes(arbitraryUserInput)) { + console.log('This is a valid release type!'); +} else { + console.warn('This is NOT a valid release type!'); +} +``` + +### `SEMVER_SPEC_VERSION` + +2.0.0 + +``` +const semver = require('semver'); + +console.log('We are currently using the semver specification version:', semver.SEMVER_SPEC_VERSION); +``` + +## Exported Modules + + + +You may pull in just the part of this semver utility that you need if you +are sensitive to packing and tree-shaking concerns. The main +`require('semver')` export uses getter functions to lazily load the parts +of the API that are used. + +The following modules are available: + +* `require('semver')` +* `require('semver/classes')` +* `require('semver/classes/comparator')` +* `require('semver/classes/range')` +* `require('semver/classes/semver')` +* `require('semver/functions/clean')` +* `require('semver/functions/cmp')` +* `require('semver/functions/coerce')` +* `require('semver/functions/compare')` +* `require('semver/functions/compare-build')` +* `require('semver/functions/compare-loose')` +* `require('semver/functions/diff')` +* `require('semver/functions/eq')` +* `require('semver/functions/gt')` +* `require('semver/functions/gte')` +* `require('semver/functions/inc')` +* `require('semver/functions/lt')` +* `require('semver/functions/lte')` +* `require('semver/functions/major')` +* `require('semver/functions/minor')` +* `require('semver/functions/neq')` +* `require('semver/functions/parse')` +* `require('semver/functions/patch')` +* `require('semver/functions/prerelease')` +* `require('semver/functions/rcompare')` +* `require('semver/functions/rsort')` +* `require('semver/functions/satisfies')` +* `require('semver/functions/sort')` +* `require('semver/functions/valid')` +* `require('semver/ranges/gtr')` +* `require('semver/ranges/intersects')` +* `require('semver/ranges/ltr')` +* `require('semver/ranges/max-satisfying')` +* `require('semver/ranges/min-satisfying')` +* `require('semver/ranges/min-version')` +* `require('semver/ranges/outside')` +* `require('semver/ranges/simplify')` +* `require('semver/ranges/subset')` +* `require('semver/ranges/to-comparators')` +* `require('semver/ranges/valid')` + diff --git a/miniprogram/node_modules/semver/bin/semver.js b/miniprogram/node_modules/semver/bin/semver.js new file mode 100644 index 00000000..dbb1bf53 --- /dev/null +++ b/miniprogram/node_modules/semver/bin/semver.js @@ -0,0 +1,191 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +'use strict' + +const argv = process.argv.slice(2) + +let versions = [] + +const range = [] + +let inc = null + +const version = require('../package.json').version + +let loose = false + +let includePrerelease = false + +let coerce = false + +let rtl = false + +let identifier + +let identifierBase + +const semver = require('../') +const parseOptions = require('../internal/parse-options') + +let reverse = false + +let options = {} + +const main = () => { + if (!argv.length) { + return help() + } + while (argv.length) { + let a = argv.shift() + const indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + const value = a.slice(indexOfEqualSign + 1) + a = a.slice(0, indexOfEqualSign) + argv.unshift(value) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + case 'release': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-n': + identifierBase = argv.shift() + if (identifierBase === 'false') { + identifierBase = false + } + break + case '-c': case '--coerce': + coerce = true + break + case '--rtl': + rtl = true + break + case '--ltr': + rtl = false + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + options = parseOptions({ loose, includePrerelease, rtl }) + + versions = versions.map((v) => { + return coerce ? (semver.coerce(v, options) || { version: v }).version : v + }).filter((v) => { + return semver.valid(v) + }) + if (!versions.length) { + return fail() + } + if (inc && (versions.length !== 1 || range.length)) { + return failInc() + } + + for (let i = 0, l = range.length; i < l; i++) { + versions = versions.filter((v) => { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) { + return fail() + } + } + versions + .sort((a, b) => semver[reverse ? 'rcompare' : 'compare'](a, b, options)) + .map(v => semver.clean(v, options)) + .map(v => inc ? semver.inc(v, inc, options, identifier, identifierBase) : v) + .forEach(v => console.log(v)) +} + +const failInc = () => { + console.error('--inc can only be used on a single version with no range') + fail() +} + +const fail = () => process.exit(1) + +const help = () => console.log( +`SemVer ${version} + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, prerelease, or release. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +-n + Base number to be used for the prerelease identifier. + Can be either 0 or 1, or false to omit the number altogether. + Defaults to 0. + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them.`) + +main() diff --git a/miniprogram/node_modules/semver/classes/comparator.js b/miniprogram/node_modules/semver/classes/comparator.js new file mode 100644 index 00000000..647c1f09 --- /dev/null +++ b/miniprogram/node_modules/semver/classes/comparator.js @@ -0,0 +1,143 @@ +'use strict' + +const ANY = Symbol('SemVer ANY') +// hoisted class for cyclic dependency +class Comparator { + static get ANY () { + return ANY + } + + constructor (comp, options) { + options = parseOptions(options) + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) + } + + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + const m = comp.match(r) + + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } + } + + toString () { + return this.value + } + + test (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) + } + + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } + + options = parseOptions(options) + + // Special cases where nothing can possibly be lower + if (options.includePrerelease && + (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { + return false + } + if (!options.includePrerelease && + (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { + return false + } + + // Same direction increasing (> or >=) + if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { + return true + } + // Same direction decreasing (< or <=) + if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { + return true + } + // same SemVer and both sides are inclusive (<= or >=) + if ( + (this.semver.version === comp.semver.version) && + this.operator.includes('=') && comp.operator.includes('=')) { + return true + } + // opposite directions less than + if (cmp(this.semver, '<', comp.semver, options) && + this.operator.startsWith('>') && comp.operator.startsWith('<')) { + return true + } + // opposite directions greater than + if (cmp(this.semver, '>', comp.semver, options) && + this.operator.startsWith('<') && comp.operator.startsWith('>')) { + return true + } + return false + } +} + +module.exports = Comparator + +const parseOptions = require('../internal/parse-options') +const { safeRe: re, t } = require('../internal/re') +const cmp = require('../functions/cmp') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const Range = require('./range') diff --git a/miniprogram/node_modules/semver/classes/index.js b/miniprogram/node_modules/semver/classes/index.js new file mode 100644 index 00000000..91c24ec4 --- /dev/null +++ b/miniprogram/node_modules/semver/classes/index.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = { + SemVer: require('./semver.js'), + Range: require('./range.js'), + Comparator: require('./comparator.js'), +} diff --git a/miniprogram/node_modules/semver/classes/range.js b/miniprogram/node_modules/semver/classes/range.js new file mode 100644 index 00000000..94629ce6 --- /dev/null +++ b/miniprogram/node_modules/semver/classes/range.js @@ -0,0 +1,557 @@ +'use strict' + +const SPACE_CHARACTERS = /\s+/g + +// hoisted class for cyclic dependency +class Range { + constructor (range, options) { + options = parseOptions(options) + + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value + this.set = [[range]] + this.formatted = undefined + return this + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range.trim().replace(SPACE_CHARACTERS, ' ') + + // First, split on || + this.set = this.raw + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length) + + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) + } + + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0] + this.set = this.set.filter(c => !isNullSet(c[0])) + if (this.set.length === 0) { + this.set = [first] + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c] + break + } + } + } + } + + this.formatted = undefined + } + + get range () { + if (this.formatted === undefined) { + this.formatted = '' + for (let i = 0; i < this.set.length; i++) { + if (i > 0) { + this.formatted += '||' + } + const comps = this.set[i] + for (let k = 0; k < comps.length; k++) { + if (k > 0) { + this.formatted += ' ' + } + this.formatted += comps[k].toString().trim() + } + } + } + return this.formatted + } + + format () { + return this.range + } + + toString () { + return this.range + } + + parseRange (range) { + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = + (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | + (this.options.loose && FLAG_LOOSE) + const memoKey = memoOpts + ':' + range + const cached = cache.get(memoKey) + if (cached) { + return cached + } + + const loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) + debug('hyphen replace', range) + + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + debug('tilde trim', range) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + debug('caret trim', range) + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)) + + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options) + return !!comp.match(re[t.COMPARATORLOOSE]) + }) + } + debug('range list', rangeList) + + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map() + const comparators = rangeList.map(comp => new Comparator(comp, this.options)) + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp) + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete('') + } + + const result = [...rangeMap.values()] + cache.set(memoKey, result) + return result + } + + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } +} + +module.exports = Range + +const LRU = require('../internal/lrucache') +const cache = new LRU() + +const parseOptions = require('../internal/parse-options') +const Comparator = require('./comparator') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const { + safeRe: re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, +} = require('../internal/re') +const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require('../internal/constants') + +const isNullSet = c => c.value === '<0.0.0-0' +const isAny = c => c.value === '' + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +const isSatisfiable = (comparators, options) => { + let result = true + const remainingComparators = comparators.slice() + let testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +const parseComparator = (comp, options) => { + comp = comp.replace(re[t.BUILD], '') + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +const isX = id => !id || id.toLowerCase() === 'x' || id === '*' + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 +// ~0.0.1 --> >=0.0.1 <0.1.0-0 +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} + +const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0` + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` + } else if (pr) { + debug('replaceTilde pr', pr) + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0` + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 +// ^1.2.3 --> >=1.2.3 <2.0.0-0 +// ^1.2.0 --> >=1.2.0 <2.0.0-0 +// ^0.0.1 --> >=0.0.1 <0.0.2-0 +// ^0.1.0 --> >=0.1.0 <0.2.0-0 +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} + +const replaceCaret = (comp, options) => { + debug('caret', comp, options) + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + const z = options.includePrerelease ? '-0' : '' + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0` + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0` + } + } + + debug('caret return', ret) + return ret + }) +} + +const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options) + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') +} + +const replaceXRange = (comp, options) => { + comp = comp.trim() + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + const xM = isX(M) + const xm = xM || isX(m) + const xp = xm || isX(p) + const anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + if (gtlt === '<') { + pr = '-0' + } + + ret = `${gtlt + M}.${m}.${p}${pr}` + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0` + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +const replaceStars = (comp, options) => { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp + .trim() + .replace(re[t.STAR], '') +} + +const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options) + return comp + .trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 +// TODO build? +const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr) => { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}` + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` + } else if (fpr) { + from = `>=${from}` + } else { + from = `>=${from}${incPr ? '-0' : ''}` + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0` + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0` + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}` + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0` + } else { + to = `<=${to}` + } + + return `${from} ${to}`.trim() +} + +const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} diff --git a/miniprogram/node_modules/semver/classes/semver.js b/miniprogram/node_modules/semver/classes/semver.js new file mode 100644 index 00000000..92254be1 --- /dev/null +++ b/miniprogram/node_modules/semver/classes/semver.js @@ -0,0 +1,333 @@ +'use strict' + +const debug = require('../internal/debug') +const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants') +const { safeRe: re, t } = require('../internal/re') + +const parseOptions = require('../internal/parse-options') +const { compareIdentifiers } = require('../internal/identifiers') +class SemVer { + constructor (version, options) { + options = parseOptions(options) + + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease + + const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() + } + + format () { + this.version = `${this.major}.${this.minor}.${this.patch}` + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}` + } + return this.version + } + + toString () { + return this.version + } + + compare (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options) + } + + if (other.version === this.version) { + return 0 + } + + return this.compareMain(other) || this.comparePre(other) + } + + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + if (this.major < other.major) { + return -1 + } + if (this.major > other.major) { + return 1 + } + if (this.minor < other.minor) { + return -1 + } + if (this.minor > other.minor) { + return 1 + } + if (this.patch < other.patch) { + return -1 + } + if (this.patch > other.patch) { + return 1 + } + return 0 + } + + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + let i = 0 + do { + const a = this.prerelease[i] + const b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + let i = 0 + do { + const a = this.build[i] + const b = other.build[i] + debug('build compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier, identifierBase) { + if (release.startsWith('pre')) { + if (!identifier && identifierBase === false) { + throw new Error('invalid increment argument: identifier is empty') + } + // Avoid an invalid semver results + if (identifier) { + const match = `-${identifier}`.match(this.options.loose ? re[t.PRERELEASELOOSE] : re[t.PRERELEASE]) + if (!match || match[1] !== identifier) { + throw new Error(`invalid identifier: ${identifier}`) + } + } + } + + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier, identifierBase) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier, identifierBase) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier, identifierBase) + this.inc('pre', identifier, identifierBase) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier, identifierBase) + } + this.inc('pre', identifier, identifierBase) + break + case 'release': + if (this.prerelease.length === 0) { + throw new Error(`version ${this.raw} is not a prerelease`) + } + this.prerelease.length = 0 + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': { + const base = Number(identifierBase) ? 1 : 0 + + if (this.prerelease.length === 0) { + this.prerelease = [base] + } else { + let i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + if (identifier === this.prerelease.join('.') && identifierBase === false) { + throw new Error('invalid increment argument: identifier already exists') + } + this.prerelease.push(base) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + let prerelease = [identifier, base] + if (identifierBase === false) { + prerelease = [identifier] + } + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = prerelease + } + } else { + this.prerelease = prerelease + } + } + break + } + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } + return this + } +} + +module.exports = SemVer diff --git a/miniprogram/node_modules/semver/functions/clean.js b/miniprogram/node_modules/semver/functions/clean.js new file mode 100644 index 00000000..79703d63 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/clean.js @@ -0,0 +1,8 @@ +'use strict' + +const parse = require('./parse') +const clean = (version, options) => { + const s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} +module.exports = clean diff --git a/miniprogram/node_modules/semver/functions/cmp.js b/miniprogram/node_modules/semver/functions/cmp.js new file mode 100644 index 00000000..77487dca --- /dev/null +++ b/miniprogram/node_modules/semver/functions/cmp.js @@ -0,0 +1,54 @@ +'use strict' + +const eq = require('./eq') +const neq = require('./neq') +const gt = require('./gt') +const gte = require('./gte') +const lt = require('./lt') +const lte = require('./lte') + +const cmp = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a === b + + case '!==': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError(`Invalid operator: ${op}`) + } +} +module.exports = cmp diff --git a/miniprogram/node_modules/semver/functions/coerce.js b/miniprogram/node_modules/semver/functions/coerce.js new file mode 100644 index 00000000..cfe02759 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/coerce.js @@ -0,0 +1,62 @@ +'use strict' + +const SemVer = require('../classes/semver') +const parse = require('./parse') +const { safeRe: re, t } = require('../internal/re') + +const coerce = (version, options) => { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + let match = null + if (!options.rtl) { + match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL] + let next + while ((next = coerceRtlRegex.exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + coerceRtlRegex.lastIndex = -1 + } + + if (match === null) { + return null + } + + const major = match[2] + const minor = match[3] || '0' + const patch = match[4] || '0' + const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : '' + const build = options.includePrerelease && match[6] ? `+${match[6]}` : '' + + return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options) +} +module.exports = coerce diff --git a/miniprogram/node_modules/semver/functions/compare-build.js b/miniprogram/node_modules/semver/functions/compare-build.js new file mode 100644 index 00000000..99157cf3 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/compare-build.js @@ -0,0 +1,9 @@ +'use strict' + +const SemVer = require('../classes/semver') +const compareBuild = (a, b, loose) => { + const versionA = new SemVer(a, loose) + const versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} +module.exports = compareBuild diff --git a/miniprogram/node_modules/semver/functions/compare-loose.js b/miniprogram/node_modules/semver/functions/compare-loose.js new file mode 100644 index 00000000..75316346 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/compare-loose.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const compareLoose = (a, b) => compare(a, b, true) +module.exports = compareLoose diff --git a/miniprogram/node_modules/semver/functions/compare.js b/miniprogram/node_modules/semver/functions/compare.js new file mode 100644 index 00000000..63d8090c --- /dev/null +++ b/miniprogram/node_modules/semver/functions/compare.js @@ -0,0 +1,7 @@ +'use strict' + +const SemVer = require('../classes/semver') +const compare = (a, b, loose) => + new SemVer(a, loose).compare(new SemVer(b, loose)) + +module.exports = compare diff --git a/miniprogram/node_modules/semver/functions/diff.js b/miniprogram/node_modules/semver/functions/diff.js new file mode 100644 index 00000000..04e064e9 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/diff.js @@ -0,0 +1,60 @@ +'use strict' + +const parse = require('./parse.js') + +const diff = (version1, version2) => { + const v1 = parse(version1, null, true) + const v2 = parse(version2, null, true) + const comparison = v1.compare(v2) + + if (comparison === 0) { + return null + } + + const v1Higher = comparison > 0 + const highVersion = v1Higher ? v1 : v2 + const lowVersion = v1Higher ? v2 : v1 + const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' + } + + // If the main part has no difference + if (lowVersion.compareMain(highVersion) === 0) { + if (lowVersion.minor && !lowVersion.patch) { + return 'minor' + } + return 'patch' + } + } + + // add the `pre` prefix if we are going to a prerelease version + const prefix = highHasPre ? 'pre' : '' + + if (v1.major !== v2.major) { + return prefix + 'major' + } + + if (v1.minor !== v2.minor) { + return prefix + 'minor' + } + + if (v1.patch !== v2.patch) { + return prefix + 'patch' + } + + // high and low are preleases + return 'prerelease' +} + +module.exports = diff diff --git a/miniprogram/node_modules/semver/functions/eq.js b/miniprogram/node_modules/semver/functions/eq.js new file mode 100644 index 00000000..5f0eead1 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/eq.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const eq = (a, b, loose) => compare(a, b, loose) === 0 +module.exports = eq diff --git a/miniprogram/node_modules/semver/functions/gt.js b/miniprogram/node_modules/semver/functions/gt.js new file mode 100644 index 00000000..84a57ddf --- /dev/null +++ b/miniprogram/node_modules/semver/functions/gt.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const gt = (a, b, loose) => compare(a, b, loose) > 0 +module.exports = gt diff --git a/miniprogram/node_modules/semver/functions/gte.js b/miniprogram/node_modules/semver/functions/gte.js new file mode 100644 index 00000000..7c52bdf2 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/gte.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const gte = (a, b, loose) => compare(a, b, loose) >= 0 +module.exports = gte diff --git a/miniprogram/node_modules/semver/functions/inc.js b/miniprogram/node_modules/semver/functions/inc.js new file mode 100644 index 00000000..ff999e9d --- /dev/null +++ b/miniprogram/node_modules/semver/functions/inc.js @@ -0,0 +1,21 @@ +'use strict' + +const SemVer = require('../classes/semver') + +const inc = (version, release, options, identifier, identifierBase) => { + if (typeof (options) === 'string') { + identifierBase = identifier + identifier = options + options = undefined + } + + try { + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier, identifierBase).version + } catch (er) { + return null + } +} +module.exports = inc diff --git a/miniprogram/node_modules/semver/functions/lt.js b/miniprogram/node_modules/semver/functions/lt.js new file mode 100644 index 00000000..2fb32a0e --- /dev/null +++ b/miniprogram/node_modules/semver/functions/lt.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const lt = (a, b, loose) => compare(a, b, loose) < 0 +module.exports = lt diff --git a/miniprogram/node_modules/semver/functions/lte.js b/miniprogram/node_modules/semver/functions/lte.js new file mode 100644 index 00000000..da9ee8f4 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/lte.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const lte = (a, b, loose) => compare(a, b, loose) <= 0 +module.exports = lte diff --git a/miniprogram/node_modules/semver/functions/major.js b/miniprogram/node_modules/semver/functions/major.js new file mode 100644 index 00000000..e6d08dc2 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/major.js @@ -0,0 +1,5 @@ +'use strict' + +const SemVer = require('../classes/semver') +const major = (a, loose) => new SemVer(a, loose).major +module.exports = major diff --git a/miniprogram/node_modules/semver/functions/minor.js b/miniprogram/node_modules/semver/functions/minor.js new file mode 100644 index 00000000..9e70ffda --- /dev/null +++ b/miniprogram/node_modules/semver/functions/minor.js @@ -0,0 +1,5 @@ +'use strict' + +const SemVer = require('../classes/semver') +const minor = (a, loose) => new SemVer(a, loose).minor +module.exports = minor diff --git a/miniprogram/node_modules/semver/functions/neq.js b/miniprogram/node_modules/semver/functions/neq.js new file mode 100644 index 00000000..84326b77 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/neq.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const neq = (a, b, loose) => compare(a, b, loose) !== 0 +module.exports = neq diff --git a/miniprogram/node_modules/semver/functions/parse.js b/miniprogram/node_modules/semver/functions/parse.js new file mode 100644 index 00000000..d544d33a --- /dev/null +++ b/miniprogram/node_modules/semver/functions/parse.js @@ -0,0 +1,18 @@ +'use strict' + +const SemVer = require('../classes/semver') +const parse = (version, options, throwErrors = false) => { + if (version instanceof SemVer) { + return version + } + try { + return new SemVer(version, options) + } catch (er) { + if (!throwErrors) { + return null + } + throw er + } +} + +module.exports = parse diff --git a/miniprogram/node_modules/semver/functions/patch.js b/miniprogram/node_modules/semver/functions/patch.js new file mode 100644 index 00000000..7675162f --- /dev/null +++ b/miniprogram/node_modules/semver/functions/patch.js @@ -0,0 +1,5 @@ +'use strict' + +const SemVer = require('../classes/semver') +const patch = (a, loose) => new SemVer(a, loose).patch +module.exports = patch diff --git a/miniprogram/node_modules/semver/functions/prerelease.js b/miniprogram/node_modules/semver/functions/prerelease.js new file mode 100644 index 00000000..b8fe1db5 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/prerelease.js @@ -0,0 +1,8 @@ +'use strict' + +const parse = require('./parse') +const prerelease = (version, options) => { + const parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} +module.exports = prerelease diff --git a/miniprogram/node_modules/semver/functions/rcompare.js b/miniprogram/node_modules/semver/functions/rcompare.js new file mode 100644 index 00000000..8e1c222b --- /dev/null +++ b/miniprogram/node_modules/semver/functions/rcompare.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const rcompare = (a, b, loose) => compare(b, a, loose) +module.exports = rcompare diff --git a/miniprogram/node_modules/semver/functions/rsort.js b/miniprogram/node_modules/semver/functions/rsort.js new file mode 100644 index 00000000..5d3d2009 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/rsort.js @@ -0,0 +1,5 @@ +'use strict' + +const compareBuild = require('./compare-build') +const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) +module.exports = rsort diff --git a/miniprogram/node_modules/semver/functions/satisfies.js b/miniprogram/node_modules/semver/functions/satisfies.js new file mode 100644 index 00000000..a0264a22 --- /dev/null +++ b/miniprogram/node_modules/semver/functions/satisfies.js @@ -0,0 +1,12 @@ +'use strict' + +const Range = require('../classes/range') +const satisfies = (version, range, options) => { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} +module.exports = satisfies diff --git a/miniprogram/node_modules/semver/functions/sort.js b/miniprogram/node_modules/semver/functions/sort.js new file mode 100644 index 00000000..edb24b1d --- /dev/null +++ b/miniprogram/node_modules/semver/functions/sort.js @@ -0,0 +1,5 @@ +'use strict' + +const compareBuild = require('./compare-build') +const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) +module.exports = sort diff --git a/miniprogram/node_modules/semver/functions/valid.js b/miniprogram/node_modules/semver/functions/valid.js new file mode 100644 index 00000000..0db67edc --- /dev/null +++ b/miniprogram/node_modules/semver/functions/valid.js @@ -0,0 +1,8 @@ +'use strict' + +const parse = require('./parse') +const valid = (version, options) => { + const v = parse(version, options) + return v ? v.version : null +} +module.exports = valid diff --git a/miniprogram/node_modules/semver/index.js b/miniprogram/node_modules/semver/index.js new file mode 100644 index 00000000..285662ac --- /dev/null +++ b/miniprogram/node_modules/semver/index.js @@ -0,0 +1,91 @@ +'use strict' + +// just pre-load all the stuff that index.js lazily exports +const internalRe = require('./internal/re') +const constants = require('./internal/constants') +const SemVer = require('./classes/semver') +const identifiers = require('./internal/identifiers') +const parse = require('./functions/parse') +const valid = require('./functions/valid') +const clean = require('./functions/clean') +const inc = require('./functions/inc') +const diff = require('./functions/diff') +const major = require('./functions/major') +const minor = require('./functions/minor') +const patch = require('./functions/patch') +const prerelease = require('./functions/prerelease') +const compare = require('./functions/compare') +const rcompare = require('./functions/rcompare') +const compareLoose = require('./functions/compare-loose') +const compareBuild = require('./functions/compare-build') +const sort = require('./functions/sort') +const rsort = require('./functions/rsort') +const gt = require('./functions/gt') +const lt = require('./functions/lt') +const eq = require('./functions/eq') +const neq = require('./functions/neq') +const gte = require('./functions/gte') +const lte = require('./functions/lte') +const cmp = require('./functions/cmp') +const coerce = require('./functions/coerce') +const Comparator = require('./classes/comparator') +const Range = require('./classes/range') +const satisfies = require('./functions/satisfies') +const toComparators = require('./ranges/to-comparators') +const maxSatisfying = require('./ranges/max-satisfying') +const minSatisfying = require('./ranges/min-satisfying') +const minVersion = require('./ranges/min-version') +const validRange = require('./ranges/valid') +const outside = require('./ranges/outside') +const gtr = require('./ranges/gtr') +const ltr = require('./ranges/ltr') +const intersects = require('./ranges/intersects') +const simplifyRange = require('./ranges/simplify') +const subset = require('./ranges/subset') +module.exports = { + parse, + valid, + clean, + inc, + diff, + major, + minor, + patch, + prerelease, + compare, + rcompare, + compareLoose, + compareBuild, + sort, + rsort, + gt, + lt, + eq, + neq, + gte, + lte, + cmp, + coerce, + Comparator, + Range, + satisfies, + toComparators, + maxSatisfying, + minSatisfying, + minVersion, + validRange, + outside, + gtr, + ltr, + intersects, + simplifyRange, + subset, + SemVer, + re: internalRe.re, + src: internalRe.src, + tokens: internalRe.t, + SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + RELEASE_TYPES: constants.RELEASE_TYPES, + compareIdentifiers: identifiers.compareIdentifiers, + rcompareIdentifiers: identifiers.rcompareIdentifiers, +} diff --git a/miniprogram/node_modules/semver/internal/constants.js b/miniprogram/node_modules/semver/internal/constants.js new file mode 100644 index 00000000..6d1db915 --- /dev/null +++ b/miniprogram/node_modules/semver/internal/constants.js @@ -0,0 +1,37 @@ +'use strict' + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0' + +const MAX_LENGTH = 256 +const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16 + +// Max safe length for a build identifier. The max length minus 6 characters for +// the shortest version with a build 0.0.0+BUILD. +const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +const RELEASE_TYPES = [ + 'major', + 'premajor', + 'minor', + 'preminor', + 'patch', + 'prepatch', + 'prerelease', +] + +module.exports = { + MAX_LENGTH, + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_SAFE_INTEGER, + RELEASE_TYPES, + SEMVER_SPEC_VERSION, + FLAG_INCLUDE_PRERELEASE: 0b001, + FLAG_LOOSE: 0b010, +} diff --git a/miniprogram/node_modules/semver/internal/debug.js b/miniprogram/node_modules/semver/internal/debug.js new file mode 100644 index 00000000..20d1e9dc --- /dev/null +++ b/miniprogram/node_modules/semver/internal/debug.js @@ -0,0 +1,11 @@ +'use strict' + +const debug = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {} + +module.exports = debug diff --git a/miniprogram/node_modules/semver/internal/identifiers.js b/miniprogram/node_modules/semver/internal/identifiers.js new file mode 100644 index 00000000..d053472d --- /dev/null +++ b/miniprogram/node_modules/semver/internal/identifiers.js @@ -0,0 +1,29 @@ +'use strict' + +const numeric = /^[0-9]+$/ +const compareIdentifiers = (a, b) => { + if (typeof a === 'number' && typeof b === 'number') { + return a === b ? 0 : a < b ? -1 : 1 + } + + const anum = numeric.test(a) + const bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) + +module.exports = { + compareIdentifiers, + rcompareIdentifiers, +} diff --git a/miniprogram/node_modules/semver/internal/lrucache.js b/miniprogram/node_modules/semver/internal/lrucache.js new file mode 100644 index 00000000..b8bf5262 --- /dev/null +++ b/miniprogram/node_modules/semver/internal/lrucache.js @@ -0,0 +1,42 @@ +'use strict' + +class LRUCache { + constructor () { + this.max = 1000 + this.map = new Map() + } + + get (key) { + const value = this.map.get(key) + if (value === undefined) { + return undefined + } else { + // Remove the key from the map and add it to the end + this.map.delete(key) + this.map.set(key, value) + return value + } + } + + delete (key) { + return this.map.delete(key) + } + + set (key, value) { + const deleted = this.delete(key) + + if (!deleted && value !== undefined) { + // If cache is full, delete the least recently used item + if (this.map.size >= this.max) { + const firstKey = this.map.keys().next().value + this.delete(firstKey) + } + + this.map.set(key, value) + } + + return this + } +} + +module.exports = LRUCache diff --git a/miniprogram/node_modules/semver/internal/parse-options.js b/miniprogram/node_modules/semver/internal/parse-options.js new file mode 100644 index 00000000..52954541 --- /dev/null +++ b/miniprogram/node_modules/semver/internal/parse-options.js @@ -0,0 +1,17 @@ +'use strict' + +// parse out just the options we care about +const looseOption = Object.freeze({ loose: true }) +const emptyOpts = Object.freeze({ }) +const parseOptions = options => { + if (!options) { + return emptyOpts + } + + if (typeof options !== 'object') { + return looseOption + } + + return options +} +module.exports = parseOptions diff --git a/miniprogram/node_modules/semver/internal/re.js b/miniprogram/node_modules/semver/internal/re.js new file mode 100644 index 00000000..4758c58d --- /dev/null +++ b/miniprogram/node_modules/semver/internal/re.js @@ -0,0 +1,223 @@ +'use strict' + +const { + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_LENGTH, +} = require('./constants') +const debug = require('./debug') +exports = module.exports = {} + +// The actual regexps go on exports.re +const re = exports.re = [] +const safeRe = exports.safeRe = [] +const src = exports.src = [] +const safeSrc = exports.safeSrc = [] +const t = exports.t = {} +let R = 0 + +const LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +const safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +const makeSafeRegex = (value) => { + for (const [token, max] of safeRegexReplacements) { + value = value + .split(`${token}*`).join(`${token}{0,${max}}`) + .split(`${token}+`).join(`${token}{1,${max}}`) + } + return value +} + +const createToken = (name, value, isGlobal) => { + const safe = makeSafeRegex(value) + const index = R++ + debug(name, index, value) + t[name] = index + src[index] = value + safeSrc[index] = safe + re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') +createToken('NUMERICIDENTIFIERLOOSE', '\\d+') + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`) + +// ## Main Version +// Three dot-separated numeric identifiers. + +createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`) + +createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. +// Non-numberic identifiers include numberic identifiers but can be longer. +// Therefore non-numberic identifiers must go first. + +createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NONNUMERICIDENTIFIER] +}|${src[t.NUMERICIDENTIFIER]})`) + +createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NONNUMERICIDENTIFIER] +}|${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] +}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + +createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] +}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`) + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] +}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +createToken('FULLPLAIN', `v?${src[t.MAINVERSION] +}${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`) + +createToken('FULL', `^${src[t.FULLPLAIN]}$`) + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] +}${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`) + +createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) + +createToken('GTLT', '((?:<|>)?=?)') + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) +createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + +createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) +createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +createToken('COERCEPLAIN', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`) +createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`) +createToken('COERCEFULL', src[t.COERCEPLAIN] + + `(?:${src[t.PRERELEASE]})?` + + `(?:${src[t.BUILD]})?` + + `(?:$|[^\\d])`) +createToken('COERCERTL', src[t.COERCE], true) +createToken('COERCERTLFULL', src[t.COERCEFULL], true) + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +createToken('LONETILDE', '(?:~>?)') + +createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) +exports.tildeTrimReplace = '$1~' + +createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) +createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +createToken('LONECARET', '(?:\\^)') + +createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) +exports.caretTrimReplace = '$1^' + +createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) +createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) +createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] +}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) +exports.comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`) + +createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`) + +// Star ranges basically just allow anything at all. +createToken('STAR', '(<|>)?=?\\s*\\*') +// >=0.0.0 is like a star +createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') +createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') diff --git a/miniprogram/node_modules/semver/package.json b/miniprogram/node_modules/semver/package.json new file mode 100644 index 00000000..2b8cadaa --- /dev/null +++ b/miniprogram/node_modules/semver/package.json @@ -0,0 +1,78 @@ +{ + "name": "semver", + "version": "7.7.3", + "description": "The semantic version parser used by npm.", + "main": "index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.25.1", + "benchmark": "^2.1.4", + "tap": "^16.0.0" + }, + "license": "ISC", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/node-semver.git" + }, + "bin": { + "semver": "bin/semver.js" + }, + "files": [ + "bin/", + "lib/", + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "tap": { + "timeout": 30, + "coverage-map": "map.js", + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": ">=10" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.1", + "engines": ">=10", + "distPaths": [ + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "allowPaths": [ + "/classes/", + "/functions/", + "/internal/", + "/ranges/", + "/index.js", + "/preload.js", + "/range.bnf", + "/benchmarks" + ], + "publish": "true" + } +} diff --git a/miniprogram/node_modules/semver/preload.js b/miniprogram/node_modules/semver/preload.js new file mode 100644 index 00000000..e6c47b9b --- /dev/null +++ b/miniprogram/node_modules/semver/preload.js @@ -0,0 +1,4 @@ +'use strict' + +// XXX remove in v8 or beyond +module.exports = require('./index.js') diff --git a/miniprogram/node_modules/semver/range.bnf b/miniprogram/node_modules/semver/range.bnf new file mode 100644 index 00000000..d4c6ae0d --- /dev/null +++ b/miniprogram/node_modules/semver/range.bnf @@ -0,0 +1,16 @@ +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | [1-9] ( [0-9] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ diff --git a/miniprogram/node_modules/semver/ranges/gtr.js b/miniprogram/node_modules/semver/ranges/gtr.js new file mode 100644 index 00000000..0e7601f6 --- /dev/null +++ b/miniprogram/node_modules/semver/ranges/gtr.js @@ -0,0 +1,6 @@ +'use strict' + +// Determine if version is greater than all the versions possible in the range. +const outside = require('./outside') +const gtr = (version, range, options) => outside(version, range, '>', options) +module.exports = gtr diff --git a/miniprogram/node_modules/semver/ranges/intersects.js b/miniprogram/node_modules/semver/ranges/intersects.js new file mode 100644 index 00000000..917be7e4 --- /dev/null +++ b/miniprogram/node_modules/semver/ranges/intersects.js @@ -0,0 +1,9 @@ +'use strict' + +const Range = require('../classes/range') +const intersects = (r1, r2, options) => { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2, options) +} +module.exports = intersects diff --git a/miniprogram/node_modules/semver/ranges/ltr.js b/miniprogram/node_modules/semver/ranges/ltr.js new file mode 100644 index 00000000..aa5e568e --- /dev/null +++ b/miniprogram/node_modules/semver/ranges/ltr.js @@ -0,0 +1,6 @@ +'use strict' + +const outside = require('./outside') +// Determine if version is less than all the versions possible in the range +const ltr = (version, range, options) => outside(version, range, '<', options) +module.exports = ltr diff --git a/miniprogram/node_modules/semver/ranges/max-satisfying.js b/miniprogram/node_modules/semver/ranges/max-satisfying.js new file mode 100644 index 00000000..01fe5ae3 --- /dev/null +++ b/miniprogram/node_modules/semver/ranges/max-satisfying.js @@ -0,0 +1,27 @@ +'use strict' + +const SemVer = require('../classes/semver') +const Range = require('../classes/range') + +const maxSatisfying = (versions, range, options) => { + let max = null + let maxSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} +module.exports = maxSatisfying diff --git a/miniprogram/node_modules/semver/ranges/min-satisfying.js b/miniprogram/node_modules/semver/ranges/min-satisfying.js new file mode 100644 index 00000000..af89c8ef --- /dev/null +++ b/miniprogram/node_modules/semver/ranges/min-satisfying.js @@ -0,0 +1,26 @@ +'use strict' + +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const minSatisfying = (versions, range, options) => { + let min = null + let minSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} +module.exports = minSatisfying diff --git a/miniprogram/node_modules/semver/ranges/min-version.js b/miniprogram/node_modules/semver/ranges/min-version.js new file mode 100644 index 00000000..09a65aa3 --- /dev/null +++ b/miniprogram/node_modules/semver/ranges/min-version.js @@ -0,0 +1,63 @@ +'use strict' + +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const gt = require('../functions/gt') + +const minVersion = (range, loose) => { + range = new Range(range, loose) + + let minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let setMin = null + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt(compver, setMin)) { + setMin = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }) + if (setMin && (!minver || gt(minver, setMin))) { + minver = setMin + } + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} +module.exports = minVersion diff --git a/miniprogram/node_modules/semver/ranges/outside.js b/miniprogram/node_modules/semver/ranges/outside.js new file mode 100644 index 00000000..ca744212 --- /dev/null +++ b/miniprogram/node_modules/semver/ranges/outside.js @@ -0,0 +1,82 @@ +'use strict' + +const SemVer = require('../classes/semver') +const Comparator = require('../classes/comparator') +const { ANY } = Comparator +const Range = require('../classes/range') +const satisfies = require('../functions/satisfies') +const gt = require('../functions/gt') +const lt = require('../functions/lt') +const lte = require('../functions/lte') +const gte = require('../functions/gte') + +const outside = (version, range, hilo, options) => { + version = new SemVer(version, options) + range = new Range(range, options) + + let gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisfies the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let high = null + let low = null + + comparators.forEach((comparator) => { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +module.exports = outside diff --git a/miniprogram/node_modules/semver/ranges/simplify.js b/miniprogram/node_modules/semver/ranges/simplify.js new file mode 100644 index 00000000..262732e6 --- /dev/null +++ b/miniprogram/node_modules/semver/ranges/simplify.js @@ -0,0 +1,49 @@ +'use strict' + +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') +module.exports = (versions, range, options) => { + const set = [] + let first = null + let prev = null + const v = versions.sort((a, b) => compare(a, b, options)) + for (const version of v) { + const included = satisfies(version, range, options) + if (included) { + prev = version + if (!first) { + first = version + } + } else { + if (prev) { + set.push([first, prev]) + } + prev = null + first = null + } + } + if (first) { + set.push([first, null]) + } + + const ranges = [] + for (const [min, max] of set) { + if (min === max) { + ranges.push(min) + } else if (!max && min === v[0]) { + ranges.push('*') + } else if (!max) { + ranges.push(`>=${min}`) + } else if (min === v[0]) { + ranges.push(`<=${max}`) + } else { + ranges.push(`${min} - ${max}`) + } + } + const simplified = ranges.join(' || ') + const original = typeof range.raw === 'string' ? range.raw : String(range) + return simplified.length < original.length ? simplified : range +} diff --git a/miniprogram/node_modules/semver/ranges/subset.js b/miniprogram/node_modules/semver/ranges/subset.js new file mode 100644 index 00000000..2c49aef1 --- /dev/null +++ b/miniprogram/node_modules/semver/ranges/subset.js @@ -0,0 +1,249 @@ +'use strict' + +const Range = require('../classes/range.js') +const Comparator = require('../classes/comparator.js') +const { ANY } = Comparator +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') + +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true + +const subset = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } + + sub = new Range(sub, options) + dom = new Range(dom, options) + let sawNonNull = false + + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options) + sawNonNull = sawNonNull || isSub !== null + if (isSub) { + continue OUTER + } + } + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false + } + } + return true +} + +const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] +const minimumVersion = [new Comparator('>=0.0.0')] + +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true + } + + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = minimumVersionWithPreRelease + } else { + sub = minimumVersion + } + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true + } else { + dom = minimumVersion + } + } + + const eqSet = new Set() + let gt, lt + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options) + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options) + } else { + eqSet.add(c.semver) + } + } + + if (eqSet.size > 1) { + return null + } + + let gtltComp + if (gt && lt) { + gtltComp = compare(gt.semver, lt.semver, options) + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } + + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies(eq, String(gt), options)) { + return null + } + + if (lt && !satisfies(eq, String(lt), options)) { + return null + } + + for (const c of dom) { + if (!satisfies(eq, String(c), options)) { + return false + } + } + + return true + } + + let higher, lower + let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false + } + + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options) + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options) + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { + return false + } + } + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false + } + } + + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false + } + + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false + } + + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false + } + + return true +} + +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +} + +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +} + +module.exports = subset diff --git a/miniprogram/node_modules/semver/ranges/to-comparators.js b/miniprogram/node_modules/semver/ranges/to-comparators.js new file mode 100644 index 00000000..5be25196 --- /dev/null +++ b/miniprogram/node_modules/semver/ranges/to-comparators.js @@ -0,0 +1,10 @@ +'use strict' + +const Range = require('../classes/range') + +// Mostly just for testing and legacy API reasons +const toComparators = (range, options) => + new Range(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) + +module.exports = toComparators diff --git a/miniprogram/node_modules/semver/ranges/valid.js b/miniprogram/node_modules/semver/ranges/valid.js new file mode 100644 index 00000000..cc6b0e9f --- /dev/null +++ b/miniprogram/node_modules/semver/ranges/valid.js @@ -0,0 +1,13 @@ +'use strict' + +const Range = require('../classes/range') +const validRange = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} +module.exports = validRange diff --git a/miniprogram/node_modules/simple-concat/.travis.yml b/miniprogram/node_modules/simple-concat/.travis.yml new file mode 100644 index 00000000..c159f6ac --- /dev/null +++ b/miniprogram/node_modules/simple-concat/.travis.yml @@ -0,0 +1,3 @@ +language: node_js +node_js: + - lts/* diff --git a/miniprogram/node_modules/simple-concat/LICENSE b/miniprogram/node_modules/simple-concat/LICENSE new file mode 100644 index 00000000..c7e68527 --- /dev/null +++ b/miniprogram/node_modules/simple-concat/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/miniprogram/node_modules/simple-concat/README.md b/miniprogram/node_modules/simple-concat/README.md new file mode 100644 index 00000000..b7d39bde --- /dev/null +++ b/miniprogram/node_modules/simple-concat/README.md @@ -0,0 +1,44 @@ +# simple-concat [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/simple-concat/master.svg +[travis-url]: https://travis-ci.org/feross/simple-concat +[npm-image]: https://img.shields.io/npm/v/simple-concat.svg +[npm-url]: https://npmjs.org/package/simple-concat +[downloads-image]: https://img.shields.io/npm/dm/simple-concat.svg +[downloads-url]: https://npmjs.org/package/simple-concat +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +### Super-minimalist version of [`concat-stream`](https://github.com/maxogden/concat-stream). Less than 15 lines! + +## install + +``` +npm install simple-concat +``` + +## usage + +This example is longer than the implementation. + +```js +var s = new stream.PassThrough() +concat(s, function (err, buf) { + if (err) throw err + console.error(buf) +}) +s.write('abc') +setTimeout(function () { + s.write('123') +}, 10) +setTimeout(function () { + s.write('456') +}, 20) +setTimeout(function () { + s.end('789') +}, 30) +``` + +## license + +MIT. Copyright (c) [Feross Aboukhadijeh](http://feross.org). diff --git a/miniprogram/node_modules/simple-concat/index.js b/miniprogram/node_modules/simple-concat/index.js new file mode 100644 index 00000000..59237fc6 --- /dev/null +++ b/miniprogram/node_modules/simple-concat/index.js @@ -0,0 +1,15 @@ +/*! simple-concat. MIT License. Feross Aboukhadijeh */ +module.exports = function (stream, cb) { + var chunks = [] + stream.on('data', function (chunk) { + chunks.push(chunk) + }) + stream.once('end', function () { + if (cb) cb(null, Buffer.concat(chunks)) + cb = null + }) + stream.once('error', function (err) { + if (cb) cb(err) + cb = null + }) +} diff --git a/miniprogram/node_modules/simple-concat/package.json b/miniprogram/node_modules/simple-concat/package.json new file mode 100644 index 00000000..2bb2c60a --- /dev/null +++ b/miniprogram/node_modules/simple-concat/package.json @@ -0,0 +1,47 @@ +{ + "name": "simple-concat", + "description": "Super-minimalist version of `concat-stream`. Less than 15 lines!", + "version": "1.0.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/simple-concat/issues" + }, + "dependencies": {}, + "devDependencies": { + "standard": "*", + "tape": "^5.0.1" + }, + "homepage": "https://github.com/feross/simple-concat", + "keywords": [ + "concat", + "concat-stream", + "concat stream" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/feross/simple-concat.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/miniprogram/node_modules/simple-concat/test/basic.js b/miniprogram/node_modules/simple-concat/test/basic.js new file mode 100644 index 00000000..4bf6f9c2 --- /dev/null +++ b/miniprogram/node_modules/simple-concat/test/basic.js @@ -0,0 +1,41 @@ +var concat = require('../') +var stream = require('stream') +var test = require('tape') + +test('basic', function (t) { + t.plan(2) + var s = new stream.PassThrough() + concat(s, function (err, buf) { + t.error(err) + t.deepEqual(buf, Buffer.from('abc123456789')) + }) + s.write('abc') + setTimeout(function () { + s.write('123') + }, 10) + setTimeout(function () { + s.write('456') + }, 20) + setTimeout(function () { + s.end('789') + }, 30) +}) + +test('error', function (t) { + t.plan(2) + var s = new stream.PassThrough() + concat(s, function (err, buf) { + t.ok(err, 'got expected error') + t.ok(!buf) + }) + s.write('abc') + setTimeout(function () { + s.write('123') + }, 10) + setTimeout(function () { + s.write('456') + }, 20) + setTimeout(function () { + s.emit('error', new Error('error')) + }, 30) +}) diff --git a/miniprogram/node_modules/simple-get/.github/dependabot.yml b/miniprogram/node_modules/simple-get/.github/dependabot.yml new file mode 100644 index 00000000..0221fbcb --- /dev/null +++ b/miniprogram/node_modules/simple-get/.github/dependabot.yml @@ -0,0 +1,15 @@ +version: 2 +updates: + - package-ecosystem: npm + directory: / + schedule: + interval: daily + labels: + - dependency + versioning-strategy: increase-if-necessary + - package-ecosystem: github-actions + directory: / + schedule: + interval: daily + labels: + - dependency diff --git a/miniprogram/node_modules/simple-get/.github/workflows/ci.yml b/miniprogram/node_modules/simple-get/.github/workflows/ci.yml new file mode 100644 index 00000000..822d21cc --- /dev/null +++ b/miniprogram/node_modules/simple-get/.github/workflows/ci.yml @@ -0,0 +1,23 @@ +name: ci +'on': + - push + - pull_request +jobs: + test: + name: Node ${{ matrix.node }} / ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + node: + - '14' + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v2 + with: + node-version: ${{ matrix.node }} + - run: npm install + - run: npm run build --if-present + - run: npm test diff --git a/miniprogram/node_modules/simple-get/LICENSE b/miniprogram/node_modules/simple-get/LICENSE new file mode 100644 index 00000000..c7e68527 --- /dev/null +++ b/miniprogram/node_modules/simple-get/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/miniprogram/node_modules/simple-get/README.md b/miniprogram/node_modules/simple-get/README.md new file mode 100644 index 00000000..63c6a6ba --- /dev/null +++ b/miniprogram/node_modules/simple-get/README.md @@ -0,0 +1,333 @@ +# simple-get [![ci][ci-image]][ci-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[ci-image]: https://img.shields.io/github/workflow/status/feross/simple-get/ci/master +[ci-url]: https://github.com/feross/simple-get/actions +[npm-image]: https://img.shields.io/npm/v/simple-get.svg +[npm-url]: https://npmjs.org/package/simple-get +[downloads-image]: https://img.shields.io/npm/dm/simple-get.svg +[downloads-url]: https://npmjs.org/package/simple-get +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +### Simplest way to make http get requests + +## features + +This module is the lightest possible wrapper on top of node.js `http`, but supporting these essential features: + +- follows redirects +- automatically handles gzip/deflate responses +- supports HTTPS +- supports specifying a timeout +- supports convenience `url` key so there's no need to use `url.parse` on the url when specifying options +- composes well with npm packages for features like cookies, proxies, form data, & OAuth + +All this in < 100 lines of code. + +## install + +``` +npm install simple-get +``` + +## usage + +Note, all these examples also work in the browser with [browserify](http://browserify.org/). + +### simple GET request + +Doesn't get easier than this: + +```js +const get = require('simple-get') + +get('http://example.com', function (err, res) { + if (err) throw err + console.log(res.statusCode) // 200 + res.pipe(process.stdout) // `res` is a stream +}) +``` + +### even simpler GET request + +If you just want the data, and don't want to deal with streams: + +```js +const get = require('simple-get') + +get.concat('http://example.com', function (err, res, data) { + if (err) throw err + console.log(res.statusCode) // 200 + console.log(data) // Buffer('this is the server response') +}) +``` + +### POST, PUT, PATCH, HEAD, DELETE support + +For `POST`, call `get.post` or use option `{ method: 'POST' }`. + +```js +const get = require('simple-get') + +const opts = { + url: 'http://example.com', + body: 'this is the POST body' +} +get.post(opts, function (err, res) { + if (err) throw err + res.pipe(process.stdout) // `res` is a stream +}) +``` + +#### A more complex example: + +```js +const get = require('simple-get') + +get({ + url: 'http://example.com', + method: 'POST', + body: 'this is the POST body', + + // simple-get accepts all options that node.js `http` accepts + // See: http://nodejs.org/api/http.html#http_http_request_options_callback + headers: { + 'user-agent': 'my cool app' + } +}, function (err, res) { + if (err) throw err + + // All properties/methods from http.IncomingResponse are available, + // even if a gunzip/inflate transform stream was returned. + // See: http://nodejs.org/api/http.html#http_http_incomingmessage + res.setTimeout(10000) + console.log(res.headers) + + res.on('data', function (chunk) { + // `chunk` is the decoded response, after it's been gunzipped or inflated + // (if applicable) + console.log('got a chunk of the response: ' + chunk) + })) + +}) +``` + +### JSON + +You can serialize/deserialize request and response with JSON: + +```js +const get = require('simple-get') + +const opts = { + method: 'POST', + url: 'http://example.com', + body: { + key: 'value' + }, + json: true +} +get.concat(opts, function (err, res, data) { + if (err) throw err + console.log(data.key) // `data` is an object +}) +``` + +### Timeout + +You can set a timeout (in milliseconds) on the request with the `timeout` option. +If the request takes longer than `timeout` to complete, then the entire request +will fail with an `Error`. + +```js +const get = require('simple-get') + +const opts = { + url: 'http://example.com', + timeout: 2000 // 2 second timeout +} + +get(opts, function (err, res) {}) +``` + +### One Quick Tip + +It's a good idea to set the `'user-agent'` header so the provider can more easily +see how their resource is used. + +```js +const get = require('simple-get') +const pkg = require('./package.json') + +get('http://example.com', { + headers: { + 'user-agent': `my-module/${pkg.version} (https://github.com/username/my-module)` + } +}) +``` + +### Proxies + +You can use the [`tunnel`](https://github.com/koichik/node-tunnel) module with the +`agent` option to work with proxies: + +```js +const get = require('simple-get') +const tunnel = require('tunnel') + +const opts = { + url: 'http://example.com', + agent: tunnel.httpOverHttp({ + proxy: { + host: 'localhost' + } + }) +} + +get(opts, function (err, res) {}) +``` + +### Cookies + +You can use the [`cookie`](https://github.com/jshttp/cookie) module to include +cookies in a request: + +```js +const get = require('simple-get') +const cookie = require('cookie') + +const opts = { + url: 'http://example.com', + headers: { + cookie: cookie.serialize('foo', 'bar') + } +} + +get(opts, function (err, res) {}) +``` + +### Form data + +You can use the [`form-data`](https://github.com/form-data/form-data) module to +create POST request with form data: + +```js +const fs = require('fs') +const get = require('simple-get') +const FormData = require('form-data') +const form = new FormData() + +form.append('my_file', fs.createReadStream('/foo/bar.jpg')) + +const opts = { + url: 'http://example.com', + body: form +} + +get.post(opts, function (err, res) {}) +``` + +#### Or, include `application/x-www-form-urlencoded` form data manually: + +```js +const get = require('simple-get') + +const opts = { + url: 'http://example.com', + form: { + key: 'value' + } +} +get.post(opts, function (err, res) {}) +``` + +### Specifically disallowing redirects + +```js +const get = require('simple-get') + +const opts = { + url: 'http://example.com/will-redirect-elsewhere', + followRedirects: false +} +// res.statusCode will be 301, no error thrown +get(opts, function (err, res) {}) +``` + +### Basic Auth + +```js +const user = 'someuser' +const pass = 'pa$$word' +const encodedAuth = Buffer.from(`${user}:${pass}`).toString('base64') + +get('http://example.com', { + headers: { + authorization: `Basic ${encodedAuth}` + } +}) +``` + +### OAuth + +You can use the [`oauth-1.0a`](https://github.com/ddo/oauth-1.0a) module to create +a signed OAuth request: + +```js +const get = require('simple-get') +const crypto = require('crypto') +const OAuth = require('oauth-1.0a') + +const oauth = OAuth({ + consumer: { + key: process.env.CONSUMER_KEY, + secret: process.env.CONSUMER_SECRET + }, + signature_method: 'HMAC-SHA1', + hash_function: (baseString, key) => crypto.createHmac('sha1', key).update(baseString).digest('base64') +}) + +const token = { + key: process.env.ACCESS_TOKEN, + secret: process.env.ACCESS_TOKEN_SECRET +} + +const url = 'https://api.twitter.com/1.1/statuses/home_timeline.json' + +const opts = { + url: url, + headers: oauth.toHeader(oauth.authorize({url, method: 'GET'}, token)), + json: true +} + +get(opts, function (err, res) {}) +``` + +### Throttle requests + +You can use [limiter](https://github.com/jhurliman/node-rate-limiter) to throttle requests. This is useful when calling an API that is rate limited. + +```js +const simpleGet = require('simple-get') +const RateLimiter = require('limiter').RateLimiter +const limiter = new RateLimiter(1, 'second') + +const get = (opts, cb) => limiter.removeTokens(1, () => simpleGet(opts, cb)) +get.concat = (opts, cb) => limiter.removeTokens(1, () => simpleGet.concat(opts, cb)) + +var opts = { + url: 'http://example.com' +} + +get.concat(opts, processResult) +get.concat(opts, processResult) + +function processResult (err, res, data) { + if (err) throw err + console.log(data.toString()) +} +``` + +## license + +MIT. Copyright (c) [Feross Aboukhadijeh](http://feross.org). diff --git a/miniprogram/node_modules/simple-get/index.js b/miniprogram/node_modules/simple-get/index.js new file mode 100644 index 00000000..80e52e8c --- /dev/null +++ b/miniprogram/node_modules/simple-get/index.js @@ -0,0 +1,108 @@ +/*! simple-get. MIT License. Feross Aboukhadijeh */ +module.exports = simpleGet + +const concat = require('simple-concat') +const decompressResponse = require('decompress-response') // excluded from browser build +const http = require('http') +const https = require('https') +const once = require('once') +const querystring = require('querystring') +const url = require('url') + +const isStream = o => o !== null && typeof o === 'object' && typeof o.pipe === 'function' + +function simpleGet (opts, cb) { + opts = Object.assign({ maxRedirects: 10 }, typeof opts === 'string' ? { url: opts } : opts) + cb = once(cb) + + if (opts.url) { + const { hostname, port, protocol, auth, path } = url.parse(opts.url) // eslint-disable-line node/no-deprecated-api + delete opts.url + if (!hostname && !port && !protocol && !auth) opts.path = path // Relative redirect + else Object.assign(opts, { hostname, port, protocol, auth, path }) // Absolute redirect + } + + const headers = { 'accept-encoding': 'gzip, deflate' } + if (opts.headers) Object.keys(opts.headers).forEach(k => (headers[k.toLowerCase()] = opts.headers[k])) + opts.headers = headers + + let body + if (opts.body) { + body = opts.json && !isStream(opts.body) ? JSON.stringify(opts.body) : opts.body + } else if (opts.form) { + body = typeof opts.form === 'string' ? opts.form : querystring.stringify(opts.form) + opts.headers['content-type'] = 'application/x-www-form-urlencoded' + } + + if (body) { + if (!opts.method) opts.method = 'POST' + if (!isStream(body)) opts.headers['content-length'] = Buffer.byteLength(body) + if (opts.json && !opts.form) opts.headers['content-type'] = 'application/json' + } + delete opts.body; delete opts.form + + if (opts.json) opts.headers.accept = 'application/json' + if (opts.method) opts.method = opts.method.toUpperCase() + + const originalHost = opts.hostname // hostname before potential redirect + const protocol = opts.protocol === 'https:' ? https : http // Support http/https urls + const req = protocol.request(opts, res => { + if (opts.followRedirects !== false && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) { + opts.url = res.headers.location // Follow 3xx redirects + delete opts.headers.host // Discard `host` header on redirect (see #32) + res.resume() // Discard response + + const redirectHost = url.parse(opts.url).hostname // eslint-disable-line node/no-deprecated-api + // If redirected host is different than original host, drop headers to prevent cookie leak (#73) + if (redirectHost !== null && redirectHost !== originalHost) { + delete opts.headers.cookie + delete opts.headers.authorization + } + + if (opts.method === 'POST' && [301, 302].includes(res.statusCode)) { + opts.method = 'GET' // On 301/302 redirect, change POST to GET (see #35) + delete opts.headers['content-length']; delete opts.headers['content-type'] + } + + if (opts.maxRedirects-- === 0) return cb(new Error('too many redirects')) + else return simpleGet(opts, cb) + } + + const tryUnzip = typeof decompressResponse === 'function' && opts.method !== 'HEAD' + cb(null, tryUnzip ? decompressResponse(res) : res) + }) + req.on('timeout', () => { + req.abort() + cb(new Error('Request timed out')) + }) + req.on('error', cb) + + if (isStream(body)) body.on('error', cb).pipe(req) + else req.end(body) + + return req +} + +simpleGet.concat = (opts, cb) => { + return simpleGet(opts, (err, res) => { + if (err) return cb(err) + concat(res, (err, data) => { + if (err) return cb(err) + if (opts.json) { + try { + data = JSON.parse(data.toString()) + } catch (err) { + return cb(err, res, data) + } + } + cb(null, res, data) + }) + }) +} + +;['get', 'post', 'put', 'patch', 'head', 'delete'].forEach(method => { + simpleGet[method] = (opts, cb) => { + if (typeof opts === 'string') opts = { url: opts } + return simpleGet(Object.assign({ method: method.toUpperCase() }, opts), cb) + } +}) diff --git a/miniprogram/node_modules/simple-get/package.json b/miniprogram/node_modules/simple-get/package.json new file mode 100644 index 00000000..e80fc5eb --- /dev/null +++ b/miniprogram/node_modules/simple-get/package.json @@ -0,0 +1,67 @@ +{ + "name": "simple-get", + "description": "Simplest way to make http get requests. Supports HTTPS, redirects, gzip/deflate, streams in < 100 lines.", + "version": "4.0.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "browser": { + "decompress-response": false + }, + "bugs": { + "url": "https://github.com/feross/simple-get/issues" + }, + "dependencies": { + "decompress-response": "^6.0.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + }, + "devDependencies": { + "self-signed-https": "^1.0.5", + "standard": "*", + "string-to-stream": "^3.0.0", + "tape": "^5.0.0" + }, + "homepage": "https://github.com/feross/simple-get", + "keywords": [ + "request", + "http", + "GET", + "get request", + "http.get", + "redirects", + "follow redirects", + "gzip", + "deflate", + "https", + "http-https", + "stream", + "simple request", + "simple get" + ], + "license": "MIT", + "main": "index.js", + "repository": { + "type": "git", + "url": "git://github.com/feross/simple-get.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/miniprogram/node_modules/string_decoder/LICENSE b/miniprogram/node_modules/string_decoder/LICENSE new file mode 100644 index 00000000..778edb20 --- /dev/null +++ b/miniprogram/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/miniprogram/node_modules/string_decoder/README.md b/miniprogram/node_modules/string_decoder/README.md new file mode 100644 index 00000000..5fd58315 --- /dev/null +++ b/miniprogram/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/miniprogram/node_modules/string_decoder/lib/string_decoder.js b/miniprogram/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 00000000..2e89e63f --- /dev/null +++ b/miniprogram/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/miniprogram/node_modules/string_decoder/package.json b/miniprogram/node_modules/string_decoder/package.json new file mode 100644 index 00000000..b2bb1411 --- /dev/null +++ b/miniprogram/node_modules/string_decoder/package.json @@ -0,0 +1,34 @@ +{ + "name": "string_decoder", + "version": "1.3.0", + "description": "The string_decoder module from Node core", + "main": "lib/string_decoder.js", + "files": [ + "lib" + ], + "dependencies": { + "safe-buffer": "~5.2.0" + }, + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "scripts": { + "test": "tap test/parallel/*.js && node test/verify-dependencies", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT" +} diff --git a/miniprogram/node_modules/strip-json-comments/index.js b/miniprogram/node_modules/strip-json-comments/index.js new file mode 100644 index 00000000..4e6576e6 --- /dev/null +++ b/miniprogram/node_modules/strip-json-comments/index.js @@ -0,0 +1,70 @@ +'use strict'; +var singleComment = 1; +var multiComment = 2; + +function stripWithoutWhitespace() { + return ''; +} + +function stripWithWhitespace(str, start, end) { + return str.slice(start, end).replace(/\S/g, ' '); +} + +module.exports = function (str, opts) { + opts = opts || {}; + + var currentChar; + var nextChar; + var insideString = false; + var insideComment = false; + var offset = 0; + var ret = ''; + var strip = opts.whitespace === false ? stripWithoutWhitespace : stripWithWhitespace; + + for (var i = 0; i < str.length; i++) { + currentChar = str[i]; + nextChar = str[i + 1]; + + if (!insideComment && currentChar === '"') { + var escaped = str[i - 1] === '\\' && str[i - 2] !== '\\'; + if (!escaped) { + insideString = !insideString; + } + } + + if (insideString) { + continue; + } + + if (!insideComment && currentChar + nextChar === '//') { + ret += str.slice(offset, i); + offset = i; + insideComment = singleComment; + i++; + } else if (insideComment === singleComment && currentChar + nextChar === '\r\n') { + i++; + insideComment = false; + ret += strip(str, offset, i); + offset = i; + continue; + } else if (insideComment === singleComment && currentChar === '\n') { + insideComment = false; + ret += strip(str, offset, i); + offset = i; + } else if (!insideComment && currentChar + nextChar === '/*') { + ret += str.slice(offset, i); + offset = i; + insideComment = multiComment; + i++; + continue; + } else if (insideComment === multiComment && currentChar + nextChar === '*/') { + i++; + insideComment = false; + ret += strip(str, offset, i + 1); + offset = i + 1; + continue; + } + } + + return ret + (insideComment ? strip(str.substr(offset)) : str.substr(offset)); +}; diff --git a/miniprogram/node_modules/strip-json-comments/license b/miniprogram/node_modules/strip-json-comments/license new file mode 100644 index 00000000..654d0bfe --- /dev/null +++ b/miniprogram/node_modules/strip-json-comments/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/miniprogram/node_modules/strip-json-comments/package.json b/miniprogram/node_modules/strip-json-comments/package.json new file mode 100644 index 00000000..288ecc77 --- /dev/null +++ b/miniprogram/node_modules/strip-json-comments/package.json @@ -0,0 +1,42 @@ +{ + "name": "strip-json-comments", + "version": "2.0.1", + "description": "Strip comments from JSON. Lets you use comments in your JSON files!", + "license": "MIT", + "repository": "sindresorhus/strip-json-comments", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "json", + "strip", + "remove", + "delete", + "trim", + "comments", + "multiline", + "parse", + "config", + "configuration", + "conf", + "settings", + "util", + "env", + "environment" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/miniprogram/node_modules/strip-json-comments/readme.md b/miniprogram/node_modules/strip-json-comments/readme.md new file mode 100644 index 00000000..0ee58dfe --- /dev/null +++ b/miniprogram/node_modules/strip-json-comments/readme.md @@ -0,0 +1,64 @@ +# strip-json-comments [![Build Status](https://travis-ci.org/sindresorhus/strip-json-comments.svg?branch=master)](https://travis-ci.org/sindresorhus/strip-json-comments) + +> Strip comments from JSON. Lets you use comments in your JSON files! + +This is now possible: + +```js +{ + // rainbows + "unicorn": /* ❤ */ "cake" +} +``` + +It will replace single-line comments `//` and multi-line comments `/**/` with whitespace. This allows JSON error positions to remain as close as possible to the original source. + +Also available as a [gulp](https://github.com/sindresorhus/gulp-strip-json-comments)/[grunt](https://github.com/sindresorhus/grunt-strip-json-comments)/[broccoli](https://github.com/sindresorhus/broccoli-strip-json-comments) plugin. + + +## Install + +``` +$ npm install --save strip-json-comments +``` + + +## Usage + +```js +const json = '{/*rainbows*/"unicorn":"cake"}'; + +JSON.parse(stripJsonComments(json)); +//=> {unicorn: 'cake'} +``` + + +## API + +### stripJsonComments(input, [options]) + +#### input + +Type: `string` + +Accepts a string with JSON and returns a string without comments. + +#### options + +##### whitespace + +Type: `boolean` +Default: `true` + +Replace comments with whitespace instead of stripping them entirely. + + +## Related + +- [strip-json-comments-cli](https://github.com/sindresorhus/strip-json-comments-cli) - CLI for this module +- [strip-css-comments](https://github.com/sindresorhus/strip-css-comments) - Strip comments from CSS + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/miniprogram/node_modules/tar-fs/.travis.yml b/miniprogram/node_modules/tar-fs/.travis.yml new file mode 100644 index 00000000..977f7a61 --- /dev/null +++ b/miniprogram/node_modules/tar-fs/.travis.yml @@ -0,0 +1,6 @@ +language: node_js +node_js: + - 8 + - 10 + - 12 + - 14 diff --git a/miniprogram/node_modules/tar-fs/LICENSE b/miniprogram/node_modules/tar-fs/LICENSE new file mode 100644 index 00000000..757562ec --- /dev/null +++ b/miniprogram/node_modules/tar-fs/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/miniprogram/node_modules/tar-fs/README.md b/miniprogram/node_modules/tar-fs/README.md new file mode 100644 index 00000000..c6d35cfa --- /dev/null +++ b/miniprogram/node_modules/tar-fs/README.md @@ -0,0 +1,165 @@ +# tar-fs + +filesystem bindings for [tar-stream](https://github.com/mafintosh/tar-stream). + +``` +npm install tar-fs +``` + +[![build status](https://secure.travis-ci.org/mafintosh/tar-fs.png)](http://travis-ci.org/mafintosh/tar-fs) + +## Usage + +tar-fs allows you to pack directories into tarballs and extract tarballs into directories. + +It doesn't gunzip for you, so if you want to extract a `.tar.gz` with this you'll need to use something like [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in addition to this. + +``` js +var tar = require('tar-fs') +var fs = require('fs') + +// packing a directory +tar.pack('./my-directory').pipe(fs.createWriteStream('my-tarball.tar')) + +// extracting a directory +fs.createReadStream('my-other-tarball.tar').pipe(tar.extract('./my-other-directory')) +``` + +To ignore various files when packing or extracting add a ignore function to the options. `ignore` +is also an alias for `filter`. Additionally you get `header` if you use ignore while extracting. +That way you could also filter by metadata. + +``` js +var pack = tar.pack('./my-directory', { + ignore: function(name) { + return path.extname(name) === '.bin' // ignore .bin files when packing + } +}) + +var extract = tar.extract('./my-other-directory', { + ignore: function(name) { + return path.extname(name) === '.bin' // ignore .bin files inside the tarball when extracing + } +}) + +var extractFilesDirs = tar.extract('./my-other-other-directory', { + ignore: function(_, header) { + // pass files & directories, ignore e.g. symlinks + return header.type !== 'file' && header.type !== 'directory' + } +}) +``` + +You can also specify which entries to pack using the `entries` option + +```js +var pack = tar.pack('./my-directory', { + entries: ['file1', 'subdir/file2'] // only the specific entries will be packed +}) +``` + +If you want to modify the headers when packing/extracting add a map function to the options + +``` js +var pack = tar.pack('./my-directory', { + map: function(header) { + header.name = 'prefixed/'+header.name + return header + } +}) + +var extract = tar.extract('./my-directory', { + map: function(header) { + header.name = 'another-prefix/'+header.name + return header + } +}) +``` + +Similarly you can use `mapStream` incase you wanna modify the input/output file streams + +``` js +var pack = tar.pack('./my-directory', { + mapStream: function(fileStream, header) { + // NOTE: the returned stream HAS to have the same length as the input stream. + // If not make sure to update the size in the header passed in here. + if (path.extname(header.name) === '.js') { + return fileStream.pipe(someTransform) + } + return fileStream; + } +}) + +var extract = tar.extract('./my-directory', { + mapStream: function(fileStream, header) { + if (path.extname(header.name) === '.js') { + return fileStream.pipe(someTransform) + } + return fileStream; + } +}) +``` + +Set `options.fmode` and `options.dmode` to ensure that files/directories extracted have the corresponding modes + +``` js +var extract = tar.extract('./my-directory', { + dmode: parseInt(555, 8), // all dirs should be readable + fmode: parseInt(444, 8) // all files should be readable +}) +``` + +It can be useful to use `dmode` and `fmode` if you are packing/unpacking tarballs between *nix/windows to ensure that all files/directories unpacked are readable. + +Alternatively you can set `options.readable` and/or `options.writable` to set the dmode and fmode to readable/writable. + +``` js +var extract = tar.extract('./my-directory', { + readable: true, // all dirs and files should be readable + writable: true, // all dirs and files should be writable +}) +``` + +Set `options.strict` to `false` if you want to ignore errors due to unsupported entry types (like device files) + +To dereference symlinks (pack the contents of the symlink instead of the link itself) set `options.dereference` to `true`. + +## Copy a directory + +Copying a directory with permissions and mtime intact is as simple as + +``` js +tar.pack('source-directory').pipe(tar.extract('dest-directory')) +``` + +## Interaction with [`tar-stream`](https://github.com/mafintosh/tar-stream) + +Use `finalize: false` and the `finish` hook to +leave the pack stream open for further entries (see +[`tar-stream#pack`](https://github.com/mafintosh/tar-stream#packing)), +and use `pack` to pass an existing pack stream. + +``` js +var mypack = tar.pack('./my-directory', { + finalize: false, + finish: function(sameAsMypack) { + mypack.entry({name: 'generated-file.txt'}, "hello") + tar.pack('./other-directory', { + pack: sameAsMypack + }) + } +}) +``` + + +## Performance + +Packing and extracting a 6.1 GB with 2496 directories and 2398 files yields the following results on my Macbook Air. +[See the benchmark here](https://gist.github.com/mafintosh/8102201) + +* tar-fs: 34.261 seconds +* [node-tar](https://github.com/isaacs/node-tar): 366.123 seconds (or 10x slower) + +## License + +MIT diff --git a/miniprogram/node_modules/tar-fs/index.js b/miniprogram/node_modules/tar-fs/index.js new file mode 100644 index 00000000..4797bc93 --- /dev/null +++ b/miniprogram/node_modules/tar-fs/index.js @@ -0,0 +1,363 @@ +var chownr = require('chownr') +var tar = require('tar-stream') +var pump = require('pump') +var mkdirp = require('mkdirp-classic') +var fs = require('fs') +var path = require('path') +var os = require('os') + +var win32 = os.platform() === 'win32' + +var noop = function () {} + +var echo = function (name) { + return name +} + +var normalize = !win32 ? echo : function (name) { + return name.replace(/\\/g, '/').replace(/[:?<>|]/g, '_') +} + +var statAll = function (fs, stat, cwd, ignore, entries, sort) { + var queue = entries || ['.'] + + return function loop (callback) { + if (!queue.length) return callback() + var next = queue.shift() + var nextAbs = path.join(cwd, next) + + stat.call(fs, nextAbs, function (err, stat) { + if (err) return callback(err) + + if (!stat.isDirectory()) return callback(null, next, stat) + + fs.readdir(nextAbs, function (err, files) { + if (err) return callback(err) + + if (sort) files.sort() + for (var i = 0; i < files.length; i++) { + if (!ignore(path.join(cwd, next, files[i]))) queue.push(path.join(next, files[i])) + } + + callback(null, next, stat) + }) + }) + } +} + +var strip = function (map, level) { + return function (header) { + header.name = header.name.split('/').slice(level).join('/') + + var linkname = header.linkname + if (linkname && (header.type === 'link' || path.isAbsolute(linkname))) { + header.linkname = linkname.split('/').slice(level).join('/') + } + + return map(header) + } +} + +exports.pack = function (cwd, opts) { + if (!cwd) cwd = '.' + if (!opts) opts = {} + + var xfs = opts.fs || fs + var ignore = opts.ignore || opts.filter || noop + var map = opts.map || noop + var mapStream = opts.mapStream || echo + var statNext = statAll(xfs, opts.dereference ? xfs.stat : xfs.lstat, cwd, ignore, opts.entries, opts.sort) + var strict = opts.strict !== false + var umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask() + var dmode = typeof opts.dmode === 'number' ? opts.dmode : 0 + var fmode = typeof opts.fmode === 'number' ? opts.fmode : 0 + var pack = opts.pack || tar.pack() + var finish = opts.finish || noop + + if (opts.strip) map = strip(map, opts.strip) + + if (opts.readable) { + dmode |= parseInt(555, 8) + fmode |= parseInt(444, 8) + } + if (opts.writable) { + dmode |= parseInt(333, 8) + fmode |= parseInt(222, 8) + } + + var onsymlink = function (filename, header) { + xfs.readlink(path.join(cwd, filename), function (err, linkname) { + if (err) return pack.destroy(err) + header.linkname = normalize(linkname) + pack.entry(header, onnextentry) + }) + } + + var onstat = function (err, filename, stat) { + if (err) return pack.destroy(err) + if (!filename) { + if (opts.finalize !== false) pack.finalize() + return finish(pack) + } + + if (stat.isSocket()) return onnextentry() // tar does not support sockets... + + var header = { + name: normalize(filename), + mode: (stat.mode | (stat.isDirectory() ? dmode : fmode)) & umask, + mtime: stat.mtime, + size: stat.size, + type: 'file', + uid: stat.uid, + gid: stat.gid + } + + if (stat.isDirectory()) { + header.size = 0 + header.type = 'directory' + header = map(header) || header + return pack.entry(header, onnextentry) + } + + if (stat.isSymbolicLink()) { + header.size = 0 + header.type = 'symlink' + header = map(header) || header + return onsymlink(filename, header) + } + + // TODO: add fifo etc... + + header = map(header) || header + + if (!stat.isFile()) { + if (strict) return pack.destroy(new Error('unsupported type for ' + filename)) + return onnextentry() + } + + var entry = pack.entry(header, onnextentry) + if (!entry) return + + var rs = mapStream(xfs.createReadStream(path.join(cwd, filename), { start: 0, end: header.size > 0 ? header.size - 1 : header.size }), header) + + rs.on('error', function (err) { // always forward errors on destroy + entry.destroy(err) + }) + + pump(rs, entry) + } + + var onnextentry = function (err) { + if (err) return pack.destroy(err) + statNext(onstat) + } + + onnextentry() + + return pack +} + +var head = function (list) { + return list.length ? list[list.length - 1] : null +} + +var processGetuid = function () { + return process.getuid ? process.getuid() : -1 +} + +var processUmask = function () { + return process.umask ? process.umask() : 0 +} + +exports.extract = function (cwd, opts) { + if (!cwd) cwd = '.' + if (!opts) opts = {} + + var xfs = opts.fs || fs + var ignore = opts.ignore || opts.filter || noop + var map = opts.map || noop + var mapStream = opts.mapStream || echo + var own = opts.chown !== false && !win32 && processGetuid() === 0 + var extract = opts.extract || tar.extract() + var stack = [] + var now = new Date() + var umask = typeof opts.umask === 'number' ? ~opts.umask : ~processUmask() + var dmode = typeof opts.dmode === 'number' ? opts.dmode : 0 + var fmode = typeof opts.fmode === 'number' ? opts.fmode : 0 + var strict = opts.strict !== false + + if (opts.strip) map = strip(map, opts.strip) + + if (opts.readable) { + dmode |= parseInt(555, 8) + fmode |= parseInt(444, 8) + } + if (opts.writable) { + dmode |= parseInt(333, 8) + fmode |= parseInt(222, 8) + } + + var utimesParent = function (name, cb) { // we just set the mtime on the parent dir again everytime we write an entry + var top + while ((top = head(stack)) && name.slice(0, top[0].length) !== top[0]) stack.pop() + if (!top) return cb() + xfs.utimes(top[0], now, top[1], cb) + } + + var utimes = function (name, header, cb) { + if (opts.utimes === false) return cb() + + if (header.type === 'directory') return xfs.utimes(name, now, header.mtime, cb) + if (header.type === 'symlink') return utimesParent(name, cb) // TODO: how to set mtime on link? + + xfs.utimes(name, now, header.mtime, function (err) { + if (err) return cb(err) + utimesParent(name, cb) + }) + } + + var chperm = function (name, header, cb) { + var link = header.type === 'symlink' + + /* eslint-disable node/no-deprecated-api */ + var chmod = link ? xfs.lchmod : xfs.chmod + var chown = link ? xfs.lchown : xfs.chown + /* eslint-enable node/no-deprecated-api */ + + if (!chmod) return cb() + + var mode = (header.mode | (header.type === 'directory' ? dmode : fmode)) & umask + + if (chown && own) chown.call(xfs, name, header.uid, header.gid, onchown) + else onchown(null) + + function onchown (err) { + if (err) return cb(err) + if (!chmod) return cb() + chmod.call(xfs, name, mode, cb) + } + } + + extract.on('entry', function (header, stream, next) { + header = map(header) || header + header.name = normalize(header.name) + var name = path.join(cwd, path.join('/', header.name)) + + if (ignore(name, header)) { + stream.resume() + return next() + } + + var stat = function (err) { + if (err) return next(err) + utimes(name, header, function (err) { + if (err) return next(err) + if (win32) return next() + chperm(name, header, next) + }) + } + + var onsymlink = function () { + if (win32) return next() // skip symlinks on win for now before it can be tested + xfs.unlink(name, function () { + var dst = path.resolve(path.dirname(name), header.linkname) + if (!inCwd(dst, cwd)) return next(new Error(name + ' is not a valid symlink')) + + xfs.symlink(header.linkname, name, stat) + }) + } + + var onlink = function () { + if (win32) return next() // skip links on win for now before it can be tested + xfs.unlink(name, function () { + var srcpath = path.join(cwd, path.join('/', header.linkname)) + + xfs.realpath(srcpath, function (err, dst) { + if (err || !inCwd(dst, cwd)) return next(new Error(name + ' is not a valid hardlink')) + + xfs.link(dst, name, function (err) { + if (err && err.code === 'EPERM' && opts.hardlinkAsFilesFallback) { + stream = xfs.createReadStream(srcpath) + return onfile() + } + + stat(err) + }) + }) + }) + } + + var onfile = function () { + var ws = xfs.createWriteStream(name) + var rs = mapStream(stream, header) + + ws.on('error', function (err) { // always forward errors on destroy + rs.destroy(err) + }) + + pump(rs, ws, function (err) { + if (err) return next(err) + ws.on('close', stat) + }) + } + + if (header.type === 'directory') { + stack.push([name, header.mtime]) + return mkdirfix(name, { + fs: xfs, own: own, uid: header.uid, gid: header.gid + }, stat) + } + + var dir = path.dirname(name) + + validate(xfs, dir, path.join(cwd, '.'), function (err, valid) { + if (err) return next(err) + if (!valid) return next(new Error(dir + ' is not a valid path')) + + mkdirfix(dir, { + fs: xfs, own: own, uid: header.uid, gid: header.gid + }, function (err) { + if (err) return next(err) + + switch (header.type) { + case 'file': return onfile() + case 'link': return onlink() + case 'symlink': return onsymlink() + } + + if (strict) return next(new Error('unsupported type for ' + name + ' (' + header.type + ')')) + + stream.resume() + next() + }) + }) + }) + + if (opts.finish) extract.on('finish', opts.finish) + + return extract +} + +function validate (fs, name, root, cb) { + if (name === root) return cb(null, true) + fs.lstat(name, function (err, st) { + if (err && err.code !== 'ENOENT') return cb(err) + if (err || st.isDirectory()) return validate(fs, path.join(name, '..'), root, cb) + cb(null, false) + }) +} + +function mkdirfix (name, opts, cb) { + mkdirp(name, { fs: opts.fs }, function (err, made) { + if (!err && made && opts.own) { + chownr(made, opts.uid, opts.gid, cb) + } else { + cb(err) + } + }) +} + +function inCwd (dst, cwd) { + cwd = path.resolve(cwd) + return cwd === dst || dst.startsWith(cwd + path.sep) +} diff --git a/miniprogram/node_modules/tar-fs/package.json b/miniprogram/node_modules/tar-fs/package.json new file mode 100644 index 00000000..61365778 --- /dev/null +++ b/miniprogram/node_modules/tar-fs/package.json @@ -0,0 +1,41 @@ +{ + "name": "tar-fs", + "version": "2.1.4", + "description": "filesystem bindings for tar-stream", + "dependencies": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + }, + "keywords": [ + "tar", + "fs", + "file", + "tarball", + "directory", + "stream" + ], + "devDependencies": { + "rimraf": "^2.6.3", + "standard": "^13.0.1", + "tape": "^4.9.2" + }, + "scripts": { + "test": "standard && tape test/index.js" + }, + "bugs": { + "url": "https://github.com/mafintosh/tar-fs/issues" + }, + "homepage": "https://github.com/mafintosh/tar-fs", + "main": "index.js", + "directories": { + "test": "test" + }, + "author": "Mathias Buus", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/mafintosh/tar-fs.git" + } +} diff --git a/miniprogram/node_modules/tar-fs/test/fixtures/a/hello.txt b/miniprogram/node_modules/tar-fs/test/fixtures/a/hello.txt new file mode 100644 index 00000000..3b18e512 --- /dev/null +++ b/miniprogram/node_modules/tar-fs/test/fixtures/a/hello.txt @@ -0,0 +1 @@ +hello world diff --git a/miniprogram/node_modules/tar-fs/test/fixtures/b/a/test.txt b/miniprogram/node_modules/tar-fs/test/fixtures/b/a/test.txt new file mode 100644 index 00000000..9daeafb9 --- /dev/null +++ b/miniprogram/node_modules/tar-fs/test/fixtures/b/a/test.txt @@ -0,0 +1 @@ +test diff --git a/miniprogram/node_modules/tar-fs/test/fixtures/d/file1 b/miniprogram/node_modules/tar-fs/test/fixtures/d/file1 new file mode 100644 index 00000000..e69de29b diff --git a/miniprogram/node_modules/tar-fs/test/fixtures/d/file2 b/miniprogram/node_modules/tar-fs/test/fixtures/d/file2 new file mode 100644 index 00000000..e69de29b diff --git a/miniprogram/node_modules/tar-fs/test/fixtures/d/sub-dir/file5 b/miniprogram/node_modules/tar-fs/test/fixtures/d/sub-dir/file5 new file mode 100644 index 00000000..e69de29b diff --git a/miniprogram/node_modules/tar-fs/test/fixtures/d/sub-files/file3 b/miniprogram/node_modules/tar-fs/test/fixtures/d/sub-files/file3 new file mode 100644 index 00000000..e69de29b diff --git a/miniprogram/node_modules/tar-fs/test/fixtures/d/sub-files/file4 b/miniprogram/node_modules/tar-fs/test/fixtures/d/sub-files/file4 new file mode 100644 index 00000000..e69de29b diff --git a/miniprogram/node_modules/tar-fs/test/fixtures/e/directory/.ignore b/miniprogram/node_modules/tar-fs/test/fixtures/e/directory/.ignore new file mode 100644 index 00000000..e69de29b diff --git a/miniprogram/node_modules/tar-fs/test/fixtures/e/file b/miniprogram/node_modules/tar-fs/test/fixtures/e/file new file mode 100644 index 00000000..e69de29b diff --git a/miniprogram/node_modules/tar-fs/test/fixtures/invalid.tar b/miniprogram/node_modules/tar-fs/test/fixtures/invalid.tar new file mode 100644 index 00000000..a645e9ce Binary files /dev/null and b/miniprogram/node_modules/tar-fs/test/fixtures/invalid.tar differ diff --git a/miniprogram/node_modules/tar-fs/test/index.js b/miniprogram/node_modules/tar-fs/test/index.js new file mode 100644 index 00000000..f983b4cd --- /dev/null +++ b/miniprogram/node_modules/tar-fs/test/index.js @@ -0,0 +1,346 @@ +var test = require('tape') +var rimraf = require('rimraf') +var tar = require('../index') +var tarStream = require('tar-stream') +var path = require('path') +var fs = require('fs') +var os = require('os') + +var win32 = os.platform() === 'win32' + +var mtime = function (st) { + return Math.floor(st.mtime.getTime() / 1000) +} + +test('copy a -> copy/a', function (t) { + t.plan(5) + + var a = path.join(__dirname, 'fixtures', 'a') + var b = path.join(__dirname, 'fixtures', 'copy', 'a') + + rimraf.sync(b) + tar.pack(a) + .pipe(tar.extract(b)) + .on('finish', function () { + var files = fs.readdirSync(b) + t.same(files.length, 1) + t.same(files[0], 'hello.txt') + var fileB = path.join(b, files[0]) + var fileA = path.join(a, files[0]) + t.same(fs.readFileSync(fileB, 'utf-8'), fs.readFileSync(fileA, 'utf-8')) + t.same(fs.statSync(fileB).mode, fs.statSync(fileA).mode) + t.same(mtime(fs.statSync(fileB)), mtime(fs.statSync(fileA))) + }) +}) + +test('copy b -> copy/b', function (t) { + t.plan(8) + + var a = path.join(__dirname, 'fixtures', 'b') + var b = path.join(__dirname, 'fixtures', 'copy', 'b') + + rimraf.sync(b) + tar.pack(a) + .pipe(tar.extract(b)) + .on('finish', function () { + var files = fs.readdirSync(b) + t.same(files.length, 1) + t.same(files[0], 'a') + var dirB = path.join(b, files[0]) + var dirA = path.join(a, files[0]) + t.same(fs.statSync(dirB).mode, fs.statSync(dirA).mode) + t.same(mtime(fs.statSync(dirB)), mtime(fs.statSync(dirA))) + t.ok(fs.statSync(dirB).isDirectory()) + var fileB = path.join(dirB, 'test.txt') + var fileA = path.join(dirA, 'test.txt') + t.same(fs.readFileSync(fileB, 'utf-8'), fs.readFileSync(fileA, 'utf-8')) + t.same(fs.statSync(fileB).mode, fs.statSync(fileA).mode) + t.same(mtime(fs.statSync(fileB)), mtime(fs.statSync(fileA))) + }) +}) + +test('symlink', function (t) { + if (win32) { // no symlink support on win32 currently. TODO: test if this can be enabled somehow + t.plan(1) + t.ok(true) + return + } + + t.plan(5) + + var a = path.join(__dirname, 'fixtures', 'c') + + rimraf.sync(path.join(a, 'link')) + fs.symlinkSync('.gitignore', path.join(a, 'link')) + + var b = path.join(__dirname, 'fixtures', 'copy', 'c') + + rimraf.sync(b) + tar.pack(a) + .pipe(tar.extract(b)) + .on('finish', function () { + var files = fs.readdirSync(b).sort() + t.same(files.length, 2) + t.same(files[0], '.gitignore') + t.same(files[1], 'link') + + var linkA = path.join(a, 'link') + var linkB = path.join(b, 'link') + + t.same(mtime(fs.lstatSync(linkB)), mtime(fs.lstatSync(linkA))) + t.same(fs.readlinkSync(linkB), fs.readlinkSync(linkA)) + }) +}) + +test('follow symlinks', function (t) { + if (win32) { // no symlink support on win32 currently. TODO: test if this can be enabled somehow + t.plan(1) + t.ok(true) + return + } + + t.plan(5) + + var a = path.join(__dirname, 'fixtures', 'c') + + rimraf.sync(path.join(a, 'link')) + fs.symlinkSync('.gitignore', path.join(a, 'link')) + + var b = path.join(__dirname, 'fixtures', 'copy', 'c-dereference') + + rimraf.sync(b) + tar.pack(a, { dereference: true }) + .pipe(tar.extract(b)) + .on('finish', function () { + var files = fs.readdirSync(b).sort() + t.same(files.length, 2) + t.same(files[0], '.gitignore') + t.same(files[1], 'link') + + var file1 = path.join(b, '.gitignore') + var file2 = path.join(b, 'link') + + t.same(mtime(fs.lstatSync(file1)), mtime(fs.lstatSync(file2))) + t.same(fs.readFileSync(file1), fs.readFileSync(file2)) + }) +}) + +test('strip', function (t) { + t.plan(2) + + var a = path.join(__dirname, 'fixtures', 'b') + var b = path.join(__dirname, 'fixtures', 'copy', 'b-strip') + + rimraf.sync(b) + + tar.pack(a) + .pipe(tar.extract(b, { strip: 1 })) + .on('finish', function () { + var files = fs.readdirSync(b).sort() + t.same(files.length, 1) + t.same(files[0], 'test.txt') + }) +}) + +test('strip + map', function (t) { + t.plan(2) + + var a = path.join(__dirname, 'fixtures', 'b') + var b = path.join(__dirname, 'fixtures', 'copy', 'b-strip') + + rimraf.sync(b) + + var uppercase = function (header) { + header.name = header.name.toUpperCase() + return header + } + + tar.pack(a) + .pipe(tar.extract(b, { strip: 1, map: uppercase })) + .on('finish', function () { + var files = fs.readdirSync(b).sort() + t.same(files.length, 1) + t.same(files[0], 'TEST.TXT') + }) +}) + +test('map + dir + permissions', function (t) { + t.plan(win32 ? 1 : 2) // skip chmod test, it's not working like unix + + var a = path.join(__dirname, 'fixtures', 'b') + var b = path.join(__dirname, 'fixtures', 'copy', 'a-perms') + + rimraf.sync(b) + + var aWithMode = function (header) { + if (header.name === 'a') { + header.mode = parseInt(700, 8) + } + return header + } + + tar.pack(a) + .pipe(tar.extract(b, { map: aWithMode })) + .on('finish', function () { + var files = fs.readdirSync(b).sort() + var stat = fs.statSync(path.join(b, 'a')) + t.same(files.length, 1) + if (!win32) { + t.same(stat.mode & parseInt(777, 8), parseInt(700, 8)) + } + }) +}) + +test('specific entries', function (t) { + t.plan(6) + + var a = path.join(__dirname, 'fixtures', 'd') + var b = path.join(__dirname, 'fixtures', 'copy', 'd-entries') + + var entries = ['file1', 'sub-files/file3', 'sub-dir'] + + rimraf.sync(b) + tar.pack(a, { entries: entries }) + .pipe(tar.extract(b)) + .on('finish', function () { + var files = fs.readdirSync(b) + t.same(files.length, 3) + t.notSame(files.indexOf('file1'), -1) + t.notSame(files.indexOf('sub-files'), -1) + t.notSame(files.indexOf('sub-dir'), -1) + var subFiles = fs.readdirSync(path.join(b, 'sub-files')) + t.same(subFiles, ['file3']) + var subDir = fs.readdirSync(path.join(b, 'sub-dir')) + t.same(subDir, ['file5']) + }) +}) + +test('check type while mapping header on packing', function (t) { + t.plan(3) + + var e = path.join(__dirname, 'fixtures', 'e') + + var checkHeaderType = function (header) { + if (header.name.indexOf('.') === -1) t.same(header.type, header.name) + } + + tar.pack(e, { map: checkHeaderType }) +}) + +test('finish callbacks', function (t) { + t.plan(3) + + var a = path.join(__dirname, 'fixtures', 'a') + var b = path.join(__dirname, 'fixtures', 'copy', 'a') + + rimraf.sync(b) + + var packEntries = 0 + var extractEntries = 0 + + var countPackEntry = function (header) { packEntries++ } + var countExtractEntry = function (header) { extractEntries++ } + + var pack + var onPackFinish = function (passedPack) { + t.equal(packEntries, 2, 'All entries have been packed') // 2 entries - the file and base directory + t.equal(passedPack, pack, 'The finish hook passes the pack') + } + + var onExtractFinish = function () { t.equal(extractEntries, 2) } + + pack = tar.pack(a, { map: countPackEntry, finish: onPackFinish }) + + pack.pipe(tar.extract(b, { map: countExtractEntry, finish: onExtractFinish })) + .on('finish', function () { + t.end() + }) +}) + +test('not finalizing the pack', function (t) { + t.plan(2) + + var a = path.join(__dirname, 'fixtures', 'a') + var b = path.join(__dirname, 'fixtures', 'b') + + var out = path.join(__dirname, 'fixtures', 'copy', 'merged-packs') + + rimraf.sync(out) + + var prefixer = function (prefix) { + return function (header) { + header.name = path.join(prefix, header.name) + return header + } + } + + tar.pack(a, { + map: prefixer('a-files'), + finalize: false, + finish: packB + }) + + function packB (pack) { + tar.pack(b, { pack: pack, map: prefixer('b-files') }) + .pipe(tar.extract(out)) + .on('finish', assertResults) + } + + function assertResults () { + var containers = fs.readdirSync(out) + t.deepEqual(containers, ['a-files', 'b-files']) + var aFiles = fs.readdirSync(path.join(out, 'a-files')) + t.deepEqual(aFiles, ['hello.txt']) + } +}) + +test('do not extract invalid tar', function (t) { + var a = path.join(__dirname, 'fixtures', 'invalid.tar') + + var out = path.join(__dirname, 'fixtures', 'invalid') + + rimraf.sync(out) + + fs.createReadStream(a) + .pipe(tar.extract(out)) + .on('error', function (err) { + t.ok(/is not a valid symlink/i.test(err.message)) + fs.stat(path.join(out, '../bar'), function (err) { + t.ok(err) + t.end() + }) + }) +}) + +test('no abs hardlink targets', function (t) { + var out = path.join(__dirname, 'fixtures', 'invalid') + var outside = path.join(__dirname, 'fixtures', 'outside') + + rimraf.sync(out) + + var s = tarStream.pack() + + fs.writeFileSync(outside, 'something') + + s.entry({ + type: 'link', + name: 'link', + linkname: outside + }) + + s.entry({ + name: 'link' + }, 'overwrite') + + s.finalize() + + s.pipe(tar.extract(out)) + .on('error', function (err) { + t.ok(err, 'had error') + fs.readFile(outside, 'utf-8', function (err, str) { + t.error(err, 'no error') + t.same(str, 'something') + t.end() + }) + }) +}) diff --git a/miniprogram/node_modules/tar-stream/LICENSE b/miniprogram/node_modules/tar-stream/LICENSE new file mode 100644 index 00000000..757562ec --- /dev/null +++ b/miniprogram/node_modules/tar-stream/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Mathias Buus + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/miniprogram/node_modules/tar-stream/README.md b/miniprogram/node_modules/tar-stream/README.md new file mode 100644 index 00000000..2679d9d0 --- /dev/null +++ b/miniprogram/node_modules/tar-stream/README.md @@ -0,0 +1,168 @@ +# tar-stream + +tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system. + +Note that you still need to gunzip your data if you have a `.tar.gz`. We recommend using [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in conjunction with this. + +``` +npm install tar-stream +``` + +[![build status](https://secure.travis-ci.org/mafintosh/tar-stream.png)](http://travis-ci.org/mafintosh/tar-stream) +[![License](https://img.shields.io/badge/license-MIT-blue.svg)](http://opensource.org/licenses/MIT) + +## Usage + +tar-stream exposes two streams, [pack](https://github.com/mafintosh/tar-stream#packing) which creates tarballs and [extract](https://github.com/mafintosh/tar-stream#extracting) which extracts tarballs. To [modify an existing tarball](https://github.com/mafintosh/tar-stream#modifying-existing-tarballs) use both. + + +It implementes USTAR with additional support for pax extended headers. It should be compatible with all popular tar distributions out there (gnutar, bsdtar etc) + +## Related + +If you want to pack/unpack directories on the file system check out [tar-fs](https://github.com/mafintosh/tar-fs) which provides file system bindings to this module. + +## Packing + +To create a pack stream use `tar.pack()` and call `pack.entry(header, [callback])` to add tar entries. + +``` js +var tar = require('tar-stream') +var pack = tar.pack() // pack is a streams2 stream + +// add a file called my-test.txt with the content "Hello World!" +pack.entry({ name: 'my-test.txt' }, 'Hello World!') + +// add a file called my-stream-test.txt from a stream +var entry = pack.entry({ name: 'my-stream-test.txt', size: 11 }, function(err) { + // the stream was added + // no more entries + pack.finalize() +}) + +entry.write('hello') +entry.write(' ') +entry.write('world') +entry.end() + +// pipe the pack stream somewhere +pack.pipe(process.stdout) +``` + +## Extracting + +To extract a stream use `tar.extract()` and listen for `extract.on('entry', (header, stream, next) )` + +``` js +var extract = tar.extract() + +extract.on('entry', function(header, stream, next) { + // header is the tar header + // stream is the content body (might be an empty stream) + // call next when you are done with this entry + + stream.on('end', function() { + next() // ready for next entry + }) + + stream.resume() // just auto drain the stream +}) + +extract.on('finish', function() { + // all entries read +}) + +pack.pipe(extract) +``` + +The tar archive is streamed sequentially, meaning you **must** drain each entry's stream as you get them or else the main extract stream will receive backpressure and stop reading. + +## Headers + +The header object using in `entry` should contain the following properties. +Most of these values can be found by stat'ing a file. + +``` js +{ + name: 'path/to/this/entry.txt', + size: 1314, // entry size. defaults to 0 + mode: 0o644, // entry mode. defaults to to 0o755 for dirs and 0o644 otherwise + mtime: new Date(), // last modified date for entry. defaults to now. + type: 'file', // type of entry. defaults to file. can be: + // file | link | symlink | directory | block-device + // character-device | fifo | contiguous-file + linkname: 'path', // linked file name + uid: 0, // uid of entry owner. defaults to 0 + gid: 0, // gid of entry owner. defaults to 0 + uname: 'maf', // uname of entry owner. defaults to null + gname: 'staff', // gname of entry owner. defaults to null + devmajor: 0, // device major version. defaults to 0 + devminor: 0 // device minor version. defaults to 0 +} +``` + +## Modifying existing tarballs + +Using tar-stream it is easy to rewrite paths / change modes etc in an existing tarball. + +``` js +var extract = tar.extract() +var pack = tar.pack() +var path = require('path') + +extract.on('entry', function(header, stream, callback) { + // let's prefix all names with 'tmp' + header.name = path.join('tmp', header.name) + // write the new entry to the pack stream + stream.pipe(pack.entry(header, callback)) +}) + +extract.on('finish', function() { + // all entries done - lets finalize it + pack.finalize() +}) + +// pipe the old tarball to the extractor +oldTarballStream.pipe(extract) + +// pipe the new tarball the another stream +pack.pipe(newTarballStream) +``` + +## Saving tarball to fs + + +``` js +var fs = require('fs') +var tar = require('tar-stream') + +var pack = tar.pack() // pack is a streams2 stream +var path = 'YourTarBall.tar' +var yourTarball = fs.createWriteStream(path) + +// add a file called YourFile.txt with the content "Hello World!" +pack.entry({name: 'YourFile.txt'}, 'Hello World!', function (err) { + if (err) throw err + pack.finalize() +}) + +// pipe the pack stream to your file +pack.pipe(yourTarball) + +yourTarball.on('close', function () { + console.log(path + ' has been written') + fs.stat(path, function(err, stats) { + if (err) throw err + console.log(stats) + console.log('Got file info successfully!') + }) +}) +``` + +## Performance + +[See tar-fs for a performance comparison with node-tar](https://github.com/mafintosh/tar-fs/blob/master/README.md#performance) + +# License + +MIT diff --git a/miniprogram/node_modules/tar-stream/extract.js b/miniprogram/node_modules/tar-stream/extract.js new file mode 100644 index 00000000..11b13b7c --- /dev/null +++ b/miniprogram/node_modules/tar-stream/extract.js @@ -0,0 +1,257 @@ +var util = require('util') +var bl = require('bl') +var headers = require('./headers') + +var Writable = require('readable-stream').Writable +var PassThrough = require('readable-stream').PassThrough + +var noop = function () {} + +var overflow = function (size) { + size &= 511 + return size && 512 - size +} + +var emptyStream = function (self, offset) { + var s = new Source(self, offset) + s.end() + return s +} + +var mixinPax = function (header, pax) { + if (pax.path) header.name = pax.path + if (pax.linkpath) header.linkname = pax.linkpath + if (pax.size) header.size = parseInt(pax.size, 10) + header.pax = pax + return header +} + +var Source = function (self, offset) { + this._parent = self + this.offset = offset + PassThrough.call(this, { autoDestroy: false }) +} + +util.inherits(Source, PassThrough) + +Source.prototype.destroy = function (err) { + this._parent.destroy(err) +} + +var Extract = function (opts) { + if (!(this instanceof Extract)) return new Extract(opts) + Writable.call(this, opts) + + opts = opts || {} + + this._offset = 0 + this._buffer = bl() + this._missing = 0 + this._partial = false + this._onparse = noop + this._header = null + this._stream = null + this._overflow = null + this._cb = null + this._locked = false + this._destroyed = false + this._pax = null + this._paxGlobal = null + this._gnuLongPath = null + this._gnuLongLinkPath = null + + var self = this + var b = self._buffer + + var oncontinue = function () { + self._continue() + } + + var onunlock = function (err) { + self._locked = false + if (err) return self.destroy(err) + if (!self._stream) oncontinue() + } + + var onstreamend = function () { + self._stream = null + var drain = overflow(self._header.size) + if (drain) self._parse(drain, ondrain) + else self._parse(512, onheader) + if (!self._locked) oncontinue() + } + + var ondrain = function () { + self._buffer.consume(overflow(self._header.size)) + self._parse(512, onheader) + oncontinue() + } + + var onpaxglobalheader = function () { + var size = self._header.size + self._paxGlobal = headers.decodePax(b.slice(0, size)) + b.consume(size) + onstreamend() + } + + var onpaxheader = function () { + var size = self._header.size + self._pax = headers.decodePax(b.slice(0, size)) + if (self._paxGlobal) self._pax = Object.assign({}, self._paxGlobal, self._pax) + b.consume(size) + onstreamend() + } + + var ongnulongpath = function () { + var size = self._header.size + this._gnuLongPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding) + b.consume(size) + onstreamend() + } + + var ongnulonglinkpath = function () { + var size = self._header.size + this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding) + b.consume(size) + onstreamend() + } + + var onheader = function () { + var offset = self._offset + var header + try { + header = self._header = headers.decode(b.slice(0, 512), opts.filenameEncoding, opts.allowUnknownFormat) + } catch (err) { + self.emit('error', err) + } + b.consume(512) + + if (!header) { + self._parse(512, onheader) + oncontinue() + return + } + if (header.type === 'gnu-long-path') { + self._parse(header.size, ongnulongpath) + oncontinue() + return + } + if (header.type === 'gnu-long-link-path') { + self._parse(header.size, ongnulonglinkpath) + oncontinue() + return + } + if (header.type === 'pax-global-header') { + self._parse(header.size, onpaxglobalheader) + oncontinue() + return + } + if (header.type === 'pax-header') { + self._parse(header.size, onpaxheader) + oncontinue() + return + } + + if (self._gnuLongPath) { + header.name = self._gnuLongPath + self._gnuLongPath = null + } + + if (self._gnuLongLinkPath) { + header.linkname = self._gnuLongLinkPath + self._gnuLongLinkPath = null + } + + if (self._pax) { + self._header = header = mixinPax(header, self._pax) + self._pax = null + } + + self._locked = true + + if (!header.size || header.type === 'directory') { + self._parse(512, onheader) + self.emit('entry', header, emptyStream(self, offset), onunlock) + return + } + + self._stream = new Source(self, offset) + + self.emit('entry', header, self._stream, onunlock) + self._parse(header.size, onstreamend) + oncontinue() + } + + this._onheader = onheader + this._parse(512, onheader) +} + +util.inherits(Extract, Writable) + +Extract.prototype.destroy = function (err) { + if (this._destroyed) return + this._destroyed = true + + if (err) this.emit('error', err) + this.emit('close') + if (this._stream) this._stream.emit('close') +} + +Extract.prototype._parse = function (size, onparse) { + if (this._destroyed) return + this._offset += size + this._missing = size + if (onparse === this._onheader) this._partial = false + this._onparse = onparse +} + +Extract.prototype._continue = function () { + if (this._destroyed) return + var cb = this._cb + this._cb = noop + if (this._overflow) this._write(this._overflow, undefined, cb) + else cb() +} + +Extract.prototype._write = function (data, enc, cb) { + if (this._destroyed) return + + var s = this._stream + var b = this._buffer + var missing = this._missing + if (data.length) this._partial = true + + // we do not reach end-of-chunk now. just forward it + + if (data.length < missing) { + this._missing -= data.length + this._overflow = null + if (s) return s.write(data, cb) + b.append(data) + return cb() + } + + // end-of-chunk. the parser should call cb. + + this._cb = cb + this._missing = 0 + + var overflow = null + if (data.length > missing) { + overflow = data.slice(missing) + data = data.slice(0, missing) + } + + if (s) s.end(data) + else b.append(data) + + this._overflow = overflow + this._onparse() +} + +Extract.prototype._final = function (cb) { + if (this._partial) return this.destroy(new Error('Unexpected end of data')) + cb() +} + +module.exports = Extract diff --git a/miniprogram/node_modules/tar-stream/headers.js b/miniprogram/node_modules/tar-stream/headers.js new file mode 100644 index 00000000..aba4ca49 --- /dev/null +++ b/miniprogram/node_modules/tar-stream/headers.js @@ -0,0 +1,295 @@ +var alloc = Buffer.alloc + +var ZEROS = '0000000000000000000' +var SEVENS = '7777777777777777777' +var ZERO_OFFSET = '0'.charCodeAt(0) +var USTAR_MAGIC = Buffer.from('ustar\x00', 'binary') +var USTAR_VER = Buffer.from('00', 'binary') +var GNU_MAGIC = Buffer.from('ustar\x20', 'binary') +var GNU_VER = Buffer.from('\x20\x00', 'binary') +var MASK = parseInt('7777', 8) +var MAGIC_OFFSET = 257 +var VERSION_OFFSET = 263 + +var clamp = function (index, len, defaultValue) { + if (typeof index !== 'number') return defaultValue + index = ~~index // Coerce to integer. + if (index >= len) return len + if (index >= 0) return index + index += len + if (index >= 0) return index + return 0 +} + +var toType = function (flag) { + switch (flag) { + case 0: + return 'file' + case 1: + return 'link' + case 2: + return 'symlink' + case 3: + return 'character-device' + case 4: + return 'block-device' + case 5: + return 'directory' + case 6: + return 'fifo' + case 7: + return 'contiguous-file' + case 72: + return 'pax-header' + case 55: + return 'pax-global-header' + case 27: + return 'gnu-long-link-path' + case 28: + case 30: + return 'gnu-long-path' + } + + return null +} + +var toTypeflag = function (flag) { + switch (flag) { + case 'file': + return 0 + case 'link': + return 1 + case 'symlink': + return 2 + case 'character-device': + return 3 + case 'block-device': + return 4 + case 'directory': + return 5 + case 'fifo': + return 6 + case 'contiguous-file': + return 7 + case 'pax-header': + return 72 + } + + return 0 +} + +var indexOf = function (block, num, offset, end) { + for (; offset < end; offset++) { + if (block[offset] === num) return offset + } + return end +} + +var cksum = function (block) { + var sum = 8 * 32 + for (var i = 0; i < 148; i++) sum += block[i] + for (var j = 156; j < 512; j++) sum += block[j] + return sum +} + +var encodeOct = function (val, n) { + val = val.toString(8) + if (val.length > n) return SEVENS.slice(0, n) + ' ' + else return ZEROS.slice(0, n - val.length) + val + ' ' +} + +/* Copied from the node-tar repo and modified to meet + * tar-stream coding standard. + * + * Source: https://github.com/npm/node-tar/blob/51b6627a1f357d2eb433e7378e5f05e83b7aa6cd/lib/header.js#L349 + */ +function parse256 (buf) { + // first byte MUST be either 80 or FF + // 80 for positive, FF for 2's comp + var positive + if (buf[0] === 0x80) positive = true + else if (buf[0] === 0xFF) positive = false + else return null + + // build up a base-256 tuple from the least sig to the highest + var tuple = [] + for (var i = buf.length - 1; i > 0; i--) { + var byte = buf[i] + if (positive) tuple.push(byte) + else tuple.push(0xFF - byte) + } + + var sum = 0 + var l = tuple.length + for (i = 0; i < l; i++) { + sum += tuple[i] * Math.pow(256, i) + } + + return positive ? sum : -1 * sum +} + +var decodeOct = function (val, offset, length) { + val = val.slice(offset, offset + length) + offset = 0 + + // If prefixed with 0x80 then parse as a base-256 integer + if (val[offset] & 0x80) { + return parse256(val) + } else { + // Older versions of tar can prefix with spaces + while (offset < val.length && val[offset] === 32) offset++ + var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length) + while (offset < end && val[offset] === 0) offset++ + if (end === offset) return 0 + return parseInt(val.slice(offset, end).toString(), 8) + } +} + +var decodeStr = function (val, offset, length, encoding) { + return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString(encoding) +} + +var addLength = function (str) { + var len = Buffer.byteLength(str) + var digits = Math.floor(Math.log(len) / Math.log(10)) + 1 + if (len + digits >= Math.pow(10, digits)) digits++ + + return (len + digits) + str +} + +exports.decodeLongPath = function (buf, encoding) { + return decodeStr(buf, 0, buf.length, encoding) +} + +exports.encodePax = function (opts) { // TODO: encode more stuff in pax + var result = '' + if (opts.name) result += addLength(' path=' + opts.name + '\n') + if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n') + var pax = opts.pax + if (pax) { + for (var key in pax) { + result += addLength(' ' + key + '=' + pax[key] + '\n') + } + } + return Buffer.from(result) +} + +exports.decodePax = function (buf) { + var result = {} + + while (buf.length) { + var i = 0 + while (i < buf.length && buf[i] !== 32) i++ + var len = parseInt(buf.slice(0, i).toString(), 10) + if (!len) return result + + var b = buf.slice(i + 1, len - 1).toString() + var keyIndex = b.indexOf('=') + if (keyIndex === -1) return result + result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1) + + buf = buf.slice(len) + } + + return result +} + +exports.encode = function (opts) { + var buf = alloc(512) + var name = opts.name + var prefix = '' + + if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/' + if (Buffer.byteLength(name) !== name.length) return null // utf-8 + + while (Buffer.byteLength(name) > 100) { + var i = name.indexOf('/') + if (i === -1) return null + prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i) + name = name.slice(i + 1) + } + + if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null + if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null + + buf.write(name) + buf.write(encodeOct(opts.mode & MASK, 6), 100) + buf.write(encodeOct(opts.uid, 6), 108) + buf.write(encodeOct(opts.gid, 6), 116) + buf.write(encodeOct(opts.size, 11), 124) + buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136) + + buf[156] = ZERO_OFFSET + toTypeflag(opts.type) + + if (opts.linkname) buf.write(opts.linkname, 157) + + USTAR_MAGIC.copy(buf, MAGIC_OFFSET) + USTAR_VER.copy(buf, VERSION_OFFSET) + if (opts.uname) buf.write(opts.uname, 265) + if (opts.gname) buf.write(opts.gname, 297) + buf.write(encodeOct(opts.devmajor || 0, 6), 329) + buf.write(encodeOct(opts.devminor || 0, 6), 337) + + if (prefix) buf.write(prefix, 345) + + buf.write(encodeOct(cksum(buf), 6), 148) + + return buf +} + +exports.decode = function (buf, filenameEncoding, allowUnknownFormat) { + var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET + + var name = decodeStr(buf, 0, 100, filenameEncoding) + var mode = decodeOct(buf, 100, 8) + var uid = decodeOct(buf, 108, 8) + var gid = decodeOct(buf, 116, 8) + var size = decodeOct(buf, 124, 12) + var mtime = decodeOct(buf, 136, 12) + var type = toType(typeflag) + var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding) + var uname = decodeStr(buf, 265, 32) + var gname = decodeStr(buf, 297, 32) + var devmajor = decodeOct(buf, 329, 8) + var devminor = decodeOct(buf, 337, 8) + + var c = cksum(buf) + + // checksum is still initial value if header was null. + if (c === 8 * 32) return null + + // valid checksum + if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?') + + if (USTAR_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0) { + // ustar (posix) format. + // prepend prefix, if present. + if (buf[345]) name = decodeStr(buf, 345, 155, filenameEncoding) + '/' + name + } else if (GNU_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0 && + GNU_VER.compare(buf, VERSION_OFFSET, VERSION_OFFSET + 2) === 0) { + // 'gnu'/'oldgnu' format. Similar to ustar, but has support for incremental and + // multi-volume tarballs. + } else { + if (!allowUnknownFormat) { + throw new Error('Invalid tar header: unknown format.') + } + } + + // to support old tar versions that use trailing / to indicate dirs + if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5 + + return { + name, + mode, + uid, + gid, + size, + mtime: new Date(1000 * mtime), + type, + linkname, + uname, + gname, + devmajor, + devminor + } +} diff --git a/miniprogram/node_modules/tar-stream/index.js b/miniprogram/node_modules/tar-stream/index.js new file mode 100644 index 00000000..64817048 --- /dev/null +++ b/miniprogram/node_modules/tar-stream/index.js @@ -0,0 +1,2 @@ +exports.extract = require('./extract') +exports.pack = require('./pack') diff --git a/miniprogram/node_modules/tar-stream/pack.js b/miniprogram/node_modules/tar-stream/pack.js new file mode 100644 index 00000000..f1da3b73 --- /dev/null +++ b/miniprogram/node_modules/tar-stream/pack.js @@ -0,0 +1,255 @@ +var constants = require('fs-constants') +var eos = require('end-of-stream') +var inherits = require('inherits') +var alloc = Buffer.alloc + +var Readable = require('readable-stream').Readable +var Writable = require('readable-stream').Writable +var StringDecoder = require('string_decoder').StringDecoder + +var headers = require('./headers') + +var DMODE = parseInt('755', 8) +var FMODE = parseInt('644', 8) + +var END_OF_TAR = alloc(1024) + +var noop = function () {} + +var overflow = function (self, size) { + size &= 511 + if (size) self.push(END_OF_TAR.slice(0, 512 - size)) +} + +function modeToType (mode) { + switch (mode & constants.S_IFMT) { + case constants.S_IFBLK: return 'block-device' + case constants.S_IFCHR: return 'character-device' + case constants.S_IFDIR: return 'directory' + case constants.S_IFIFO: return 'fifo' + case constants.S_IFLNK: return 'symlink' + } + + return 'file' +} + +var Sink = function (to) { + Writable.call(this) + this.written = 0 + this._to = to + this._destroyed = false +} + +inherits(Sink, Writable) + +Sink.prototype._write = function (data, enc, cb) { + this.written += data.length + if (this._to.push(data)) return cb() + this._to._drain = cb +} + +Sink.prototype.destroy = function () { + if (this._destroyed) return + this._destroyed = true + this.emit('close') +} + +var LinkSink = function () { + Writable.call(this) + this.linkname = '' + this._decoder = new StringDecoder('utf-8') + this._destroyed = false +} + +inherits(LinkSink, Writable) + +LinkSink.prototype._write = function (data, enc, cb) { + this.linkname += this._decoder.write(data) + cb() +} + +LinkSink.prototype.destroy = function () { + if (this._destroyed) return + this._destroyed = true + this.emit('close') +} + +var Void = function () { + Writable.call(this) + this._destroyed = false +} + +inherits(Void, Writable) + +Void.prototype._write = function (data, enc, cb) { + cb(new Error('No body allowed for this entry')) +} + +Void.prototype.destroy = function () { + if (this._destroyed) return + this._destroyed = true + this.emit('close') +} + +var Pack = function (opts) { + if (!(this instanceof Pack)) return new Pack(opts) + Readable.call(this, opts) + + this._drain = noop + this._finalized = false + this._finalizing = false + this._destroyed = false + this._stream = null +} + +inherits(Pack, Readable) + +Pack.prototype.entry = function (header, buffer, callback) { + if (this._stream) throw new Error('already piping an entry') + if (this._finalized || this._destroyed) return + + if (typeof buffer === 'function') { + callback = buffer + buffer = null + } + + if (!callback) callback = noop + + var self = this + + if (!header.size || header.type === 'symlink') header.size = 0 + if (!header.type) header.type = modeToType(header.mode) + if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE + if (!header.uid) header.uid = 0 + if (!header.gid) header.gid = 0 + if (!header.mtime) header.mtime = new Date() + + if (typeof buffer === 'string') buffer = Buffer.from(buffer) + if (Buffer.isBuffer(buffer)) { + header.size = buffer.length + this._encode(header) + var ok = this.push(buffer) + overflow(self, header.size) + if (ok) process.nextTick(callback) + else this._drain = callback + return new Void() + } + + if (header.type === 'symlink' && !header.linkname) { + var linkSink = new LinkSink() + eos(linkSink, function (err) { + if (err) { // stream was closed + self.destroy() + return callback(err) + } + + header.linkname = linkSink.linkname + self._encode(header) + callback() + }) + + return linkSink + } + + this._encode(header) + + if (header.type !== 'file' && header.type !== 'contiguous-file') { + process.nextTick(callback) + return new Void() + } + + var sink = new Sink(this) + + this._stream = sink + + eos(sink, function (err) { + self._stream = null + + if (err) { // stream was closed + self.destroy() + return callback(err) + } + + if (sink.written !== header.size) { // corrupting tar + self.destroy() + return callback(new Error('size mismatch')) + } + + overflow(self, header.size) + if (self._finalizing) self.finalize() + callback() + }) + + return sink +} + +Pack.prototype.finalize = function () { + if (this._stream) { + this._finalizing = true + return + } + + if (this._finalized) return + this._finalized = true + this.push(END_OF_TAR) + this.push(null) +} + +Pack.prototype.destroy = function (err) { + if (this._destroyed) return + this._destroyed = true + + if (err) this.emit('error', err) + this.emit('close') + if (this._stream && this._stream.destroy) this._stream.destroy() +} + +Pack.prototype._encode = function (header) { + if (!header.pax) { + var buf = headers.encode(header) + if (buf) { + this.push(buf) + return + } + } + this._encodePax(header) +} + +Pack.prototype._encodePax = function (header) { + var paxHeader = headers.encodePax({ + name: header.name, + linkname: header.linkname, + pax: header.pax + }) + + var newHeader = { + name: 'PaxHeader', + mode: header.mode, + uid: header.uid, + gid: header.gid, + size: paxHeader.length, + mtime: header.mtime, + type: 'pax-header', + linkname: header.linkname && 'PaxHeader', + uname: header.uname, + gname: header.gname, + devmajor: header.devmajor, + devminor: header.devminor + } + + this.push(headers.encode(newHeader)) + this.push(paxHeader) + overflow(this, paxHeader.length) + + newHeader.size = header.size + newHeader.type = header.type + this.push(headers.encode(newHeader)) +} + +Pack.prototype._read = function (n) { + var drain = this._drain + this._drain = noop + drain() +} + +module.exports = Pack diff --git a/miniprogram/node_modules/tar-stream/package.json b/miniprogram/node_modules/tar-stream/package.json new file mode 100644 index 00000000..d717dfcc --- /dev/null +++ b/miniprogram/node_modules/tar-stream/package.json @@ -0,0 +1,58 @@ +{ + "name": "tar-stream", + "version": "2.2.0", + "description": "tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.", + "author": "Mathias Buus ", + "dependencies": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "devDependencies": { + "concat-stream": "^2.0.0", + "standard": "^12.0.1", + "tape": "^4.9.2" + }, + "scripts": { + "test": "standard && tape test/extract.js test/pack.js", + "test-all": "standard && tape test/*.js" + }, + "keywords": [ + "tar", + "tarball", + "parse", + "parser", + "generate", + "generator", + "stream", + "stream2", + "streams", + "streams2", + "streaming", + "pack", + "extract", + "modify" + ], + "bugs": { + "url": "https://github.com/mafintosh/tar-stream/issues" + }, + "homepage": "https://github.com/mafintosh/tar-stream", + "main": "index.js", + "files": [ + "*.js", + "LICENSE" + ], + "directories": { + "test": "test" + }, + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/mafintosh/tar-stream.git" + }, + "engines": { + "node": ">=6" + } +} diff --git a/miniprogram/node_modules/tar-stream/sandbox.js b/miniprogram/node_modules/tar-stream/sandbox.js new file mode 100644 index 00000000..9b82d401 --- /dev/null +++ b/miniprogram/node_modules/tar-stream/sandbox.js @@ -0,0 +1,11 @@ +const tar = require('tar-stream') +const fs = require('fs') +const path = require('path') +const pipeline = require('pump') // eequire('stream').pipeline + +fs.createReadStream('test.tar') + .pipe(tar.extract()) + .on('entry', function (header, stream, done) { + console.log(header.name) + pipeline(stream, fs.createWriteStream(path.join('/tmp', header.name)), done) + }) diff --git a/miniprogram/node_modules/tunnel-agent/LICENSE b/miniprogram/node_modules/tunnel-agent/LICENSE new file mode 100644 index 00000000..a4a9aee0 --- /dev/null +++ b/miniprogram/node_modules/tunnel-agent/LICENSE @@ -0,0 +1,55 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/miniprogram/node_modules/tunnel-agent/README.md b/miniprogram/node_modules/tunnel-agent/README.md new file mode 100644 index 00000000..bb533d56 --- /dev/null +++ b/miniprogram/node_modules/tunnel-agent/README.md @@ -0,0 +1,4 @@ +tunnel-agent +============ + +HTTP proxy tunneling agent. Formerly part of mikeal/request, now a standalone module. diff --git a/miniprogram/node_modules/tunnel-agent/index.js b/miniprogram/node_modules/tunnel-agent/index.js new file mode 100644 index 00000000..3ee9abc5 --- /dev/null +++ b/miniprogram/node_modules/tunnel-agent/index.js @@ -0,0 +1,244 @@ +'use strict' + +var net = require('net') + , tls = require('tls') + , http = require('http') + , https = require('https') + , events = require('events') + , assert = require('assert') + , util = require('util') + , Buffer = require('safe-buffer').Buffer + ; + +exports.httpOverHttp = httpOverHttp +exports.httpsOverHttp = httpsOverHttp +exports.httpOverHttps = httpOverHttps +exports.httpsOverHttps = httpsOverHttps + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options) + agent.request = http.request + return agent +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options) + agent.request = http.request + agent.createSocket = createSecureSocket + agent.defaultPort = 443 + return agent +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options) + agent.request = https.request + return agent +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options) + agent.request = https.request + agent.createSocket = createSecureSocket + agent.defaultPort = 443 + return agent +} + + +function TunnelingAgent(options) { + var self = this + self.options = options || {} + self.proxyOptions = self.options.proxy || {} + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets + self.requests = [] + self.sockets = [] + + self.on('free', function onFree(socket, host, port) { + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i] + if (pending.host === host && pending.port === port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1) + pending.request.onSocket(socket) + return + } + } + socket.destroy() + self.removeSocket(socket) + }) +} +util.inherits(TunnelingAgent, events.EventEmitter) + +TunnelingAgent.prototype.addRequest = function addRequest(req, options) { + var self = this + + // Legacy API: addRequest(req, host, port, path) + if (typeof options === 'string') { + options = { + host: options, + port: arguments[2], + path: arguments[3] + }; + } + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push({host: options.host, port: options.port, request: req}) + return + } + + // If we are under maxSockets create a new one. + self.createConnection({host: options.host, port: options.port, request: req}) +} + +TunnelingAgent.prototype.createConnection = function createConnection(pending) { + var self = this + + self.createSocket(pending, function(socket) { + socket.on('free', onFree) + socket.on('close', onCloseOrRemove) + socket.on('agentRemove', onCloseOrRemove) + pending.request.onSocket(socket) + + function onFree() { + self.emit('free', socket, pending.host, pending.port) + } + + function onCloseOrRemove(err) { + self.removeSocket(socket) + socket.removeListener('free', onFree) + socket.removeListener('close', onCloseOrRemove) + socket.removeListener('agentRemove', onCloseOrRemove) + } + }) +} + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this + var placeholder = {} + self.sockets.push(placeholder) + + var connectOptions = mergeOptions({}, self.proxyOptions, + { method: 'CONNECT' + , path: options.host + ':' + options.port + , agent: false + } + ) + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {} + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + Buffer.from(connectOptions.proxyAuth).toString('base64') + } + + debug('making CONNECT request') + var connectReq = self.request(connectOptions) + connectReq.useChunkedEncodingByDefault = false // for v0.6 + connectReq.once('response', onResponse) // for v0.6 + connectReq.once('upgrade', onUpgrade) // for v0.6 + connectReq.once('connect', onConnect) // for v0.7 or later + connectReq.once('error', onError) + connectReq.end() + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head) + }) + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners() + socket.removeAllListeners() + + if (res.statusCode === 200) { + assert.equal(head.length, 0) + debug('tunneling connection has established') + self.sockets[self.sockets.indexOf(placeholder)] = socket + cb(socket) + } else { + debug('tunneling socket could not be established, statusCode=%d', res.statusCode) + var error = new Error('tunneling socket could not be established, ' + 'statusCode=' + res.statusCode) + error.code = 'ECONNRESET' + options.request.emit('error', error) + self.removeSocket(placeholder) + } + } + + function onError(cause) { + connectReq.removeAllListeners() + + debug('tunneling socket could not be established, cause=%s\n', cause.message, cause.stack) + var error = new Error('tunneling socket could not be established, ' + 'cause=' + cause.message) + error.code = 'ECONNRESET' + options.request.emit('error', error) + self.removeSocket(placeholder) + } +} + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) return + + this.sockets.splice(pos, 1) + + var pending = this.requests.shift() + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createConnection(pending) + } +} + +function createSecureSocket(options, cb) { + var self = this + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, mergeOptions({}, self.options, + { servername: options.host + , socket: socket + } + )) + self.sockets[self.sockets.indexOf(socket)] = secureSocket + cb(secureSocket) + }) +} + + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i] + if (typeof overrides === 'object') { + var keys = Object.keys(overrides) + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j] + if (overrides[k] !== undefined) { + target[k] = overrides[k] + } + } + } + } + return target +} + + +var debug +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments) + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0] + } else { + args.unshift('TUNNEL:') + } + console.error.apply(console, args) + } +} else { + debug = function() {} +} +exports.debug = debug // for test diff --git a/miniprogram/node_modules/tunnel-agent/package.json b/miniprogram/node_modules/tunnel-agent/package.json new file mode 100644 index 00000000..a271fda9 --- /dev/null +++ b/miniprogram/node_modules/tunnel-agent/package.json @@ -0,0 +1,22 @@ +{ + "author": "Mikeal Rogers (http://www.futurealoof.com)", + "name": "tunnel-agent", + "license": "Apache-2.0", + "description": "HTTP proxy tunneling agent. Formerly part of mikeal/request, now a standalone module.", + "version": "0.6.0", + "repository": { + "url": "https://github.com/mikeal/tunnel-agent" + }, + "main": "index.js", + "files": [ + "index.js" + ], + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "devDependencies": {}, + "optionalDependencies": {}, + "engines": { + "node": "*" + } +} diff --git a/miniprogram/node_modules/util-deprecate/History.md b/miniprogram/node_modules/util-deprecate/History.md new file mode 100644 index 00000000..acc86753 --- /dev/null +++ b/miniprogram/node_modules/util-deprecate/History.md @@ -0,0 +1,16 @@ + +1.0.2 / 2015-10-07 +================== + + * use try/catch when checking `localStorage` (#3, @kumavis) + +1.0.1 / 2014-11-25 +================== + + * browser: use `console.warn()` for deprecation calls + * browser: more jsdocs + +1.0.0 / 2014-04-30 +================== + + * initial commit diff --git a/miniprogram/node_modules/util-deprecate/LICENSE b/miniprogram/node_modules/util-deprecate/LICENSE new file mode 100644 index 00000000..6a60e8c2 --- /dev/null +++ b/miniprogram/node_modules/util-deprecate/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/miniprogram/node_modules/util-deprecate/README.md b/miniprogram/node_modules/util-deprecate/README.md new file mode 100644 index 00000000..75622fa7 --- /dev/null +++ b/miniprogram/node_modules/util-deprecate/README.md @@ -0,0 +1,53 @@ +util-deprecate +============== +### The Node.js `util.deprecate()` function with browser support + +In Node.js, this module simply re-exports the `util.deprecate()` function. + +In the web browser (i.e. via browserify), a browser-specific implementation +of the `util.deprecate()` function is used. + + +## API + +A `deprecate()` function is the only thing exposed by this module. + +``` javascript +// setup: +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); + + +// users see: +foo(); +// foo() is deprecated, use bar() instead +foo(); +foo(); +``` + + +## License + +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/miniprogram/node_modules/util-deprecate/browser.js b/miniprogram/node_modules/util-deprecate/browser.js new file mode 100644 index 00000000..549ae2f0 --- /dev/null +++ b/miniprogram/node_modules/util-deprecate/browser.js @@ -0,0 +1,67 @@ + +/** + * Module exports. + */ + +module.exports = deprecate; + +/** + * Mark that a method should not be used. + * Returns a modified function which warns once by default. + * + * If `localStorage.noDeprecation = true` is set, then it is a no-op. + * + * If `localStorage.throwDeprecation = true` is set, then deprecated functions + * will throw an Error when invoked. + * + * If `localStorage.traceDeprecation = true` is set, then deprecated functions + * will invoke `console.trace()` instead of `console.error()`. + * + * @param {Function} fn - the function to deprecate + * @param {String} msg - the string to print to the console when `fn` is invoked + * @returns {Function} a new "deprecated" version of `fn` + * @api public + */ + +function deprecate (fn, msg) { + if (config('noDeprecation')) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (config('throwDeprecation')) { + throw new Error(msg); + } else if (config('traceDeprecation')) { + console.trace(msg); + } else { + console.warn(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +} + +/** + * Checks `localStorage` for boolean values for the given `name`. + * + * @param {String} name + * @returns {Boolean} + * @api private + */ + +function config (name) { + // accessing global.localStorage can trigger a DOMException in sandboxed iframes + try { + if (!global.localStorage) return false; + } catch (_) { + return false; + } + var val = global.localStorage[name]; + if (null == val) return false; + return String(val).toLowerCase() === 'true'; +} diff --git a/miniprogram/node_modules/util-deprecate/node.js b/miniprogram/node_modules/util-deprecate/node.js new file mode 100644 index 00000000..5e6fcff5 --- /dev/null +++ b/miniprogram/node_modules/util-deprecate/node.js @@ -0,0 +1,6 @@ + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = require('util').deprecate; diff --git a/miniprogram/node_modules/util-deprecate/package.json b/miniprogram/node_modules/util-deprecate/package.json new file mode 100644 index 00000000..2e79f89a --- /dev/null +++ b/miniprogram/node_modules/util-deprecate/package.json @@ -0,0 +1,27 @@ +{ + "name": "util-deprecate", + "version": "1.0.2", + "description": "The Node.js `util.deprecate()` function with browser support", + "main": "node.js", + "browser": "browser.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" + }, + "keywords": [ + "util", + "deprecate", + "browserify", + "browser", + "node" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" + }, + "homepage": "https://github.com/TooTallNate/util-deprecate" +} diff --git a/miniprogram/node_modules/wrappy/LICENSE b/miniprogram/node_modules/wrappy/LICENSE new file mode 100644 index 00000000..19129e31 --- /dev/null +++ b/miniprogram/node_modules/wrappy/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/miniprogram/node_modules/wrappy/README.md b/miniprogram/node_modules/wrappy/README.md new file mode 100644 index 00000000..98eab252 --- /dev/null +++ b/miniprogram/node_modules/wrappy/README.md @@ -0,0 +1,36 @@ +# wrappy + +Callback wrapping utility + +## USAGE + +```javascript +var wrappy = require("wrappy") + +// var wrapper = wrappy(wrapperFunction) + +// make sure a cb is called only once +// See also: http://npm.im/once for this specific use case +var once = wrappy(function (cb) { + var called = false + return function () { + if (called) return + called = true + return cb.apply(this, arguments) + } +}) + +function printBoo () { + console.log('boo') +} +// has some rando property +printBoo.iAmBooPrinter = true + +var onlyPrintOnce = once(printBoo) + +onlyPrintOnce() // prints 'boo' +onlyPrintOnce() // does nothing + +// random property is retained! +assert.equal(onlyPrintOnce.iAmBooPrinter, true) +``` diff --git a/miniprogram/node_modules/wrappy/package.json b/miniprogram/node_modules/wrappy/package.json new file mode 100644 index 00000000..13075204 --- /dev/null +++ b/miniprogram/node_modules/wrappy/package.json @@ -0,0 +1,29 @@ +{ + "name": "wrappy", + "version": "1.0.2", + "description": "Callback wrapping utility", + "main": "wrappy.js", + "files": [ + "wrappy.js" + ], + "directories": { + "test": "test" + }, + "dependencies": {}, + "devDependencies": { + "tap": "^2.3.1" + }, + "scripts": { + "test": "tap --coverage test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/wrappy" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/wrappy/issues" + }, + "homepage": "https://github.com/npm/wrappy" +} diff --git a/miniprogram/node_modules/wrappy/wrappy.js b/miniprogram/node_modules/wrappy/wrappy.js new file mode 100644 index 00000000..bb7e7d6f --- /dev/null +++ b/miniprogram/node_modules/wrappy/wrappy.js @@ -0,0 +1,33 @@ +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} diff --git a/miniprogram/package-lock.json b/miniprogram/package-lock.json new file mode 100644 index 00000000..3502f6de --- /dev/null +++ b/miniprogram/package-lock.json @@ -0,0 +1,454 @@ +{ + "name": "miniprogram", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "canvas": "^3.2.1" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "license": "MIT", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/canvas": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/canvas/-/canvas-3.2.1.tgz", + "integrity": "sha512-ej1sPFR5+0YWtaVp6S1N1FVz69TQCqmrkGeRvQxZeAB1nAIcjNTHVwrZtYtWFFBmQsF40/uDLehsW5KuYC99mg==", + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "node-addon-api": "^7.0.0", + "prebuild-install": "^7.1.3" + }, + "engines": { + "node": "^18.12.0 || >= 20.9.0" + } + }, + "node_modules/chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", + "license": "ISC" + }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "license": "MIT", + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "license": "MIT", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/expand-template": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", + "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", + "license": "(MIT OR WTFPL)", + "engines": { + "node": ">=6" + } + }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "license": "MIT" + }, + "node_modules/github-from-package": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", + "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", + "license": "MIT" + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "license": "ISC" + }, + "node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/mkdirp-classic": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", + "license": "MIT" + }, + "node_modules/napi-build-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-2.0.0.tgz", + "integrity": "sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==", + "license": "MIT" + }, + "node_modules/node-abi": { + "version": "3.86.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.86.0.tgz", + "integrity": "sha512-sn9Et4N3ynsetj3spsZR729DVlGH6iBG4RiDMV7HEp3guyOW6W3S0unGpLDxT50mXortGUMax/ykUNQXdqc/Xg==", + "license": "MIT", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-addon-api": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz", + "integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==", + "license": "MIT" + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/prebuild-install": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.3.tgz", + "integrity": "sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug==", + "license": "MIT", + "dependencies": { + "detect-libc": "^2.0.0", + "expand-template": "^2.0.3", + "github-from-package": "0.0.0", + "minimist": "^1.2.3", + "mkdirp-classic": "^0.5.3", + "napi-build-utils": "^2.0.0", + "node-abi": "^3.3.0", + "pump": "^3.0.0", + "rc": "^1.2.7", + "simple-get": "^4.0.0", + "tar-fs": "^2.0.0", + "tunnel-agent": "^0.6.0" + }, + "bin": { + "prebuild-install": "bin.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/pump": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", + "license": "MIT", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "license": "(BSD-2-Clause OR MIT OR Apache-2.0)", + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/simple-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/simple-get": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "decompress-response": "^6.0.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tar-fs": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz", + "integrity": "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==", + "license": "MIT", + "dependencies": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + } + }, + "node_modules/tar-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "license": "MIT", + "dependencies": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "license": "Apache-2.0", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + } + } +} diff --git a/miniprogram/package.json b/miniprogram/package.json new file mode 100644 index 00000000..fd16cda9 --- /dev/null +++ b/miniprogram/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "canvas": "^3.2.1" + } +} diff --git a/miniprogram/scripts/generateIcons.js b/miniprogram/scripts/generateIcons.js new file mode 100644 index 00000000..269a81b9 --- /dev/null +++ b/miniprogram/scripts/generateIcons.js @@ -0,0 +1,181 @@ +/** + * TabBar 图标生成脚本 + * 运行: node scripts/generateIcons.js + * 需要先安装依赖: npm install canvas + */ + +const fs = require('fs'); +const path = require('path'); + +// 尝试使用 canvas,如果没有安装则使用简单的 PNG 生成 +let useCanvas = false; +let createCanvas; + +try { + const canvas = require('canvas'); + createCanvas = canvas.createCanvas; + useCanvas = true; +} catch (e) { + console.log('canvas 未安装,将生成简单的占位图标'); +} + +const imagesDir = path.join(__dirname, '..', 'images'); + +// 确保 images 目录存在 +if (!fs.existsSync(imagesDir)) { + fs.mkdirSync(imagesDir, { recursive: true }); +} + +// 图标配置 +const icons = [ + { name: 'tab-rank', icon: '排', desc: '排名' }, + { name: 'tab-match', icon: '赛', desc: '比赛' }, + { name: 'tab-points', icon: '分', desc: '积分' }, + { name: 'tab-user', icon: '我', desc: '我的' } +]; + +const normalColor = '#999999'; +const activeColor = '#FF6B35'; +const size = 81; // 微信推荐 81x81 + +function generateIconWithCanvas(iconConfig, isActive) { + const canvas = createCanvas(size, size); + const ctx = canvas.getContext('2d'); + + // 清空背景(透明) + ctx.clearRect(0, 0, size, size); + + // 绘制圆形背景 + ctx.beginPath(); + ctx.arc(size / 2, size / 2, size / 2 - 4, 0, Math.PI * 2); + ctx.fillStyle = isActive ? activeColor : '#f5f5f5'; + ctx.fill(); + + // 绘制文字 + ctx.font = 'bold 32px sans-serif'; + ctx.textAlign = 'center'; + ctx.textBaseline = 'middle'; + ctx.fillStyle = isActive ? '#ffffff' : normalColor; + ctx.fillText(iconConfig.icon, size / 2, size / 2); + + return canvas.toBuffer('image/png'); +} + +// 简单的 PNG 生成(不依赖 canvas) +function generateSimplePNG(iconConfig, isActive) { + // 创建一个简单的 1x1 像素的 PNG 作为占位符 + // 实际项目中应该使用真实的图标文件 + const color = isActive ? [255, 107, 53] : [153, 153, 153]; // RGB + + // 最简单的 PNG 文件头 + IHDR + IDAT + IEND + // 这是一个 8x8 的纯色 PNG + const width = 8; + const height = 8; + + // PNG 签名 + const signature = Buffer.from([137, 80, 78, 71, 13, 10, 26, 10]); + + // IHDR chunk + const ihdr = createChunk('IHDR', Buffer.from([ + 0, 0, 0, width, // width + 0, 0, 0, height, // height + 8, // bit depth + 2, // color type (RGB) + 0, // compression + 0, // filter + 0 // interlace + ])); + + // IDAT chunk - 简化的图像数据 + const zlib = require('zlib'); + const rawData = Buffer.alloc((width * 3 + 1) * height); + for (let y = 0; y < height; y++) { + rawData[y * (width * 3 + 1)] = 0; // filter byte + for (let x = 0; x < width; x++) { + const offset = y * (width * 3 + 1) + 1 + x * 3; + rawData[offset] = color[0]; + rawData[offset + 1] = color[1]; + rawData[offset + 2] = color[2]; + } + } + const compressed = zlib.deflateSync(rawData); + const idat = createChunk('IDAT', compressed); + + // IEND chunk + const iend = createChunk('IEND', Buffer.alloc(0)); + + return Buffer.concat([signature, ihdr, idat, iend]); +} + +function createChunk(type, data) { + const length = Buffer.alloc(4); + length.writeUInt32BE(data.length); + + const typeBuffer = Buffer.from(type); + const crcData = Buffer.concat([typeBuffer, data]); + const crc = crc32(crcData); + + const crcBuffer = Buffer.alloc(4); + crcBuffer.writeUInt32BE(crc >>> 0); + + return Buffer.concat([length, typeBuffer, data, crcBuffer]); +} + +// CRC32 计算 +function crc32(data) { + let crc = 0xffffffff; + const table = getCRC32Table(); + + for (let i = 0; i < data.length; i++) { + crc = table[(crc ^ data[i]) & 0xff] ^ (crc >>> 8); + } + + return crc ^ 0xffffffff; +} + +let crc32Table = null; +function getCRC32Table() { + if (crc32Table) return crc32Table; + + crc32Table = new Uint32Array(256); + for (let i = 0; i < 256; i++) { + let c = i; + for (let j = 0; j < 8; j++) { + c = (c & 1) ? (0xedb88320 ^ (c >>> 1)) : (c >>> 1); + } + crc32Table[i] = c; + } + return crc32Table; +} + +// 生成图标 +console.log('开始生成 TabBar 图标...\n'); + +icons.forEach(iconConfig => { + // 生成普通状态图标 + const normalPath = path.join(imagesDir, `${iconConfig.name}.png`); + const normalBuffer = useCanvas + ? generateIconWithCanvas(iconConfig, false) + : generateSimplePNG(iconConfig, false); + fs.writeFileSync(normalPath, normalBuffer); + console.log(`✓ 已生成: ${iconConfig.name}.png (${iconConfig.desc})`); + + // 生成选中状态图标 + const activePath = path.join(imagesDir, `${iconConfig.name}-active.png`); + const activeBuffer = useCanvas + ? generateIconWithCanvas(iconConfig, true) + : generateSimplePNG(iconConfig, true); + fs.writeFileSync(activePath, activeBuffer); + console.log(`✓ 已生成: ${iconConfig.name}-active.png (${iconConfig.desc} - 选中)`); +}); + +console.log('\n图标生成完成!'); +console.log(`图标目录: ${imagesDir}`); + +if (!useCanvas) { + console.log('\n提示: 当前生成的是简单占位图标。'); + console.log('如需更好的图标效果,请:'); + console.log('1. 安装 canvas: npm install canvas'); + console.log('2. 重新运行此脚本'); + console.log('或者从 iconfont.cn 下载合适的图标替换'); +}